diff --git a/.hyperlint/.vale.ini b/.hyperlint/.vale.ini new file mode 100644 index 000000000..ec494b46d --- /dev/null +++ b/.hyperlint/.vale.ini @@ -0,0 +1,7 @@ +StylesPath = styles +MinAlertLevel = suggestion +Vocab = hyperlint +SkippedScopes = script, style, pre, figure, code, code-block + +[*] +BasedOnStyles = Vale, hyperlint diff --git a/.hyperlint/styles/config/vocabularies/hyperlint/accept.txt b/.hyperlint/styles/config/vocabularies/hyperlint/accept.txt new file mode 100644 index 000000000..c41930e01 --- /dev/null +++ b/.hyperlint/styles/config/vocabularies/hyperlint/accept.txt @@ -0,0 +1,15 @@ +validator +[Pp]ydantic +validators +namespace +Hyperlint +preprocess +tokenization +tokenizer +API +APIs +SDKs +SDK +[Aa]sync +[Ss]ync +[Ll]ogfire diff --git a/.hyperlint/styles/hyperlint/repeatedWords.yml b/.hyperlint/styles/hyperlint/repeatedWords.yml new file mode 100644 index 000000000..00d024b0a --- /dev/null +++ b/.hyperlint/styles/hyperlint/repeatedWords.yml @@ -0,0 +1,6 @@ +extends: repetition +message: "'%s' is repeated, did you mean to repeat this word?" +level: error +alpha: true +tokens: + - '[^\s]+' diff --git a/docs/guides/advanced/creating_write_tokens.md b/docs/guides/advanced/creating_write_tokens.md index 9c8630334..969f8234b 100644 --- a/docs/guides/advanced/creating_write_tokens.md +++ b/docs/guides/advanced/creating_write_tokens.md @@ -7,8 +7,8 @@ You can create a write token by following these steps: 1. Open the **Logfire** web interface at [logfire.pydantic.dev](https://logfire.pydantic.dev). 2. Select your project from the **Projects** section on the left hand side of the page. -3. Click on the ⚙️ **Settings** tab on the top right corner of the page. -4. Select the **{} Write tokens** tab on the left hand menu. +3. Click on the ⚙️ **Settings** tab in the top right corner of the page. +4. Select the **{} Write tokens** tab from the left hand menu. 5. Click on the **Create write token** button. After creating the write token, you'll see a dialog with the token value. diff --git a/docs/guides/advanced/index.md b/docs/guides/advanced/index.md index 4211e0a0a..2b2b90733 100644 --- a/docs/guides/advanced/index.md +++ b/docs/guides/advanced/index.md @@ -3,3 +3,4 @@ * **[Testing](testing.md):** Verify your application's logging and span tracking with Logfire's testing utilities, ensuring accurate data capture and observability. * **[Backfill](backfill.md):** Recover lost data and bulk load historical data into Logfire with the `logfire backfill` command, ensuring data continuity. * **[Creating Write Tokens](creating_write_tokens.md):** Generate and manage multiple write tokens for different services. +* **[Using Read Tokens](query_api.md):** Generate and manage read tokens for programmatic querying of your Logfire data. diff --git a/docs/guides/advanced/query_api.md b/docs/guides/advanced/query_api.md new file mode 100644 index 000000000..53d9e78d1 --- /dev/null +++ b/docs/guides/advanced/query_api.md @@ -0,0 +1,222 @@ +Logfire provides a web API for programmatically running arbitrary SQL queries against the data in your Logfire projects. +This API can be used to retrieve data for export, analysis, or integration with other tools, allowing you to leverage +your data in a variety of ways. + +The API is available at `https://logfire-api.pydantic.dev/v1/query` and requires a **read token** for authentication. +Read tokens can be generated from the Logfire web interface and provide secure access to your data. + +The API can return data in various formats, including JSON, Apache Arrow, and CSV, to suit your needs. +See [here](#additional-configuration) for more details about the available response formats. + +## How to Create a Read Token + +If you've set up Logfire following the [first steps guide](../first_steps/index.md), you can generate read tokens from +the Logfire web interface, for use accessing the Logfire Query API. + +To create a read token: + +1. Open the **Logfire** web interface at [logfire.pydantic.dev](https://logfire.pydantic.dev). +2. Select your project from the **Projects** section on the left-hand side of the page. +3. Click on the ⚙️ **Settings** tab in the top right corner of the page. +4. Select the **Read tokens** tab from the left-hand menu. +5. Click on the **Create read token** button. + +After creating the read token, you'll see a dialog with the token value. +**Copy this value and store it securely, it will not be shown again.** + +## Using the Read Clients + +While you can [make direct HTTP requests](#making-direct-http-requests) to Logfire's querying API, +we provide Python clients to simplify the process of interacting with the API from Python. + +Logfire provides both synchronous and asynchronous clients. +These clients are currently experimental, meaning we might introduce breaking changes in the future. +To use these clients, you can import them from the `experimental` namespace: + +```python +from logfire.experimental.query_client import AsyncLogfireQueryClient, LogfireQueryClient +``` + +!!! note "Additional required dependencies" + + To use the query clients provided in `logfire.experimental.query_client`, you need to install `httpx`. + + If you want to retrieve Arrow-format responses, you will also need to install `pyarrow`. + +### Client Usage Examples + +The `AsyncLogfireQueryClient` allows for asynchronous interaction with the Logfire API. +If blocking I/O is acceptable and you want to avoid the complexities of asynchronous programming, +you can use the plain `LogfireQueryClient`. + +Here's an example of how to use these clients: + +=== "Async" + + ```python + from io import StringIO + + import polars as pl + from logfire.experimental.query_client import AsyncLogfireQueryClient + + + async def main(): + query = """ + SELECT start_timestamp + FROM records + LIMIT 1 + """ + + async with AsyncLogfireQueryClient(read_token='') as client: + # Load data as JSON, in column-oriented format + json_cols = await client.query_json(sql=query) + print(json_cols) + + # Load data as JSON, in row-oriented format + json_rows = await client.query_json_rows(sql=query) + print(json_rows) + + # Retrieve data in arrow format, and load into a polars DataFrame + # Note that JSON columns such as `attributes` will be returned as + # JSON-serialized strings + df_from_arrow = pl.from_arrow(await client.query_arrow(sql=query)) + print(df_from_arrow) + + # Retrieve data in CSV format, and load into a polars DataFrame + # Note that JSON columns such as `attributes` will be returned as + # JSON-serialized strings + df_from_csv = pl.read_csv(StringIO(await client.query_csv(sql=query))) + print(df_from_csv) + + + if __name__ == '__main__': + import asyncio + + asyncio.run(main()) + ``` + +=== "Sync" + + ```python + from io import StringIO + + import polars as pl + from logfire.experimental.query_client import LogfireQueryClient + + + def main(): + query = """ + SELECT start_timestamp + FROM records + LIMIT 1 + """ + + with LogfireQueryClient(read_token='') as client: + # Load data as JSON, in column-oriented format + json_cols = client.query_json(sql=query) + print(json_cols) + + # Load data as JSON, in row-oriented format + json_rows = client.query_json_rows(sql=query) + print(json_rows) + + # Retrieve data in arrow format, and load into a polars DataFrame + # Note that JSON columns such as `attributes` will be returned as + # JSON-serialized strings + df_from_arrow = pl.from_arrow(client.query_arrow(sql=query)) # type: ignore + print(df_from_arrow) + + # Retrieve data in CSV format, and load into a polars DataFrame + # Note that JSON columns such as `attributes` will be returned as + # JSON-serialized strings + df_from_csv = pl.read_csv(StringIO(client.query_csv(sql=query))) + print(df_from_csv) + + + if __name__ == '__main__': + main() + ``` + +## Making Direct HTTP Requests + +If you prefer not to use the provided clients, you can make direct HTTP requests to the Logfire API using any HTTP +client library, such as `requests` in Python. Below are the general steps and an example to guide you: + +### General Steps to Make a Direct HTTP Request + +1. **Set the Endpoint URL**: The base URL for the Logfire API is `https://logfire-api.pydantic.dev`. + +2. **Add Authentication**: Include the read token in your request headers to authenticate. + The header key should be `Authorization` with the value `Bearer `. + +3. **Define the SQL Query**: Write the SQL query you want to execute. + +4. **Send the Request**: Use an HTTP GET request to the `/v1/query` endpoint with the SQL query as a query parameter. + +**Note:** You can provide additional query parameters to control the behavior of your requests. +You can also use the `Accept` header to specify the desired format for the response data (JSON, Arrow, or CSV). + +### Example: Using Python `requests` Library + +```python +import requests + +# Define the base URL and your read token +base_url = 'https://logfire-api.pydantic.dev' +read_token = '' + +# Set the headers for authentication +headers = { + 'Authorization': f'Bearer {read_token}', + 'Content-Type': 'application/json' +} + +# Define your SQL query +query = """ +SELECT start_timestamp +FROM records +LIMIT 1 +""" + +# Prepare the query parameters for the GET request +params = { + 'sql': query +} + +# Send the GET request to the Logfire API +response = requests.get(f'{base_url}/v1/query', params=params, headers=headers) + +# Check the response status +if response.status_code == 200: + print("Query Successful!") + print(response.json()) +else: + print(f"Failed to execute query. Status code: {response.status_code}") + print(response.text) +``` + +### Additional Configuration + +The Logfire API supports various query parameters and response formats to give you flexibility in how you retrieve your data: + +- **Response Format**: Use the `Accept` header to specify the response format. Supported values include: + - `application/json`: Returns the data in JSON format. By default, this will be column-oriented unless specified otherwise with the `json_rows` parameter. + - `application/vnd.apache.arrow.stream`: Returns the data in Apache Arrow format, suitable for high-performance data processing. + - `text/csv`: Returns the data in CSV format, which is easy to use with many data tools. + + If no `Accept` header is provided, the default response format is JSON. + +- **Query Parameters**: + - **`min_timestamp`**: An optional ISO-format timestamp to filter records with `start_timestamp` greater than this value for the `records` table or `recorded_timestamp` greater than this value for the `metrics` table. The same filtering can also be done manually within the query itself. + - **`max_timestamp`**: Similar to `min_timestamp`, but serves as an upper bound for filtering `start_timestamp` in the `records` table or `recorded_timestamp` in the `metrics` table. The same filtering can also be done manually within the query itself. + - **`limit`**: An optional parameter to limit the number of rows returned by the query. If not specified, **the default limit is 500**. The maximum allowed value is 10,000. + - **`row_oriented`**: Only affects JSON responses. If set to `true`, the JSON response will be row-oriented; otherwise, it will be column-oriented. + +All query parameters are optional and can be used in any combination to tailor the API response to your needs. + +### Important Notes + +- **Experimental Feature**: The query clients are under the `experimental` namespace, indicating that the API may change in future versions. +- **Environment Configuration**: Remember to securely store your read token in environment variables or a secure vault for production use. + +With read tokens, you have the flexibility to integrate Logfire into your workflow, whether using Python scripts, data analysis tools, or other systems. diff --git a/docs/guides/onboarding_checklist/index.md b/docs/guides/onboarding_checklist/index.md index 54b4fa9dd..ebaf789a2 100644 --- a/docs/guides/onboarding_checklist/index.md +++ b/docs/guides/onboarding_checklist/index.md @@ -8,7 +8,7 @@ fix bugs, analyze user behavior, and make data-driven decisions. !!! note If you aren't familiar with traces and spans, you might want to review - [this section of the First Steps guide](../first_steps/index.md#opentelemetry-concepts). + [this section of the First Steps guide](../first_steps/index.md#tracing-with-spans). #### Logfire Onboarding Checklist diff --git a/logfire/experimental/__init__.py b/logfire/experimental/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/logfire/experimental/query_client.py b/logfire/experimental/query_client.py new file mode 100644 index 000000000..da2e2269e --- /dev/null +++ b/logfire/experimental/query_client.py @@ -0,0 +1,344 @@ +from __future__ import annotations + +from datetime import datetime +from types import TracebackType +from typing import TYPE_CHECKING, Any, Generic, Literal, TypedDict, TypeVar + +try: + from httpx import AsyncClient, Client, Response, Timeout + from httpx._client import BaseClient +except ImportError as e: # pragma: no cover + raise ImportError('httpx is required to use the Logfire query clients') from e + +if TYPE_CHECKING: + from pyarrow import Table # type: ignore + +DEFAULT_TIMEOUT = Timeout(30.0) # queries might typically be slower than the 5s default from AsyncClient + + +class QueryExecutionError(RuntimeError): + """Raised when the query execution fails on the server.""" + + pass + + +class QueryRequestError(RuntimeError): + """Raised when the query request is invalid.""" + + pass + + +class ColumnDetails(TypedDict): + """The details of a column in the row-oriented JSON-format query results.""" + + name: str + datatype: Any + bit_settings: str + + +class ColumnData(ColumnDetails): + """The data of a column in the column-oriented JSON-format query results.""" + + values: list[Any] + + +class QueryResults(TypedDict): + """The (column-oriented) results of a JSON-format query.""" + + columns: list[ColumnData] + + +class RowQueryResults(TypedDict): + """The row-oriented results of a JSON-format query.""" + + columns: list[ColumnDetails] + rows: list[dict[str, Any]] + + +T = TypeVar('T', bound=BaseClient) +S = TypeVar('S', bound='LogfireQueryClient') +R = TypeVar('R', bound='AsyncLogfireQueryClient') + + +class _BaseLogfireQueryClient(Generic[T]): + def __init__(self, base_url: str, read_token: str, timeout: Timeout, client: type[T], **client_kwargs: Any): + self.base_url = base_url + self.read_token = read_token + self.timeout = timeout + self.client: T = client( + timeout=timeout, base_url=base_url, headers={'authorization': read_token}, **client_kwargs + ) + + def build_query_params( + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + row_oriented: bool = False, + ) -> dict[str, str]: + params: dict[str, str] = {'sql': sql} + if limit is not None: + params['limit'] = str(limit) + if row_oriented: + params['json_rows'] = 'true' + if min_timestamp: + params['min_timestamp'] = min_timestamp.isoformat() + if max_timestamp: + params['max_timestamp'] = max_timestamp.isoformat() + return params + + def handle_response_errors(self, response: Response) -> None: + if response.status_code == 400: # pragma: no cover + raise QueryExecutionError(response.json()) + if response.status_code == 422: # pragma: no cover + raise QueryRequestError(response.json()) + assert response.status_code == 200, response.content + + +class LogfireQueryClient(_BaseLogfireQueryClient[Client]): + """A synchronous client for querying Logfire data.""" + + def __init__( + self, + read_token: str, + base_url: str = 'https://logfire-api.pydantic.dev/', + timeout: Timeout = DEFAULT_TIMEOUT, + **client_kwargs: Any, + ): + super().__init__(base_url, read_token, timeout, Client, **client_kwargs) + + def __enter__(self: S) -> S: + self.client.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self.client.__exit__(exc_type, exc_value, traceback) + + def query_json( + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + ) -> QueryResults: + """Query Logfire data and return the results as a column-oriented dictionary.""" + response = self._query( + accept='application/json', + sql=sql, + min_timestamp=min_timestamp, + max_timestamp=max_timestamp, + limit=limit, + row_oriented=False, + ) + return response.json() + + def query_json_rows( + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + ) -> RowQueryResults: + """Query Logfire data and return the results as a row-oriented dictionary.""" + response = self._query( + accept='application/json', + sql=sql, + min_timestamp=min_timestamp, + max_timestamp=max_timestamp, + limit=limit, + row_oriented=True, + ) + return response.json() + + def query_arrow( # type: ignore + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + ) -> Table: + """Query Logfire data and return the results as a pyarrow Table. + + Note that pyarrow must be installed for this method to succeed. + + You can use `polars.from_arrow(result)` to convert the returned table to a polars DataFrame. + """ + try: + import pyarrow + except ImportError as e: # pragma: no cover + raise ImportError('pyarrow is required to use the query_arrow method') from e + + response = self._query( + accept='application/vnd.apache.arrow.stream', + sql=sql, + min_timestamp=min_timestamp, + max_timestamp=max_timestamp, + limit=limit, + ) + with pyarrow.ipc.open_stream(response.content) as reader: # type: ignore + arrow_table: Table = reader.read_all() # type: ignore + return arrow_table # type: ignore + + def query_csv( + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + ) -> str: + """Query Logfire data and return the results as a CSV-format string. + + Use `polars.read_csv(StringIO(result))` to convert the returned CSV to a polars DataFrame. + """ + response = self._query( + accept='text/csv', + sql=sql, + min_timestamp=min_timestamp, + max_timestamp=max_timestamp, + limit=limit, + ) + return response.text + + def _query( + self, + accept: Literal['application/json', 'application/vnd.apache.arrow.stream', 'text/csv'], + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + row_oriented: bool = False, + ) -> Response: + params = self.build_query_params(sql, min_timestamp, max_timestamp, limit, row_oriented) + response = self.client.get('/v1/query', headers={'accept': accept}, params=params) + self.handle_response_errors(response) + return response + + +class AsyncLogfireQueryClient(_BaseLogfireQueryClient[AsyncClient]): + """An asynchronous client for querying Logfire data.""" + + def __init__( + self, + read_token: str, + base_url: str = 'https://logfire-api.pydantic.dev/', + timeout: Timeout = DEFAULT_TIMEOUT, + **async_client_kwargs: Any, + ): + super().__init__(base_url, read_token, timeout, AsyncClient, **async_client_kwargs) + + async def __aenter__(self: R) -> R: + await self.client.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + await self.client.__aexit__(exc_type, exc_value, traceback) + + async def query_json( + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + ) -> QueryResults: + """Query Logfire data and return the results as a column-oriented dictionary.""" + response = await self._query( + accept='application/json', + sql=sql, + min_timestamp=min_timestamp, + max_timestamp=max_timestamp, + limit=limit, + row_oriented=False, + ) + return response.json() + + async def query_json_rows( + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + ) -> RowQueryResults: + """Query Logfire data and return the results as a row-oriented dictionary.""" + response = await self._query( + accept='application/json', + sql=sql, + min_timestamp=min_timestamp, + max_timestamp=max_timestamp, + limit=limit, + row_oriented=True, + ) + return response.json() + + async def query_arrow( # type: ignore + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + ) -> Table: + """Query Logfire data and return the results as a pyarrow Table. + + Note that pyarrow must be installed for this method to succeed. + + You can use `polars.from_arrow(result)` to convert the returned table to a polars DataFrame. + """ + try: + import pyarrow + except ImportError as e: # pragma: no cover + raise ImportError('pyarrow is required to use the query_arrow method') from e + + response = await self._query( + accept='application/vnd.apache.arrow.stream', + sql=sql, + min_timestamp=min_timestamp, + max_timestamp=max_timestamp, + limit=limit, + ) + with pyarrow.ipc.open_stream(response.content) as reader: # type: ignore + arrow_table: Table = reader.read_all() # type: ignore + return arrow_table # type: ignore + + async def query_csv( + self, + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + ) -> str: + """Query Logfire data and return the results as a CSV-format string. + + Use `polars.read_csv(StringIO(result))` to convert the returned CSV to a polars DataFrame. + """ + response = await self._query( + accept='text/csv', + sql=sql, + min_timestamp=min_timestamp, + max_timestamp=max_timestamp, + limit=limit, + ) + return response.text + + async def _query( + self, + accept: Literal['application/json', 'application/vnd.apache.arrow.stream', 'text/csv'], + sql: str, + min_timestamp: datetime | None = None, + max_timestamp: datetime | None = None, + limit: int | None = None, + row_oriented: bool = False, + ) -> Response: + params = self.build_query_params(sql, min_timestamp, max_timestamp, limit, row_oriented) + response = await self.client.get('/v1/query', headers={'accept': accept}, params=params) + self.handle_response_errors(response) + return response diff --git a/mkdocs.yml b/mkdocs.yml index cbc2b831e..be22261ef 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -92,6 +92,7 @@ nav: - Testing: guides/advanced/testing.md - Backfill: guides/advanced/backfill.md - Creating Write Tokens: guides/advanced/creating_write_tokens.md + - Query API: guides/advanced/query_api.md - Integrations: - Integrations: integrations/index.md - OpenTelemetry: diff --git a/pyproject.toml b/pyproject.toml index 3956d66cf..055a68ca2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -140,6 +140,8 @@ dev-dependencies = [ "testcontainers", "mysql-connector-python~=8.0", "griffe==0.48.0", + "pyarrow>=17.0.0", + "pytest-recording>=0.13.2", ] [tool.rye.scripts] diff --git a/requirements-dev.lock b/requirements-dev.lock index 430637436..304271d6d 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -6,7 +6,6 @@ # features: [] # all-features: false # with-sources: false -# generate-hashes: false -e file:. aiohappyeyeballs==2.4.0 @@ -29,6 +28,8 @@ asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asttokens==2.4.1 # via inline-snapshot +async-timeout==4.0.3 + # via asyncpg asyncpg==0.29.0 attrs==24.2.0 # via aiohttp @@ -41,7 +42,7 @@ black==24.8.0 blinker==1.8.2 # via flask celery==5.4.0 -certifi==2024.7.4 +certifi==2024.8.30 # via httpcore # via httpx # via requests @@ -159,7 +160,7 @@ mdurl==0.1.2 mergedeep==1.3.4 # via mkdocs # via mkdocs-get-deps -mkdocs==1.6.0 +mkdocs==1.6.1 # via mkdocs-autorefs # via mkdocs-material # via mkdocstrings @@ -187,7 +188,8 @@ nodeenv==1.9.1 # via pyright numpy==2.1.0 # via pandas -openai==1.42.0 + # via pyarrow +openai==1.43.0 opentelemetry-api==1.27.0 # via opentelemetry-exporter-otlp-proto-http # via opentelemetry-instrumentation @@ -329,12 +331,13 @@ psycopg==3.2.1 psycopg-binary==3.2.1 # via psycopg psycopg2-binary==2.9.9 -pydantic @ git+https://github.com/pydantic/pydantic@27411c4d10e6132ce05160818d265ed22898b0fd +pyarrow==17.0.0 +pydantic @ git+https://github.com/pydantic/pydantic@111eb01ea3808e5283f8951265c35f47855f0faa # via anthropic # via fastapi # via openai # via sqlmodel -pydantic-core==2.23.0 +pydantic-core==2.23.1 # via pydantic pygments==2.18.0 # via mkdocs-material @@ -347,8 +350,10 @@ pyright==1.1.378 pytest==8.3.2 # via pytest-django # via pytest-pretty + # via pytest-recording pytest-django==4.8.0 pytest-pretty==1.2.0 +pytest-recording==0.13.2 python-dateutil==2.9.0.post0 # via celery # via ghp-import @@ -362,6 +367,7 @@ pyyaml==6.0.2 # via pre-commit # via pymdown-extensions # via pyyaml-env-tag + # via vcrpy pyyaml-env-tag==0.1 # via mkdocs redis==5.0.8 @@ -378,7 +384,7 @@ rich==13.8.0 # via inline-snapshot # via logfire # via pytest-pretty -ruff==0.6.2 +ruff==0.6.3 setuptools==74.0.0 # via opentelemetry-instrumentation six==1.16.0 @@ -429,6 +435,8 @@ urllib3==2.2.2 # via docker # via requests # via testcontainers +vcrpy==6.0.1 + # via pytest-recording vine==5.1.0 # via amqp # via celery @@ -449,7 +457,9 @@ wrapt==1.16.0 # via opentelemetry-instrumentation-redis # via opentelemetry-instrumentation-sqlalchemy # via testcontainers + # via vcrpy yarl==1.9.4 # via aiohttp + # via vcrpy zipp==3.20.1 # via importlib-metadata diff --git a/requirements.lock b/requirements.lock index 1025b3f90..e058770be 100644 --- a/requirements.lock +++ b/requirements.lock @@ -6,10 +6,9 @@ # features: [] # all-features: false # with-sources: false -# generate-hashes: false -e file:. -certifi==2024.7.4 +certifi==2024.8.30 # via requests charset-normalizer==3.3.2 # via requests diff --git a/tests/aaa_query_client/README.md b/tests/aaa_query_client/README.md new file mode 100644 index 000000000..f0b7fd842 --- /dev/null +++ b/tests/aaa_query_client/README.md @@ -0,0 +1,4 @@ +This folder starts with `aaa_` in order to make sure these tests run first. + +I don't know why, but they don't pass when they aren't the first tests to run. +Maybe it's related to instrumenting httpx in some of the tests? diff --git a/tests/aaa_query_client/cassettes/test_query_client/test_query_params_async.yaml b/tests/aaa_query_client/cassettes/test_query_client/test_query_params_async.yaml new file mode 100644 index 000000000..18b3c1b92 --- /dev/null +++ b/tests/aaa_query_client/cassettes/test_query_client/test_query_params_async.yaml @@ -0,0 +1,183 @@ +interactions: +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20is_exception%2C%20count%28%2A%29%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20GROUP%20BY%20is_exception%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%0A%20%20%20%20%20%20%20%20 + response: + body: + string: !!binary | + H4sIAILu0WYC/8ssjk+tSE4tKMnMz9NJzi/NK9HQ0uRKS8wpTtUxNucqKSpN1THkAgAPkIWfJgAA + AA== + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:34 GMT + server: + - uvicorn + traceresponse: + - 00-263b6bfa64dc44be3f58f4b0e850030a-dce4ce1f1c21487f-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20is_exception%2C%20count%28%2A%29%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20GROUP%20BY%20is_exception%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%0A%20%20%20%20%20%20%20%20&min_timestamp=2030-01-01T00%3A00%3A00%2B00%3A00 + response: + body: + string: !!binary | + H4sIAILu0WYC/8ssjk+tSE4tKMnMz9NJzi/NK9HQ0uQCAFnBUz8WAAAA + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:34 GMT + server: + - uvicorn + traceresponse: + - 00-4cd967a98d7da5aefb74931b5e1791da-f374614f494b132f-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20is_exception%2C%20count%28%2A%29%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20GROUP%20BY%20is_exception%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%0A%20%20%20%20%20%20%20%20&max_timestamp=2020-01-01T00%3A00%3A00%2B00%3A00 + response: + body: + string: !!binary | + H4sIAILu0WYC/8ssjk+tSE4tKMnMz9NJzi/NK9HQ0uQCAFnBUz8WAAAA + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:34 GMT + server: + - uvicorn + traceresponse: + - 00-38a1ad9bb2426224dd941d777ae1a568-8be52888bfe0205a-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20is_exception%2C%20count%28%2A%29%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20GROUP%20BY%20is_exception%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%0A%20%20%20%20%20%20%20%20&limit=1 + response: + body: + string: !!binary | + H4sIAILu0WYC/8ssjk+tSE4tKMnMz9NJzi/NK9HQ0uRKS8wpTtUxNucCANUZtHYfAAAA + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:34 GMT + server: + - uvicorn + traceresponse: + - 00-9c016048fd844e49b095fe52567b9f94-b35101f536d6b5b9-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +version: 1 diff --git a/tests/aaa_query_client/cassettes/test_query_client/test_query_params_sync.yaml b/tests/aaa_query_client/cassettes/test_query_client/test_query_params_sync.yaml new file mode 100644 index 000000000..b0b46507e --- /dev/null +++ b/tests/aaa_query_client/cassettes/test_query_client/test_query_params_sync.yaml @@ -0,0 +1,183 @@ +interactions: +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20is_exception%2C%20count%28%2A%29%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20GROUP%20BY%20is_exception%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%0A%20%20%20%20%20%20%20%20 + response: + body: + string: !!binary | + H4sIAILu0WYC/8ssjk+tSE4tKMnMz9NJzi/NK9HQ0uRKS8wpTtUxNucqKSpN1THkAgAPkIWfJgAA + AA== + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-2a27c780dccdf9cc4ba19a4c570a4749-d467e08ff341a4d4-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20is_exception%2C%20count%28%2A%29%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20GROUP%20BY%20is_exception%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%0A%20%20%20%20%20%20%20%20&min_timestamp=2030-01-01T00%3A00%3A00%2B00%3A00 + response: + body: + string: !!binary | + H4sIAILu0WYC/8ssjk+tSE4tKMnMz9NJzi/NK9HQ0uQCAFnBUz8WAAAA + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:34 GMT + server: + - uvicorn + traceresponse: + - 00-c192219a87ea208c88e25bdd7d495d07-ea35167c0637309f-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20is_exception%2C%20count%28%2A%29%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20GROUP%20BY%20is_exception%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%0A%20%20%20%20%20%20%20%20&max_timestamp=2020-01-01T00%3A00%3A00%2B00%3A00 + response: + body: + string: !!binary | + H4sIAILu0WYC/8ssjk+tSE4tKMnMz9NJzi/NK9HQ0uQCAFnBUz8WAAAA + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:34 GMT + server: + - uvicorn + traceresponse: + - 00-bb49963ce29cb55d4207f8ee8813c2b6-3026334f415e266b-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20is_exception%2C%20count%28%2A%29%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20GROUP%20BY%20is_exception%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%0A%20%20%20%20%20%20%20%20&limit=1 + response: + body: + string: !!binary | + H4sIAILu0WYC/8ssjk+tSE4tKMnMz9NJzi/NK9HQ0uRKS8wpTtUxNucCANUZtHYfAAAA + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:34 GMT + server: + - uvicorn + traceresponse: + - 00-3ba834af7af03f6cbf82744a266bbde7-3104fd2517724cc1-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +version: 1 diff --git a/tests/aaa_query_client/cassettes/test_query_client/test_read_async.yaml b/tests/aaa_query_client/cassettes/test_query_client/test_read_async.yaml new file mode 100644 index 000000000..c516f5dcb --- /dev/null +++ b/tests/aaa_query_client/cassettes/test_query_client/test_read_async.yaml @@ -0,0 +1,177 @@ +interactions: +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20kind%2C%20message%2C%20is_exception%2C%20tags%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%2C%20message%0A%20%20%20%20%20%20%20%20LIMIT%202%0A%20%20%20%20%20%20%20%20 + response: + body: + string: '{"columns":[{"name":"kind","datatype":"String","bit_settings":"","values":["log","log"]},{"name":"message","datatype":"String","bit_settings":"","values":["about + to raise an error","aha 0"]},{"name":"is_exception","datatype":"Boolean","bit_settings":"","values":[false,false]},{"name":"tags","datatype":{"List":"String"},"bit_settings":"","values":[{"name":"","datatype":"String","bit_settings":"","values":[]},{"name":"","datatype":"String","bit_settings":"","values":["tag1","tag2"]}]}]}' + headers: + access-control-expose-headers: + - traceresponse + content-length: + - '489' + content-type: + - application/json + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-111f83c2f365c871897bb1384294057b-46f4c321c092c0f0-01 + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20kind%2C%20message%2C%20is_exception%2C%20tags%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%2C%20message%0A%20%20%20%20%20%20%20%20LIMIT%202%0A%20%20%20%20%20%20%20%20&json_rows=true + response: + body: + string: '{"columns":[{"name":"kind","datatype":"String","bit_settings":""},{"name":"message","datatype":"String","bit_settings":""},{"name":"is_exception","datatype":"Boolean","bit_settings":""},{"name":"tags","datatype":{"List":"String"},"bit_settings":""}],"rows":[{"kind":"log","message":"about + to raise an error","is_exception":false,"tags":[]},{"kind":"log","message":"aha + 0","is_exception":false,"tags":["tag1","tag2"]}]}' + headers: + access-control-expose-headers: + - traceresponse + content-length: + - '418' + content-type: + - application/json + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-01774ae97cf3730cbb5eeec847b6866f-f2162caac72e9305-01 + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20kind%2C%20message%2C%20is_exception%2C%20tags%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%2C%20message%0A%20%20%20%20%20%20%20%20LIMIT%202%0A%20%20%20%20%20%20%20%20 + response: + body: + string: !!binary | + H4sIAILu0WYC/y2LQQqAMAzA7ntF6bkH9TsypGqdw7nKOsHnK+othGSLeaZdzDgIRRvkmuSoUTNV + DuaSBuJRzwpVoXA0Ac4gpWihhZMJ9f6LVobmV9gjPneLSC90iB7dDVkJgRRqAAAA + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-a32ae9361f54d7b36e49d11ec53224f7-d12b8d090b639e15-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - application/vnd.apache.arrow.stream + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20kind%2C%20message%2C%20is_exception%2C%20tags%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%2C%20message%0A%20%20%20%20%20%20%20%20LIMIT%202%0A%20%20%20%20%20%20%20%20 + response: + body: + string: !!binary | + H4sIAILu0WYC/8VVvU7DMBC+tKYEFEGHlEaoQwYeoDAxMPRBkJAhVomgDUqC1IGhD8LAwEMw5hF4 + AN4lnJ1zSEz5a6rg6Hxn333nL3dOm+d5vrAA+iDHLjgoO8Bwpj2LgY0PAKO5kDeUJ5RzWj9gHg+1 + o/JYg1M5o0iMi75HFPfD7xYaYIz7Eh+mYgaUK+XTRNqvdUxPYwLcl3aYXIjFlbhLw2gOiq+Lsgf7 + Kout+K868xnx26hnIkn4VGCcp+Ikrk+4M8mtCHf7ZNuqKkUdilw9tBnOun7Sy8oa3YTzAH4YSCV/ + IbyDPA5goE6Q+UcoWTW4w2ocdM9cjLapZ0fEzSKIVbH16FZs7ZuQ1u+xXBG7kj9sZnTXxN1G00bn + 5pX778AhdmBY1t//TKzLjHoHVO8OBWxV7LJtDdfON/1iRr+0X98bzwCONN6qxy+NdWasNSmdb9Ix + 8IY/o7Xd0v1Zu/8byjOEen1/O/hldJ/6aeTHPEyEz+e+iOMo5tfcH7fJ/7/r99Xdb+t8/R3Zf8Th + f9UxykmT3x+p3wHsMJvMiAcAAA== + headers: + access-control-expose-headers: + - traceresponse + content-encoding: + - gzip + content-type: + - application/vnd.apache.arrow.stream + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-85fd4620af9948cc03a0689541041639-c2135003cf616261-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +version: 1 diff --git a/tests/aaa_query_client/cassettes/test_query_client/test_read_sync.yaml b/tests/aaa_query_client/cassettes/test_query_client/test_read_sync.yaml new file mode 100644 index 000000000..ba2cfc63e --- /dev/null +++ b/tests/aaa_query_client/cassettes/test_query_client/test_read_sync.yaml @@ -0,0 +1,177 @@ +interactions: +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20kind%2C%20message%2C%20is_exception%2C%20tags%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%2C%20message%0A%20%20%20%20%20%20%20%20LIMIT%202%0A%20%20%20%20%20%20%20%20 + response: + body: + string: '{"columns":[{"name":"kind","datatype":"String","bit_settings":"","values":["log","log"]},{"name":"message","datatype":"String","bit_settings":"","values":["about + to raise an error","aha 0"]},{"name":"is_exception","datatype":"Boolean","bit_settings":"","values":[false,false]},{"name":"tags","datatype":{"List":"String"},"bit_settings":"","values":[{"name":"","datatype":"String","bit_settings":"","values":[]},{"name":"","datatype":"String","bit_settings":"","values":["tag1","tag2"]}]}]}' + headers: + access-control-expose-headers: + - traceresponse + content-length: + - '489' + content-type: + - application/json + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-dd22d7beb04189279d2242ac2fef8095-49128b549ecfc52d-01 + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20kind%2C%20message%2C%20is_exception%2C%20tags%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%2C%20message%0A%20%20%20%20%20%20%20%20LIMIT%202%0A%20%20%20%20%20%20%20%20&json_rows=true + response: + body: + string: '{"columns":[{"name":"kind","datatype":"String","bit_settings":""},{"name":"message","datatype":"String","bit_settings":""},{"name":"is_exception","datatype":"Boolean","bit_settings":""},{"name":"tags","datatype":{"List":"String"},"bit_settings":""}],"rows":[{"kind":"log","message":"about + to raise an error","is_exception":false,"tags":[]},{"kind":"log","message":"aha + 0","is_exception":false,"tags":["tag1","tag2"]}]}' + headers: + access-control-expose-headers: + - traceresponse + content-length: + - '418' + content-type: + - application/json + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-33b92a1f94a4c2ad1bf14a5edd368639-d98eac524aac98d5-01 + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - text/csv + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20kind%2C%20message%2C%20is_exception%2C%20tags%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%2C%20message%0A%20%20%20%20%20%20%20%20LIMIT%202%0A%20%20%20%20%20%20%20%20 + response: + body: + string: !!binary | + H4sIAIHu0WYC/y2LQQqAMAzA7ntF6bkH9TsypGqdw7nKOsHnK+othGSLeaZdzDgIRRvkmuSoUTNV + DuaSBuJRzwpVoXA0Ac4gpWihhZMJ9f6LVobmV9gjPneLSC90iB7dDVkJgRRqAAAA + headers: + access-control-expose-headers: + - traceresponse + content-disposition: + - attachment; filename="query_results.csv" + content-encoding: + - gzip + content-type: + - text/csv; charset=utf-8 + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-468d6e0df3c8d689f0bb285aa656917b-ca1f009dcf9c3351-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +- request: + body: '' + headers: + accept: + - application/vnd.apache.arrow.stream + accept-encoding: + - gzip, deflate + connection: + - keep-alive + host: + - localhost:8000 + user-agent: + - python-httpx/0.27.2 + method: GET + uri: http://localhost:8000/v1/query?sql=%0A%20%20%20%20%20%20%20%20SELECT%20kind%2C%20message%2C%20is_exception%2C%20tags%0A%20%20%20%20%20%20%20%20FROM%20records%0A%20%20%20%20%20%20%20%20ORDER%20BY%20is_exception%2C%20message%0A%20%20%20%20%20%20%20%20LIMIT%202%0A%20%20%20%20%20%20%20%20 + response: + body: + string: !!binary | + H4sIAIHu0WYC/8VVvU7DMBC+tKYEFEGHlEaoQwYeoDAxMPRBkJAhVomgDUqC1IGhD8LAwEMw5hF4 + AN4lnJ1zSEz5a6rg6Hxn333nL3dOm+d5vrAA+iDHLjgoO8Bwpj2LgY0PAKO5kDeUJ5RzWj9gHg+1 + o/JYg1M5o0iMi75HFPfD7xYaYIz7Eh+mYgaUK+XTRNqvdUxPYwLcl3aYXIjFlbhLw2gOiq+Lsgf7 + Kout+K868xnx26hnIkn4VGCcp+Ikrk+4M8mtCHf7ZNuqKkUdilw9tBnOun7Sy8oa3YTzAH4YSCV/ + IbyDPA5goE6Q+UcoWTW4w2ocdM9cjLapZ0fEzSKIVbH16FZs7ZuQ1u+xXBG7kj9sZnTXxN1G00bn + 5pX778AhdmBY1t//TKzLjHoHVO8OBWxV7LJtDdfON/1iRr+0X98bzwCONN6qxy+NdWasNSmdb9Ix + 8IY/o7Xd0v1Zu/8byjOEen1/O/hldJ/6aeTHPEyEz+e+iOMo5tfcH7fJ/7/r99Xdb+t8/R3Zf8Th + f9UxykmT3x+p3wHsMJvMiAcAAA== + headers: + access-control-expose-headers: + - traceresponse + content-encoding: + - gzip + content-type: + - application/vnd.apache.arrow.stream + date: + - Fri, 30 Aug 2024 16:08:33 GMT + server: + - uvicorn + traceresponse: + - 00-8c98be1079ee2d0c36962e600b3f1076-29be5898366fd76f-01 + transfer-encoding: + - chunked + vary: + - Accept-Encoding + x-api-version: + - IrAe09QtPMFmR9tG0nG94l9GFfbLxjXec2/mMxKUkoI= + x-logfire-project: + - e2e-test/test-e2e-project + status: + code: 200 + message: OK +version: 1 diff --git a/tests/aaa_query_client/test_query_client.py b/tests/aaa_query_client/test_query_client.py new file mode 100644 index 000000000..fc5075641 --- /dev/null +++ b/tests/aaa_query_client/test_query_client.py @@ -0,0 +1,259 @@ +import sys +from datetime import datetime, timezone + +import pytest +from inline_snapshot import snapshot + +from logfire.experimental.query_client import AsyncLogfireQueryClient, LogfireQueryClient + +# This file is intended to be updated by the Logfire developers, with the development platform running locally. +# To update, set the `CLIENT_BASE_URL` and `CLIENT_READ_TOKEN` values to match the local development environment, +# and run the tests with `--record-mode=rewrite --inline-snapshot=fix` to update the cassettes and snapshots. +CLIENT_BASE_URL = 'http://localhost:8000/' +CLIENT_READ_TOKEN = '6qdcmMdvHhyqy6sjhmSW08q1J5VCMRfLl23yNbdz3YGn' +pytestmark = [ + pytest.mark.vcr(), + pytest.mark.skipif( + sys.version_info < (3, 10), + reason='vcr is not compatible with latest urllib3 on python<3.10, ' + 'see https://github.com/kevin1024/vcrpy/issues/688.', + ), +] + + +def test_read_sync(): + with LogfireQueryClient(read_token=CLIENT_READ_TOKEN, base_url=CLIENT_BASE_URL) as client: + sql = """ + SELECT kind, message, is_exception, tags + FROM records + ORDER BY is_exception, message + LIMIT 2 + """ + assert client.query_json(sql) == snapshot( + { + 'columns': [ + { + 'name': 'kind', + 'datatype': 'String', + 'bit_settings': '', + 'values': ['log', 'log'], + }, + { + 'name': 'message', + 'datatype': 'String', + 'bit_settings': '', + 'values': ['about to raise an error', 'aha 0'], + }, + { + 'name': 'is_exception', + 'datatype': 'Boolean', + 'bit_settings': '', + 'values': [False, False], + }, + { + 'name': 'tags', + 'datatype': {'List': 'String'}, + 'bit_settings': '', + 'values': [ + {'name': '', 'datatype': 'String', 'bit_settings': '', 'values': []}, + { + 'name': '', + 'datatype': 'String', + 'bit_settings': '', + 'values': ['tag1', 'tag2'], + }, + ], + }, + ] + } + ) + assert client.query_json_rows(sql) == snapshot( + { + 'columns': [ + {'name': 'kind', 'datatype': 'String', 'bit_settings': ''}, + {'name': 'message', 'datatype': 'String', 'bit_settings': ''}, + {'name': 'is_exception', 'datatype': 'Boolean', 'bit_settings': ''}, + {'name': 'tags', 'datatype': {'List': 'String'}, 'bit_settings': ''}, + ], + 'rows': [ + { + 'kind': 'log', + 'message': 'about to raise an error', + 'is_exception': False, + 'tags': [], + }, + { + 'kind': 'log', + 'message': 'aha 0', + 'is_exception': False, + 'tags': ['tag1', 'tag2'], + }, + ], + } + ) + assert client.query_csv(sql) == snapshot("""\ +kind,message,is_exception,tags +log,about to raise an error,false,[] +log,aha 0,false,"[""tag1"",""tag2""]" +""") + assert client.query_arrow(sql).to_pylist() == snapshot( # type: ignore + [ + { + 'kind': 'log', + 'message': 'about to raise an error', + 'is_exception': False, + 'tags': [], + }, + { + 'kind': 'log', + 'message': 'aha 0', + 'is_exception': False, + 'tags': ['tag1', 'tag2'], + }, + ] + ) + + +@pytest.mark.anyio +async def test_read_async(): + async with AsyncLogfireQueryClient(read_token=CLIENT_READ_TOKEN, base_url=CLIENT_BASE_URL) as client: + sql = """ + SELECT kind, message, is_exception, tags + FROM records + ORDER BY is_exception, message + LIMIT 2 + """ + assert await client.query_json(sql) == snapshot( + { + 'columns': [ + { + 'name': 'kind', + 'datatype': 'String', + 'bit_settings': '', + 'values': ['log', 'log'], + }, + { + 'name': 'message', + 'datatype': 'String', + 'bit_settings': '', + 'values': ['about to raise an error', 'aha 0'], + }, + { + 'name': 'is_exception', + 'datatype': 'Boolean', + 'bit_settings': '', + 'values': [False, False], + }, + { + 'name': 'tags', + 'datatype': {'List': 'String'}, + 'bit_settings': '', + 'values': [ + {'name': '', 'datatype': 'String', 'bit_settings': '', 'values': []}, + { + 'name': '', + 'datatype': 'String', + 'bit_settings': '', + 'values': ['tag1', 'tag2'], + }, + ], + }, + ] + } + ) + assert await client.query_json_rows(sql) == snapshot( + { + 'columns': [ + {'name': 'kind', 'datatype': 'String', 'bit_settings': ''}, + {'name': 'message', 'datatype': 'String', 'bit_settings': ''}, + {'name': 'is_exception', 'datatype': 'Boolean', 'bit_settings': ''}, + {'name': 'tags', 'datatype': {'List': 'String'}, 'bit_settings': ''}, + ], + 'rows': [ + { + 'kind': 'log', + 'message': 'about to raise an error', + 'is_exception': False, + 'tags': [], + }, + { + 'kind': 'log', + 'message': 'aha 0', + 'is_exception': False, + 'tags': ['tag1', 'tag2'], + }, + ], + } + ) + assert await client.query_csv(sql) == snapshot("""\ +kind,message,is_exception,tags +log,about to raise an error,false,[] +log,aha 0,false,"[""tag1"",""tag2""]" +""") + assert (await client.query_arrow(sql)).to_pylist() == snapshot( # type: ignore + [ + { + 'kind': 'log', + 'message': 'about to raise an error', + 'is_exception': False, + 'tags': [], + }, + { + 'kind': 'log', + 'message': 'aha 0', + 'is_exception': False, + 'tags': ['tag1', 'tag2'], + }, + ] + ) + + +def test_query_params_sync(): + with LogfireQueryClient(read_token=CLIENT_READ_TOKEN, base_url=CLIENT_BASE_URL) as client: + sql = """ + SELECT is_exception, count(*) + FROM records + GROUP BY is_exception + ORDER BY is_exception + """ + assert client.query_csv(sql) == snapshot("""\ +is_exception,count(*) +false,37 +true,1 +""") + assert client.query_csv(sql, min_timestamp=datetime(2030, 1, 1, tzinfo=timezone.utc)) == snapshot("""\ +is_exception,count(*) +""") + assert client.query_csv(sql, max_timestamp=datetime(2020, 1, 1, tzinfo=timezone.utc)) == snapshot("""\ +is_exception,count(*) +""") + assert client.query_csv(sql, limit=1) == snapshot("""\ +is_exception,count(*) +false,37 +""") + + +@pytest.mark.anyio +async def test_query_params_async(): + async with AsyncLogfireQueryClient(read_token=CLIENT_READ_TOKEN, base_url=CLIENT_BASE_URL) as client: + sql = """ + SELECT is_exception, count(*) + FROM records + GROUP BY is_exception + ORDER BY is_exception + """ + assert await client.query_csv(sql) == snapshot("""\ +is_exception,count(*) +false,37 +true,1 +""") + assert await client.query_csv(sql, min_timestamp=datetime(2030, 1, 1, tzinfo=timezone.utc)) == snapshot("""\ +is_exception,count(*) +""") + assert await client.query_csv(sql, max_timestamp=datetime(2020, 1, 1, tzinfo=timezone.utc)) == snapshot("""\ +is_exception,count(*) +""") + assert await client.query_csv(sql, limit=1) == snapshot("""\ +is_exception,count(*) +false,37 +""") diff --git a/tests/conftest.py b/tests/conftest.py index 1b5420cbb..aa234e9ec 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -112,3 +112,8 @@ def default_credentials(tmp_path: Path) -> Path: """ ) return auth_file + + +@pytest.fixture(scope='module') +def vcr_config(): + return {'filter_headers': ['authorization']}