From 762488ecc8e55fb18e05b90a7cb353bea9ee6a00 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 20 May 2024 18:14:07 +0000 Subject: [PATCH 01/39] chore(internal): version bump (#54) --- .github/workflows/ci.yml | 16 +++------------- CONTRIBUTING.md | 2 +- SECURITY.md | 27 +++++++++++++++++++++++++++ requirements-dev.lock | 4 ++-- requirements.lock | 4 ++-- scripts/format | 2 +- scripts/lint | 4 ++++ scripts/test | 1 - src/dataherald/_models.py | 20 ++++++++++++++++---- tests/test_models.py | 8 ++++---- tests/test_transform.py | 22 ++++++++++++---------- 11 files changed, 72 insertions(+), 38 deletions(-) create mode 100644 SECURITY.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 737c50e..6fcd6ae 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,20 +25,10 @@ jobs: RYE_INSTALL_OPTION: '--yes' - name: Install dependencies - run: | - rye sync --all-features - - - name: Run ruff - run: | - rye run check:ruff + run: rye sync --all-features - - name: Run type checking - run: | - rye run typecheck - - - name: Ensure importable - run: | - rye run python -c 'import dataherald' + - name: Run lints + run: ./scripts/lint test: name: test runs-on: ubuntu-latest diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b114bb7..8fbabb7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -59,7 +59,7 @@ If you’d like to use the repository from source, you can either install from g To install via git: ```bash -pip install git+ssh://git@github.com/Dataherald/dataherald-python.git +pip install git+ssh://git@github.com/Dataherald/dataherald-python#main.git ``` Alternatively, you can build from source and install the wheel file: diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..376105a --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,27 @@ +# Security Policy + +## Reporting Security Issues + +This SDK is generated by [Stainless Software Inc](http://stainlessapi.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken. + +To report a security issue, please contact the Stainless team at security@stainlessapi.com. + +## Responsible Disclosure + +We appreciate the efforts of security researchers and individuals who help us maintain the security of +SDKs we generate. If you believe you have found a security vulnerability, please adhere to responsible +disclosure practices by allowing us a reasonable amount of time to investigate and address the issue +before making any information public. + +## Reporting Non-SDK Related Security Issues + +If you encounter security issues that are not directly related to SDKs but pertain to the services +or products provided by Dataherald please follow the respective company's security reporting guidelines. + +### Dataherald Terms and Policies + +Please contact support@dataherald.com for any questions or concerns regarding security of our services. + +--- + +Thank you for helping us keep the SDKs and systems they interact with secure. diff --git a/requirements-dev.lock b/requirements-dev.lock index 55f0d6f..e1f5de8 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -59,9 +59,9 @@ pluggy==1.3.0 # via pytest py==1.11.0 # via pytest -pydantic==2.4.2 +pydantic==2.7.1 # via dataherald -pydantic-core==2.10.1 +pydantic-core==2.18.2 # via pydantic pyright==1.1.359 pytest==7.1.1 diff --git a/requirements.lock b/requirements.lock index 4988d32..e329365 100644 --- a/requirements.lock +++ b/requirements.lock @@ -29,9 +29,9 @@ httpx==0.25.2 idna==3.4 # via anyio # via httpx -pydantic==2.4.2 +pydantic==2.7.1 # via dataherald -pydantic-core==2.10.1 +pydantic-core==2.18.2 # via pydantic sniffio==1.3.0 # via anyio diff --git a/scripts/format b/scripts/format index 2a9ea46..667ec2d 100755 --- a/scripts/format +++ b/scripts/format @@ -4,5 +4,5 @@ set -e cd "$(dirname "$0")/.." +echo "==> Running formatters" rye run format - diff --git a/scripts/lint b/scripts/lint index 0cc68b5..335bfec 100755 --- a/scripts/lint +++ b/scripts/lint @@ -4,5 +4,9 @@ set -e cd "$(dirname "$0")/.." +echo "==> Running lints" rye run lint +echo "==> Making sure it imports" +rye run python -c 'import dataherald' + diff --git a/scripts/test b/scripts/test index be01d04..b3ace90 100755 --- a/scripts/test +++ b/scripts/test @@ -52,6 +52,5 @@ else echo fi -# Run tests echo "==> Running tests" rye run pytest "$@" diff --git a/src/dataherald/_models.py b/src/dataherald/_models.py index ff3f54e..75c68cc 100644 --- a/src/dataherald/_models.py +++ b/src/dataherald/_models.py @@ -62,7 +62,7 @@ from ._constants import RAW_RESPONSE_HEADER if TYPE_CHECKING: - from pydantic_core.core_schema import ModelField, ModelFieldsSchema + from pydantic_core.core_schema import ModelField, LiteralSchema, ModelFieldsSchema __all__ = ["BaseModel", "GenericModel"] @@ -251,7 +251,9 @@ def model_dump( exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, - warnings: bool = True, + warnings: bool | Literal["none", "warn", "error"] = True, + context: dict[str, Any] | None = None, + serialize_as_any: bool = False, ) -> dict[str, Any]: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump @@ -279,6 +281,10 @@ def model_dump( raise ValueError("round_trip is only supported in Pydantic v2") if warnings != True: raise ValueError("warnings is only supported in Pydantic v2") + if context is not None: + raise ValueError("context is only supported in Pydantic v2") + if serialize_as_any != False: + raise ValueError("serialize_as_any is only supported in Pydantic v2") return super().dict( # pyright: ignore[reportDeprecated] include=include, exclude=exclude, @@ -300,7 +306,9 @@ def model_dump_json( exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, - warnings: bool = True, + warnings: bool | Literal["none", "warn", "error"] = True, + context: dict[str, Any] | None = None, + serialize_as_any: bool = False, ) -> str: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json @@ -324,6 +332,10 @@ def model_dump_json( raise ValueError("round_trip is only supported in Pydantic v2") if warnings != True: raise ValueError("warnings is only supported in Pydantic v2") + if context is not None: + raise ValueError("context is only supported in Pydantic v2") + if serialize_as_any != False: + raise ValueError("serialize_as_any is only supported in Pydantic v2") return super().json( # type: ignore[reportDeprecated] indent=indent, include=include, @@ -550,7 +562,7 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, field_schema = field["schema"] if field_schema["type"] == "literal": - for entry in field_schema["expected"]: + for entry in cast("LiteralSchema", field_schema)["expected"]: if isinstance(entry, str): mapping[entry] = variant else: diff --git a/tests/test_models.py b/tests/test_models.py index 3ca967e..ef3c150 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -31,7 +31,7 @@ class NestedModel(BaseModel): # mismatched types m = NestedModel.construct(nested="hello!") - assert m.nested == "hello!" + assert cast(Any, m.nested) == "hello!" def test_optional_nested_model() -> None: @@ -48,7 +48,7 @@ class NestedModel(BaseModel): # mismatched types m3 = NestedModel.construct(nested={"foo"}) assert isinstance(cast(Any, m3.nested), set) - assert m3.nested == {"foo"} + assert cast(Any, m3.nested) == {"foo"} def test_list_nested_model() -> None: @@ -323,7 +323,7 @@ class Model(BaseModel): assert len(m.items) == 2 assert isinstance(m.items[0], Submodel1) assert m.items[0].level == -1 - assert m.items[1] == 156 + assert cast(Any, m.items[1]) == 156 def test_union_of_lists() -> None: @@ -355,7 +355,7 @@ class Model(BaseModel): assert len(m.items) == 2 assert isinstance(m.items[0], SubModel1) assert m.items[0].level == -1 - assert m.items[1] == 156 + assert cast(Any, m.items[1]) == 156 def test_dict_of_union() -> None: diff --git a/tests/test_transform.py b/tests/test_transform.py index 0586324..3d8a5eb 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -260,20 +260,22 @@ class MyModel(BaseModel): @parametrize @pytest.mark.asyncio async def test_pydantic_model_to_dictionary(use_async: bool) -> None: - assert await transform(MyModel(foo="hi!"), Any, use_async) == {"foo": "hi!"} - assert await transform(MyModel.construct(foo="hi!"), Any, use_async) == {"foo": "hi!"} + assert cast(Any, await transform(MyModel(foo="hi!"), Any, use_async)) == {"foo": "hi!"} + assert cast(Any, await transform(MyModel.construct(foo="hi!"), Any, use_async)) == {"foo": "hi!"} @parametrize @pytest.mark.asyncio async def test_pydantic_empty_model(use_async: bool) -> None: - assert await transform(MyModel.construct(), Any, use_async) == {} + assert cast(Any, await transform(MyModel.construct(), Any, use_async)) == {} @parametrize @pytest.mark.asyncio async def test_pydantic_unknown_field(use_async: bool) -> None: - assert await transform(MyModel.construct(my_untyped_field=True), Any, use_async) == {"my_untyped_field": True} + assert cast(Any, await transform(MyModel.construct(my_untyped_field=True), Any, use_async)) == { + "my_untyped_field": True + } @parametrize @@ -285,7 +287,7 @@ async def test_pydantic_mismatched_types(use_async: bool) -> None: params = await transform(model, Any, use_async) else: params = await transform(model, Any, use_async) - assert params == {"foo": True} + assert cast(Any, params) == {"foo": True} @parametrize @@ -297,7 +299,7 @@ async def test_pydantic_mismatched_object_type(use_async: bool) -> None: params = await transform(model, Any, use_async) else: params = await transform(model, Any, use_async) - assert params == {"foo": {"hello": "world"}} + assert cast(Any, params) == {"foo": {"hello": "world"}} class ModelNestedObjects(BaseModel): @@ -309,7 +311,7 @@ class ModelNestedObjects(BaseModel): async def test_pydantic_nested_objects(use_async: bool) -> None: model = ModelNestedObjects.construct(nested={"foo": "stainless"}) assert isinstance(model.nested, MyModel) - assert await transform(model, Any, use_async) == {"nested": {"foo": "stainless"}} + assert cast(Any, await transform(model, Any, use_async)) == {"nested": {"foo": "stainless"}} class ModelWithDefaultField(BaseModel): @@ -325,19 +327,19 @@ async def test_pydantic_default_field(use_async: bool) -> None: model = ModelWithDefaultField.construct() assert model.with_none_default is None assert model.with_str_default == "foo" - assert await transform(model, Any, use_async) == {} + assert cast(Any, await transform(model, Any, use_async)) == {} # should be included when the default value is explicitly given model = ModelWithDefaultField.construct(with_none_default=None, with_str_default="foo") assert model.with_none_default is None assert model.with_str_default == "foo" - assert await transform(model, Any, use_async) == {"with_none_default": None, "with_str_default": "foo"} + assert cast(Any, await transform(model, Any, use_async)) == {"with_none_default": None, "with_str_default": "foo"} # should be included when a non-default value is explicitly given model = ModelWithDefaultField.construct(with_none_default="bar", with_str_default="baz") assert model.with_none_default == "bar" assert model.with_str_default == "baz" - assert await transform(model, Any, use_async) == {"with_none_default": "bar", "with_str_default": "baz"} + assert cast(Any, await transform(model, Any, use_async)) == {"with_none_default": "bar", "with_str_default": "baz"} class TypedDictIterableUnion(TypedDict): From 3f06cb70c1ef852ec71f9b4763f991ffcf3e1562 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 19:41:07 +0000 Subject: [PATCH 02/39] chore: rebuild project due to codegen change (#56) --- .devcontainer/Dockerfile | 2 +- .github/workflows/ci.yml | 9 +- .github/workflows/publish-pypi.yml | 6 +- .github/workflows/release-doctor.yml | 2 + .gitignore | 1 + CONTRIBUTING.md | 52 ++-- README.md | 57 ++-- bin/publish-pypi | 3 + pyproject.toml | 51 ++-- requirements-dev.lock | 38 +-- requirements.lock | 12 +- scripts/bootstrap | 2 +- scripts/mock | 4 +- scripts/test | 3 + src/dataherald/_base_client.py | 270 +++++++++++------- src/dataherald/_compat.py | 43 ++- src/dataherald/_files.py | 12 +- src/dataherald/_models.py | 65 ++++- src/dataherald/_response.py | 20 +- src/dataherald/_types.py | 15 +- src/dataherald/_utils/__init__.py | 5 + src/dataherald/_utils/_proxy.py | 3 +- src/dataherald/_utils/_reflection.py | 42 +++ src/dataherald/_utils/_sync.py | 19 +- src/dataherald/_utils/_transform.py | 9 +- src/dataherald/_utils/_utils.py | 45 +-- .../database_connections.py | 26 +- .../resources/database_connections/drivers.py | 26 +- src/dataherald/resources/engine.py | 26 +- src/dataherald/resources/finetunings.py | 26 +- src/dataherald/resources/generations.py | 26 +- src/dataherald/resources/golden_sqls.py | 30 +- src/dataherald/resources/heartbeat.py | 26 +- .../resources/instructions/first.py | 26 +- .../resources/instructions/instructions.py | 26 +- src/dataherald/resources/nl_generations.py | 26 +- src/dataherald/resources/prompts/prompts.py | 26 +- .../resources/prompts/sql_generations.py | 26 +- .../sql_generations/nl_generations.py | 26 +- .../sql_generations/sql_generations.py | 26 +- .../resources/table_descriptions.py | 32 ++- .../database_connection_list_response.py | 3 +- .../driver_list_response.py | 3 +- .../types/finetuning_list_response.py | 3 +- .../types/generation_list_response.py | 3 +- .../types/golden_sql_list_response.py | 3 +- .../types/golden_sql_upload_response.py | 3 +- .../types/instruction_list_response.py | 3 +- .../types/nl_generation_list_response.py | 3 +- src/dataherald/types/prompt_list_response.py | 3 +- .../types/sql_generation_execute_response.py | 3 +- .../types/sql_generation_list_response.py | 3 +- .../types/table_description_list_response.py | 3 +- ...table_description_sync_schemas_response.py | 3 +- .../prompts/test_sql_generations.py | 84 +++--- .../sql_generations/test_nl_generations.py | 44 +-- .../test_database_connections.py | 144 +++++----- tests/api_resources/test_finetunings.py | 76 ++--- tests/api_resources/test_generations.py | 96 +++---- tests/api_resources/test_golden_sqls.py | 140 ++++----- tests/api_resources/test_instructions.py | 100 +++---- tests/api_resources/test_nl_generations.py | 64 ++--- tests/api_resources/test_prompts.py | 48 ++-- tests/api_resources/test_sql_generations.py | 76 ++--- .../api_resources/test_table_descriptions.py | 152 +++++----- tests/conftest.py | 14 +- tests/test_client.py | 179 +++++++++++- tests/test_deepcopy.py | 3 +- tests/test_models.py | 23 +- tests/test_response.py | 101 ++++++- tests/test_transform.py | 15 + tests/test_utils/test_typing.py | 15 +- tests/utils.py | 10 +- 73 files changed, 1720 insertions(+), 893 deletions(-) create mode 100644 src/dataherald/_utils/_reflection.py diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index dd93962..ac9a2e7 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -3,7 +3,7 @@ FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} USER vscode -RUN curl -sSf https://rye-up.com/get | RYE_VERSION="0.24.0" RYE_INSTALL_OPTION="--yes" bash +RUN curl -sSf https://rye.astral.sh/get | RYE_VERSION="0.35.0" RYE_INSTALL_OPTION="--yes" bash ENV PATH=/home/vscode/.rye/shims:$PATH RUN echo "[[ -d .venv ]] && source .venv/bin/activate" >> /home/vscode/.bashrc diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6fcd6ae..4029396 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,7 @@ on: pull_request: branches: - main + - next jobs: lint: @@ -18,10 +19,10 @@ jobs: - name: Install Rye run: | - curl -sSf https://rye-up.com/get | bash + curl -sSf https://rye.astral.sh/get | bash echo "$HOME/.rye/shims" >> $GITHUB_PATH env: - RYE_VERSION: 0.24.0 + RYE_VERSION: '0.35.0' RYE_INSTALL_OPTION: '--yes' - name: Install dependencies @@ -38,10 +39,10 @@ jobs: - name: Install Rye run: | - curl -sSf https://rye-up.com/get | bash + curl -sSf https://rye.astral.sh/get | bash echo "$HOME/.rye/shims" >> $GITHUB_PATH env: - RYE_VERSION: 0.24.0 + RYE_VERSION: '0.35.0' RYE_INSTALL_OPTION: '--yes' - name: Bootstrap diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index eabffab..e5879e9 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -18,11 +18,11 @@ jobs: - name: Install Rye run: | - curl -sSf https://rye-up.com/get | bash + curl -sSf https://rye.astral.sh/get | bash echo "$HOME/.rye/shims" >> $GITHUB_PATH env: - RYE_VERSION: 0.24.0 - RYE_INSTALL_OPTION: "--yes" + RYE_VERSION: '0.35.0' + RYE_INSTALL_OPTION: '--yes' - name: Publish to PyPI run: | diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index 75ece6f..43b48d8 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -1,6 +1,8 @@ name: Release Doctor on: pull_request: + branches: + - main workflow_dispatch: jobs: diff --git a/.gitignore b/.gitignore index 0f9a66a..8779740 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.prism.log .vscode _dev diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8fbabb7..da496a4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,9 +2,13 @@ ### With Rye -We use [Rye](https://rye-up.com/) to manage dependencies so we highly recommend [installing it](https://rye-up.com/guide/installation/) as it will automatically provision a Python environment with the expected Python version. +We use [Rye](https://rye.astral.sh/) to manage dependencies because it will automatically provision a Python environment with the expected Python version. To set it up, run: -After installing Rye, you'll just have to run this command: +```sh +$ ./scripts/bootstrap +``` + +Or [install Rye manually](https://rye.astral.sh/guide/installation/) and run: ```sh $ rye sync --all-features @@ -31,25 +35,25 @@ $ pip install -r requirements-dev.lock ## Modifying/Adding code -Most of the SDK is generated code, and any modified code will be overridden on the next generation. The -`src/dataherald/lib/` and `examples/` directories are exceptions and will never be overridden. +Most of the SDK is generated code. Modifications to code will be persisted between generations, but may +result in merge conflicts between manual patches and changes from the generator. The generator will never +modify the contents of the `src/dataherald/lib/` and `examples/` directories. ## Adding and running examples -All files in the `examples/` directory are not modified by the Stainless generator and can be freely edited or -added to. +All files in the `examples/` directory are not modified by the generator and can be freely edited or added to. -```bash +```py # add an example to examples/<your-example>.py #!/usr/bin/env -S rye run python … ``` -``` -chmod +x examples/<your-example>.py +```sh +$ chmod +x examples/<your-example>.py # run the example against your api -./examples/<your-example>.py +$ ./examples/<your-example>.py ``` ## Using the repository from source @@ -58,8 +62,8 @@ If you’d like to use the repository from source, you can either install from g To install via git: -```bash -pip install git+ssh://git@github.com/Dataherald/dataherald-python#main.git +```sh +$ pip install git+ssh://git@github.com/Dataherald/dataherald-python#main.git ``` Alternatively, you can build from source and install the wheel file: @@ -68,29 +72,29 @@ Building this package will create two files in the `dist/` directory, a `.tar.gz To create a distributable version of the library, all you have to do is run this command: -```bash -rye build +```sh +$ rye build # or -python -m build +$ python -m build ``` Then to install: ```sh -pip install ./path-to-wheel-file.whl +$ pip install ./path-to-wheel-file.whl ``` ## Running tests Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests. -```bash +```sh # you will need npm installed -npx prism mock path/to/your/openapi.yml +$ npx prism mock path/to/your/openapi.yml ``` -```bash -rye run pytest +```sh +$ ./scripts/test ``` ## Linting and formatting @@ -100,14 +104,14 @@ This repository uses [ruff](https://github.com/astral-sh/ruff) and To lint: -```bash -rye run lint +```sh +$ ./scripts/lint ``` To format and fix all ruff issues automatically: -```bash -rye run format +```sh +$ ./scripts/format ``` ## Publishing and releases diff --git a/README.md b/README.md index f155e5c..89e7f5d 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![PyPI version](https://img.shields.io/pypi/v/dataherald.svg)](https://pypi.org/project/dataherald/) -The Dataherald Python library provides convenient access to the Dataherald REST API from any Python 3.7+ +The Dataherald Python library provides convenient access to the Dataherald REST API from any Python 3.8+ application. The library includes type definitions for all request params and response fields, and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). @@ -10,7 +10,7 @@ It is generated with [Stainless](https://www.stainlessapi.com/). ## Documentation -The REST API documentation can be found [on dataherald.readthedocs.io](https://dataherald.readthedocs.io/en/latest/). The full API of this library can be found in [api.md](api.md). +The REST API documentation can be found on [dataherald.readthedocs.io](https://dataherald.readthedocs.io/en/latest/). The full API of this library can be found in [api.md](api.md). ## Installation @@ -32,8 +32,8 @@ client = Dataherald( ) db_connection_response = client.database_connections.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) print(db_connection_response.id) ``` @@ -59,8 +59,8 @@ client = AsyncDataherald( async def main() -> None: db_connection_response = await client.database_connections.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) print(db_connection_response.id) @@ -96,8 +96,8 @@ client = Dataherald() try: client.database_connections.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) except dataherald.APIConnectionError as e: print("The server could not be reached") @@ -142,8 +142,8 @@ client = Dataherald( # Or, configure per-request: client.with_options(max_retries=5).database_connections.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) ``` @@ -168,8 +168,8 @@ client = Dataherald( # Override per-request: client.with_options(timeout=5.0).database_connections.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) ``` @@ -210,8 +210,8 @@ from dataherald import Dataherald client = Dataherald() response = client.database_connections.with_raw_response.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) print(response.headers.get('X-My-Header')) @@ -231,8 +231,8 @@ To stream the response body, use `.with_streaming_response` instead, which requi ```python with client.database_connections.with_streaming_response.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) as response: print(response.headers.get("X-My-Header")) @@ -283,7 +283,7 @@ You can directly override the [httpx client](https://www.python-httpx.org/api/#c - Support for proxies - Custom transports -- Additional [advanced](https://www.python-httpx.org/advanced/#client-instances) functionality +- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality ```python from dataherald import Dataherald, DefaultHttpxClient @@ -298,6 +298,12 @@ client = Dataherald( ) ``` +You can also customize the client on a per-request basis by using `with_options()`: + +```python +client.with_options(http_client=DefaultHttpxClient(...)) +``` + ### Managing HTTP resources By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. @@ -314,6 +320,21 @@ We take backwards-compatibility seriously and work hard to ensure you can rely o We are keen for your feedback; please open an [issue](https://www.github.com/Dataherald/dataherald-python/issues) with questions, bugs, or suggestions. +### Determining the installed version + +If you've upgraded to the latest version but aren't seeing any new features you were expecting then your python environment is likely still using an older version. + +You can determine the version that is being used at runtime with: + +```py +import dataherald +print(dataherald.__version__) +``` + ## Requirements -Python 3.7 or higher. +Python 3.8 or higher. + +## Contributing + +See [the contributing documentation](./CONTRIBUTING.md). diff --git a/bin/publish-pypi b/bin/publish-pypi index 826054e..05bfccb 100644 --- a/bin/publish-pypi +++ b/bin/publish-pypi @@ -3,4 +3,7 @@ set -eux mkdir -p dist rye build --clean +# Patching importlib-metadata version until upstream library version is updated +# https://github.com/pypa/twine/issues/977#issuecomment-2189800841 +"$HOME/.rye/self/bin/python3" -m pip install 'importlib-metadata==7.2.1' rye publish --yes --token=$PYPI_TOKEN diff --git a/pyproject.toml b/pyproject.toml index d292a93..d1e3d28 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,13 +15,11 @@ dependencies = [ "distro>=1.7.0, <2", "sniffio", "cached-property; python_version < '3.8'", - ] -requires-python = ">= 3.7" +requires-python = ">= 3.8" classifiers = [ "Typing :: Typed", "Intended Audience :: Developers", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", @@ -36,8 +34,6 @@ classifiers = [ "License :: OSI Approved :: Apache Software License" ] - - [project.urls] Homepage = "https://github.com/Dataherald/dataherald-python" Repository = "https://github.com/Dataherald/dataherald-python" @@ -58,7 +54,7 @@ dev-dependencies = [ "nox", "dirty-equals>=0.6.0", "importlib-metadata>=6.7.0", - + "rich>=13.7.1", ] [tool.rye.scripts] @@ -66,18 +62,21 @@ format = { chain = [ "format:ruff", "format:docs", "fix:ruff", + # run formatting again to fix any inconsistencies when imports are stripped + "format:ruff", ]} -"format:black" = "black ." "format:docs" = "python scripts/utils/ruffen-docs.py README.md api.md" "format:ruff" = "ruff format" -"format:isort" = "isort ." "lint" = { chain = [ "check:ruff", "typecheck", + "check:importable", ]} -"check:ruff" = "ruff ." -"fix:ruff" = "ruff --fix ." +"check:ruff" = "ruff check ." +"fix:ruff" = "ruff check --fix ." + +"check:importable" = "python -c 'import dataherald'" typecheck = { chain = [ "typecheck:pyright", @@ -99,6 +98,21 @@ include = [ [tool.hatch.build.targets.wheel] packages = ["src/dataherald"] +[tool.hatch.build.targets.sdist] +# Basically everything except hidden files/directories (such as .github, .devcontainers, .python-version, etc) +include = [ + "/*.toml", + "/*.json", + "/*.lock", + "/*.md", + "/mypy.ini", + "/noxfile.py", + "bin/*", + "examples/*", + "src/*", + "tests/*", +] + [tool.hatch.metadata.hooks.fancy-pypi-readme] content-type = "text/markdown" @@ -110,10 +124,6 @@ path = "README.md" pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)' replacement = '[\1](https://github.com/Dataherald/dataherald-python/tree/main/\g<2>)' -[tool.black] -line-length = 120 -target-version = ["py37"] - [tool.pytest.ini_options] testpaths = ["tests"] addopts = "--tb=short" @@ -128,7 +138,7 @@ filterwarnings = [ # there are a couple of flags that are still disabled by # default in strict mode as they are experimental and niche. typeCheckingMode = "strict" -pythonVersion = "3.7" +pythonVersion = "3.8" exclude = [ "_dev", @@ -146,6 +156,11 @@ reportPrivateUsage = false line-length = 120 output-format = "grouped" target-version = "py37" + +[tool.ruff.format] +docstring-code-format = true + +[tool.ruff.lint] select = [ # isort "I", @@ -174,10 +189,6 @@ unfixable = [ "T201", "T203", ] -ignore-init-module-imports = true - -[tool.ruff.format] -docstring-code-format = true [tool.ruff.lint.flake8-tidy-imports.banned-api] "functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead" @@ -189,7 +200,7 @@ combine-as-imports = true extra-standard-library = ["typing_extensions"] known-first-party = ["dataherald", "tests"] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "bin/**.py" = ["T201", "T203"] "scripts/**.py" = ["T201", "T203"] "tests/**.py" = ["T201", "T203"] diff --git a/requirements-dev.lock b/requirements-dev.lock index e1f5de8..6fb7f60 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -6,17 +6,16 @@ # features: [] # all-features: true # with-sources: false +# generate-hashes: false -e file:. annotated-types==0.6.0 # via pydantic -anyio==4.1.0 +anyio==4.4.0 # via dataherald # via httpx argcomplete==3.1.2 # via nox -attrs==23.1.0 - # via pytest certifi==2023.7.22 # via httpcore # via httpx @@ -27,8 +26,9 @@ distlib==0.3.7 # via virtualenv distro==1.8.0 # via dataherald -exceptiongroup==1.1.3 +exceptiongroup==1.2.2 # via anyio + # via pytest filelock==3.12.4 # via virtualenv h11==0.14.0 @@ -44,7 +44,11 @@ idna==3.4 importlib-metadata==7.0.0 iniconfig==2.0.0 # via pytest -mypy==1.7.1 +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +mypy==1.13.0 mypy-extensions==1.0.0 # via mypy nodeenv==1.8.0 @@ -55,24 +59,25 @@ packaging==23.2 # via pytest platformdirs==3.11.0 # via virtualenv -pluggy==1.3.0 +pluggy==1.5.0 # via pytest -py==1.11.0 - # via pytest -pydantic==2.7.1 +pydantic==2.9.2 # via dataherald -pydantic-core==2.18.2 +pydantic-core==2.23.4 # via pydantic -pyright==1.1.359 -pytest==7.1.1 +pygments==2.18.0 + # via rich +pyright==1.1.380 +pytest==8.3.3 # via pytest-asyncio -pytest-asyncio==0.21.1 +pytest-asyncio==0.24.0 python-dateutil==2.8.2 # via time-machine pytz==2023.3.post1 # via dirty-equals respx==0.20.2 -ruff==0.1.9 +rich==13.7.1 +ruff==0.6.9 setuptools==68.2.2 # via nodeenv six==1.16.0 @@ -82,10 +87,11 @@ sniffio==1.3.0 # via dataherald # via httpx time-machine==2.9.0 -tomli==2.0.1 +tomli==2.0.2 # via mypy # via pytest -typing-extensions==4.8.0 +typing-extensions==4.12.2 + # via anyio # via dataherald # via mypy # via pydantic diff --git a/requirements.lock b/requirements.lock index e329365..04d3a03 100644 --- a/requirements.lock +++ b/requirements.lock @@ -6,11 +6,12 @@ # features: [] # all-features: true # with-sources: false +# generate-hashes: false -e file:. annotated-types==0.6.0 # via pydantic -anyio==4.1.0 +anyio==4.4.0 # via dataherald # via httpx certifi==2023.7.22 @@ -18,7 +19,7 @@ certifi==2023.7.22 # via httpx distro==1.8.0 # via dataherald -exceptiongroup==1.1.3 +exceptiongroup==1.2.2 # via anyio h11==0.14.0 # via httpcore @@ -29,15 +30,16 @@ httpx==0.25.2 idna==3.4 # via anyio # via httpx -pydantic==2.7.1 +pydantic==2.9.2 # via dataherald -pydantic-core==2.18.2 +pydantic-core==2.23.4 # via pydantic sniffio==1.3.0 # via anyio # via dataherald # via httpx -typing-extensions==4.8.0 +typing-extensions==4.12.2 + # via anyio # via dataherald # via pydantic # via pydantic-core diff --git a/scripts/bootstrap b/scripts/bootstrap index 29df07e..8c5c60e 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -16,4 +16,4 @@ echo "==> Installing Python dependencies…" # experimental uv support makes installations significantly faster rye config --set-bool behavior.use-uv=true -rye sync +rye sync --all-features diff --git a/scripts/mock b/scripts/mock index fe89a1d..d2814ae 100755 --- a/scripts/mock +++ b/scripts/mock @@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}" # Run prism mock on the given spec if [ "$1" == "--daemon" ]; then - npm exec --package=@stoplight/prism-cli@~5.8 -- prism mock "$URL" &> .prism.log & + npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log & # Wait for server to come online echo -n "Waiting for server" @@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then echo else - npm exec --package=@stoplight/prism-cli@~5.8 -- prism mock "$URL" + npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" fi diff --git a/scripts/test b/scripts/test index b3ace90..4fa5698 100755 --- a/scripts/test +++ b/scripts/test @@ -54,3 +54,6 @@ fi echo "==> Running tests" rye run pytest "$@" + +echo "==> Running Pydantic v1 tests" +rye run nox -s test-pydantic-v1 -- "$@" diff --git a/src/dataherald/_base_client.py b/src/dataherald/_base_client.py index b7986af..1bd954c 100644 --- a/src/dataherald/_base_client.py +++ b/src/dataherald/_base_client.py @@ -1,5 +1,6 @@ from __future__ import annotations +import sys import json import time import uuid @@ -58,9 +59,10 @@ HttpxSendArgs, AsyncTransport, RequestOptions, + HttpxRequestFiles, ModelBuilderProtocol, ) -from ._utils import is_dict, is_list, is_given, lru_cache, is_mapping +from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping from ._compat import model_copy, model_dump from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type from ._response import ( @@ -123,16 +125,14 @@ def __init__( self, *, url: URL, - ) -> None: - ... + ) -> None: ... @overload def __init__( self, *, params: Query, - ) -> None: - ... + ) -> None: ... def __init__( self, @@ -143,6 +143,12 @@ def __init__( self.url = url self.params = params + @override + def __repr__(self) -> str: + if self.url: + return f"{self.__class__.__name__}(url={self.url})" + return f"{self.__class__.__name__}(params={self.params})" + class BasePage(GenericModel, Generic[_T]): """ @@ -165,8 +171,7 @@ def has_next_page(self) -> bool: return False return self.next_page_info() is not None - def next_page_info(self) -> Optional[PageInfo]: - ... + def next_page_info(self) -> Optional[PageInfo]: ... def _get_page_items(self) -> Iterable[_T]: # type: ignore[empty-body] ... @@ -358,6 +363,7 @@ def __init__( self._custom_query = custom_query or {} self._strict_response_validation = _strict_response_validation self._idempotency_header = None + self._platform: Platform | None = None if max_retries is None: # pyright: ignore[reportUnnecessaryComparison] raise TypeError( @@ -400,14 +406,7 @@ def _make_status_error( ) -> _exceptions.APIStatusError: raise NotImplementedError() - def _remaining_retries( - self, - remaining_retries: Optional[int], - options: FinalRequestOptions, - ) -> int: - return remaining_retries if remaining_retries is not None else options.get_max_retries(self.max_retries) - - def _build_headers(self, options: FinalRequestOptions) -> httpx.Headers: + def _build_headers(self, options: FinalRequestOptions, *, retries_taken: int = 0) -> httpx.Headers: custom_headers = options.headers or {} headers_dict = _merge_mappings(self.default_headers, custom_headers) self._validate_headers(headers_dict, custom_headers) @@ -419,6 +418,11 @@ def _build_headers(self, options: FinalRequestOptions) -> httpx.Headers: if idempotency_header and options.method.lower() != "get" and idempotency_header not in headers: headers[idempotency_header] = options.idempotency_key or self._idempotency_key() + # Don't set the retry count header if it was already set or removed by the caller. We check + # `custom_headers`, which can contain `Omit()`, instead of `headers` to account for the removal case. + if "x-stainless-retry-count" not in (header.lower() for header in custom_headers): + headers["x-stainless-retry-count"] = str(retries_taken) + return headers def _prepare_url(self, url: str) -> URL: @@ -440,6 +444,8 @@ def _make_sse_decoder(self) -> SSEDecoder | SSEBytesDecoder: def _build_request( self, options: FinalRequestOptions, + *, + retries_taken: int = 0, ) -> httpx.Request: if log.isEnabledFor(logging.DEBUG): log.debug("Request options: %s", model_dump(options, exclude_unset=True)) @@ -455,9 +461,10 @@ def _build_request( else: raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`") - headers = self._build_headers(options) - params = _merge_mappings(self._custom_query, options.params) + headers = self._build_headers(options, retries_taken=retries_taken) + params = _merge_mappings(self.default_query, options.params) content_type = headers.get("Content-Type") + files = options.files # If the given Content-Type header is multipart/form-data then it # has to be removed so that httpx can generate the header with @@ -471,7 +478,7 @@ def _build_request( headers.pop("Content-Type") # As we are now sending multipart/form-data instead of application/json - # we need to tell httpx to use it, https://www.python-httpx.org/advanced/#multipart-file-encoding + # we need to tell httpx to use it, https://www.python-httpx.org/advanced/clients/#multipart-file-encoding if json_data: if not is_dict(json_data): raise TypeError( @@ -479,19 +486,33 @@ def _build_request( ) kwargs["data"] = self._serialize_multipartform(json_data) + # httpx determines whether or not to send a "multipart/form-data" + # request based on the truthiness of the "files" argument. + # This gets around that issue by generating a dict value that + # evaluates to true. + # + # https://github.com/encode/httpx/discussions/2399#discussioncomment-3814186 + if not files: + files = cast(HttpxRequestFiles, ForceMultipartDict()) + + prepared_url = self._prepare_url(options.url) + if "_" in prepared_url.host: + # work around https://github.com/encode/httpx/discussions/2880 + kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")} + # TODO: report this error to httpx return self._client.build_request( # pyright: ignore[reportUnknownMemberType] headers=headers, timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, method=options.method, - url=self._prepare_url(options.url), + url=prepared_url, # the `Query` type that we use is incompatible with qs' # `Params` type as it needs to be typed as `Mapping[str, object]` # so that passing a `TypedDict` doesn't cause an error. # https://github.com/microsoft/pyright/issues/3526#event-6715453066 params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None, json=json_data, - files=options.files, + files=files, **kwargs, ) @@ -592,6 +613,12 @@ def default_headers(self) -> dict[str, str | Omit]: **self._custom_headers, } + @property + def default_query(self) -> dict[str, object]: + return { + **self._custom_query, + } + def _validate_headers( self, headers: Headers, # noqa: ARG002 @@ -616,7 +643,10 @@ def base_url(self, url: URL | str) -> None: self._base_url = self._enforce_trailing_slash(url if isinstance(url, URL) else URL(url)) def platform_headers(self) -> Dict[str, str]: - return platform_headers(self._version) + # the actual implementation is in a separate `lru_cache` decorated + # function because adding `lru_cache` to methods will leak memory + # https://github.com/python/cpython/issues/88476 + return platform_headers(self._version, platform=self._platform) def _parse_retry_after_header(self, response_headers: Optional[httpx.Headers] = None) -> float | None: """Returns a float of the number of seconds (not milliseconds) to wait after retrying, or None if unspecified. @@ -665,7 +695,8 @@ def _calculate_retry_timeout( if retry_after is not None and 0 < retry_after <= 60: return retry_after - nb_retries = max_retries - remaining_retries + # Also cap retry count to 1000 to avoid any potential overflows with `pow` + nb_retries = min(max_retries - remaining_retries, 1000) # Apply exponential backoff, but not more than the max. sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY) @@ -858,9 +889,9 @@ def __exit__( def _prepare_options( self, options: FinalRequestOptions, # noqa: ARG002 - ) -> None: + ) -> FinalRequestOptions: """Hook for mutating the given options""" - return None + return options def _prepare_request( self, @@ -882,8 +913,7 @@ def request( *, stream: Literal[True], stream_cls: Type[_StreamT], - ) -> _StreamT: - ... + ) -> _StreamT: ... @overload def request( @@ -893,8 +923,7 @@ def request( remaining_retries: Optional[int] = None, *, stream: Literal[False] = False, - ) -> ResponseT: - ... + ) -> ResponseT: ... @overload def request( @@ -905,8 +934,7 @@ def request( *, stream: bool = False, stream_cls: Type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: - ... + ) -> ResponseT | _StreamT: ... def request( self, @@ -917,12 +945,17 @@ def request( stream: bool = False, stream_cls: type[_StreamT] | None = None, ) -> ResponseT | _StreamT: + if remaining_retries is not None: + retries_taken = options.get_max_retries(self.max_retries) - remaining_retries + else: + retries_taken = 0 + return self._request( cast_to=cast_to, options=options, stream=stream, stream_cls=stream_cls, - remaining_retries=remaining_retries, + retries_taken=retries_taken, ) def _request( @@ -930,15 +963,20 @@ def _request( *, cast_to: Type[ResponseT], options: FinalRequestOptions, - remaining_retries: int | None, + retries_taken: int, stream: bool, stream_cls: type[_StreamT] | None, ) -> ResponseT | _StreamT: + # create a copy of the options we were given so that if the + # options are mutated later & we then retry, the retries are + # given the original options + input_options = model_copy(options) + cast_to = self._maybe_override_cast_to(cast_to, options) - self._prepare_options(options) + options = self._prepare_options(options) - retries = self._remaining_retries(remaining_retries, options) - request = self._build_request(options) + remaining_retries = options.get_max_retries(self.max_retries) - retries_taken + request = self._build_request(options, retries_taken=retries_taken) self._prepare_request(request) kwargs: HttpxSendArgs = {} @@ -956,11 +994,11 @@ def _request( except httpx.TimeoutException as err: log.debug("Encountered httpx.TimeoutException", exc_info=True) - if retries > 0: + if remaining_retries > 0: return self._retry_request( - options, + input_options, cast_to, - retries, + retries_taken=retries_taken, stream=stream, stream_cls=stream_cls, response_headers=None, @@ -971,11 +1009,11 @@ def _request( except Exception as err: log.debug("Encountered Exception", exc_info=True) - if retries > 0: + if remaining_retries > 0: return self._retry_request( - options, + input_options, cast_to, - retries, + retries_taken=retries_taken, stream=stream, stream_cls=stream_cls, response_headers=None, @@ -998,13 +1036,13 @@ def _request( except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code log.debug("Encountered httpx.HTTPStatusError", exc_info=True) - if retries > 0 and self._should_retry(err.response): + if remaining_retries > 0 and self._should_retry(err.response): err.response.close() return self._retry_request( - options, + input_options, cast_to, - retries, - err.response.headers, + retries_taken=retries_taken, + response_headers=err.response.headers, stream=stream, stream_cls=stream_cls, ) @@ -1023,25 +1061,26 @@ def _request( response=response, stream=stream, stream_cls=stream_cls, + retries_taken=retries_taken, ) def _retry_request( self, options: FinalRequestOptions, cast_to: Type[ResponseT], - remaining_retries: int, - response_headers: httpx.Headers | None, *, + retries_taken: int, + response_headers: httpx.Headers | None, stream: bool, stream_cls: type[_StreamT] | None, ) -> ResponseT | _StreamT: - remaining = remaining_retries - 1 - if remaining == 1: + remaining_retries = options.get_max_retries(self.max_retries) - retries_taken + if remaining_retries == 1: log.debug("1 retry left") else: - log.debug("%i retries left", remaining) + log.debug("%i retries left", remaining_retries) - timeout = self._calculate_retry_timeout(remaining, options, response_headers) + timeout = self._calculate_retry_timeout(remaining_retries, options, response_headers) log.info("Retrying request to %s in %f seconds", options.url, timeout) # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a @@ -1051,7 +1090,7 @@ def _retry_request( return self._request( options=options, cast_to=cast_to, - remaining_retries=remaining, + retries_taken=retries_taken + 1, stream=stream, stream_cls=stream_cls, ) @@ -1064,6 +1103,7 @@ def _process_response( response: httpx.Response, stream: bool, stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + retries_taken: int = 0, ) -> ResponseT: origin = get_origin(cast_to) or cast_to @@ -1081,6 +1121,7 @@ def _process_response( stream=stream, stream_cls=stream_cls, options=options, + retries_taken=retries_taken, ), ) @@ -1094,6 +1135,7 @@ def _process_response( stream=stream, stream_cls=stream_cls, options=options, + retries_taken=retries_taken, ) if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): return cast(ResponseT, api_response) @@ -1126,8 +1168,7 @@ def get( cast_to: Type[ResponseT], options: RequestOptions = {}, stream: Literal[False] = False, - ) -> ResponseT: - ... + ) -> ResponseT: ... @overload def get( @@ -1138,8 +1179,7 @@ def get( options: RequestOptions = {}, stream: Literal[True], stream_cls: type[_StreamT], - ) -> _StreamT: - ... + ) -> _StreamT: ... @overload def get( @@ -1150,8 +1190,7 @@ def get( options: RequestOptions = {}, stream: bool, stream_cls: type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: - ... + ) -> ResponseT | _StreamT: ... def get( self, @@ -1177,8 +1216,7 @@ def post( options: RequestOptions = {}, files: RequestFiles | None = None, stream: Literal[False] = False, - ) -> ResponseT: - ... + ) -> ResponseT: ... @overload def post( @@ -1191,8 +1229,7 @@ def post( files: RequestFiles | None = None, stream: Literal[True], stream_cls: type[_StreamT], - ) -> _StreamT: - ... + ) -> _StreamT: ... @overload def post( @@ -1205,8 +1242,7 @@ def post( files: RequestFiles | None = None, stream: bool, stream_cls: type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: - ... + ) -> ResponseT | _StreamT: ... def post( self, @@ -1416,9 +1452,9 @@ async def __aexit__( async def _prepare_options( self, options: FinalRequestOptions, # noqa: ARG002 - ) -> None: + ) -> FinalRequestOptions: """Hook for mutating the given options""" - return None + return options async def _prepare_request( self, @@ -1439,8 +1475,7 @@ async def request( *, stream: Literal[False] = False, remaining_retries: Optional[int] = None, - ) -> ResponseT: - ... + ) -> ResponseT: ... @overload async def request( @@ -1451,8 +1486,7 @@ async def request( stream: Literal[True], stream_cls: type[_AsyncStreamT], remaining_retries: Optional[int] = None, - ) -> _AsyncStreamT: - ... + ) -> _AsyncStreamT: ... @overload async def request( @@ -1463,8 +1497,7 @@ async def request( stream: bool, stream_cls: type[_AsyncStreamT] | None = None, remaining_retries: Optional[int] = None, - ) -> ResponseT | _AsyncStreamT: - ... + ) -> ResponseT | _AsyncStreamT: ... async def request( self, @@ -1475,12 +1508,17 @@ async def request( stream_cls: type[_AsyncStreamT] | None = None, remaining_retries: Optional[int] = None, ) -> ResponseT | _AsyncStreamT: + if remaining_retries is not None: + retries_taken = options.get_max_retries(self.max_retries) - remaining_retries + else: + retries_taken = 0 + return await self._request( cast_to=cast_to, options=options, stream=stream, stream_cls=stream_cls, - remaining_retries=remaining_retries, + retries_taken=retries_taken, ) async def _request( @@ -1490,13 +1528,23 @@ async def _request( *, stream: bool, stream_cls: type[_AsyncStreamT] | None, - remaining_retries: int | None, + retries_taken: int, ) -> ResponseT | _AsyncStreamT: + if self._platform is None: + # `get_platform` can make blocking IO calls so we + # execute it earlier while we are in an async context + self._platform = await asyncify(get_platform)() + + # create a copy of the options we were given so that if the + # options are mutated later & we then retry, the retries are + # given the original options + input_options = model_copy(options) + cast_to = self._maybe_override_cast_to(cast_to, options) - await self._prepare_options(options) + options = await self._prepare_options(options) - retries = self._remaining_retries(remaining_retries, options) - request = self._build_request(options) + remaining_retries = options.get_max_retries(self.max_retries) - retries_taken + request = self._build_request(options, retries_taken=retries_taken) await self._prepare_request(request) kwargs: HttpxSendArgs = {} @@ -1512,11 +1560,11 @@ async def _request( except httpx.TimeoutException as err: log.debug("Encountered httpx.TimeoutException", exc_info=True) - if retries > 0: + if remaining_retries > 0: return await self._retry_request( - options, + input_options, cast_to, - retries, + retries_taken=retries_taken, stream=stream, stream_cls=stream_cls, response_headers=None, @@ -1527,11 +1575,11 @@ async def _request( except Exception as err: log.debug("Encountered Exception", exc_info=True) - if retries > 0: + if remaining_retries > 0: return await self._retry_request( - options, + input_options, cast_to, - retries, + retries_taken=retries_taken, stream=stream, stream_cls=stream_cls, response_headers=None, @@ -1549,13 +1597,13 @@ async def _request( except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code log.debug("Encountered httpx.HTTPStatusError", exc_info=True) - if retries > 0 and self._should_retry(err.response): + if remaining_retries > 0 and self._should_retry(err.response): await err.response.aclose() return await self._retry_request( - options, + input_options, cast_to, - retries, - err.response.headers, + retries_taken=retries_taken, + response_headers=err.response.headers, stream=stream, stream_cls=stream_cls, ) @@ -1574,25 +1622,26 @@ async def _request( response=response, stream=stream, stream_cls=stream_cls, + retries_taken=retries_taken, ) async def _retry_request( self, options: FinalRequestOptions, cast_to: Type[ResponseT], - remaining_retries: int, - response_headers: httpx.Headers | None, *, + retries_taken: int, + response_headers: httpx.Headers | None, stream: bool, stream_cls: type[_AsyncStreamT] | None, ) -> ResponseT | _AsyncStreamT: - remaining = remaining_retries - 1 - if remaining == 1: + remaining_retries = options.get_max_retries(self.max_retries) - retries_taken + if remaining_retries == 1: log.debug("1 retry left") else: - log.debug("%i retries left", remaining) + log.debug("%i retries left", remaining_retries) - timeout = self._calculate_retry_timeout(remaining, options, response_headers) + timeout = self._calculate_retry_timeout(remaining_retries, options, response_headers) log.info("Retrying request to %s in %f seconds", options.url, timeout) await anyio.sleep(timeout) @@ -1600,7 +1649,7 @@ async def _retry_request( return await self._request( options=options, cast_to=cast_to, - remaining_retries=remaining, + retries_taken=retries_taken + 1, stream=stream, stream_cls=stream_cls, ) @@ -1613,6 +1662,7 @@ async def _process_response( response: httpx.Response, stream: bool, stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + retries_taken: int = 0, ) -> ResponseT: origin = get_origin(cast_to) or cast_to @@ -1630,6 +1680,7 @@ async def _process_response( stream=stream, stream_cls=stream_cls, options=options, + retries_taken=retries_taken, ), ) @@ -1643,6 +1694,7 @@ async def _process_response( stream=stream, stream_cls=stream_cls, options=options, + retries_taken=retries_taken, ) if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): return cast(ResponseT, api_response) @@ -1665,8 +1717,7 @@ async def get( cast_to: Type[ResponseT], options: RequestOptions = {}, stream: Literal[False] = False, - ) -> ResponseT: - ... + ) -> ResponseT: ... @overload async def get( @@ -1677,8 +1728,7 @@ async def get( options: RequestOptions = {}, stream: Literal[True], stream_cls: type[_AsyncStreamT], - ) -> _AsyncStreamT: - ... + ) -> _AsyncStreamT: ... @overload async def get( @@ -1689,8 +1739,7 @@ async def get( options: RequestOptions = {}, stream: bool, stream_cls: type[_AsyncStreamT] | None = None, - ) -> ResponseT | _AsyncStreamT: - ... + ) -> ResponseT | _AsyncStreamT: ... async def get( self, @@ -1714,8 +1763,7 @@ async def post( files: RequestFiles | None = None, options: RequestOptions = {}, stream: Literal[False] = False, - ) -> ResponseT: - ... + ) -> ResponseT: ... @overload async def post( @@ -1728,8 +1776,7 @@ async def post( options: RequestOptions = {}, stream: Literal[True], stream_cls: type[_AsyncStreamT], - ) -> _AsyncStreamT: - ... + ) -> _AsyncStreamT: ... @overload async def post( @@ -1742,8 +1789,7 @@ async def post( options: RequestOptions = {}, stream: bool, stream_cls: type[_AsyncStreamT] | None = None, - ) -> ResponseT | _AsyncStreamT: - ... + ) -> ResponseT | _AsyncStreamT: ... async def post( self, @@ -1848,6 +1894,11 @@ def make_request_options( return options +class ForceMultipartDict(Dict[str, None]): + def __bool__(self) -> bool: + return True + + class OtherPlatform: def __init__(self, name: str) -> None: self.name = name @@ -1915,11 +1966,11 @@ def get_platform() -> Platform: @lru_cache(maxsize=None) -def platform_headers(version: str) -> Dict[str, str]: +def platform_headers(version: str, *, platform: Platform | None) -> Dict[str, str]: return { "X-Stainless-Lang": "python", "X-Stainless-Package-Version": version, - "X-Stainless-OS": str(get_platform()), + "X-Stainless-OS": str(platform or get_platform()), "X-Stainless-Arch": str(get_architecture()), "X-Stainless-Runtime": get_python_runtime(), "X-Stainless-Runtime-Version": get_python_version(), @@ -1954,7 +2005,6 @@ def get_python_version() -> str: def get_architecture() -> Arch: try: - python_bitness, _ = platform.architecture() machine = platform.machine().lower() except Exception: return "unknown" @@ -1970,7 +2020,7 @@ def get_architecture() -> Arch: return "x64" # TODO: untested - if python_bitness == "32bit": + if sys.maxsize <= 2**32: return "x32" if machine: diff --git a/src/dataherald/_compat.py b/src/dataherald/_compat.py index 74c7639..4794129 100644 --- a/src/dataherald/_compat.py +++ b/src/dataherald/_compat.py @@ -2,12 +2,12 @@ from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload from datetime import date, datetime -from typing_extensions import Self +from typing_extensions import Self, Literal import pydantic from pydantic.fields import FieldInfo -from ._types import StrBytesIntFloat +from ._types import IncEx, StrBytesIntFloat _T = TypeVar("_T") _ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) @@ -118,10 +118,10 @@ def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: return model.__fields__ # type: ignore -def model_copy(model: _ModelT) -> _ModelT: +def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT: if PYDANTIC_V2: - return model.model_copy() - return model.copy() # type: ignore + return model.model_copy(deep=deep) + return model.copy(deep=deep) # type: ignore def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: @@ -133,17 +133,24 @@ def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: def model_dump( model: pydantic.BaseModel, *, + exclude: IncEx | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, + warnings: bool = True, + mode: Literal["json", "python"] = "python", ) -> dict[str, Any]: - if PYDANTIC_V2: + if PYDANTIC_V2 or hasattr(model, "model_dump"): return model.model_dump( + mode=mode, + exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, + warnings=warnings, ) return cast( "dict[str, Any]", model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, ), @@ -159,22 +166,19 @@ def model_parse(model: type[_ModelT], data: Any) -> _ModelT: # generic models if TYPE_CHECKING: - class GenericModel(pydantic.BaseModel): - ... + class GenericModel(pydantic.BaseModel): ... else: if PYDANTIC_V2: # there no longer needs to be a distinction in v2 but # we still have to create our own subclass to avoid # inconsistent MRO ordering errors - class GenericModel(pydantic.BaseModel): - ... + class GenericModel(pydantic.BaseModel): ... else: import pydantic.generics - class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): - ... + class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... # cached properties @@ -193,26 +197,21 @@ class typed_cached_property(Generic[_T]): func: Callable[[Any], _T] attrname: str | None - def __init__(self, func: Callable[[Any], _T]) -> None: - ... + def __init__(self, func: Callable[[Any], _T]) -> None: ... @overload - def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: - ... + def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... @overload - def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: - ... + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self: raise NotImplementedError() - def __set_name__(self, owner: type[Any], name: str) -> None: - ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... # __set__ is not defined at runtime, but @cached_property is designed to be settable - def __set__(self, instance: object, value: _T) -> None: - ... + def __set__(self, instance: object, value: _T) -> None: ... else: try: from functools import cached_property as cached_property diff --git a/src/dataherald/_files.py b/src/dataherald/_files.py index 0d2022a..715cc20 100644 --- a/src/dataherald/_files.py +++ b/src/dataherald/_files.py @@ -39,13 +39,11 @@ def assert_is_file_content(obj: object, *, key: str | None = None) -> None: @overload -def to_httpx_files(files: None) -> None: - ... +def to_httpx_files(files: None) -> None: ... @overload -def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: - ... +def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: @@ -83,13 +81,11 @@ def _read_file_content(file: FileContent) -> HttpxFileContent: @overload -async def async_to_httpx_files(files: None) -> None: - ... +async def async_to_httpx_files(files: None) -> None: ... @overload -async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: - ... +async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: diff --git a/src/dataherald/_models.py b/src/dataherald/_models.py index 75c68cc..6cb469e 100644 --- a/src/dataherald/_models.py +++ b/src/dataherald/_models.py @@ -10,6 +10,7 @@ ClassVar, Protocol, Required, + ParamSpec, TypedDict, TypeGuard, final, @@ -36,6 +37,7 @@ PropertyInfo, is_list, is_given, + json_safe, lru_cache, is_mapping, parse_date, @@ -67,6 +69,9 @@ __all__ = ["BaseModel", "GenericModel"] _T = TypeVar("_T") +_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel") + +P = ParamSpec("P") @runtime_checkable @@ -172,7 +177,7 @@ def __str__(self) -> str: # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. @classmethod @override - def construct( + def construct( # pyright: ignore[reportIncompatibleMethodOverride] cls: Type[ModelT], _fields_set: set[str] | None = None, **values: object, @@ -244,8 +249,8 @@ def model_dump( self, *, mode: Literal["json", "python"] | str = "python", - include: IncEx = None, - exclude: IncEx = None, + include: IncEx | None = None, + exclude: IncEx | None = None, by_alias: bool = False, exclude_unset: bool = False, exclude_defaults: bool = False, @@ -275,8 +280,8 @@ def model_dump( Returns: A dictionary representation of the model. """ - if mode != "python": - raise ValueError("mode is only supported in Pydantic v2") + if mode not in {"json", "python"}: + raise ValueError("mode must be either 'json' or 'python'") if round_trip != False: raise ValueError("round_trip is only supported in Pydantic v2") if warnings != True: @@ -285,7 +290,7 @@ def model_dump( raise ValueError("context is only supported in Pydantic v2") if serialize_as_any != False: raise ValueError("serialize_as_any is only supported in Pydantic v2") - return super().dict( # pyright: ignore[reportDeprecated] + dumped = super().dict( # pyright: ignore[reportDeprecated] include=include, exclude=exclude, by_alias=by_alias, @@ -294,13 +299,15 @@ def model_dump( exclude_none=exclude_none, ) + return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped + @override def model_dump_json( self, *, indent: int | None = None, - include: IncEx = None, - exclude: IncEx = None, + include: IncEx | None = None, + exclude: IncEx | None = None, by_alias: bool = False, exclude_unset: bool = False, exclude_defaults: bool = False, @@ -376,9 +383,43 @@ def is_basemodel(type_: type) -> bool: def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]: origin = get_origin(type_) or type_ + if not inspect.isclass(origin): + return False return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) +def build( + base_model_cls: Callable[P, _BaseModelT], + *args: P.args, + **kwargs: P.kwargs, +) -> _BaseModelT: + """Construct a BaseModel class without validation. + + This is useful for cases where you need to instantiate a `BaseModel` + from an API response as this provides type-safe params which isn't supported + by helpers like `construct_type()`. + + ```py + build(MyModel, my_field_a="foo", my_field_b=123) + ``` + """ + if args: + raise TypeError( + "Received positional arguments which are not supported; Keyword arguments must be used instead", + ) + + return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs)) + + +def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: + """Loose coercion to the expected type with construction of nested values. + + Note: the returned value from this function is not guaranteed to match the + given type. + """ + return cast(_T, construct_type(value=value, type_=type_)) + + def construct_type(*, value: object, type_: object) -> object: """Loose coercion to the expected type with construction of nested values. @@ -616,6 +657,14 @@ def validate_type(*, type_: type[_T], value: object) -> _T: return cast(_T, _validate_non_model_type(type_=type_, value=value)) +def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None: + """Add a pydantic config for the given type. + + Note: this is a no-op on Pydantic v1. + """ + setattr(typ, "__pydantic_config__", config) # noqa: B010 + + # our use of subclasssing here causes weirdness for type checkers, # so we just pretend that we don't subclass if TYPE_CHECKING: diff --git a/src/dataherald/_response.py b/src/dataherald/_response.py index eb4d761..f1aa94f 100644 --- a/src/dataherald/_response.py +++ b/src/dataherald/_response.py @@ -55,6 +55,9 @@ class BaseAPIResponse(Generic[R]): http_response: httpx.Response + retries_taken: int + """The number of retries made. If no retries happened this will be `0`""" + def __init__( self, *, @@ -64,6 +67,7 @@ def __init__( stream: bool, stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, options: FinalRequestOptions, + retries_taken: int = 0, ) -> None: self._cast_to = cast_to self._client = client @@ -72,6 +76,7 @@ def __init__( self._stream_cls = stream_cls self._options = options self.http_response = raw + self.retries_taken = retries_taken @property def headers(self) -> httpx.Headers: @@ -187,6 +192,9 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: if cast_to == float: return cast(R, float(response.text)) + if cast_to == bool: + return cast(R, response.text.lower() == "true") + origin = get_origin(cast_to) or cast_to if origin == APIResponse: @@ -257,12 +265,10 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: class APIResponse(BaseAPIResponse[R]): @overload - def parse(self, *, to: type[_T]) -> _T: - ... + def parse(self, *, to: type[_T]) -> _T: ... @overload - def parse(self) -> R: - ... + def parse(self) -> R: ... def parse(self, *, to: type[_T] | None = None) -> R | _T: """Returns the rich python representation of this response's data. @@ -361,12 +367,10 @@ def iter_lines(self) -> Iterator[str]: class AsyncAPIResponse(BaseAPIResponse[R]): @overload - async def parse(self, *, to: type[_T]) -> _T: - ... + async def parse(self, *, to: type[_T]) -> _T: ... @overload - async def parse(self) -> R: - ... + async def parse(self) -> R: ... async def parse(self, *, to: type[_T] | None = None) -> R | _T: """Returns the rich python representation of this response's data. diff --git a/src/dataherald/_types.py b/src/dataherald/_types.py index 59f65c1..bb13593 100644 --- a/src/dataherald/_types.py +++ b/src/dataherald/_types.py @@ -16,7 +16,7 @@ Optional, Sequence, ) -from typing_extensions import Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable +from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable import httpx import pydantic @@ -111,8 +111,7 @@ class NotGiven: For example: ```py - def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: - ... + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ... get(timeout=1) # 1s timeout @@ -162,16 +161,14 @@ def build( *, response: Response, data: object, - ) -> _T: - ... + ) -> _T: ... Headers = Mapping[str, Union[str, Omit]] class HeadersLikeProtocol(Protocol): - def get(self, __key: str) -> str | None: - ... + def get(self, __key: str) -> str | None: ... HeadersLike = Union[Headers, HeadersLikeProtocol] @@ -196,7 +193,9 @@ def get(self, __key: str) -> str | None: # Note: copied from Pydantic # https://github.com/pydantic/pydantic/blob/32ea570bf96e84234d2992e1ddf40ab8a565925a/pydantic/main.py#L49 -IncEx: TypeAlias = "set[int] | set[str] | dict[int, Any] | dict[str, Any] | None" +IncEx: TypeAlias = Union[ + Set[int], Set[str], Mapping[int, Union["IncEx", Literal[True]]], Mapping[str, Union["IncEx", Literal[True]]] +] PostParser = Callable[[Any], Any] diff --git a/src/dataherald/_utils/__init__.py b/src/dataherald/_utils/__init__.py index 31b5b22..a7cff3c 100644 --- a/src/dataherald/_utils/__init__.py +++ b/src/dataherald/_utils/__init__.py @@ -6,6 +6,7 @@ is_list as is_list, is_given as is_given, is_tuple as is_tuple, + json_safe as json_safe, lru_cache as lru_cache, is_mapping as is_mapping, is_tuple_t as is_tuple_t, @@ -49,3 +50,7 @@ maybe_transform as maybe_transform, async_maybe_transform as async_maybe_transform, ) +from ._reflection import ( + function_has_argument as function_has_argument, + assert_signatures_in_sync as assert_signatures_in_sync, +) diff --git a/src/dataherald/_utils/_proxy.py b/src/dataherald/_utils/_proxy.py index c46a62a..ffd883e 100644 --- a/src/dataherald/_utils/_proxy.py +++ b/src/dataherald/_utils/_proxy.py @@ -59,5 +59,4 @@ def __as_proxied__(self) -> T: return cast(T, self) @abstractmethod - def __load__(self) -> T: - ... + def __load__(self) -> T: ... diff --git a/src/dataherald/_utils/_reflection.py b/src/dataherald/_utils/_reflection.py new file mode 100644 index 0000000..89aa712 --- /dev/null +++ b/src/dataherald/_utils/_reflection.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import inspect +from typing import Any, Callable + + +def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool: + """Returns whether or not the given function has a specific parameter""" + sig = inspect.signature(func) + return arg_name in sig.parameters + + +def assert_signatures_in_sync( + source_func: Callable[..., Any], + check_func: Callable[..., Any], + *, + exclude_params: set[str] = set(), +) -> None: + """Ensure that the signature of the second function matches the first.""" + + check_sig = inspect.signature(check_func) + source_sig = inspect.signature(source_func) + + errors: list[str] = [] + + for name, source_param in source_sig.parameters.items(): + if name in exclude_params: + continue + + custom_param = check_sig.parameters.get(name) + if not custom_param: + errors.append(f"the `{name}` param is missing") + continue + + if custom_param.annotation != source_param.annotation: + errors.append( + f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}" + ) + continue + + if errors: + raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors)) diff --git a/src/dataherald/_utils/_sync.py b/src/dataherald/_utils/_sync.py index 595924e..d0d8103 100644 --- a/src/dataherald/_utils/_sync.py +++ b/src/dataherald/_utils/_sync.py @@ -7,6 +7,8 @@ import anyio import anyio.to_thread +from ._reflection import function_has_argument + T_Retval = TypeVar("T_Retval") T_ParamSpec = ParamSpec("T_ParamSpec") @@ -59,6 +61,21 @@ def do_work(arg1, arg2, kwarg1="", kwarg2="") -> str: async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval: partial_f = functools.partial(function, *args, **kwargs) - return await anyio.to_thread.run_sync(partial_f, cancellable=cancellable, limiter=limiter) + + # In `v4.1.0` anyio added the `abandon_on_cancel` argument and deprecated the old + # `cancellable` argument, so we need to use the new `abandon_on_cancel` to avoid + # surfacing deprecation warnings. + if function_has_argument(anyio.to_thread.run_sync, "abandon_on_cancel"): + return await anyio.to_thread.run_sync( + partial_f, + abandon_on_cancel=cancellable, + limiter=limiter, + ) + + return await anyio.to_thread.run_sync( + partial_f, + cancellable=cancellable, + limiter=limiter, + ) return wrapper diff --git a/src/dataherald/_utils/_transform.py b/src/dataherald/_utils/_transform.py index 47e262a..d7c0534 100644 --- a/src/dataherald/_utils/_transform.py +++ b/src/dataherald/_utils/_transform.py @@ -173,6 +173,11 @@ def _transform_recursive( # Iterable[T] or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) ): + # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually + # intended as an iterable, so we don't transform it. + if isinstance(data, dict): + return cast(object, data) + inner_type = extract_type_arg(stripped_type, 0) return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] @@ -186,7 +191,7 @@ def _transform_recursive( return data if isinstance(data, pydantic.BaseModel): - return model_dump(data, exclude_unset=True) + return model_dump(data, exclude_unset=True, mode="json") annotated_type = _get_annotated_type(annotation) if annotated_type is None: @@ -324,7 +329,7 @@ async def _async_transform_recursive( return data if isinstance(data, pydantic.BaseModel): - return model_dump(data, exclude_unset=True) + return model_dump(data, exclude_unset=True, mode="json") annotated_type = _get_annotated_type(annotation) if annotated_type is None: diff --git a/src/dataherald/_utils/_utils.py b/src/dataherald/_utils/_utils.py index 17904ce..e5811bb 100644 --- a/src/dataherald/_utils/_utils.py +++ b/src/dataherald/_utils/_utils.py @@ -16,11 +16,12 @@ overload, ) from pathlib import Path +from datetime import date, datetime from typing_extensions import TypeGuard import sniffio -from .._types import Headers, NotGiven, FileTypes, NotGivenOr, HeadersLike +from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike from .._compat import parse_date as parse_date, parse_datetime as parse_datetime _T = TypeVar("_T") @@ -211,20 +212,17 @@ def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: Example usage: ```py @overload - def foo(*, a: str) -> str: - ... + def foo(*, a: str) -> str: ... @overload - def foo(*, b: bool) -> str: - ... + def foo(*, b: bool) -> str: ... # This enforces the same constraints that a static type checker would # i.e. that either a or b must be passed to the function @required_args(["a"], ["b"]) - def foo(*, a: str | None = None, b: bool | None = None) -> str: - ... + def foo(*, a: str | None = None, b: bool | None = None) -> str: ... ``` """ @@ -286,18 +284,15 @@ def wrapper(*args: object, **kwargs: object) -> object: @overload -def strip_not_given(obj: None) -> None: - ... +def strip_not_given(obj: None) -> None: ... @overload -def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: - ... +def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ... @overload -def strip_not_given(obj: object) -> object: - ... +def strip_not_given(obj: object) -> object: ... def strip_not_given(obj: object | None) -> object: @@ -369,13 +364,13 @@ def file_from_path(path: str) -> FileTypes: def get_required_header(headers: HeadersLike, header: str) -> str: lower_header = header.lower() - if isinstance(headers, Mapping): - headers = cast(Headers, headers) - for k, v in headers.items(): + if is_mapping_t(headers): + # mypy doesn't understand the type narrowing here + for k, v in headers.items(): # type: ignore if k.lower() == lower_header and isinstance(v, str): return v - """ to deal with the case where the header looks like Stainless-Event-Id """ + # to deal with the case where the header looks like Stainless-Event-Id intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize()) for normalized_header in [header, lower_header, header.upper(), intercaps_header]: @@ -401,3 +396,19 @@ def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]: maxsize=maxsize, ) return cast(Any, wrapper) # type: ignore[no-any-return] + + +def json_safe(data: object) -> object: + """Translates a mapping / sequence recursively in the same fashion + as `pydantic` v2's `model_dump(mode="json")`. + """ + if is_mapping(data): + return {json_safe(key): json_safe(value) for key, value in data.items()} + + if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)): + return [json_safe(item) for item in data] + + if isinstance(data, (datetime, date)): + return data.isoformat() + + return data diff --git a/src/dataherald/resources/database_connections/database_connections.py b/src/dataherald/resources/database_connections/database_connections.py index 1f707aa..06dab31 100644 --- a/src/dataherald/resources/database_connections/database_connections.py +++ b/src/dataherald/resources/database_connections/database_connections.py @@ -28,9 +28,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import ( - make_request_options, -) +from ..._base_client import make_request_options from ...types.db_connection_response import DBConnectionResponse from ...types.database_connection_list_response import DatabaseConnectionListResponse @@ -44,10 +42,21 @@ def drivers(self) -> DriversResource: @cached_property def with_raw_response(self) -> DatabaseConnectionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return DatabaseConnectionsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> DatabaseConnectionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return DatabaseConnectionsResourceWithStreamingResponse(self) def create( @@ -211,10 +220,21 @@ def drivers(self) -> AsyncDriversResource: @cached_property def with_raw_response(self) -> AsyncDatabaseConnectionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncDatabaseConnectionsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncDatabaseConnectionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncDatabaseConnectionsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/database_connections/drivers.py b/src/dataherald/resources/database_connections/drivers.py index 5fc1477..c289f7a 100644 --- a/src/dataherald/resources/database_connections/drivers.py +++ b/src/dataherald/resources/database_connections/drivers.py @@ -13,9 +13,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import ( - make_request_options, -) +from ..._base_client import make_request_options from ...types.database_connections.driver_list_response import DriverListResponse __all__ = ["DriversResource", "AsyncDriversResource"] @@ -24,10 +22,21 @@ class DriversResource(SyncAPIResource): @cached_property def with_raw_response(self) -> DriversResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return DriversResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> DriversResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return DriversResourceWithStreamingResponse(self) def list( @@ -53,10 +62,21 @@ def list( class AsyncDriversResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncDriversResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncDriversResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncDriversResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncDriversResourceWithStreamingResponse(self) async def list( diff --git a/src/dataherald/resources/engine.py b/src/dataherald/resources/engine.py index 8434f25..593f683 100644 --- a/src/dataherald/resources/engine.py +++ b/src/dataherald/resources/engine.py @@ -13,9 +13,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._base_client import ( - make_request_options, -) +from .._base_client import make_request_options __all__ = ["EngineResource", "AsyncEngineResource"] @@ -23,10 +21,21 @@ class EngineResource(SyncAPIResource): @cached_property def with_raw_response(self) -> EngineResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return EngineResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> EngineResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return EngineResourceWithStreamingResponse(self) def heartbeat( @@ -52,10 +61,21 @@ def heartbeat( class AsyncEngineResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncEngineResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncEngineResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncEngineResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncEngineResourceWithStreamingResponse(self) async def heartbeat( diff --git a/src/dataherald/resources/finetunings.py b/src/dataherald/resources/finetunings.py index 56f0491..95b39fe 100644 --- a/src/dataherald/resources/finetunings.py +++ b/src/dataherald/resources/finetunings.py @@ -20,9 +20,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._base_client import ( - make_request_options, -) +from .._base_client import make_request_options from ..types.finetuning_response import FinetuningResponse from ..types.finetuning_list_response import FinetuningListResponse @@ -32,10 +30,21 @@ class FinetuningsResource(SyncAPIResource): @cached_property def with_raw_response(self) -> FinetuningsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return FinetuningsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> FinetuningsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return FinetuningsResourceWithStreamingResponse(self) def create( @@ -190,10 +199,21 @@ def cancel( class AsyncFinetuningsResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncFinetuningsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncFinetuningsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncFinetuningsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncFinetuningsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/generations.py b/src/dataherald/resources/generations.py index 28d4aa9..3b07c0c 100644 --- a/src/dataherald/resources/generations.py +++ b/src/dataherald/resources/generations.py @@ -20,9 +20,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._base_client import ( - make_request_options, -) +from .._base_client import make_request_options from ..types.generation_response import GenerationResponse from ..types.generation_list_response import GenerationListResponse from ..types.shared.nl_generation_response import NlGenerationResponse @@ -33,10 +31,21 @@ class GenerationsResource(SyncAPIResource): @cached_property def with_raw_response(self) -> GenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return GenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> GenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return GenerationsResourceWithStreamingResponse(self) def create( @@ -268,10 +277,21 @@ def sql_generation( class AsyncGenerationsResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncGenerationsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/golden_sqls.py b/src/dataherald/resources/golden_sqls.py index 3993c1d..e295066 100644 --- a/src/dataherald/resources/golden_sqls.py +++ b/src/dataherald/resources/golden_sqls.py @@ -20,9 +20,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._base_client import ( - make_request_options, -) +from .._base_client import make_request_options from ..types.golden_sql_list_response import GoldenSqlListResponse from ..types.golden_sql_upload_response import GoldenSqlUploadResponse from ..types.shared.golden_sql_response import GoldenSqlResponse @@ -33,10 +31,21 @@ class GoldenSqlsResource(SyncAPIResource): @cached_property def with_raw_response(self) -> GoldenSqlsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return GoldenSqlsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> GoldenSqlsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return GoldenSqlsResourceWithStreamingResponse(self) def retrieve( @@ -178,7 +187,7 @@ def upload( """ return self._post( "/api/golden-sqls", - body=maybe_transform(body, golden_sql_upload_params.GoldenSqlUploadParams), + body=maybe_transform(body, Iterable[golden_sql_upload_params.Body]), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -189,10 +198,21 @@ def upload( class AsyncGoldenSqlsResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncGoldenSqlsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncGoldenSqlsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncGoldenSqlsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncGoldenSqlsResourceWithStreamingResponse(self) async def retrieve( @@ -334,7 +354,7 @@ async def upload( """ return await self._post( "/api/golden-sqls", - body=await async_maybe_transform(body, golden_sql_upload_params.GoldenSqlUploadParams), + body=await async_maybe_transform(body, Iterable[golden_sql_upload_params.Body]), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/dataherald/resources/heartbeat.py b/src/dataherald/resources/heartbeat.py index 59081dc..bac54b7 100644 --- a/src/dataherald/resources/heartbeat.py +++ b/src/dataherald/resources/heartbeat.py @@ -13,9 +13,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._base_client import ( - make_request_options, -) +from .._base_client import make_request_options __all__ = ["HeartbeatResource", "AsyncHeartbeatResource"] @@ -23,10 +21,21 @@ class HeartbeatResource(SyncAPIResource): @cached_property def with_raw_response(self) -> HeartbeatResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return HeartbeatResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> HeartbeatResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return HeartbeatResourceWithStreamingResponse(self) def retrieve( @@ -52,10 +61,21 @@ def retrieve( class AsyncHeartbeatResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncHeartbeatResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncHeartbeatResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncHeartbeatResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncHeartbeatResourceWithStreamingResponse(self) async def retrieve( diff --git a/src/dataherald/resources/instructions/first.py b/src/dataherald/resources/instructions/first.py index 2b42a36..7cd7355 100644 --- a/src/dataherald/resources/instructions/first.py +++ b/src/dataherald/resources/instructions/first.py @@ -13,9 +13,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import ( - make_request_options, -) +from ..._base_client import make_request_options from ...types.shared.instruction_response import InstructionResponse __all__ = ["FirstResource", "AsyncFirstResource"] @@ -24,10 +22,21 @@ class FirstResource(SyncAPIResource): @cached_property def with_raw_response(self) -> FirstResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return FirstResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> FirstResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return FirstResourceWithStreamingResponse(self) def retrieve( @@ -53,10 +62,21 @@ def retrieve( class AsyncFirstResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncFirstResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncFirstResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncFirstResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncFirstResourceWithStreamingResponse(self) async def retrieve( diff --git a/src/dataherald/resources/instructions/instructions.py b/src/dataherald/resources/instructions/instructions.py index 6be856e..3c5a15b 100644 --- a/src/dataherald/resources/instructions/instructions.py +++ b/src/dataherald/resources/instructions/instructions.py @@ -26,9 +26,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import ( - make_request_options, -) +from ..._base_client import make_request_options from ...types.instruction_list_response import InstructionListResponse from ...types.shared.instruction_response import InstructionResponse @@ -42,10 +40,21 @@ def first(self) -> FirstResource: @cached_property def with_raw_response(self) -> InstructionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return InstructionsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> InstructionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return InstructionsResourceWithStreamingResponse(self) def create( @@ -244,10 +253,21 @@ def first(self) -> AsyncFirstResource: @cached_property def with_raw_response(self) -> AsyncInstructionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncInstructionsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncInstructionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncInstructionsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/nl_generations.py b/src/dataherald/resources/nl_generations.py index a835a21..0d5e313 100644 --- a/src/dataherald/resources/nl_generations.py +++ b/src/dataherald/resources/nl_generations.py @@ -18,9 +18,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._base_client import ( - make_request_options, -) +from .._base_client import make_request_options from ..types.nl_generation_list_response import NlGenerationListResponse from ..types.shared.nl_generation_response import NlGenerationResponse @@ -30,10 +28,21 @@ class NlGenerationsResource(SyncAPIResource): @cached_property def with_raw_response(self) -> NlGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return NlGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> NlGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return NlGenerationsResourceWithStreamingResponse(self) def create( @@ -160,10 +169,21 @@ def list( class AsyncNlGenerationsResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncNlGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncNlGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncNlGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncNlGenerationsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/prompts/prompts.py b/src/dataherald/resources/prompts/prompts.py index e585a86..178a556 100644 --- a/src/dataherald/resources/prompts/prompts.py +++ b/src/dataherald/resources/prompts/prompts.py @@ -18,9 +18,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import ( - make_request_options, -) +from ..._base_client import make_request_options from .sql_generations import ( SqlGenerationsResource, AsyncSqlGenerationsResource, @@ -42,10 +40,21 @@ def sql_generations(self) -> SqlGenerationsResource: @cached_property def with_raw_response(self) -> PromptsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return PromptsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> PromptsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return PromptsResourceWithStreamingResponse(self) def create( @@ -176,10 +185,21 @@ def sql_generations(self) -> AsyncSqlGenerationsResource: @cached_property def with_raw_response(self) -> AsyncPromptsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncPromptsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncPromptsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncPromptsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/prompts/sql_generations.py b/src/dataherald/resources/prompts/sql_generations.py index fba331a..276c6df 100644 --- a/src/dataherald/resources/prompts/sql_generations.py +++ b/src/dataherald/resources/prompts/sql_generations.py @@ -17,9 +17,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import ( - make_request_options, -) +from ..._base_client import make_request_options from ...types.prompts import ( sql_generation_create_params, sql_generation_retrieve_params, @@ -34,10 +32,21 @@ class SqlGenerationsResource(SyncAPIResource): @cached_property def with_raw_response(self) -> SqlGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return SqlGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> SqlGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return SqlGenerationsResourceWithStreamingResponse(self) def create( @@ -185,10 +194,21 @@ def nl_generations( class AsyncSqlGenerationsResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncSqlGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncSqlGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncSqlGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncSqlGenerationsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/sql_generations/nl_generations.py b/src/dataherald/resources/sql_generations/nl_generations.py index a6c70b0..ff3f6b3 100644 --- a/src/dataherald/resources/sql_generations/nl_generations.py +++ b/src/dataherald/resources/sql_generations/nl_generations.py @@ -17,9 +17,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import ( - make_request_options, -) +from ..._base_client import make_request_options from ...types.sql_generations import nl_generation_create_params, nl_generation_retrieve_params from ...types.shared.nl_generation_response import NlGenerationResponse @@ -29,10 +27,21 @@ class NlGenerationsResource(SyncAPIResource): @cached_property def with_raw_response(self) -> NlGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return NlGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> NlGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return NlGenerationsResourceWithStreamingResponse(self) def create( @@ -130,10 +139,21 @@ def retrieve( class AsyncNlGenerationsResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncNlGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncNlGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncNlGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncNlGenerationsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/sql_generations/sql_generations.py b/src/dataherald/resources/sql_generations/sql_generations.py index eb81141..57e1d1b 100644 --- a/src/dataherald/resources/sql_generations/sql_generations.py +++ b/src/dataherald/resources/sql_generations/sql_generations.py @@ -22,9 +22,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from ..._base_client import ( - make_request_options, -) +from ..._base_client import make_request_options from .nl_generations import ( NlGenerationsResource, AsyncNlGenerationsResource, @@ -47,10 +45,21 @@ def nl_generations(self) -> NlGenerationsResource: @cached_property def with_raw_response(self) -> SqlGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return SqlGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> SqlGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return SqlGenerationsResourceWithStreamingResponse(self) def create( @@ -223,10 +232,21 @@ def nl_generations(self) -> AsyncNlGenerationsResource: @cached_property def with_raw_response(self) -> AsyncSqlGenerationsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncSqlGenerationsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncSqlGenerationsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncSqlGenerationsResourceWithStreamingResponse(self) async def create( diff --git a/src/dataherald/resources/table_descriptions.py b/src/dataherald/resources/table_descriptions.py index 643ae6d..99400cc 100644 --- a/src/dataherald/resources/table_descriptions.py +++ b/src/dataherald/resources/table_descriptions.py @@ -24,9 +24,7 @@ async_to_raw_response_wrapper, async_to_streamed_response_wrapper, ) -from .._base_client import ( - make_request_options, -) +from .._base_client import make_request_options from ..types.table_description_response import TableDescriptionResponse from ..types.table_description_list_response import TableDescriptionListResponse from ..types.table_description_sync_schemas_response import TableDescriptionSyncSchemasResponse @@ -37,10 +35,21 @@ class TableDescriptionsResource(SyncAPIResource): @cached_property def with_raw_response(self) -> TableDescriptionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return TableDescriptionsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> TableDescriptionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return TableDescriptionsResourceWithStreamingResponse(self) def retrieve( @@ -189,7 +198,7 @@ def sync_schemas( """ return self._post( "/api/table-descriptions/sync-schemas", - body=maybe_transform(body, table_description_sync_schemas_params.TableDescriptionSyncSchemasParams), + body=maybe_transform(body, Iterable[table_description_sync_schemas_params.Body]), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), @@ -200,10 +209,21 @@ def sync_schemas( class AsyncTableDescriptionsResource(AsyncAPIResource): @cached_property def with_raw_response(self) -> AsyncTableDescriptionsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return the + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/Dataherald/dataherald-python#accessing-raw-response-data-eg-headers + """ return AsyncTableDescriptionsResourceWithRawResponse(self) @cached_property def with_streaming_response(self) -> AsyncTableDescriptionsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/Dataherald/dataherald-python#with_streaming_response + """ return AsyncTableDescriptionsResourceWithStreamingResponse(self) async def retrieve( @@ -352,9 +372,7 @@ async def sync_schemas( """ return await self._post( "/api/table-descriptions/sync-schemas", - body=await async_maybe_transform( - body, table_description_sync_schemas_params.TableDescriptionSyncSchemasParams - ), + body=await async_maybe_transform(body, Iterable[table_description_sync_schemas_params.Body]), options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), diff --git a/src/dataherald/types/database_connection_list_response.py b/src/dataherald/types/database_connection_list_response.py index 3c06152..c18a474 100644 --- a/src/dataherald/types/database_connection_list_response.py +++ b/src/dataherald/types/database_connection_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .db_connection_response import DBConnectionResponse __all__ = ["DatabaseConnectionListResponse"] -DatabaseConnectionListResponse = List[DBConnectionResponse] +DatabaseConnectionListResponse: TypeAlias = List[DBConnectionResponse] diff --git a/src/dataherald/types/database_connections/driver_list_response.py b/src/dataherald/types/database_connections/driver_list_response.py index e8b1b3b..d18ecdf 100644 --- a/src/dataherald/types/database_connections/driver_list_response.py +++ b/src/dataherald/types/database_connections/driver_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .driver_response import DriverResponse __all__ = ["DriverListResponse"] -DriverListResponse = List[DriverResponse] +DriverListResponse: TypeAlias = List[DriverResponse] diff --git a/src/dataherald/types/finetuning_list_response.py b/src/dataherald/types/finetuning_list_response.py index 5d248d0..352499a 100644 --- a/src/dataherald/types/finetuning_list_response.py +++ b/src/dataherald/types/finetuning_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .finetuning_response import FinetuningResponse __all__ = ["FinetuningListResponse"] -FinetuningListResponse = List[FinetuningResponse] +FinetuningListResponse: TypeAlias = List[FinetuningResponse] diff --git a/src/dataherald/types/generation_list_response.py b/src/dataherald/types/generation_list_response.py index 1413ba6..f0873b8 100644 --- a/src/dataherald/types/generation_list_response.py +++ b/src/dataherald/types/generation_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .generation_list_item import GenerationListItem __all__ = ["GenerationListResponse"] -GenerationListResponse = List[GenerationListItem] +GenerationListResponse: TypeAlias = List[GenerationListItem] diff --git a/src/dataherald/types/golden_sql_list_response.py b/src/dataherald/types/golden_sql_list_response.py index fdc6898..e616814 100644 --- a/src/dataherald/types/golden_sql_list_response.py +++ b/src/dataherald/types/golden_sql_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .shared.golden_sql_response import GoldenSqlResponse __all__ = ["GoldenSqlListResponse"] -GoldenSqlListResponse = List[GoldenSqlResponse] +GoldenSqlListResponse: TypeAlias = List[GoldenSqlResponse] diff --git a/src/dataherald/types/golden_sql_upload_response.py b/src/dataherald/types/golden_sql_upload_response.py index 0b3818e..199e481 100644 --- a/src/dataherald/types/golden_sql_upload_response.py +++ b/src/dataherald/types/golden_sql_upload_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .shared.golden_sql_response import GoldenSqlResponse __all__ = ["GoldenSqlUploadResponse"] -GoldenSqlUploadResponse = List[GoldenSqlResponse] +GoldenSqlUploadResponse: TypeAlias = List[GoldenSqlResponse] diff --git a/src/dataherald/types/instruction_list_response.py b/src/dataherald/types/instruction_list_response.py index 4b8bf33..3dfd9b9 100644 --- a/src/dataherald/types/instruction_list_response.py +++ b/src/dataherald/types/instruction_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .shared.instruction_response import InstructionResponse __all__ = ["InstructionListResponse"] -InstructionListResponse = List[InstructionResponse] +InstructionListResponse: TypeAlias = List[InstructionResponse] diff --git a/src/dataherald/types/nl_generation_list_response.py b/src/dataherald/types/nl_generation_list_response.py index c543129..ce92832 100644 --- a/src/dataherald/types/nl_generation_list_response.py +++ b/src/dataherald/types/nl_generation_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .shared.nl_generation_response import NlGenerationResponse __all__ = ["NlGenerationListResponse"] -NlGenerationListResponse = List[NlGenerationResponse] +NlGenerationListResponse: TypeAlias = List[NlGenerationResponse] diff --git a/src/dataherald/types/prompt_list_response.py b/src/dataherald/types/prompt_list_response.py index 3cfc419..15128d6 100644 --- a/src/dataherald/types/prompt_list_response.py +++ b/src/dataherald/types/prompt_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .prompt_response import PromptResponse __all__ = ["PromptListResponse"] -PromptListResponse = List[PromptResponse] +PromptListResponse: TypeAlias = List[PromptResponse] diff --git a/src/dataherald/types/sql_generation_execute_response.py b/src/dataherald/types/sql_generation_execute_response.py index 19925d6..765b7aa 100644 --- a/src/dataherald/types/sql_generation_execute_response.py +++ b/src/dataherald/types/sql_generation_execute_response.py @@ -1,7 +1,8 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias __all__ = ["SqlGenerationExecuteResponse"] -SqlGenerationExecuteResponse = List[object] +SqlGenerationExecuteResponse: TypeAlias = List[object] diff --git a/src/dataherald/types/sql_generation_list_response.py b/src/dataherald/types/sql_generation_list_response.py index 2403e8e..824d9e9 100644 --- a/src/dataherald/types/sql_generation_list_response.py +++ b/src/dataherald/types/sql_generation_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .shared.sql_generation_response import SqlGenerationResponse __all__ = ["SqlGenerationListResponse"] -SqlGenerationListResponse = List[SqlGenerationResponse] +SqlGenerationListResponse: TypeAlias = List[SqlGenerationResponse] diff --git a/src/dataherald/types/table_description_list_response.py b/src/dataherald/types/table_description_list_response.py index e6211c0..b765cc4 100644 --- a/src/dataherald/types/table_description_list_response.py +++ b/src/dataherald/types/table_description_list_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .table_description_response import TableDescriptionResponse __all__ = ["TableDescriptionListResponse"] -TableDescriptionListResponse = List[TableDescriptionResponse] +TableDescriptionListResponse: TypeAlias = List[TableDescriptionResponse] diff --git a/src/dataherald/types/table_description_sync_schemas_response.py b/src/dataherald/types/table_description_sync_schemas_response.py index 60c9f31..c40dd1f 100644 --- a/src/dataherald/types/table_description_sync_schemas_response.py +++ b/src/dataherald/types/table_description_sync_schemas_response.py @@ -1,9 +1,10 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List +from typing_extensions import TypeAlias from .table_description_response import TableDescriptionResponse __all__ = ["TableDescriptionSyncSchemasResponse"] -TableDescriptionSyncSchemasResponse = List[TableDescriptionResponse] +TableDescriptionSyncSchemasResponse: TypeAlias = List[TableDescriptionResponse] diff --git a/tests/api_resources/prompts/test_sql_generations.py b/tests/api_resources/prompts/test_sql_generations.py index 6b35c85..4fa34ca 100644 --- a/tests/api_resources/prompts/test_sql_generations.py +++ b/tests/api_resources/prompts/test_sql_generations.py @@ -20,26 +20,26 @@ class TestSqlGenerations: @parametrize def test_method_create(self, client: Dataherald) -> None: sql_generation = client.prompts.sql_generations.create( - "string", + id="id", ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Dataherald) -> None: sql_generation = client.prompts.sql_generations.create( - "string", + id="id", evaluate=True, - finetuning_id="string", + finetuning_id="finetuning_id", low_latency_mode=True, metadata={}, - sql="string", + sql="sql", ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @parametrize def test_raw_response_create(self, client: Dataherald) -> None: response = client.prompts.sql_generations.with_raw_response.create( - "string", + id="id", ) assert response.is_closed is True @@ -50,7 +50,7 @@ def test_raw_response_create(self, client: Dataherald) -> None: @parametrize def test_streaming_response_create(self, client: Dataherald) -> None: with client.prompts.sql_generations.with_streaming_response.create( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -64,22 +64,22 @@ def test_streaming_response_create(self, client: Dataherald) -> None: def test_path_params_create(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.prompts.sql_generations.with_raw_response.create( - "", + id="", ) @parametrize def test_method_retrieve(self, client: Dataherald) -> None: sql_generation = client.prompts.sql_generations.retrieve( - "string", + id="id", ) assert_matches_type(object, sql_generation, path=["response"]) @parametrize def test_method_retrieve_with_all_params(self, client: Dataherald) -> None: sql_generation = client.prompts.sql_generations.retrieve( - "string", + id="id", ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -88,7 +88,7 @@ def test_method_retrieve_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.prompts.sql_generations.with_raw_response.retrieve( - "string", + id="id", ) assert response.is_closed is True @@ -99,7 +99,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.prompts.sql_generations.with_streaming_response.retrieve( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -113,13 +113,13 @@ def test_streaming_response_retrieve(self, client: Dataherald) -> None: def test_path_params_retrieve(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.prompts.sql_generations.with_raw_response.retrieve( - "", + id="", ) @parametrize def test_method_nl_generations(self, client: Dataherald) -> None: sql_generation = client.prompts.sql_generations.nl_generations( - "string", + id="id", sql_generation={}, ) assert_matches_type(NlGenerationResponse, sql_generation, path=["response"]) @@ -127,13 +127,13 @@ def test_method_nl_generations(self, client: Dataherald) -> None: @parametrize def test_method_nl_generations_with_all_params(self, client: Dataherald) -> None: sql_generation = client.prompts.sql_generations.nl_generations( - "string", + id="id", sql_generation={ - "finetuning_id": "string", - "low_latency_mode": True, "evaluate": True, - "sql": "string", + "finetuning_id": "finetuning_id", + "low_latency_mode": True, "metadata": {}, + "sql": "sql", }, max_rows=0, metadata={}, @@ -143,7 +143,7 @@ def test_method_nl_generations_with_all_params(self, client: Dataherald) -> None @parametrize def test_raw_response_nl_generations(self, client: Dataherald) -> None: response = client.prompts.sql_generations.with_raw_response.nl_generations( - "string", + id="id", sql_generation={}, ) @@ -155,7 +155,7 @@ def test_raw_response_nl_generations(self, client: Dataherald) -> None: @parametrize def test_streaming_response_nl_generations(self, client: Dataherald) -> None: with client.prompts.sql_generations.with_streaming_response.nl_generations( - "string", + id="id", sql_generation={}, ) as response: assert not response.is_closed @@ -170,7 +170,7 @@ def test_streaming_response_nl_generations(self, client: Dataherald) -> None: def test_path_params_nl_generations(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.prompts.sql_generations.with_raw_response.nl_generations( - "", + id="", sql_generation={}, ) @@ -181,26 +181,26 @@ class TestAsyncSqlGenerations: @parametrize async def test_method_create(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.prompts.sql_generations.create( - "string", + id="id", ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.prompts.sql_generations.create( - "string", + id="id", evaluate=True, - finetuning_id="string", + finetuning_id="finetuning_id", low_latency_mode=True, metadata={}, - sql="string", + sql="sql", ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @parametrize async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.prompts.sql_generations.with_raw_response.create( - "string", + id="id", ) assert response.is_closed is True @@ -211,7 +211,7 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncDataherald) -> None: async with async_client.prompts.sql_generations.with_streaming_response.create( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -225,22 +225,22 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> async def test_path_params_create(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.prompts.sql_generations.with_raw_response.create( - "", + id="", ) @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.prompts.sql_generations.retrieve( - "string", + id="id", ) assert_matches_type(object, sql_generation, path=["response"]) @parametrize async def test_method_retrieve_with_all_params(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.prompts.sql_generations.retrieve( - "string", + id="id", ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -249,7 +249,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncDatahera @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.prompts.sql_generations.with_raw_response.retrieve( - "string", + id="id", ) assert response.is_closed is True @@ -260,7 +260,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.prompts.sql_generations.with_streaming_response.retrieve( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -274,13 +274,13 @@ async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) async def test_path_params_retrieve(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.prompts.sql_generations.with_raw_response.retrieve( - "", + id="", ) @parametrize async def test_method_nl_generations(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.prompts.sql_generations.nl_generations( - "string", + id="id", sql_generation={}, ) assert_matches_type(NlGenerationResponse, sql_generation, path=["response"]) @@ -288,13 +288,13 @@ async def test_method_nl_generations(self, async_client: AsyncDataherald) -> Non @parametrize async def test_method_nl_generations_with_all_params(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.prompts.sql_generations.nl_generations( - "string", + id="id", sql_generation={ - "finetuning_id": "string", - "low_latency_mode": True, "evaluate": True, - "sql": "string", + "finetuning_id": "finetuning_id", + "low_latency_mode": True, "metadata": {}, + "sql": "sql", }, max_rows=0, metadata={}, @@ -304,7 +304,7 @@ async def test_method_nl_generations_with_all_params(self, async_client: AsyncDa @parametrize async def test_raw_response_nl_generations(self, async_client: AsyncDataherald) -> None: response = await async_client.prompts.sql_generations.with_raw_response.nl_generations( - "string", + id="id", sql_generation={}, ) @@ -316,7 +316,7 @@ async def test_raw_response_nl_generations(self, async_client: AsyncDataherald) @parametrize async def test_streaming_response_nl_generations(self, async_client: AsyncDataherald) -> None: async with async_client.prompts.sql_generations.with_streaming_response.nl_generations( - "string", + id="id", sql_generation={}, ) as response: assert not response.is_closed @@ -331,6 +331,6 @@ async def test_streaming_response_nl_generations(self, async_client: AsyncDatahe async def test_path_params_nl_generations(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.prompts.sql_generations.with_raw_response.nl_generations( - "", + id="", sql_generation={}, ) diff --git a/tests/api_resources/sql_generations/test_nl_generations.py b/tests/api_resources/sql_generations/test_nl_generations.py index c87690d..13d8a0d 100644 --- a/tests/api_resources/sql_generations/test_nl_generations.py +++ b/tests/api_resources/sql_generations/test_nl_generations.py @@ -20,14 +20,14 @@ class TestNlGenerations: @parametrize def test_method_create(self, client: Dataherald) -> None: nl_generation = client.sql_generations.nl_generations.create( - "string", + id="id", ) assert_matches_type(NlGenerationResponse, nl_generation, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Dataherald) -> None: nl_generation = client.sql_generations.nl_generations.create( - "string", + id="id", max_rows=0, metadata={}, ) @@ -36,7 +36,7 @@ def test_method_create_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_create(self, client: Dataherald) -> None: response = client.sql_generations.nl_generations.with_raw_response.create( - "string", + id="id", ) assert response.is_closed is True @@ -47,7 +47,7 @@ def test_raw_response_create(self, client: Dataherald) -> None: @parametrize def test_streaming_response_create(self, client: Dataherald) -> None: with client.sql_generations.nl_generations.with_streaming_response.create( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -61,22 +61,22 @@ def test_streaming_response_create(self, client: Dataherald) -> None: def test_path_params_create(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.sql_generations.nl_generations.with_raw_response.create( - "", + id="", ) @parametrize def test_method_retrieve(self, client: Dataherald) -> None: nl_generation = client.sql_generations.nl_generations.retrieve( - "string", + id="id", ) assert_matches_type(object, nl_generation, path=["response"]) @parametrize def test_method_retrieve_with_all_params(self, client: Dataherald) -> None: nl_generation = client.sql_generations.nl_generations.retrieve( - "string", + id="id", ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -85,7 +85,7 @@ def test_method_retrieve_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.sql_generations.nl_generations.with_raw_response.retrieve( - "string", + id="id", ) assert response.is_closed is True @@ -96,7 +96,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.sql_generations.nl_generations.with_streaming_response.retrieve( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -110,7 +110,7 @@ def test_streaming_response_retrieve(self, client: Dataherald) -> None: def test_path_params_retrieve(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.sql_generations.nl_generations.with_raw_response.retrieve( - "", + id="", ) @@ -120,14 +120,14 @@ class TestAsyncNlGenerations: @parametrize async def test_method_create(self, async_client: AsyncDataherald) -> None: nl_generation = await async_client.sql_generations.nl_generations.create( - "string", + id="id", ) assert_matches_type(NlGenerationResponse, nl_generation, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncDataherald) -> None: nl_generation = await async_client.sql_generations.nl_generations.create( - "string", + id="id", max_rows=0, metadata={}, ) @@ -136,7 +136,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDataherald @parametrize async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.sql_generations.nl_generations.with_raw_response.create( - "string", + id="id", ) assert response.is_closed is True @@ -147,7 +147,7 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncDataherald) -> None: async with async_client.sql_generations.nl_generations.with_streaming_response.create( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -161,22 +161,22 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> async def test_path_params_create(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.sql_generations.nl_generations.with_raw_response.create( - "", + id="", ) @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: nl_generation = await async_client.sql_generations.nl_generations.retrieve( - "string", + id="id", ) assert_matches_type(object, nl_generation, path=["response"]) @parametrize async def test_method_retrieve_with_all_params(self, async_client: AsyncDataherald) -> None: nl_generation = await async_client.sql_generations.nl_generations.retrieve( - "string", + id="id", ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -185,7 +185,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncDatahera @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.sql_generations.nl_generations.with_raw_response.retrieve( - "string", + id="id", ) assert response.is_closed is True @@ -196,7 +196,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.sql_generations.nl_generations.with_streaming_response.retrieve( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -210,5 +210,5 @@ async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) async def test_path_params_retrieve(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.sql_generations.nl_generations.with_raw_response.retrieve( - "", + id="", ) diff --git a/tests/api_resources/test_database_connections.py b/tests/api_resources/test_database_connections.py index eb14191..1fe8ebc 100644 --- a/tests/api_resources/test_database_connections.py +++ b/tests/api_resources/test_database_connections.py @@ -23,23 +23,23 @@ class TestDatabaseConnections: @parametrize def test_method_create(self, client: Dataherald) -> None: database_connection = client.database_connections.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) assert_matches_type(DBConnectionResponse, database_connection, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Dataherald) -> None: database_connection = client.database_connections.create( - alias="string", - connection_uri="string", - bigquery_credential_file_content={}, + alias="alias", + connection_uri="connection_uri", + bigquery_credential_file_content="string", metadata={}, - sqlite_file_path="string", + sqlite_file_path="sqlite_file_path", ssh_settings={ - "host": "string", - "username": "string", - "password": "string", + "host": "host", + "password": "password", + "username": "username", }, use_ssh=True, ) @@ -48,8 +48,8 @@ def test_method_create_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_create(self, client: Dataherald) -> None: response = client.database_connections.with_raw_response.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) assert response.is_closed is True @@ -60,8 +60,8 @@ def test_raw_response_create(self, client: Dataherald) -> None: @parametrize def test_streaming_response_create(self, client: Dataherald) -> None: with client.database_connections.with_streaming_response.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -74,14 +74,14 @@ def test_streaming_response_create(self, client: Dataherald) -> None: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: database_connection = client.database_connections.retrieve( - "string", + "id", ) assert_matches_type(DBConnectionResponse, database_connection, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.database_connections.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -92,7 +92,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.database_connections.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -112,25 +112,25 @@ def test_path_params_retrieve(self, client: Dataherald) -> None: @parametrize def test_method_update(self, client: Dataherald) -> None: database_connection = client.database_connections.update( - "string", - alias="string", - connection_uri="string", + id="id", + alias="alias", + connection_uri="connection_uri", ) assert_matches_type(DBConnectionResponse, database_connection, path=["response"]) @parametrize def test_method_update_with_all_params(self, client: Dataherald) -> None: database_connection = client.database_connections.update( - "string", - alias="string", - connection_uri="string", - bigquery_credential_file_content={}, + id="id", + alias="alias", + connection_uri="connection_uri", + bigquery_credential_file_content="string", metadata={}, - sqlite_file_path="string", + sqlite_file_path="sqlite_file_path", ssh_settings={ - "host": "string", - "username": "string", - "password": "string", + "host": "host", + "password": "password", + "username": "username", }, use_ssh=True, ) @@ -139,9 +139,9 @@ def test_method_update_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_update(self, client: Dataherald) -> None: response = client.database_connections.with_raw_response.update( - "string", - alias="string", - connection_uri="string", + id="id", + alias="alias", + connection_uri="connection_uri", ) assert response.is_closed is True @@ -152,9 +152,9 @@ def test_raw_response_update(self, client: Dataherald) -> None: @parametrize def test_streaming_response_update(self, client: Dataherald) -> None: with client.database_connections.with_streaming_response.update( - "string", - alias="string", - connection_uri="string", + id="id", + alias="alias", + connection_uri="connection_uri", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -168,9 +168,9 @@ def test_streaming_response_update(self, client: Dataherald) -> None: def test_path_params_update(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.database_connections.with_raw_response.update( - "", - alias="string", - connection_uri="string", + id="", + alias="alias", + connection_uri="connection_uri", ) @parametrize @@ -205,23 +205,23 @@ class TestAsyncDatabaseConnections: @parametrize async def test_method_create(self, async_client: AsyncDataherald) -> None: database_connection = await async_client.database_connections.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) assert_matches_type(DBConnectionResponse, database_connection, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncDataherald) -> None: database_connection = await async_client.database_connections.create( - alias="string", - connection_uri="string", - bigquery_credential_file_content={}, + alias="alias", + connection_uri="connection_uri", + bigquery_credential_file_content="string", metadata={}, - sqlite_file_path="string", + sqlite_file_path="sqlite_file_path", ssh_settings={ - "host": "string", - "username": "string", - "password": "string", + "host": "host", + "password": "password", + "username": "username", }, use_ssh=True, ) @@ -230,8 +230,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncDataherald @parametrize async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.database_connections.with_raw_response.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) assert response.is_closed is True @@ -242,8 +242,8 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncDataherald) -> None: async with async_client.database_connections.with_streaming_response.create( - alias="string", - connection_uri="string", + alias="alias", + connection_uri="connection_uri", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -256,14 +256,14 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: database_connection = await async_client.database_connections.retrieve( - "string", + "id", ) assert_matches_type(DBConnectionResponse, database_connection, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.database_connections.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -274,7 +274,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.database_connections.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -294,25 +294,25 @@ async def test_path_params_retrieve(self, async_client: AsyncDataherald) -> None @parametrize async def test_method_update(self, async_client: AsyncDataherald) -> None: database_connection = await async_client.database_connections.update( - "string", - alias="string", - connection_uri="string", + id="id", + alias="alias", + connection_uri="connection_uri", ) assert_matches_type(DBConnectionResponse, database_connection, path=["response"]) @parametrize async def test_method_update_with_all_params(self, async_client: AsyncDataherald) -> None: database_connection = await async_client.database_connections.update( - "string", - alias="string", - connection_uri="string", - bigquery_credential_file_content={}, + id="id", + alias="alias", + connection_uri="connection_uri", + bigquery_credential_file_content="string", metadata={}, - sqlite_file_path="string", + sqlite_file_path="sqlite_file_path", ssh_settings={ - "host": "string", - "username": "string", - "password": "string", + "host": "host", + "password": "password", + "username": "username", }, use_ssh=True, ) @@ -321,9 +321,9 @@ async def test_method_update_with_all_params(self, async_client: AsyncDataherald @parametrize async def test_raw_response_update(self, async_client: AsyncDataherald) -> None: response = await async_client.database_connections.with_raw_response.update( - "string", - alias="string", - connection_uri="string", + id="id", + alias="alias", + connection_uri="connection_uri", ) assert response.is_closed is True @@ -334,9 +334,9 @@ async def test_raw_response_update(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_update(self, async_client: AsyncDataherald) -> None: async with async_client.database_connections.with_streaming_response.update( - "string", - alias="string", - connection_uri="string", + id="id", + alias="alias", + connection_uri="connection_uri", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -350,9 +350,9 @@ async def test_streaming_response_update(self, async_client: AsyncDataherald) -> async def test_path_params_update(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.database_connections.with_raw_response.update( - "", - alias="string", - connection_uri="string", + id="", + alias="alias", + connection_uri="connection_uri", ) @parametrize diff --git a/tests/api_resources/test_finetunings.py b/tests/api_resources/test_finetunings.py index fc617c2..36728f7 100644 --- a/tests/api_resources/test_finetunings.py +++ b/tests/api_resources/test_finetunings.py @@ -23,22 +23,22 @@ class TestFinetunings: @parametrize def test_method_create(self, client: Dataherald) -> None: finetuning = client.finetunings.create( - alias="string", + alias="alias", base_llm={}, - db_connection_id="string", + db_connection_id="db_connection_id", ) assert_matches_type(FinetuningResponse, finetuning, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Dataherald) -> None: finetuning = client.finetunings.create( - alias="string", + alias="alias", base_llm={ - "model_provider": "string", - "model_name": "string", + "model_name": "model_name", "model_parameters": {"foo": "string"}, + "model_provider": "model_provider", }, - db_connection_id="string", + db_connection_id="db_connection_id", golden_sqls=["string", "string", "string"], metadata={}, ) @@ -47,9 +47,9 @@ def test_method_create_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_create(self, client: Dataherald) -> None: response = client.finetunings.with_raw_response.create( - alias="string", + alias="alias", base_llm={}, - db_connection_id="string", + db_connection_id="db_connection_id", ) assert response.is_closed is True @@ -60,9 +60,9 @@ def test_raw_response_create(self, client: Dataherald) -> None: @parametrize def test_streaming_response_create(self, client: Dataherald) -> None: with client.finetunings.with_streaming_response.create( - alias="string", + alias="alias", base_llm={}, - db_connection_id="string", + db_connection_id="db_connection_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -75,14 +75,14 @@ def test_streaming_response_create(self, client: Dataherald) -> None: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: finetuning = client.finetunings.retrieve( - "string", + "id", ) assert_matches_type(FinetuningResponse, finetuning, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.finetunings.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -93,7 +93,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.finetunings.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -113,14 +113,14 @@ def test_path_params_retrieve(self, client: Dataherald) -> None: @parametrize def test_method_list(self, client: Dataherald) -> None: finetuning = client.finetunings.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert_matches_type(FinetuningListResponse, finetuning, path=["response"]) @parametrize def test_raw_response_list(self, client: Dataherald) -> None: response = client.finetunings.with_raw_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert response.is_closed is True @@ -131,7 +131,7 @@ def test_raw_response_list(self, client: Dataherald) -> None: @parametrize def test_streaming_response_list(self, client: Dataherald) -> None: with client.finetunings.with_streaming_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -144,14 +144,14 @@ def test_streaming_response_list(self, client: Dataherald) -> None: @parametrize def test_method_cancel(self, client: Dataherald) -> None: finetuning = client.finetunings.cancel( - "string", + "id", ) assert_matches_type(FinetuningResponse, finetuning, path=["response"]) @parametrize def test_raw_response_cancel(self, client: Dataherald) -> None: response = client.finetunings.with_raw_response.cancel( - "string", + "id", ) assert response.is_closed is True @@ -162,7 +162,7 @@ def test_raw_response_cancel(self, client: Dataherald) -> None: @parametrize def test_streaming_response_cancel(self, client: Dataherald) -> None: with client.finetunings.with_streaming_response.cancel( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -186,22 +186,22 @@ class TestAsyncFinetunings: @parametrize async def test_method_create(self, async_client: AsyncDataherald) -> None: finetuning = await async_client.finetunings.create( - alias="string", + alias="alias", base_llm={}, - db_connection_id="string", + db_connection_id="db_connection_id", ) assert_matches_type(FinetuningResponse, finetuning, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncDataherald) -> None: finetuning = await async_client.finetunings.create( - alias="string", + alias="alias", base_llm={ - "model_provider": "string", - "model_name": "string", + "model_name": "model_name", "model_parameters": {"foo": "string"}, + "model_provider": "model_provider", }, - db_connection_id="string", + db_connection_id="db_connection_id", golden_sqls=["string", "string", "string"], metadata={}, ) @@ -210,9 +210,9 @@ async def test_method_create_with_all_params(self, async_client: AsyncDataherald @parametrize async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.finetunings.with_raw_response.create( - alias="string", + alias="alias", base_llm={}, - db_connection_id="string", + db_connection_id="db_connection_id", ) assert response.is_closed is True @@ -223,9 +223,9 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncDataherald) -> None: async with async_client.finetunings.with_streaming_response.create( - alias="string", + alias="alias", base_llm={}, - db_connection_id="string", + db_connection_id="db_connection_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -238,14 +238,14 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: finetuning = await async_client.finetunings.retrieve( - "string", + "id", ) assert_matches_type(FinetuningResponse, finetuning, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.finetunings.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -256,7 +256,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.finetunings.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -276,14 +276,14 @@ async def test_path_params_retrieve(self, async_client: AsyncDataherald) -> None @parametrize async def test_method_list(self, async_client: AsyncDataherald) -> None: finetuning = await async_client.finetunings.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert_matches_type(FinetuningListResponse, finetuning, path=["response"]) @parametrize async def test_raw_response_list(self, async_client: AsyncDataherald) -> None: response = await async_client.finetunings.with_raw_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert response.is_closed is True @@ -294,7 +294,7 @@ async def test_raw_response_list(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_list(self, async_client: AsyncDataherald) -> None: async with async_client.finetunings.with_streaming_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -307,14 +307,14 @@ async def test_streaming_response_list(self, async_client: AsyncDataherald) -> N @parametrize async def test_method_cancel(self, async_client: AsyncDataherald) -> None: finetuning = await async_client.finetunings.cancel( - "string", + "id", ) assert_matches_type(FinetuningResponse, finetuning, path=["response"]) @parametrize async def test_raw_response_cancel(self, async_client: AsyncDataherald) -> None: response = await async_client.finetunings.with_raw_response.cancel( - "string", + "id", ) assert response.is_closed is True @@ -325,7 +325,7 @@ async def test_raw_response_cancel(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_cancel(self, async_client: AsyncDataherald) -> None: async with async_client.finetunings.with_streaming_response.cancel( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/test_generations.py b/tests/api_resources/test_generations.py index 9904963..303140d 100644 --- a/tests/api_resources/test_generations.py +++ b/tests/api_resources/test_generations.py @@ -24,14 +24,14 @@ class TestGenerations: @parametrize def test_method_create(self, client: Dataherald) -> None: generation = client.generations.create( - "string", + "id", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize def test_raw_response_create(self, client: Dataherald) -> None: response = client.generations.with_raw_response.create( - "string", + "id", ) assert response.is_closed is True @@ -42,7 +42,7 @@ def test_raw_response_create(self, client: Dataherald) -> None: @parametrize def test_streaming_response_create(self, client: Dataherald) -> None: with client.generations.with_streaming_response.create( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -62,14 +62,14 @@ def test_path_params_create(self, client: Dataherald) -> None: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: generation = client.generations.retrieve( - "string", + "id", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.generations.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -80,7 +80,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.generations.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -100,23 +100,23 @@ def test_path_params_retrieve(self, client: Dataherald) -> None: @parametrize def test_method_update(self, client: Dataherald) -> None: generation = client.generations.update( - "string", + id="id", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize def test_method_update_with_all_params(self, client: Dataherald) -> None: generation = client.generations.update( - "string", + id="id", generation_status="INITIALIZED", - message="string", + message="message", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize def test_raw_response_update(self, client: Dataherald) -> None: response = client.generations.with_raw_response.update( - "string", + id="id", ) assert response.is_closed is True @@ -127,7 +127,7 @@ def test_raw_response_update(self, client: Dataherald) -> None: @parametrize def test_streaming_response_update(self, client: Dataherald) -> None: with client.generations.with_streaming_response.update( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -141,7 +141,7 @@ def test_streaming_response_update(self, client: Dataherald) -> None: def test_path_params_update(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.generations.with_raw_response.update( - "", + id="", ) @parametrize @@ -153,7 +153,7 @@ def test_method_list(self, client: Dataherald) -> None: def test_method_list_with_all_params(self, client: Dataherald) -> None: generation = client.generations.list( ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -182,14 +182,14 @@ def test_streaming_response_list(self, client: Dataherald) -> None: @parametrize def test_method_nl_generation(self, client: Dataherald) -> None: generation = client.generations.nl_generation( - "string", + "id", ) assert_matches_type(NlGenerationResponse, generation, path=["response"]) @parametrize def test_raw_response_nl_generation(self, client: Dataherald) -> None: response = client.generations.with_raw_response.nl_generation( - "string", + "id", ) assert response.is_closed is True @@ -200,7 +200,7 @@ def test_raw_response_nl_generation(self, client: Dataherald) -> None: @parametrize def test_streaming_response_nl_generation(self, client: Dataherald) -> None: with client.generations.with_streaming_response.nl_generation( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -220,16 +220,16 @@ def test_path_params_nl_generation(self, client: Dataherald) -> None: @parametrize def test_method_sql_generation(self, client: Dataherald) -> None: generation = client.generations.sql_generation( - "string", - sql="string", + id="id", + sql="sql", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize def test_raw_response_sql_generation(self, client: Dataherald) -> None: response = client.generations.with_raw_response.sql_generation( - "string", - sql="string", + id="id", + sql="sql", ) assert response.is_closed is True @@ -240,8 +240,8 @@ def test_raw_response_sql_generation(self, client: Dataherald) -> None: @parametrize def test_streaming_response_sql_generation(self, client: Dataherald) -> None: with client.generations.with_streaming_response.sql_generation( - "string", - sql="string", + id="id", + sql="sql", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -255,8 +255,8 @@ def test_streaming_response_sql_generation(self, client: Dataherald) -> None: def test_path_params_sql_generation(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.generations.with_raw_response.sql_generation( - "", - sql="string", + id="", + sql="sql", ) @@ -266,14 +266,14 @@ class TestAsyncGenerations: @parametrize async def test_method_create(self, async_client: AsyncDataherald) -> None: generation = await async_client.generations.create( - "string", + "id", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.generations.with_raw_response.create( - "string", + "id", ) assert response.is_closed is True @@ -284,7 +284,7 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncDataherald) -> None: async with async_client.generations.with_streaming_response.create( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -304,14 +304,14 @@ async def test_path_params_create(self, async_client: AsyncDataherald) -> None: @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: generation = await async_client.generations.retrieve( - "string", + "id", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.generations.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -322,7 +322,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.generations.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -342,23 +342,23 @@ async def test_path_params_retrieve(self, async_client: AsyncDataherald) -> None @parametrize async def test_method_update(self, async_client: AsyncDataherald) -> None: generation = await async_client.generations.update( - "string", + id="id", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize async def test_method_update_with_all_params(self, async_client: AsyncDataherald) -> None: generation = await async_client.generations.update( - "string", + id="id", generation_status="INITIALIZED", - message="string", + message="message", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize async def test_raw_response_update(self, async_client: AsyncDataherald) -> None: response = await async_client.generations.with_raw_response.update( - "string", + id="id", ) assert response.is_closed is True @@ -369,7 +369,7 @@ async def test_raw_response_update(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_update(self, async_client: AsyncDataherald) -> None: async with async_client.generations.with_streaming_response.update( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -383,7 +383,7 @@ async def test_streaming_response_update(self, async_client: AsyncDataherald) -> async def test_path_params_update(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.generations.with_raw_response.update( - "", + id="", ) @parametrize @@ -395,7 +395,7 @@ async def test_method_list(self, async_client: AsyncDataherald) -> None: async def test_method_list_with_all_params(self, async_client: AsyncDataherald) -> None: generation = await async_client.generations.list( ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -424,14 +424,14 @@ async def test_streaming_response_list(self, async_client: AsyncDataherald) -> N @parametrize async def test_method_nl_generation(self, async_client: AsyncDataherald) -> None: generation = await async_client.generations.nl_generation( - "string", + "id", ) assert_matches_type(NlGenerationResponse, generation, path=["response"]) @parametrize async def test_raw_response_nl_generation(self, async_client: AsyncDataherald) -> None: response = await async_client.generations.with_raw_response.nl_generation( - "string", + "id", ) assert response.is_closed is True @@ -442,7 +442,7 @@ async def test_raw_response_nl_generation(self, async_client: AsyncDataherald) - @parametrize async def test_streaming_response_nl_generation(self, async_client: AsyncDataherald) -> None: async with async_client.generations.with_streaming_response.nl_generation( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -462,16 +462,16 @@ async def test_path_params_nl_generation(self, async_client: AsyncDataherald) -> @parametrize async def test_method_sql_generation(self, async_client: AsyncDataherald) -> None: generation = await async_client.generations.sql_generation( - "string", - sql="string", + id="id", + sql="sql", ) assert_matches_type(GenerationResponse, generation, path=["response"]) @parametrize async def test_raw_response_sql_generation(self, async_client: AsyncDataherald) -> None: response = await async_client.generations.with_raw_response.sql_generation( - "string", - sql="string", + id="id", + sql="sql", ) assert response.is_closed is True @@ -482,8 +482,8 @@ async def test_raw_response_sql_generation(self, async_client: AsyncDataherald) @parametrize async def test_streaming_response_sql_generation(self, async_client: AsyncDataherald) -> None: async with async_client.generations.with_streaming_response.sql_generation( - "string", - sql="string", + id="id", + sql="sql", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -497,6 +497,6 @@ async def test_streaming_response_sql_generation(self, async_client: AsyncDatahe async def test_path_params_sql_generation(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.generations.with_raw_response.sql_generation( - "", - sql="string", + id="", + sql="sql", ) diff --git a/tests/api_resources/test_golden_sqls.py b/tests/api_resources/test_golden_sqls.py index c3f76f4..ce37aa4 100644 --- a/tests/api_resources/test_golden_sqls.py +++ b/tests/api_resources/test_golden_sqls.py @@ -24,14 +24,14 @@ class TestGoldenSqls: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: golden_sql = client.golden_sqls.retrieve( - "string", + "id", ) assert_matches_type(GoldenSqlResponse, golden_sql, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.golden_sqls.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -42,7 +42,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.golden_sqls.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -68,8 +68,8 @@ def test_method_list(self, client: Dataherald) -> None: def test_method_list_with_all_params(self, client: Dataherald) -> None: golden_sql = client.golden_sqls.list( ascend=True, - db_connection_id="string", - order="string", + db_connection_id="db_connection_id", + order="order", page=0, page_size=0, ) @@ -98,14 +98,14 @@ def test_streaming_response_list(self, client: Dataherald) -> None: @parametrize def test_method_delete(self, client: Dataherald) -> None: golden_sql = client.golden_sqls.delete( - "string", + "id", ) assert_matches_type(object, golden_sql, path=["response"]) @parametrize def test_raw_response_delete(self, client: Dataherald) -> None: response = client.golden_sqls.with_raw_response.delete( - "string", + "id", ) assert response.is_closed is True @@ -116,7 +116,7 @@ def test_raw_response_delete(self, client: Dataherald) -> None: @parametrize def test_streaming_response_delete(self, client: Dataherald) -> None: with client.golden_sqls.with_streaming_response.delete( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -138,19 +138,19 @@ def test_method_upload(self, client: Dataherald) -> None: golden_sql = client.golden_sqls.upload( body=[ { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, ], ) @@ -161,19 +161,19 @@ def test_raw_response_upload(self, client: Dataherald) -> None: response = client.golden_sqls.with_raw_response.upload( body=[ { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, ], ) @@ -188,19 +188,19 @@ def test_streaming_response_upload(self, client: Dataherald) -> None: with client.golden_sqls.with_streaming_response.upload( body=[ { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, ], ) as response: @@ -219,14 +219,14 @@ class TestAsyncGoldenSqls: @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: golden_sql = await async_client.golden_sqls.retrieve( - "string", + "id", ) assert_matches_type(GoldenSqlResponse, golden_sql, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.golden_sqls.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -237,7 +237,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.golden_sqls.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -263,8 +263,8 @@ async def test_method_list(self, async_client: AsyncDataherald) -> None: async def test_method_list_with_all_params(self, async_client: AsyncDataherald) -> None: golden_sql = await async_client.golden_sqls.list( ascend=True, - db_connection_id="string", - order="string", + db_connection_id="db_connection_id", + order="order", page=0, page_size=0, ) @@ -293,14 +293,14 @@ async def test_streaming_response_list(self, async_client: AsyncDataherald) -> N @parametrize async def test_method_delete(self, async_client: AsyncDataherald) -> None: golden_sql = await async_client.golden_sqls.delete( - "string", + "id", ) assert_matches_type(object, golden_sql, path=["response"]) @parametrize async def test_raw_response_delete(self, async_client: AsyncDataherald) -> None: response = await async_client.golden_sqls.with_raw_response.delete( - "string", + "id", ) assert response.is_closed is True @@ -311,7 +311,7 @@ async def test_raw_response_delete(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_delete(self, async_client: AsyncDataherald) -> None: async with async_client.golden_sqls.with_streaming_response.delete( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -333,19 +333,19 @@ async def test_method_upload(self, async_client: AsyncDataherald) -> None: golden_sql = await async_client.golden_sqls.upload( body=[ { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, ], ) @@ -356,19 +356,19 @@ async def test_raw_response_upload(self, async_client: AsyncDataherald) -> None: response = await async_client.golden_sqls.with_raw_response.upload( body=[ { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, ], ) @@ -383,19 +383,19 @@ async def test_streaming_response_upload(self, async_client: AsyncDataherald) -> async with async_client.golden_sqls.with_streaming_response.upload( body=[ { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, { - "db_connection_id": "string", - "prompt_text": "string", - "sql": "string", + "db_connection_id": "db_connection_id", + "prompt_text": "prompt_text", + "sql": "sql", }, ], ) as response: diff --git a/tests/api_resources/test_instructions.py b/tests/api_resources/test_instructions.py index 36a76dc..85f7b93 100644 --- a/tests/api_resources/test_instructions.py +++ b/tests/api_resources/test_instructions.py @@ -23,15 +23,15 @@ class TestInstructions: @parametrize def test_method_create(self, client: Dataherald) -> None: instruction = client.instructions.create( - instruction="string", + instruction="instruction", ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Dataherald) -> None: instruction = client.instructions.create( - instruction="string", - db_connection_id="string", + instruction="instruction", + db_connection_id="db_connection_id", metadata={}, ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_create(self, client: Dataherald) -> None: response = client.instructions.with_raw_response.create( - instruction="string", + instruction="instruction", ) assert response.is_closed is True @@ -50,7 +50,7 @@ def test_raw_response_create(self, client: Dataherald) -> None: @parametrize def test_streaming_response_create(self, client: Dataherald) -> None: with client.instructions.with_streaming_response.create( - instruction="string", + instruction="instruction", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,14 +63,14 @@ def test_streaming_response_create(self, client: Dataherald) -> None: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: instruction = client.instructions.retrieve( - "string", + "id", ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.instructions.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.instructions.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -101,17 +101,17 @@ def test_path_params_retrieve(self, client: Dataherald) -> None: @parametrize def test_method_update(self, client: Dataherald) -> None: instruction = client.instructions.update( - "string", - instruction="string", + id="id", + instruction="instruction", ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @parametrize def test_method_update_with_all_params(self, client: Dataherald) -> None: instruction = client.instructions.update( - "string", - instruction="string", - db_connection_id="string", + id="id", + instruction="instruction", + db_connection_id="db_connection_id", metadata={}, ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @@ -119,8 +119,8 @@ def test_method_update_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_update(self, client: Dataherald) -> None: response = client.instructions.with_raw_response.update( - "string", - instruction="string", + id="id", + instruction="instruction", ) assert response.is_closed is True @@ -131,8 +131,8 @@ def test_raw_response_update(self, client: Dataherald) -> None: @parametrize def test_streaming_response_update(self, client: Dataherald) -> None: with client.instructions.with_streaming_response.update( - "string", - instruction="string", + id="id", + instruction="instruction", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -146,21 +146,21 @@ def test_streaming_response_update(self, client: Dataherald) -> None: def test_path_params_update(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.instructions.with_raw_response.update( - "", - instruction="string", + id="", + instruction="instruction", ) @parametrize def test_method_list(self, client: Dataherald) -> None: instruction = client.instructions.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert_matches_type(InstructionListResponse, instruction, path=["response"]) @parametrize def test_raw_response_list(self, client: Dataherald) -> None: response = client.instructions.with_raw_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert response.is_closed is True @@ -171,7 +171,7 @@ def test_raw_response_list(self, client: Dataherald) -> None: @parametrize def test_streaming_response_list(self, client: Dataherald) -> None: with client.instructions.with_streaming_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -184,14 +184,14 @@ def test_streaming_response_list(self, client: Dataherald) -> None: @parametrize def test_method_delete(self, client: Dataherald) -> None: instruction = client.instructions.delete( - "string", + "id", ) assert_matches_type(object, instruction, path=["response"]) @parametrize def test_raw_response_delete(self, client: Dataherald) -> None: response = client.instructions.with_raw_response.delete( - "string", + "id", ) assert response.is_closed is True @@ -202,7 +202,7 @@ def test_raw_response_delete(self, client: Dataherald) -> None: @parametrize def test_streaming_response_delete(self, client: Dataherald) -> None: with client.instructions.with_streaming_response.delete( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -226,15 +226,15 @@ class TestAsyncInstructions: @parametrize async def test_method_create(self, async_client: AsyncDataherald) -> None: instruction = await async_client.instructions.create( - instruction="string", + instruction="instruction", ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncDataherald) -> None: instruction = await async_client.instructions.create( - instruction="string", - db_connection_id="string", + instruction="instruction", + db_connection_id="db_connection_id", metadata={}, ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @@ -242,7 +242,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDataherald @parametrize async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.instructions.with_raw_response.create( - instruction="string", + instruction="instruction", ) assert response.is_closed is True @@ -253,7 +253,7 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncDataherald) -> None: async with async_client.instructions.with_streaming_response.create( - instruction="string", + instruction="instruction", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -266,14 +266,14 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: instruction = await async_client.instructions.retrieve( - "string", + "id", ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.instructions.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -284,7 +284,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.instructions.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -304,17 +304,17 @@ async def test_path_params_retrieve(self, async_client: AsyncDataherald) -> None @parametrize async def test_method_update(self, async_client: AsyncDataherald) -> None: instruction = await async_client.instructions.update( - "string", - instruction="string", + id="id", + instruction="instruction", ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @parametrize async def test_method_update_with_all_params(self, async_client: AsyncDataherald) -> None: instruction = await async_client.instructions.update( - "string", - instruction="string", - db_connection_id="string", + id="id", + instruction="instruction", + db_connection_id="db_connection_id", metadata={}, ) assert_matches_type(InstructionResponse, instruction, path=["response"]) @@ -322,8 +322,8 @@ async def test_method_update_with_all_params(self, async_client: AsyncDataherald @parametrize async def test_raw_response_update(self, async_client: AsyncDataherald) -> None: response = await async_client.instructions.with_raw_response.update( - "string", - instruction="string", + id="id", + instruction="instruction", ) assert response.is_closed is True @@ -334,8 +334,8 @@ async def test_raw_response_update(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_update(self, async_client: AsyncDataherald) -> None: async with async_client.instructions.with_streaming_response.update( - "string", - instruction="string", + id="id", + instruction="instruction", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -349,21 +349,21 @@ async def test_streaming_response_update(self, async_client: AsyncDataherald) -> async def test_path_params_update(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.instructions.with_raw_response.update( - "", - instruction="string", + id="", + instruction="instruction", ) @parametrize async def test_method_list(self, async_client: AsyncDataherald) -> None: instruction = await async_client.instructions.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert_matches_type(InstructionListResponse, instruction, path=["response"]) @parametrize async def test_raw_response_list(self, async_client: AsyncDataherald) -> None: response = await async_client.instructions.with_raw_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert response.is_closed is True @@ -374,7 +374,7 @@ async def test_raw_response_list(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_list(self, async_client: AsyncDataherald) -> None: async with async_client.instructions.with_streaming_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -387,14 +387,14 @@ async def test_streaming_response_list(self, async_client: AsyncDataherald) -> N @parametrize async def test_method_delete(self, async_client: AsyncDataherald) -> None: instruction = await async_client.instructions.delete( - "string", + "id", ) assert_matches_type(object, instruction, path=["response"]) @parametrize async def test_raw_response_delete(self, async_client: AsyncDataherald) -> None: response = await async_client.instructions.with_raw_response.delete( - "string", + "id", ) assert response.is_closed is True @@ -405,7 +405,7 @@ async def test_raw_response_delete(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_delete(self, async_client: AsyncDataherald) -> None: async with async_client.instructions.with_streaming_response.delete( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/test_nl_generations.py b/tests/api_resources/test_nl_generations.py index 70c4c9b..917c831 100644 --- a/tests/api_resources/test_nl_generations.py +++ b/tests/api_resources/test_nl_generations.py @@ -23,8 +23,8 @@ def test_method_create(self, client: Dataherald) -> None: nl_generation = client.nl_generations.create( sql_generation={ "prompt": { - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", } }, ) @@ -34,15 +34,15 @@ def test_method_create(self, client: Dataherald) -> None: def test_method_create_with_all_params(self, client: Dataherald) -> None: nl_generation = client.nl_generations.create( sql_generation={ - "finetuning_id": "string", - "evaluate": True, - "sql": "string", - "metadata": {}, "prompt": { - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", "metadata": {}, }, + "evaluate": True, + "finetuning_id": "finetuning_id", + "metadata": {}, + "sql": "sql", }, max_rows=0, metadata={}, @@ -54,8 +54,8 @@ def test_raw_response_create(self, client: Dataherald) -> None: response = client.nl_generations.with_raw_response.create( sql_generation={ "prompt": { - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", } }, ) @@ -70,8 +70,8 @@ def test_streaming_response_create(self, client: Dataherald) -> None: with client.nl_generations.with_streaming_response.create( sql_generation={ "prompt": { - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", } }, ) as response: @@ -86,14 +86,14 @@ def test_streaming_response_create(self, client: Dataherald) -> None: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: nl_generation = client.nl_generations.retrieve( - "string", + "id", ) assert_matches_type(NlGenerationResponse, nl_generation, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.nl_generations.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -104,7 +104,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.nl_generations.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -130,7 +130,7 @@ def test_method_list(self, client: Dataherald) -> None: def test_method_list_with_all_params(self, client: Dataherald) -> None: nl_generation = client.nl_generations.list( ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -165,8 +165,8 @@ async def test_method_create(self, async_client: AsyncDataherald) -> None: nl_generation = await async_client.nl_generations.create( sql_generation={ "prompt": { - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", } }, ) @@ -176,15 +176,15 @@ async def test_method_create(self, async_client: AsyncDataherald) -> None: async def test_method_create_with_all_params(self, async_client: AsyncDataherald) -> None: nl_generation = await async_client.nl_generations.create( sql_generation={ - "finetuning_id": "string", - "evaluate": True, - "sql": "string", - "metadata": {}, "prompt": { - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", "metadata": {}, }, + "evaluate": True, + "finetuning_id": "finetuning_id", + "metadata": {}, + "sql": "sql", }, max_rows=0, metadata={}, @@ -196,8 +196,8 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.nl_generations.with_raw_response.create( sql_generation={ "prompt": { - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", } }, ) @@ -212,8 +212,8 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> async with async_client.nl_generations.with_streaming_response.create( sql_generation={ "prompt": { - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", } }, ) as response: @@ -228,14 +228,14 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: nl_generation = await async_client.nl_generations.retrieve( - "string", + "id", ) assert_matches_type(NlGenerationResponse, nl_generation, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.nl_generations.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -246,7 +246,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.nl_generations.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -272,7 +272,7 @@ async def test_method_list(self, async_client: AsyncDataherald) -> None: async def test_method_list_with_all_params(self, async_client: AsyncDataherald) -> None: nl_generation = await async_client.nl_generations.list( ascend=True, - order="string", + order="order", page=0, page_size=0, ) diff --git a/tests/api_resources/test_prompts.py b/tests/api_resources/test_prompts.py index 387b6ce..c31885d 100644 --- a/tests/api_resources/test_prompts.py +++ b/tests/api_resources/test_prompts.py @@ -20,16 +20,16 @@ class TestPrompts: @parametrize def test_method_create(self, client: Dataherald) -> None: prompt = client.prompts.create( - db_connection_id="string", - text="string", + db_connection_id="db_connection_id", + text="text", ) assert_matches_type(PromptResponse, prompt, path=["response"]) @parametrize def test_method_create_with_all_params(self, client: Dataherald) -> None: prompt = client.prompts.create( - db_connection_id="string", - text="string", + db_connection_id="db_connection_id", + text="text", metadata={}, ) assert_matches_type(PromptResponse, prompt, path=["response"]) @@ -37,8 +37,8 @@ def test_method_create_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_create(self, client: Dataherald) -> None: response = client.prompts.with_raw_response.create( - db_connection_id="string", - text="string", + db_connection_id="db_connection_id", + text="text", ) assert response.is_closed is True @@ -49,8 +49,8 @@ def test_raw_response_create(self, client: Dataherald) -> None: @parametrize def test_streaming_response_create(self, client: Dataherald) -> None: with client.prompts.with_streaming_response.create( - db_connection_id="string", - text="string", + db_connection_id="db_connection_id", + text="text", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,14 +63,14 @@ def test_streaming_response_create(self, client: Dataherald) -> None: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: prompt = client.prompts.retrieve( - "string", + "id", ) assert_matches_type(PromptResponse, prompt, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.prompts.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.prompts.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -107,7 +107,7 @@ def test_method_list(self, client: Dataherald) -> None: def test_method_list_with_all_params(self, client: Dataherald) -> None: prompt = client.prompts.list( ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -140,16 +140,16 @@ class TestAsyncPrompts: @parametrize async def test_method_create(self, async_client: AsyncDataherald) -> None: prompt = await async_client.prompts.create( - db_connection_id="string", - text="string", + db_connection_id="db_connection_id", + text="text", ) assert_matches_type(PromptResponse, prompt, path=["response"]) @parametrize async def test_method_create_with_all_params(self, async_client: AsyncDataherald) -> None: prompt = await async_client.prompts.create( - db_connection_id="string", - text="string", + db_connection_id="db_connection_id", + text="text", metadata={}, ) assert_matches_type(PromptResponse, prompt, path=["response"]) @@ -157,8 +157,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncDataherald @parametrize async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.prompts.with_raw_response.create( - db_connection_id="string", - text="string", + db_connection_id="db_connection_id", + text="text", ) assert response.is_closed is True @@ -169,8 +169,8 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_create(self, async_client: AsyncDataherald) -> None: async with async_client.prompts.with_streaming_response.create( - db_connection_id="string", - text="string", + db_connection_id="db_connection_id", + text="text", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -183,14 +183,14 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: prompt = await async_client.prompts.retrieve( - "string", + "id", ) assert_matches_type(PromptResponse, prompt, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.prompts.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -201,7 +201,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.prompts.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -227,7 +227,7 @@ async def test_method_list(self, async_client: AsyncDataherald) -> None: async def test_method_list_with_all_params(self, async_client: AsyncDataherald) -> None: prompt = await async_client.prompts.list( ascend=True, - order="string", + order="order", page=0, page_size=0, ) diff --git a/tests/api_resources/test_sql_generations.py b/tests/api_resources/test_sql_generations.py index aaa7187..03fbc80 100644 --- a/tests/api_resources/test_sql_generations.py +++ b/tests/api_resources/test_sql_generations.py @@ -25,8 +25,8 @@ class TestSqlGenerations: def test_method_create(self, client: Dataherald) -> None: sql_generation = client.sql_generations.create( prompt={ - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", }, ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @@ -35,14 +35,14 @@ def test_method_create(self, client: Dataherald) -> None: def test_method_create_with_all_params(self, client: Dataherald) -> None: sql_generation = client.sql_generations.create( prompt={ - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", "metadata": {}, }, evaluate=True, - finetuning_id="string", + finetuning_id="finetuning_id", metadata={}, - sql="string", + sql="sql", ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @@ -50,8 +50,8 @@ def test_method_create_with_all_params(self, client: Dataherald) -> None: def test_raw_response_create(self, client: Dataherald) -> None: response = client.sql_generations.with_raw_response.create( prompt={ - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", }, ) @@ -64,8 +64,8 @@ def test_raw_response_create(self, client: Dataherald) -> None: def test_streaming_response_create(self, client: Dataherald) -> None: with client.sql_generations.with_streaming_response.create( prompt={ - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", }, ) as response: assert not response.is_closed @@ -79,14 +79,14 @@ def test_streaming_response_create(self, client: Dataherald) -> None: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: sql_generation = client.sql_generations.retrieve( - "string", + "id", ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.sql_generations.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -97,7 +97,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.sql_generations.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -123,7 +123,7 @@ def test_method_list(self, client: Dataherald) -> None: def test_method_list_with_all_params(self, client: Dataherald) -> None: sql_generation = client.sql_generations.list( ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -152,14 +152,14 @@ def test_streaming_response_list(self, client: Dataherald) -> None: @parametrize def test_method_execute(self, client: Dataherald) -> None: sql_generation = client.sql_generations.execute( - "string", + id="id", ) assert_matches_type(SqlGenerationExecuteResponse, sql_generation, path=["response"]) @parametrize def test_method_execute_with_all_params(self, client: Dataherald) -> None: sql_generation = client.sql_generations.execute( - "string", + id="id", max_rows=0, ) assert_matches_type(SqlGenerationExecuteResponse, sql_generation, path=["response"]) @@ -167,7 +167,7 @@ def test_method_execute_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_execute(self, client: Dataherald) -> None: response = client.sql_generations.with_raw_response.execute( - "string", + id="id", ) assert response.is_closed is True @@ -178,7 +178,7 @@ def test_raw_response_execute(self, client: Dataherald) -> None: @parametrize def test_streaming_response_execute(self, client: Dataherald) -> None: with client.sql_generations.with_streaming_response.execute( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -192,7 +192,7 @@ def test_streaming_response_execute(self, client: Dataherald) -> None: def test_path_params_execute(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.sql_generations.with_raw_response.execute( - "", + id="", ) @@ -203,8 +203,8 @@ class TestAsyncSqlGenerations: async def test_method_create(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.sql_generations.create( prompt={ - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", }, ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @@ -213,14 +213,14 @@ async def test_method_create(self, async_client: AsyncDataherald) -> None: async def test_method_create_with_all_params(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.sql_generations.create( prompt={ - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", "metadata": {}, }, evaluate=True, - finetuning_id="string", + finetuning_id="finetuning_id", metadata={}, - sql="string", + sql="sql", ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @@ -228,8 +228,8 @@ async def test_method_create_with_all_params(self, async_client: AsyncDataherald async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: response = await async_client.sql_generations.with_raw_response.create( prompt={ - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", }, ) @@ -242,8 +242,8 @@ async def test_raw_response_create(self, async_client: AsyncDataherald) -> None: async def test_streaming_response_create(self, async_client: AsyncDataherald) -> None: async with async_client.sql_generations.with_streaming_response.create( prompt={ - "text": "string", - "db_connection_id": "string", + "db_connection_id": "db_connection_id", + "text": "text", }, ) as response: assert not response.is_closed @@ -257,14 +257,14 @@ async def test_streaming_response_create(self, async_client: AsyncDataherald) -> @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.sql_generations.retrieve( - "string", + "id", ) assert_matches_type(SqlGenerationResponse, sql_generation, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.sql_generations.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -275,7 +275,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.sql_generations.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -301,7 +301,7 @@ async def test_method_list(self, async_client: AsyncDataherald) -> None: async def test_method_list_with_all_params(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.sql_generations.list( ascend=True, - order="string", + order="order", page=0, page_size=0, ) @@ -330,14 +330,14 @@ async def test_streaming_response_list(self, async_client: AsyncDataherald) -> N @parametrize async def test_method_execute(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.sql_generations.execute( - "string", + id="id", ) assert_matches_type(SqlGenerationExecuteResponse, sql_generation, path=["response"]) @parametrize async def test_method_execute_with_all_params(self, async_client: AsyncDataherald) -> None: sql_generation = await async_client.sql_generations.execute( - "string", + id="id", max_rows=0, ) assert_matches_type(SqlGenerationExecuteResponse, sql_generation, path=["response"]) @@ -345,7 +345,7 @@ async def test_method_execute_with_all_params(self, async_client: AsyncDataheral @parametrize async def test_raw_response_execute(self, async_client: AsyncDataherald) -> None: response = await async_client.sql_generations.with_raw_response.execute( - "string", + id="id", ) assert response.is_closed is True @@ -356,7 +356,7 @@ async def test_raw_response_execute(self, async_client: AsyncDataherald) -> None @parametrize async def test_streaming_response_execute(self, async_client: AsyncDataherald) -> None: async with async_client.sql_generations.with_streaming_response.execute( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -370,5 +370,5 @@ async def test_streaming_response_execute(self, async_client: AsyncDataherald) - async def test_path_params_execute(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.sql_generations.with_raw_response.execute( - "", + id="", ) diff --git a/tests/api_resources/test_table_descriptions.py b/tests/api_resources/test_table_descriptions.py index 38765a9..88ab64e 100644 --- a/tests/api_resources/test_table_descriptions.py +++ b/tests/api_resources/test_table_descriptions.py @@ -24,14 +24,14 @@ class TestTableDescriptions: @parametrize def test_method_retrieve(self, client: Dataherald) -> None: table_description = client.table_descriptions.retrieve( - "string", + "id", ) assert_matches_type(TableDescriptionResponse, table_description, path=["response"]) @parametrize def test_raw_response_retrieve(self, client: Dataherald) -> None: response = client.table_descriptions.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -42,7 +42,7 @@ def test_raw_response_retrieve(self, client: Dataherald) -> None: @parametrize def test_streaming_response_retrieve(self, client: Dataherald) -> None: with client.table_descriptions.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -62,44 +62,44 @@ def test_path_params_retrieve(self, client: Dataherald) -> None: @parametrize def test_method_update(self, client: Dataherald) -> None: table_description = client.table_descriptions.update( - "string", + id="id", ) assert_matches_type(TableDescriptionResponse, table_description, path=["response"]) @parametrize def test_method_update_with_all_params(self, client: Dataherald) -> None: table_description = client.table_descriptions.update( - "string", + id="id", columns=[ { - "name": "string", - "description": "string", - "is_primary_key": True, - "data_type": "string", - "low_cardinality": True, "categories": ["string", "string", "string"], + "data_type": "data_type", + "description": "description", "foreign_key": {}, - }, - { - "name": "string", - "description": "string", "is_primary_key": True, - "data_type": "string", "low_cardinality": True, - "categories": ["string", "string", "string"], - "foreign_key": {}, + "name": "name", }, { - "name": "string", - "description": "string", + "categories": ["string", "string", "string"], + "data_type": "data_type", + "description": "description", + "foreign_key": {}, "is_primary_key": True, - "data_type": "string", "low_cardinality": True, + "name": "name", + }, + { "categories": ["string", "string", "string"], + "data_type": "data_type", + "description": "description", "foreign_key": {}, + "is_primary_key": True, + "low_cardinality": True, + "name": "name", }, ], - description="string", + description="description", examples=[{}, {}, {}], metadata={}, ) @@ -108,7 +108,7 @@ def test_method_update_with_all_params(self, client: Dataherald) -> None: @parametrize def test_raw_response_update(self, client: Dataherald) -> None: response = client.table_descriptions.with_raw_response.update( - "string", + id="id", ) assert response.is_closed is True @@ -119,7 +119,7 @@ def test_raw_response_update(self, client: Dataherald) -> None: @parametrize def test_streaming_response_update(self, client: Dataherald) -> None: with client.table_descriptions.with_streaming_response.update( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -133,28 +133,28 @@ def test_streaming_response_update(self, client: Dataherald) -> None: def test_path_params_update(self, client: Dataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): client.table_descriptions.with_raw_response.update( - "", + id="", ) @parametrize def test_method_list(self, client: Dataherald) -> None: table_description = client.table_descriptions.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert_matches_type(TableDescriptionListResponse, table_description, path=["response"]) @parametrize def test_method_list_with_all_params(self, client: Dataherald) -> None: table_description = client.table_descriptions.list( - db_connection_id="string", - table_name="string", + db_connection_id="db_connection_id", + table_name="table_name", ) assert_matches_type(TableDescriptionListResponse, table_description, path=["response"]) @parametrize def test_raw_response_list(self, client: Dataherald) -> None: response = client.table_descriptions.with_raw_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert response.is_closed is True @@ -165,7 +165,7 @@ def test_raw_response_list(self, client: Dataherald) -> None: @parametrize def test_streaming_response_list(self, client: Dataherald) -> None: with client.table_descriptions.with_streaming_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -178,14 +178,22 @@ def test_streaming_response_list(self, client: Dataherald) -> None: @parametrize def test_method_sync_schemas(self, client: Dataherald) -> None: table_description = client.table_descriptions.sync_schemas( - body=[{"db_connection_id": "string"}, {"db_connection_id": "string"}, {"db_connection_id": "string"}], + body=[ + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + ], ) assert_matches_type(TableDescriptionSyncSchemasResponse, table_description, path=["response"]) @parametrize def test_raw_response_sync_schemas(self, client: Dataherald) -> None: response = client.table_descriptions.with_raw_response.sync_schemas( - body=[{"db_connection_id": "string"}, {"db_connection_id": "string"}, {"db_connection_id": "string"}], + body=[ + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + ], ) assert response.is_closed is True @@ -196,7 +204,11 @@ def test_raw_response_sync_schemas(self, client: Dataherald) -> None: @parametrize def test_streaming_response_sync_schemas(self, client: Dataherald) -> None: with client.table_descriptions.with_streaming_response.sync_schemas( - body=[{"db_connection_id": "string"}, {"db_connection_id": "string"}, {"db_connection_id": "string"}], + body=[ + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + ], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -213,14 +225,14 @@ class TestAsyncTableDescriptions: @parametrize async def test_method_retrieve(self, async_client: AsyncDataherald) -> None: table_description = await async_client.table_descriptions.retrieve( - "string", + "id", ) assert_matches_type(TableDescriptionResponse, table_description, path=["response"]) @parametrize async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> None: response = await async_client.table_descriptions.with_raw_response.retrieve( - "string", + "id", ) assert response.is_closed is True @@ -231,7 +243,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncDataherald) -> Non @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncDataherald) -> None: async with async_client.table_descriptions.with_streaming_response.retrieve( - "string", + "id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -251,44 +263,44 @@ async def test_path_params_retrieve(self, async_client: AsyncDataherald) -> None @parametrize async def test_method_update(self, async_client: AsyncDataherald) -> None: table_description = await async_client.table_descriptions.update( - "string", + id="id", ) assert_matches_type(TableDescriptionResponse, table_description, path=["response"]) @parametrize async def test_method_update_with_all_params(self, async_client: AsyncDataherald) -> None: table_description = await async_client.table_descriptions.update( - "string", + id="id", columns=[ { - "name": "string", - "description": "string", - "is_primary_key": True, - "data_type": "string", - "low_cardinality": True, "categories": ["string", "string", "string"], + "data_type": "data_type", + "description": "description", "foreign_key": {}, - }, - { - "name": "string", - "description": "string", "is_primary_key": True, - "data_type": "string", "low_cardinality": True, - "categories": ["string", "string", "string"], - "foreign_key": {}, + "name": "name", }, { - "name": "string", - "description": "string", + "categories": ["string", "string", "string"], + "data_type": "data_type", + "description": "description", + "foreign_key": {}, "is_primary_key": True, - "data_type": "string", "low_cardinality": True, + "name": "name", + }, + { "categories": ["string", "string", "string"], + "data_type": "data_type", + "description": "description", "foreign_key": {}, + "is_primary_key": True, + "low_cardinality": True, + "name": "name", }, ], - description="string", + description="description", examples=[{}, {}, {}], metadata={}, ) @@ -297,7 +309,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncDataherald @parametrize async def test_raw_response_update(self, async_client: AsyncDataherald) -> None: response = await async_client.table_descriptions.with_raw_response.update( - "string", + id="id", ) assert response.is_closed is True @@ -308,7 +320,7 @@ async def test_raw_response_update(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_update(self, async_client: AsyncDataherald) -> None: async with async_client.table_descriptions.with_streaming_response.update( - "string", + id="id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -322,28 +334,28 @@ async def test_streaming_response_update(self, async_client: AsyncDataherald) -> async def test_path_params_update(self, async_client: AsyncDataherald) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"): await async_client.table_descriptions.with_raw_response.update( - "", + id="", ) @parametrize async def test_method_list(self, async_client: AsyncDataherald) -> None: table_description = await async_client.table_descriptions.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert_matches_type(TableDescriptionListResponse, table_description, path=["response"]) @parametrize async def test_method_list_with_all_params(self, async_client: AsyncDataherald) -> None: table_description = await async_client.table_descriptions.list( - db_connection_id="string", - table_name="string", + db_connection_id="db_connection_id", + table_name="table_name", ) assert_matches_type(TableDescriptionListResponse, table_description, path=["response"]) @parametrize async def test_raw_response_list(self, async_client: AsyncDataherald) -> None: response = await async_client.table_descriptions.with_raw_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) assert response.is_closed is True @@ -354,7 +366,7 @@ async def test_raw_response_list(self, async_client: AsyncDataherald) -> None: @parametrize async def test_streaming_response_list(self, async_client: AsyncDataherald) -> None: async with async_client.table_descriptions.with_streaming_response.list( - db_connection_id="string", + db_connection_id="db_connection_id", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -367,14 +379,22 @@ async def test_streaming_response_list(self, async_client: AsyncDataherald) -> N @parametrize async def test_method_sync_schemas(self, async_client: AsyncDataherald) -> None: table_description = await async_client.table_descriptions.sync_schemas( - body=[{"db_connection_id": "string"}, {"db_connection_id": "string"}, {"db_connection_id": "string"}], + body=[ + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + ], ) assert_matches_type(TableDescriptionSyncSchemasResponse, table_description, path=["response"]) @parametrize async def test_raw_response_sync_schemas(self, async_client: AsyncDataherald) -> None: response = await async_client.table_descriptions.with_raw_response.sync_schemas( - body=[{"db_connection_id": "string"}, {"db_connection_id": "string"}, {"db_connection_id": "string"}], + body=[ + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + ], ) assert response.is_closed is True @@ -385,7 +405,11 @@ async def test_raw_response_sync_schemas(self, async_client: AsyncDataherald) -> @parametrize async def test_streaming_response_sync_schemas(self, async_client: AsyncDataherald) -> None: async with async_client.table_descriptions.with_streaming_response.sync_schemas( - body=[{"db_connection_id": "string"}, {"db_connection_id": "string"}, {"db_connection_id": "string"}], + body=[ + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + {"db_connection_id": "db_connection_id"}, + ], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/conftest.py b/tests/conftest.py index 1455033..52f2afc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,11 +1,11 @@ from __future__ import annotations import os -import asyncio import logging from typing import TYPE_CHECKING, Iterator, AsyncIterator import pytest +from pytest_asyncio import is_async_test from dataherald import Dataherald, AsyncDataherald @@ -17,11 +17,13 @@ logging.getLogger("dataherald").setLevel(logging.DEBUG) -@pytest.fixture(scope="session") -def event_loop() -> Iterator[asyncio.AbstractEventLoop]: - loop = asyncio.new_event_loop() - yield loop - loop.close() +# automatically add `pytest.mark.asyncio()` to all of our async tests +# so we don't have to add that boilerplate everywhere +def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: + pytest_asyncio_tests = (item for item in items if is_async_test(item)) + session_scope_marker = pytest.mark.asyncio(loop_scope="session") + for async_test in pytest_asyncio_tests: + async_test.add_marker(session_scope_marker, append=False) base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") diff --git a/tests/test_client.py b/tests/test_client.py index ef4bda3..ded0989 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -10,6 +10,7 @@ import tracemalloc from typing import Any, Union, cast from unittest import mock +from typing_extensions import Literal import httpx import pytest @@ -17,6 +18,7 @@ from pydantic import ValidationError from dataherald import Dataherald, AsyncDataherald, APIResponseValidationError +from dataherald._types import Omit from dataherald._models import BaseModel, FinalRequestOptions from dataherald._constants import RAW_RESPONSE_HEADER from dataherald._exceptions import APIStatusError, APITimeoutError, APIResponseValidationError @@ -699,6 +701,7 @@ class Model(BaseModel): [3, "", 0.5], [2, "", 0.5 * 2.0], [1, "", 0.5 * 4.0], + [-1100, "", 7.8], # test large number potentially overflowing ], ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) @@ -718,7 +721,7 @@ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> No with pytest.raises(APITimeoutError): self.client.post( "/api/database-connections", - body=cast(object, dict(alias="string", connection_uri="string")), + body=cast(object, dict(alias="alias", connection_uri="connection_uri")), cast_to=httpx.Response, options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, ) @@ -733,13 +736,94 @@ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> Non with pytest.raises(APIStatusError): self.client.post( "/api/database-connections", - body=cast(object, dict(alias="string", connection_uri="string")), + body=cast(object, dict(alias="alias", connection_uri="connection_uri")), cast_to=httpx.Response, options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, ) assert _get_open_connections(self.client) == 0 + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("dataherald._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.parametrize("failure_mode", ["status", "exception"]) + def test_retries_taken( + self, + client: Dataherald, + failures_before_success: int, + failure_mode: Literal["status", "exception"], + respx_mock: MockRouter, + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + if failure_mode == "exception": + raise RuntimeError("oops") + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/api/database-connections").mock(side_effect=retry_handler) + + response = client.database_connections.with_raw_response.create(alias="alias", connection_uri="connection_uri") + + assert response.retries_taken == failures_before_success + assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("dataherald._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_omit_retry_count_header( + self, client: Dataherald, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/api/database-connections").mock(side_effect=retry_handler) + + response = client.database_connections.with_raw_response.create( + alias="alias", connection_uri="connection_uri", extra_headers={"x-stainless-retry-count": Omit()} + ) + + assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("dataherald._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_overwrite_retry_count_header( + self, client: Dataherald, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/api/database-connections").mock(side_effect=retry_handler) + + response = client.database_connections.with_raw_response.create( + alias="alias", connection_uri="connection_uri", extra_headers={"x-stainless-retry-count": "42"} + ) + + assert response.http_request.headers.get("x-stainless-retry-count") == "42" + class TestAsyncDataherald: client = AsyncDataherald(base_url=base_url, api_key=api_key, _strict_response_validation=True) @@ -1396,6 +1480,7 @@ class Model(BaseModel): [3, "", 0.5], [2, "", 0.5 * 2.0], [1, "", 0.5 * 4.0], + [-1100, "", 7.8], # test large number potentially overflowing ], ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) @@ -1416,7 +1501,7 @@ async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) with pytest.raises(APITimeoutError): await self.client.post( "/api/database-connections", - body=cast(object, dict(alias="string", connection_uri="string")), + body=cast(object, dict(alias="alias", connection_uri="connection_uri")), cast_to=httpx.Response, options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, ) @@ -1431,9 +1516,95 @@ async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) with pytest.raises(APIStatusError): await self.client.post( "/api/database-connections", - body=cast(object, dict(alias="string", connection_uri="string")), + body=cast(object, dict(alias="alias", connection_uri="connection_uri")), cast_to=httpx.Response, options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, ) assert _get_open_connections(self.client) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("dataherald._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + @pytest.mark.parametrize("failure_mode", ["status", "exception"]) + async def test_retries_taken( + self, + async_client: AsyncDataherald, + failures_before_success: int, + failure_mode: Literal["status", "exception"], + respx_mock: MockRouter, + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + if failure_mode == "exception": + raise RuntimeError("oops") + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/api/database-connections").mock(side_effect=retry_handler) + + response = await client.database_connections.with_raw_response.create( + alias="alias", connection_uri="connection_uri" + ) + + assert response.retries_taken == failures_before_success + assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("dataherald._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_omit_retry_count_header( + self, async_client: AsyncDataherald, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/api/database-connections").mock(side_effect=retry_handler) + + response = await client.database_connections.with_raw_response.create( + alias="alias", connection_uri="connection_uri", extra_headers={"x-stainless-retry-count": Omit()} + ) + + assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("dataherald._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_overwrite_retry_count_header( + self, async_client: AsyncDataherald, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/api/database-connections").mock(side_effect=retry_handler) + + response = await client.database_connections.with_raw_response.create( + alias="alias", connection_uri="connection_uri", extra_headers={"x-stainless-retry-count": "42"} + ) + + assert response.http_request.headers.get("x-stainless-retry-count") == "42" diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py index dfefa7b..4937261 100644 --- a/tests/test_deepcopy.py +++ b/tests/test_deepcopy.py @@ -41,8 +41,7 @@ def test_nested_list() -> None: assert_different_identities(obj1[1], obj2[1]) -class MyObject: - ... +class MyObject: ... def test_ignores_other_types() -> None: diff --git a/tests/test_models.py b/tests/test_models.py index ef3c150..0246894 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -245,7 +245,7 @@ class Model(BaseModel): assert m.foo is True m = Model.construct(foo="CARD_HOLDER") - assert m.foo is "CARD_HOLDER" + assert m.foo == "CARD_HOLDER" m = Model.construct(foo={"bar": False}) assert isinstance(m.foo, Submodel1) @@ -520,19 +520,15 @@ class Model(BaseModel): assert m3.to_dict(exclude_none=True) == {} assert m3.to_dict(exclude_defaults=True) == {} - if PYDANTIC_V2: - - class Model2(BaseModel): - created_at: datetime + class Model2(BaseModel): + created_at: datetime - time_str = "2024-03-21T11:39:01.275859" - m4 = Model2.construct(created_at=time_str) - assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)} - assert m4.to_dict(mode="json") == {"created_at": time_str} - else: - with pytest.raises(ValueError, match="mode is only supported in Pydantic v2"): - m.to_dict(mode="json") + time_str = "2024-03-21T11:39:01.275859" + m4 = Model2.construct(created_at=time_str) + assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)} + assert m4.to_dict(mode="json") == {"created_at": time_str} + if not PYDANTIC_V2: with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): m.to_dict(warnings=False) @@ -558,9 +554,6 @@ class Model(BaseModel): assert m3.model_dump(exclude_none=True) == {} if not PYDANTIC_V2: - with pytest.raises(ValueError, match="mode is only supported in Pydantic v2"): - m.model_dump(mode="json") - with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): m.model_dump(round_trip=True) diff --git a/tests/test_response.py b/tests/test_response.py index 277d954..22b5cd1 100644 --- a/tests/test_response.py +++ b/tests/test_response.py @@ -1,5 +1,5 @@ import json -from typing import List, cast +from typing import Any, List, Union, cast from typing_extensions import Annotated import httpx @@ -19,16 +19,13 @@ from dataherald._base_client import FinalRequestOptions -class ConcreteBaseAPIResponse(APIResponse[bytes]): - ... +class ConcreteBaseAPIResponse(APIResponse[bytes]): ... -class ConcreteAPIResponse(APIResponse[List[str]]): - ... +class ConcreteAPIResponse(APIResponse[List[str]]): ... -class ConcreteAsyncAPIResponse(APIResponse[httpx.Response]): - ... +class ConcreteAsyncAPIResponse(APIResponse[httpx.Response]): ... def test_extract_response_type_direct_classes() -> None: @@ -56,8 +53,7 @@ def test_extract_response_type_binary_response() -> None: assert extract_response_type(AsyncBinaryAPIResponse) == bytes -class PydanticModel(pydantic.BaseModel): - ... +class PydanticModel(pydantic.BaseModel): ... def test_response_parse_mismatched_basemodel(client: Dataherald) -> None: @@ -192,3 +188,90 @@ async def test_async_response_parse_annotated_type(async_client: AsyncDataherald ) assert obj.foo == "hello!" assert obj.bar == 2 + + +@pytest.mark.parametrize( + "content, expected", + [ + ("false", False), + ("true", True), + ("False", False), + ("True", True), + ("TrUe", True), + ("FalSe", False), + ], +) +def test_response_parse_bool(client: Dataherald, content: str, expected: bool) -> None: + response = APIResponse( + raw=httpx.Response(200, content=content), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + result = response.parse(to=bool) + assert result is expected + + +@pytest.mark.parametrize( + "content, expected", + [ + ("false", False), + ("true", True), + ("False", False), + ("True", True), + ("TrUe", True), + ("FalSe", False), + ], +) +async def test_async_response_parse_bool(client: AsyncDataherald, content: str, expected: bool) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=content), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + result = await response.parse(to=bool) + assert result is expected + + +class OtherModel(BaseModel): + a: str + + +@pytest.mark.parametrize("client", [False], indirect=True) # loose validation +def test_response_parse_expect_model_union_non_json_content(client: Dataherald) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse(to=cast(Any, Union[CustomModel, OtherModel])) + assert isinstance(obj, str) + assert obj == "foo" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("async_client", [False], indirect=True) # loose validation +async def test_async_response_parse_expect_model_union_non_json_content(async_client: AsyncDataherald) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse(to=cast(Any, Union[CustomModel, OtherModel])) + assert isinstance(obj, str) + assert obj == "foo" diff --git a/tests/test_transform.py b/tests/test_transform.py index 3d8a5eb..e901181 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -177,17 +177,32 @@ class DateDict(TypedDict, total=False): foo: Annotated[date, PropertyInfo(format="iso8601")] +class DatetimeModel(BaseModel): + foo: datetime + + +class DateModel(BaseModel): + foo: Optional[date] + + @parametrize @pytest.mark.asyncio async def test_iso8601_format(use_async: bool) -> None: dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + tz = "Z" if PYDANTIC_V2 else "+00:00" assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] + assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap] dt = dt.replace(tzinfo=None) assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap] + assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap] assert await transform({"foo": None}, DateDict, use_async) == {"foo": None} # type: ignore[comparison-overlap] + assert await transform(DateModel(foo=None), Any, use_async) == {"foo": None} # type: ignore assert await transform({"foo": date.fromisoformat("2023-02-23")}, DateDict, use_async) == {"foo": "2023-02-23"} # type: ignore[comparison-overlap] + assert await transform(DateModel(foo=date.fromisoformat("2023-02-23")), DateDict, use_async) == { + "foo": "2023-02-23" + } # type: ignore[comparison-overlap] @parametrize diff --git a/tests/test_utils/test_typing.py b/tests/test_utils/test_typing.py index e330962..56995e6 100644 --- a/tests/test_utils/test_typing.py +++ b/tests/test_utils/test_typing.py @@ -9,24 +9,19 @@ _T3 = TypeVar("_T3") -class BaseGeneric(Generic[_T]): - ... +class BaseGeneric(Generic[_T]): ... -class SubclassGeneric(BaseGeneric[_T]): - ... +class SubclassGeneric(BaseGeneric[_T]): ... -class BaseGenericMultipleTypeArgs(Generic[_T, _T2, _T3]): - ... +class BaseGenericMultipleTypeArgs(Generic[_T, _T2, _T3]): ... -class SubclassGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T, _T2, _T3]): - ... +class SubclassGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T, _T2, _T3]): ... -class SubclassDifferentOrderGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T2, _T, _T3]): - ... +class SubclassDifferentOrderGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T2, _T, _T3]): ... def test_extract_type_var() -> None: diff --git a/tests/utils.py b/tests/utils.py index dad87d7..42e498f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,7 +8,7 @@ from datetime import date, datetime from typing_extensions import Literal, get_args, get_origin, assert_type -from dataherald._types import NoneType +from dataherald._types import Omit, NoneType from dataherald._utils import ( is_dict, is_list, @@ -139,11 +139,15 @@ def _assert_list_type(type_: type[object], value: object) -> None: @contextlib.contextmanager -def update_env(**new_env: str) -> Iterator[None]: +def update_env(**new_env: str | Omit) -> Iterator[None]: old = os.environ.copy() try: - os.environ.update(new_env) + for name, value in new_env.items(): + if isinstance(value, Omit): + os.environ.pop(name, None) + else: + os.environ[name] = value yield None finally: From eaf13302cfbaf6c7ed61717be1ce9359d716cbdf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 6 Nov 2024 15:51:38 +0000 Subject: [PATCH 03/39] chore: rebuild project due to codegen change (#57) --- tests/test_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index ded0989..bdd297c 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -701,7 +701,7 @@ class Model(BaseModel): [3, "", 0.5], [2, "", 0.5 * 2.0], [1, "", 0.5 * 4.0], - [-1100, "", 7.8], # test large number potentially overflowing + [-1100, "", 8], # test large number potentially overflowing ], ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) @@ -1480,7 +1480,7 @@ class Model(BaseModel): [3, "", 0.5], [2, "", 0.5 * 2.0], [1, "", 0.5 * 4.0], - [-1100, "", 7.8], # test large number potentially overflowing + [-1100, "", 8], # test large number potentially overflowing ], ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) From 2e4560f29d5e27c88dbcfd80aaa84fca1e64133b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 12:11:04 +0000 Subject: [PATCH 04/39] chore: rebuild project due to codegen change (#58) --- src/dataherald/_utils/_transform.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/dataherald/_utils/_transform.py b/src/dataherald/_utils/_transform.py index d7c0534..a6b62ca 100644 --- a/src/dataherald/_utils/_transform.py +++ b/src/dataherald/_utils/_transform.py @@ -316,6 +316,11 @@ async def _async_transform_recursive( # Iterable[T] or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) ): + # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually + # intended as an iterable, so we don't transform it. + if isinstance(data, dict): + return cast(object, data) + inner_type = extract_type_arg(stripped_type, 0) return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] From 7728427bca7e453c0897e599c2cb6e80b558e80e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 10:29:16 +0000 Subject: [PATCH 05/39] chore: rebuild project due to codegen change (#59) --- tests/api_resources/test_finetunings.py | 4 +- tests/api_resources/test_golden_sqls.py | 72 ++-------------- .../api_resources/test_table_descriptions.py | 84 +++---------------- 3 files changed, 20 insertions(+), 140 deletions(-) diff --git a/tests/api_resources/test_finetunings.py b/tests/api_resources/test_finetunings.py index 36728f7..8a67eb4 100644 --- a/tests/api_resources/test_finetunings.py +++ b/tests/api_resources/test_finetunings.py @@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Dataherald) -> None: "model_provider": "model_provider", }, db_connection_id="db_connection_id", - golden_sqls=["string", "string", "string"], + golden_sqls=["string"], metadata={}, ) assert_matches_type(FinetuningResponse, finetuning, path=["response"]) @@ -202,7 +202,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncDataherald "model_provider": "model_provider", }, db_connection_id="db_connection_id", - golden_sqls=["string", "string", "string"], + golden_sqls=["string"], metadata={}, ) assert_matches_type(FinetuningResponse, finetuning, path=["response"]) diff --git a/tests/api_resources/test_golden_sqls.py b/tests/api_resources/test_golden_sqls.py index ce37aa4..9c7f3f5 100644 --- a/tests/api_resources/test_golden_sqls.py +++ b/tests/api_resources/test_golden_sqls.py @@ -141,17 +141,7 @@ def test_method_upload(self, client: Dataherald) -> None: "db_connection_id": "db_connection_id", "prompt_text": "prompt_text", "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, + } ], ) assert_matches_type(GoldenSqlUploadResponse, golden_sql, path=["response"]) @@ -164,17 +154,7 @@ def test_raw_response_upload(self, client: Dataherald) -> None: "db_connection_id": "db_connection_id", "prompt_text": "prompt_text", "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, + } ], ) @@ -191,17 +171,7 @@ def test_streaming_response_upload(self, client: Dataherald) -> None: "db_connection_id": "db_connection_id", "prompt_text": "prompt_text", "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, + } ], ) as response: assert not response.is_closed @@ -336,17 +306,7 @@ async def test_method_upload(self, async_client: AsyncDataherald) -> None: "db_connection_id": "db_connection_id", "prompt_text": "prompt_text", "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, + } ], ) assert_matches_type(GoldenSqlUploadResponse, golden_sql, path=["response"]) @@ -359,17 +319,7 @@ async def test_raw_response_upload(self, async_client: AsyncDataherald) -> None: "db_connection_id": "db_connection_id", "prompt_text": "prompt_text", "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, + } ], ) @@ -386,17 +336,7 @@ async def test_streaming_response_upload(self, async_client: AsyncDataherald) -> "db_connection_id": "db_connection_id", "prompt_text": "prompt_text", "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, - { - "db_connection_id": "db_connection_id", - "prompt_text": "prompt_text", - "sql": "sql", - }, + } ], ) as response: assert not response.is_closed diff --git a/tests/api_resources/test_table_descriptions.py b/tests/api_resources/test_table_descriptions.py index 88ab64e..2f761e7 100644 --- a/tests/api_resources/test_table_descriptions.py +++ b/tests/api_resources/test_table_descriptions.py @@ -72,35 +72,17 @@ def test_method_update_with_all_params(self, client: Dataherald) -> None: id="id", columns=[ { - "categories": ["string", "string", "string"], + "categories": ["string"], "data_type": "data_type", "description": "description", "foreign_key": {}, "is_primary_key": True, "low_cardinality": True, "name": "name", - }, - { - "categories": ["string", "string", "string"], - "data_type": "data_type", - "description": "description", - "foreign_key": {}, - "is_primary_key": True, - "low_cardinality": True, - "name": "name", - }, - { - "categories": ["string", "string", "string"], - "data_type": "data_type", - "description": "description", - "foreign_key": {}, - "is_primary_key": True, - "low_cardinality": True, - "name": "name", - }, + } ], description="description", - examples=[{}, {}, {}], + examples=[{}], metadata={}, ) assert_matches_type(TableDescriptionResponse, table_description, path=["response"]) @@ -178,22 +160,14 @@ def test_streaming_response_list(self, client: Dataherald) -> None: @parametrize def test_method_sync_schemas(self, client: Dataherald) -> None: table_description = client.table_descriptions.sync_schemas( - body=[ - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - ], + body=[{"db_connection_id": "db_connection_id"}], ) assert_matches_type(TableDescriptionSyncSchemasResponse, table_description, path=["response"]) @parametrize def test_raw_response_sync_schemas(self, client: Dataherald) -> None: response = client.table_descriptions.with_raw_response.sync_schemas( - body=[ - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - ], + body=[{"db_connection_id": "db_connection_id"}], ) assert response.is_closed is True @@ -204,11 +178,7 @@ def test_raw_response_sync_schemas(self, client: Dataherald) -> None: @parametrize def test_streaming_response_sync_schemas(self, client: Dataherald) -> None: with client.table_descriptions.with_streaming_response.sync_schemas( - body=[ - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - ], + body=[{"db_connection_id": "db_connection_id"}], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -273,35 +243,17 @@ async def test_method_update_with_all_params(self, async_client: AsyncDataherald id="id", columns=[ { - "categories": ["string", "string", "string"], + "categories": ["string"], "data_type": "data_type", "description": "description", "foreign_key": {}, "is_primary_key": True, "low_cardinality": True, "name": "name", - }, - { - "categories": ["string", "string", "string"], - "data_type": "data_type", - "description": "description", - "foreign_key": {}, - "is_primary_key": True, - "low_cardinality": True, - "name": "name", - }, - { - "categories": ["string", "string", "string"], - "data_type": "data_type", - "description": "description", - "foreign_key": {}, - "is_primary_key": True, - "low_cardinality": True, - "name": "name", - }, + } ], description="description", - examples=[{}, {}, {}], + examples=[{}], metadata={}, ) assert_matches_type(TableDescriptionResponse, table_description, path=["response"]) @@ -379,22 +331,14 @@ async def test_streaming_response_list(self, async_client: AsyncDataherald) -> N @parametrize async def test_method_sync_schemas(self, async_client: AsyncDataherald) -> None: table_description = await async_client.table_descriptions.sync_schemas( - body=[ - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - ], + body=[{"db_connection_id": "db_connection_id"}], ) assert_matches_type(TableDescriptionSyncSchemasResponse, table_description, path=["response"]) @parametrize async def test_raw_response_sync_schemas(self, async_client: AsyncDataherald) -> None: response = await async_client.table_descriptions.with_raw_response.sync_schemas( - body=[ - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - ], + body=[{"db_connection_id": "db_connection_id"}], ) assert response.is_closed is True @@ -405,11 +349,7 @@ async def test_raw_response_sync_schemas(self, async_client: AsyncDataherald) -> @parametrize async def test_streaming_response_sync_schemas(self, async_client: AsyncDataherald) -> None: async with async_client.table_descriptions.with_streaming_response.sync_schemas( - body=[ - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - {"db_connection_id": "db_connection_id"}, - ], + body=[{"db_connection_id": "db_connection_id"}], ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" From 6742d13cf6e40582d84e19aa085ef36f6359fb4c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 12:48:31 +0000 Subject: [PATCH 06/39] chore: rebuild project due to codegen change (#60) --- pyproject.toml | 1 + requirements-dev.lock | 1 + src/dataherald/_utils/_sync.py | 90 +++++++++++++++------------------- tests/test_client.py | 38 ++++++++++++++ 4 files changed, 80 insertions(+), 50 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d1e3d28..0a872a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,7 @@ dev-dependencies = [ "dirty-equals>=0.6.0", "importlib-metadata>=6.7.0", "rich>=13.7.1", + "nest_asyncio==1.6.0" ] [tool.rye.scripts] diff --git a/requirements-dev.lock b/requirements-dev.lock index 6fb7f60..646708b 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -51,6 +51,7 @@ mdurl==0.1.2 mypy==1.13.0 mypy-extensions==1.0.0 # via mypy +nest-asyncio==1.6.0 nodeenv==1.8.0 # via pyright nox==2023.4.22 diff --git a/src/dataherald/_utils/_sync.py b/src/dataherald/_utils/_sync.py index d0d8103..8b3aaf2 100644 --- a/src/dataherald/_utils/_sync.py +++ b/src/dataherald/_utils/_sync.py @@ -1,56 +1,62 @@ from __future__ import annotations +import sys +import asyncio import functools -from typing import TypeVar, Callable, Awaitable +import contextvars +from typing import Any, TypeVar, Callable, Awaitable from typing_extensions import ParamSpec -import anyio -import anyio.to_thread - -from ._reflection import function_has_argument - T_Retval = TypeVar("T_Retval") T_ParamSpec = ParamSpec("T_ParamSpec") -# copied from `asyncer`, https://github.com/tiangolo/asyncer -def asyncify( - function: Callable[T_ParamSpec, T_Retval], - *, - cancellable: bool = False, - limiter: anyio.CapacityLimiter | None = None, -) -> Callable[T_ParamSpec, Awaitable[T_Retval]]: +if sys.version_info >= (3, 9): + to_thread = asyncio.to_thread +else: + # backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread + # for Python 3.8 support + async def to_thread( + func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs + ) -> Any: + """Asynchronously run function *func* in a separate thread. + + Any *args and **kwargs supplied for this function are directly passed + to *func*. Also, the current :class:`contextvars.Context` is propagated, + allowing context variables from the main thread to be accessed in the + separate thread. + + Returns a coroutine that can be awaited to get the eventual result of *func*. + """ + loop = asyncio.events.get_running_loop() + ctx = contextvars.copy_context() + func_call = functools.partial(ctx.run, func, *args, **kwargs) + return await loop.run_in_executor(None, func_call) + + +# inspired by `asyncer`, https://github.com/tiangolo/asyncer +def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]: """ Take a blocking function and create an async one that receives the same - positional and keyword arguments, and that when called, calls the original function - in a worker thread using `anyio.to_thread.run_sync()`. Internally, - `asyncer.asyncify()` uses the same `anyio.to_thread.run_sync()`, but it supports - keyword arguments additional to positional arguments and it adds better support for - autocompletion and inline errors for the arguments of the function called and the - return value. - - If the `cancellable` option is enabled and the task waiting for its completion is - cancelled, the thread will still run its course but its return value (or any raised - exception) will be ignored. + positional and keyword arguments. For python version 3.9 and above, it uses + asyncio.to_thread to run the function in a separate thread. For python version + 3.8, it uses locally defined copy of the asyncio.to_thread function which was + introduced in python 3.9. - Use it like this: + Usage: - ```Python - def do_work(arg1, arg2, kwarg1="", kwarg2="") -> str: - # Do work - return "Some result" + ```python + def blocking_func(arg1, arg2, kwarg1=None): + # blocking code + return result - result = await to_thread.asyncify(do_work)("spam", "ham", kwarg1="a", kwarg2="b") - print(result) + result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1) ``` ## Arguments `function`: a blocking regular callable (e.g. a function) - `cancellable`: `True` to allow cancellation of the operation - `limiter`: capacity limiter to use to limit the total amount of threads running - (if omitted, the default limiter is used) ## Return @@ -60,22 +66,6 @@ def do_work(arg1, arg2, kwarg1="", kwarg2="") -> str: """ async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval: - partial_f = functools.partial(function, *args, **kwargs) - - # In `v4.1.0` anyio added the `abandon_on_cancel` argument and deprecated the old - # `cancellable` argument, so we need to use the new `abandon_on_cancel` to avoid - # surfacing deprecation warnings. - if function_has_argument(anyio.to_thread.run_sync, "abandon_on_cancel"): - return await anyio.to_thread.run_sync( - partial_f, - abandon_on_cancel=cancellable, - limiter=limiter, - ) - - return await anyio.to_thread.run_sync( - partial_f, - cancellable=cancellable, - limiter=limiter, - ) + return await to_thread(function, *args, **kwargs) return wrapper diff --git a/tests/test_client.py b/tests/test_client.py index bdd297c..0ab254b 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -4,11 +4,14 @@ import gc import os +import sys import json import asyncio import inspect +import subprocess import tracemalloc from typing import Any, Union, cast +from textwrap import dedent from unittest import mock from typing_extensions import Literal @@ -1608,3 +1611,38 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: ) assert response.http_request.headers.get("x-stainless-retry-count") == "42" + + def test_get_platform(self) -> None: + # A previous implementation of asyncify could leave threads unterminated when + # used with nest_asyncio. + # + # Since nest_asyncio.apply() is global and cannot be un-applied, this + # test is run in a separate process to avoid affecting other tests. + test_code = dedent(""" + import asyncio + import nest_asyncio + import threading + + from dataherald._utils import asyncify + from dataherald._base_client import get_platform + + async def test_main() -> None: + result = await asyncify(get_platform)() + print(result) + for thread in threading.enumerate(): + print(thread.name) + + nest_asyncio.apply() + asyncio.run(test_main()) + """) + with subprocess.Popen( + [sys.executable, "-c", test_code], + text=True, + ) as process: + try: + process.wait(2) + if process.returncode: + raise AssertionError("calling get_platform using asyncify resulted in a non-zero exit code") + except subprocess.TimeoutExpired as e: + process.kill() + raise AssertionError("calling get_platform using asyncify resulted in a hung process") from e From 6fedf36fd30a8a1ccbcb8633f85d999f0b38ec34 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 22 Nov 2024 11:28:30 +0000 Subject: [PATCH 07/39] chore(internal): fix compat model_dump method when warnings are passed (#61) --- src/dataherald/_compat.py | 3 ++- tests/test_models.py | 8 ++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/dataherald/_compat.py b/src/dataherald/_compat.py index 4794129..df173f8 100644 --- a/src/dataherald/_compat.py +++ b/src/dataherald/_compat.py @@ -145,7 +145,8 @@ def model_dump( exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, - warnings=warnings, + # warnings are not supported in Pydantic v1 + warnings=warnings if PYDANTIC_V2 else True, ) return cast( "dict[str, Any]", diff --git a/tests/test_models.py b/tests/test_models.py index 0246894..d8b77a6 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -561,6 +561,14 @@ class Model(BaseModel): m.model_dump(warnings=False) +def test_compat_method_no_error_for_warnings() -> None: + class Model(BaseModel): + foo: Optional[str] + + m = Model(foo="hello") + assert isinstance(model_dump(m, warnings=False), dict) + + def test_to_json() -> None: class Model(BaseModel): foo: Optional[str] = Field(alias="FOO", default=None) From 175f6b8f5e544c979425b1a4c37be9a04162ceb8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 05:08:28 +0000 Subject: [PATCH 08/39] docs: add info log level to readme (#62) --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 89e7f5d..9e1dff7 100644 --- a/README.md +++ b/README.md @@ -183,12 +183,14 @@ Note that requests that time out are [retried twice by default](#retries). We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. -You can enable logging by setting the environment variable `DATAHERALD_LOG` to `debug`. +You can enable logging by setting the environment variable `DATAHERALD_LOG` to `info`. ```shell -$ export DATAHERALD_LOG=debug +$ export DATAHERALD_LOG=info ``` +Or to `debug` for more verbose logging. + ### How to tell whether `None` means `null` or missing In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`: From 540b49a21e53183ac7a5eb670b375e3ded2ecf34 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 05:21:47 +0000 Subject: [PATCH 09/39] chore: remove now unused `cached-property` dep (#63) --- pyproject.toml | 1 - src/dataherald/_compat.py | 5 +---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0a872a8..3c273a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ dependencies = [ "anyio>=3.5.0, <5", "distro>=1.7.0, <2", "sniffio", - "cached-property; python_version < '3.8'", ] requires-python = ">= 3.8" classifiers = [ diff --git a/src/dataherald/_compat.py b/src/dataherald/_compat.py index df173f8..92d9ee6 100644 --- a/src/dataherald/_compat.py +++ b/src/dataherald/_compat.py @@ -214,9 +214,6 @@ def __set_name__(self, owner: type[Any], name: str) -> None: ... # __set__ is not defined at runtime, but @cached_property is designed to be settable def __set__(self, instance: object, value: _T) -> None: ... else: - try: - from functools import cached_property as cached_property - except ImportError: - from cached_property import cached_property as cached_property + from functools import cached_property as cached_property typed_cached_property = cached_property From 5ba70006e6e4c0addb3461eaec8424d45d12c3c5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 28 Nov 2024 05:05:15 +0000 Subject: [PATCH 10/39] chore(internal): codegen related update (#64) --- mypy.ini | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mypy.ini b/mypy.ini index cb50a8a..bf6cd2f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -5,7 +5,10 @@ show_error_codes = True # Exclude _files.py because mypy isn't smart enough to apply # the correct type narrowing and as this is an internal module # it's fine to just use Pyright. -exclude = ^(src/dataherald/_files\.py|_dev/.*\.py)$ +# +# We also exclude our `tests` as mypy doesn't always infer +# types correctly and Pyright will still catch any type errors. +exclude = ^(src/dataherald/_files\.py|_dev/.*\.py|tests/.*)$ strict_equality = True implicit_reexport = True From 9ee84bd044e4fd133b94e0346dff772ea62ec152 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 28 Nov 2024 18:47:04 +0000 Subject: [PATCH 11/39] fix(client): compat with new httpx 0.28.0 release (#65) --- src/dataherald/_base_client.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/dataherald/_base_client.py b/src/dataherald/_base_client.py index 1bd954c..e550cd6 100644 --- a/src/dataherald/_base_client.py +++ b/src/dataherald/_base_client.py @@ -792,6 +792,7 @@ def __init__( custom_query: Mapping[str, object] | None = None, _strict_response_validation: bool, ) -> None: + kwargs: dict[str, Any] = {} if limits is not None: warnings.warn( "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead", @@ -804,6 +805,7 @@ def __init__( limits = DEFAULT_CONNECTION_LIMITS if transport is not None: + kwargs["transport"] = transport warnings.warn( "The `transport` argument is deprecated. The `http_client` argument should be passed instead", category=DeprecationWarning, @@ -813,6 +815,7 @@ def __init__( raise ValueError("The `http_client` argument is mutually exclusive with `transport`") if proxies is not None: + kwargs["proxies"] = proxies warnings.warn( "The `proxies` argument is deprecated. The `http_client` argument should be passed instead", category=DeprecationWarning, @@ -856,10 +859,9 @@ def __init__( base_url=base_url, # cast to a valid type because mypy doesn't understand our type narrowing timeout=cast(Timeout, timeout), - proxies=proxies, - transport=transport, limits=limits, follow_redirects=True, + **kwargs, # type: ignore ) def is_closed(self) -> bool: @@ -1358,6 +1360,7 @@ def __init__( custom_headers: Mapping[str, str] | None = None, custom_query: Mapping[str, object] | None = None, ) -> None: + kwargs: dict[str, Any] = {} if limits is not None: warnings.warn( "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead", @@ -1370,6 +1373,7 @@ def __init__( limits = DEFAULT_CONNECTION_LIMITS if transport is not None: + kwargs["transport"] = transport warnings.warn( "The `transport` argument is deprecated. The `http_client` argument should be passed instead", category=DeprecationWarning, @@ -1379,6 +1383,7 @@ def __init__( raise ValueError("The `http_client` argument is mutually exclusive with `transport`") if proxies is not None: + kwargs["proxies"] = proxies warnings.warn( "The `proxies` argument is deprecated. The `http_client` argument should be passed instead", category=DeprecationWarning, @@ -1422,10 +1427,9 @@ def __init__( base_url=base_url, # cast to a valid type because mypy doesn't understand our type narrowing timeout=cast(Timeout, timeout), - proxies=proxies, - transport=transport, limits=limits, follow_redirects=True, + **kwargs, # type: ignore ) def is_closed(self) -> bool: From 9d5db21aae350d5eb3fa5a52cb72a8fcab17f0c2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 3 Dec 2024 05:14:15 +0000 Subject: [PATCH 12/39] chore(internal): bump pyright (#66) --- requirements-dev.lock | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements-dev.lock b/requirements-dev.lock index 646708b..3de4e82 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -68,7 +68,7 @@ pydantic-core==2.23.4 # via pydantic pygments==2.18.0 # via rich -pyright==1.1.380 +pyright==1.1.389 pytest==8.3.3 # via pytest-asyncio pytest-asyncio==0.24.0 @@ -97,6 +97,7 @@ typing-extensions==4.12.2 # via mypy # via pydantic # via pydantic-core + # via pyright virtualenv==20.24.5 # via nox zipp==3.17.0 From 3070bff7d602bd234433ad802c92b9fba210a852 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 4 Dec 2024 05:22:42 +0000 Subject: [PATCH 13/39] chore: make the `Omit` type public (#67) --- src/dataherald/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dataherald/__init__.py b/src/dataherald/__init__.py index 1682151..fca965c 100644 --- a/src/dataherald/__init__.py +++ b/src/dataherald/__init__.py @@ -1,7 +1,7 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from . import types -from ._types import NOT_GIVEN, NoneType, NotGiven, Transport, ProxiesTypes +from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes from ._utils import file_from_path from ._client import ( ENVIRONMENTS, @@ -47,6 +47,7 @@ "ProxiesTypes", "NotGiven", "NOT_GIVEN", + "Omit", "DataheraldError", "APIError", "APIStatusError", From 13d4ec203e3b194ec38c33b8582288e344a16f19 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 05:11:47 +0000 Subject: [PATCH 14/39] chore(internal): bump pydantic dependency (#68) --- requirements-dev.lock | 4 ++-- requirements.lock | 4 ++-- src/dataherald/_types.py | 6 ++---- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/requirements-dev.lock b/requirements-dev.lock index 3de4e82..12c537d 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -62,9 +62,9 @@ platformdirs==3.11.0 # via virtualenv pluggy==1.5.0 # via pytest -pydantic==2.9.2 +pydantic==2.10.3 # via dataherald -pydantic-core==2.23.4 +pydantic-core==2.27.1 # via pydantic pygments==2.18.0 # via rich diff --git a/requirements.lock b/requirements.lock index 04d3a03..ae9bfcd 100644 --- a/requirements.lock +++ b/requirements.lock @@ -30,9 +30,9 @@ httpx==0.25.2 idna==3.4 # via anyio # via httpx -pydantic==2.9.2 +pydantic==2.10.3 # via dataherald -pydantic-core==2.23.4 +pydantic-core==2.27.1 # via pydantic sniffio==1.3.0 # via anyio diff --git a/src/dataherald/_types.py b/src/dataherald/_types.py index bb13593..ef776a2 100644 --- a/src/dataherald/_types.py +++ b/src/dataherald/_types.py @@ -192,10 +192,8 @@ def get(self, __key: str) -> str | None: ... StrBytesIntFloat = Union[str, bytes, int, float] # Note: copied from Pydantic -# https://github.com/pydantic/pydantic/blob/32ea570bf96e84234d2992e1ddf40ab8a565925a/pydantic/main.py#L49 -IncEx: TypeAlias = Union[ - Set[int], Set[str], Mapping[int, Union["IncEx", Literal[True]]], Mapping[str, Union["IncEx", Literal[True]]] -] +# https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79 +IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]] PostParser = Callable[[Any], Any] From f0129cf790bb9849d6c64bae68b912756006e9da Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 10 Dec 2024 05:16:15 +0000 Subject: [PATCH 15/39] docs(readme): fix http client proxies example (#69) --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 9e1dff7..b655fbf 100644 --- a/README.md +++ b/README.md @@ -283,18 +283,19 @@ can also get all the extra fields on the Pydantic model as a dict with You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: -- Support for proxies -- Custom transports +- Support for [proxies](https://www.python-httpx.org/advanced/proxies/) +- Custom [transports](https://www.python-httpx.org/advanced/transports/) - Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality ```python +import httpx from dataherald import Dataherald, DefaultHttpxClient client = Dataherald( # Or use the `DATAHERALD_BASE_URL` env var base_url="http://my.test.server.example.com:8083", http_client=DefaultHttpxClient( - proxies="http://my.test.proxy.example.com", + proxy="http://my.test.proxy.example.com", transport=httpx.HTTPTransport(local_address="0.0.0.0"), ), ) From 2a732e03fafe75f1b0ab7812176259a9c311f2bc Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 05:22:15 +0000 Subject: [PATCH 16/39] chore(internal): bump pyright (#70) --- requirements-dev.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.lock b/requirements-dev.lock index 12c537d..18fea86 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -68,7 +68,7 @@ pydantic-core==2.27.1 # via pydantic pygments==2.18.0 # via rich -pyright==1.1.389 +pyright==1.1.390 pytest==8.3.3 # via pytest-asyncio pytest-asyncio==0.24.0 From 911abd4190cbc49d96fa291c0be303440fc651e7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 05:23:06 +0000 Subject: [PATCH 17/39] chore(internal): add support for TypeAliasType (#71) --- pyproject.toml | 2 +- src/dataherald/_models.py | 3 +++ src/dataherald/_response.py | 20 ++++++++++---------- src/dataherald/_utils/__init__.py | 1 + src/dataherald/_utils/_typing.py | 31 ++++++++++++++++++++++++++++++- tests/test_models.py | 18 +++++++++++++++++- tests/utils.py | 4 ++++ 7 files changed, 66 insertions(+), 13 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3c273a2..d5cee2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ authors = [ dependencies = [ "httpx>=0.23.0, <1", "pydantic>=1.9.0, <3", - "typing-extensions>=4.7, <5", + "typing-extensions>=4.10, <5", "anyio>=3.5.0, <5", "distro>=1.7.0, <2", "sniffio", diff --git a/src/dataherald/_models.py b/src/dataherald/_models.py index 6cb469e..7a547ce 100644 --- a/src/dataherald/_models.py +++ b/src/dataherald/_models.py @@ -46,6 +46,7 @@ strip_not_given, extract_type_arg, is_annotated_type, + is_type_alias_type, strip_annotated_type, ) from ._compat import ( @@ -428,6 +429,8 @@ def construct_type(*, value: object, type_: object) -> object: # we allow `object` as the input type because otherwise, passing things like # `Literal['value']` will be reported as a type error by type checkers type_ = cast("type[object]", type_) + if is_type_alias_type(type_): + type_ = type_.__value__ # type: ignore[unreachable] # unwrap `Annotated[T, ...]` -> `T` if is_annotated_type(type_): diff --git a/src/dataherald/_response.py b/src/dataherald/_response.py index f1aa94f..934f5cf 100644 --- a/src/dataherald/_response.py +++ b/src/dataherald/_response.py @@ -25,7 +25,7 @@ import pydantic from ._types import NoneType -from ._utils import is_given, extract_type_arg, is_annotated_type, extract_type_var_from_base +from ._utils import is_given, extract_type_arg, is_annotated_type, is_type_alias_type, extract_type_var_from_base from ._models import BaseModel, is_basemodel from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type @@ -126,9 +126,15 @@ def __repr__(self) -> str: ) def _parse(self, *, to: type[_T] | None = None) -> R | _T: + cast_to = to if to is not None else self._cast_to + + # unwrap `TypeAlias('Name', T)` -> `T` + if is_type_alias_type(cast_to): + cast_to = cast_to.__value__ # type: ignore[unreachable] + # unwrap `Annotated[T, ...]` -> `T` - if to and is_annotated_type(to): - to = extract_type_arg(to, 0) + if cast_to and is_annotated_type(cast_to): + cast_to = extract_type_arg(cast_to, 0) if self._is_sse_stream: if to: @@ -164,18 +170,12 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: return cast( R, stream_cls( - cast_to=self._cast_to, + cast_to=cast_to, response=self.http_response, client=cast(Any, self._client), ), ) - cast_to = to if to is not None else self._cast_to - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(cast_to): - cast_to = extract_type_arg(cast_to, 0) - if cast_to is NoneType: return cast(R, None) diff --git a/src/dataherald/_utils/__init__.py b/src/dataherald/_utils/__init__.py index a7cff3c..d4fda26 100644 --- a/src/dataherald/_utils/__init__.py +++ b/src/dataherald/_utils/__init__.py @@ -39,6 +39,7 @@ is_iterable_type as is_iterable_type, is_required_type as is_required_type, is_annotated_type as is_annotated_type, + is_type_alias_type as is_type_alias_type, strip_annotated_type as strip_annotated_type, extract_type_var_from_base as extract_type_var_from_base, ) diff --git a/src/dataherald/_utils/_typing.py b/src/dataherald/_utils/_typing.py index c036991..278749b 100644 --- a/src/dataherald/_utils/_typing.py +++ b/src/dataherald/_utils/_typing.py @@ -1,8 +1,17 @@ from __future__ import annotations +import sys +import typing +import typing_extensions from typing import Any, TypeVar, Iterable, cast from collections import abc as _c_abc -from typing_extensions import Required, Annotated, get_args, get_origin +from typing_extensions import ( + TypeIs, + Required, + Annotated, + get_args, + get_origin, +) from .._types import InheritsGeneric from .._compat import is_union as _is_union @@ -36,6 +45,26 @@ def is_typevar(typ: type) -> bool: return type(typ) == TypeVar # type: ignore +_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,) +if sys.version_info >= (3, 12): + _TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType) + + +def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]: + """Return whether the provided argument is an instance of `TypeAliasType`. + + ```python + type Int = int + is_type_alias_type(Int) + # > True + Str = TypeAliasType("Str", str) + is_type_alias_type(Str) + # > True + ``` + """ + return isinstance(tp, _TYPE_ALIAS_TYPES) + + # Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]] def strip_annotated_type(typ: type) -> type: if is_required_type(typ) or is_annotated_type(typ): diff --git a/tests/test_models.py b/tests/test_models.py index d8b77a6..57c66fb 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -1,7 +1,7 @@ import json from typing import Any, Dict, List, Union, Optional, cast from datetime import datetime, timezone -from typing_extensions import Literal, Annotated +from typing_extensions import Literal, Annotated, TypeAliasType import pytest import pydantic @@ -828,3 +828,19 @@ class B(BaseModel): # if the discriminator details object stays the same between invocations then # we hit the cache assert UnionType.__discriminator__ is discriminator + + +@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +def test_type_alias_type() -> None: + Alias = TypeAliasType("Alias", str) + + class Model(BaseModel): + alias: Alias + union: Union[int, Alias] + + m = construct_type(value={"alias": "foo", "union": "bar"}, type_=Model) + assert isinstance(m, Model) + assert isinstance(m.alias, str) + assert m.alias == "foo" + assert isinstance(m.union, str) + assert m.union == "bar" diff --git a/tests/utils.py b/tests/utils.py index 42e498f..9e9c0c1 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -16,6 +16,7 @@ is_union_type, extract_type_arg, is_annotated_type, + is_type_alias_type, ) from dataherald._compat import PYDANTIC_V2, field_outer_type, get_model_fields from dataherald._models import BaseModel @@ -51,6 +52,9 @@ def assert_matches_type( path: list[str], allow_none: bool = False, ) -> None: + if is_type_alias_type(type_): + type_ = type_.__value__ + # unwrap `Annotated[T, ...]` -> `T` if is_annotated_type(type_): type_ = extract_type_arg(type_, 0) From 6ddf080b63f5fb373d1fd32ec4919baada704b64 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 14 Dec 2024 05:26:00 +0000 Subject: [PATCH 18/39] chore(internal): codegen related update (#72) --- src/dataherald/_client.py | 190 ++++++++++++++++++++------------------ 1 file changed, 100 insertions(+), 90 deletions(-) diff --git a/src/dataherald/_client.py b/src/dataherald/_client.py index b0077d0..38ada33 100644 --- a/src/dataherald/_client.py +++ b/src/dataherald/_client.py @@ -8,7 +8,7 @@ import httpx -from . import resources, _exceptions +from . import _exceptions from ._qs import Querystring from ._types import ( NOT_GIVEN, @@ -24,6 +24,7 @@ get_async_library, ) from ._version import __version__ +from .resources import engine, heartbeat, finetunings, generations, golden_sqls, nl_generations, table_descriptions from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError, DataheraldError from ._base_client import ( @@ -31,6 +32,10 @@ SyncAPIClient, AsyncAPIClient, ) +from .resources.prompts import prompts +from .resources.instructions import instructions +from .resources.sql_generations import sql_generations +from .resources.database_connections import database_connections __all__ = [ "ENVIRONMENTS", @@ -38,7 +43,6 @@ "Transport", "ProxiesTypes", "RequestOptions", - "resources", "Dataherald", "AsyncDataherald", "Client", @@ -52,17 +56,17 @@ class Dataherald(SyncAPIClient): - database_connections: resources.DatabaseConnectionsResource - finetunings: resources.FinetuningsResource - golden_sqls: resources.GoldenSqlsResource - instructions: resources.InstructionsResource - generations: resources.GenerationsResource - prompts: resources.PromptsResource - sql_generations: resources.SqlGenerationsResource - nl_generations: resources.NlGenerationsResource - table_descriptions: resources.TableDescriptionsResource - heartbeat: resources.HeartbeatResource - engine: resources.EngineResource + database_connections: database_connections.DatabaseConnectionsResource + finetunings: finetunings.FinetuningsResource + golden_sqls: golden_sqls.GoldenSqlsResource + instructions: instructions.InstructionsResource + generations: generations.GenerationsResource + prompts: prompts.PromptsResource + sql_generations: sql_generations.SqlGenerationsResource + nl_generations: nl_generations.NlGenerationsResource + table_descriptions: table_descriptions.TableDescriptionsResource + heartbeat: heartbeat.HeartbeatResource + engine: engine.EngineResource with_raw_response: DataheraldWithRawResponse with_streaming_response: DataheraldWithStreamedResponse @@ -144,17 +148,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = resources.DatabaseConnectionsResource(self) - self.finetunings = resources.FinetuningsResource(self) - self.golden_sqls = resources.GoldenSqlsResource(self) - self.instructions = resources.InstructionsResource(self) - self.generations = resources.GenerationsResource(self) - self.prompts = resources.PromptsResource(self) - self.sql_generations = resources.SqlGenerationsResource(self) - self.nl_generations = resources.NlGenerationsResource(self) - self.table_descriptions = resources.TableDescriptionsResource(self) - self.heartbeat = resources.HeartbeatResource(self) - self.engine = resources.EngineResource(self) + self.database_connections = database_connections.DatabaseConnectionsResource(self) + self.finetunings = finetunings.FinetuningsResource(self) + self.golden_sqls = golden_sqls.GoldenSqlsResource(self) + self.instructions = instructions.InstructionsResource(self) + self.generations = generations.GenerationsResource(self) + self.prompts = prompts.PromptsResource(self) + self.sql_generations = sql_generations.SqlGenerationsResource(self) + self.nl_generations = nl_generations.NlGenerationsResource(self) + self.table_descriptions = table_descriptions.TableDescriptionsResource(self) + self.heartbeat = heartbeat.HeartbeatResource(self) + self.engine = engine.EngineResource(self) self.with_raw_response = DataheraldWithRawResponse(self) self.with_streaming_response = DataheraldWithStreamedResponse(self) @@ -266,17 +270,17 @@ def _make_status_error( class AsyncDataherald(AsyncAPIClient): - database_connections: resources.AsyncDatabaseConnectionsResource - finetunings: resources.AsyncFinetuningsResource - golden_sqls: resources.AsyncGoldenSqlsResource - instructions: resources.AsyncInstructionsResource - generations: resources.AsyncGenerationsResource - prompts: resources.AsyncPromptsResource - sql_generations: resources.AsyncSqlGenerationsResource - nl_generations: resources.AsyncNlGenerationsResource - table_descriptions: resources.AsyncTableDescriptionsResource - heartbeat: resources.AsyncHeartbeatResource - engine: resources.AsyncEngineResource + database_connections: database_connections.AsyncDatabaseConnectionsResource + finetunings: finetunings.AsyncFinetuningsResource + golden_sqls: golden_sqls.AsyncGoldenSqlsResource + instructions: instructions.AsyncInstructionsResource + generations: generations.AsyncGenerationsResource + prompts: prompts.AsyncPromptsResource + sql_generations: sql_generations.AsyncSqlGenerationsResource + nl_generations: nl_generations.AsyncNlGenerationsResource + table_descriptions: table_descriptions.AsyncTableDescriptionsResource + heartbeat: heartbeat.AsyncHeartbeatResource + engine: engine.AsyncEngineResource with_raw_response: AsyncDataheraldWithRawResponse with_streaming_response: AsyncDataheraldWithStreamedResponse @@ -358,17 +362,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = resources.AsyncDatabaseConnectionsResource(self) - self.finetunings = resources.AsyncFinetuningsResource(self) - self.golden_sqls = resources.AsyncGoldenSqlsResource(self) - self.instructions = resources.AsyncInstructionsResource(self) - self.generations = resources.AsyncGenerationsResource(self) - self.prompts = resources.AsyncPromptsResource(self) - self.sql_generations = resources.AsyncSqlGenerationsResource(self) - self.nl_generations = resources.AsyncNlGenerationsResource(self) - self.table_descriptions = resources.AsyncTableDescriptionsResource(self) - self.heartbeat = resources.AsyncHeartbeatResource(self) - self.engine = resources.AsyncEngineResource(self) + self.database_connections = database_connections.AsyncDatabaseConnectionsResource(self) + self.finetunings = finetunings.AsyncFinetuningsResource(self) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResource(self) + self.instructions = instructions.AsyncInstructionsResource(self) + self.generations = generations.AsyncGenerationsResource(self) + self.prompts = prompts.AsyncPromptsResource(self) + self.sql_generations = sql_generations.AsyncSqlGenerationsResource(self) + self.nl_generations = nl_generations.AsyncNlGenerationsResource(self) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResource(self) + self.heartbeat = heartbeat.AsyncHeartbeatResource(self) + self.engine = engine.AsyncEngineResource(self) self.with_raw_response = AsyncDataheraldWithRawResponse(self) self.with_streaming_response = AsyncDataheraldWithStreamedResponse(self) @@ -481,70 +485,76 @@ def _make_status_error( class DataheraldWithRawResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = resources.DatabaseConnectionsResourceWithRawResponse(client.database_connections) - self.finetunings = resources.FinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = resources.GoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = resources.InstructionsResourceWithRawResponse(client.instructions) - self.generations = resources.GenerationsResourceWithRawResponse(client.generations) - self.prompts = resources.PromptsResourceWithRawResponse(client.prompts) - self.sql_generations = resources.SqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = resources.NlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = resources.TableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = resources.HeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = resources.EngineResourceWithRawResponse(client.engine) + self.database_connections = database_connections.DatabaseConnectionsResourceWithRawResponse( + client.database_connections + ) + self.finetunings = finetunings.FinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = golden_sqls.GoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = instructions.InstructionsResourceWithRawResponse(client.instructions) + self.generations = generations.GenerationsResourceWithRawResponse(client.generations) + self.prompts = prompts.PromptsResourceWithRawResponse(client.prompts) + self.sql_generations = sql_generations.SqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = nl_generations.NlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = table_descriptions.TableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = heartbeat.HeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = engine.EngineResourceWithRawResponse(client.engine) class AsyncDataheraldWithRawResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = resources.AsyncDatabaseConnectionsResourceWithRawResponse( + self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithRawResponse( client.database_connections ) - self.finetunings = resources.AsyncFinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = resources.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = resources.AsyncInstructionsResourceWithRawResponse(client.instructions) - self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations) - self.prompts = resources.AsyncPromptsResourceWithRawResponse(client.prompts) - self.sql_generations = resources.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = resources.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = resources.AsyncTableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = resources.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = resources.AsyncEngineResourceWithRawResponse(client.engine) + self.finetunings = finetunings.AsyncFinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = instructions.AsyncInstructionsResourceWithRawResponse(client.instructions) + self.generations = generations.AsyncGenerationsResourceWithRawResponse(client.generations) + self.prompts = prompts.AsyncPromptsResourceWithRawResponse(client.prompts) + self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithRawResponse( + client.table_descriptions + ) + self.heartbeat = heartbeat.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = engine.AsyncEngineResourceWithRawResponse(client.engine) class DataheraldWithStreamedResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = resources.DatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = database_connections.DatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = resources.FinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = resources.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = resources.InstructionsResourceWithStreamingResponse(client.instructions) - self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations) - self.prompts = resources.PromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = resources.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = resources.NlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = resources.TableDescriptionsResourceWithStreamingResponse(client.table_descriptions) - self.heartbeat = resources.HeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = resources.EngineResourceWithStreamingResponse(client.engine) + self.finetunings = finetunings.FinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = golden_sqls.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = instructions.InstructionsResourceWithStreamingResponse(client.instructions) + self.generations = generations.GenerationsResourceWithStreamingResponse(client.generations) + self.prompts = prompts.PromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = sql_generations.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = nl_generations.NlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = table_descriptions.TableDescriptionsResourceWithStreamingResponse( + client.table_descriptions + ) + self.heartbeat = heartbeat.HeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = engine.EngineResourceWithStreamingResponse(client.engine) class AsyncDataheraldWithStreamedResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = resources.AsyncDatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = resources.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = resources.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = resources.AsyncInstructionsResourceWithStreamingResponse(client.instructions) - self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations) - self.prompts = resources.AsyncPromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = resources.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = resources.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = resources.AsyncTableDescriptionsResourceWithStreamingResponse( + self.finetunings = finetunings.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = instructions.AsyncInstructionsResourceWithStreamingResponse(client.instructions) + self.generations = generations.AsyncGenerationsResourceWithStreamingResponse(client.generations) + self.prompts = prompts.AsyncPromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithStreamingResponse( client.table_descriptions ) - self.heartbeat = resources.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = resources.AsyncEngineResourceWithStreamingResponse(client.engine) + self.heartbeat = heartbeat.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = engine.AsyncEngineResourceWithStreamingResponse(client.engine) Client = Dataherald From d8c41ed0a8cf93d7839a61ab572da71262f0c0db Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 14 Dec 2024 05:26:53 +0000 Subject: [PATCH 19/39] chore(internal): codegen related update (#73) --- src/dataherald/_client.py | 190 ++++++++++++++++++-------------------- 1 file changed, 90 insertions(+), 100 deletions(-) diff --git a/src/dataherald/_client.py b/src/dataherald/_client.py index 38ada33..b0077d0 100644 --- a/src/dataherald/_client.py +++ b/src/dataherald/_client.py @@ -8,7 +8,7 @@ import httpx -from . import _exceptions +from . import resources, _exceptions from ._qs import Querystring from ._types import ( NOT_GIVEN, @@ -24,7 +24,6 @@ get_async_library, ) from ._version import __version__ -from .resources import engine, heartbeat, finetunings, generations, golden_sqls, nl_generations, table_descriptions from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError, DataheraldError from ._base_client import ( @@ -32,10 +31,6 @@ SyncAPIClient, AsyncAPIClient, ) -from .resources.prompts import prompts -from .resources.instructions import instructions -from .resources.sql_generations import sql_generations -from .resources.database_connections import database_connections __all__ = [ "ENVIRONMENTS", @@ -43,6 +38,7 @@ "Transport", "ProxiesTypes", "RequestOptions", + "resources", "Dataherald", "AsyncDataherald", "Client", @@ -56,17 +52,17 @@ class Dataherald(SyncAPIClient): - database_connections: database_connections.DatabaseConnectionsResource - finetunings: finetunings.FinetuningsResource - golden_sqls: golden_sqls.GoldenSqlsResource - instructions: instructions.InstructionsResource - generations: generations.GenerationsResource - prompts: prompts.PromptsResource - sql_generations: sql_generations.SqlGenerationsResource - nl_generations: nl_generations.NlGenerationsResource - table_descriptions: table_descriptions.TableDescriptionsResource - heartbeat: heartbeat.HeartbeatResource - engine: engine.EngineResource + database_connections: resources.DatabaseConnectionsResource + finetunings: resources.FinetuningsResource + golden_sqls: resources.GoldenSqlsResource + instructions: resources.InstructionsResource + generations: resources.GenerationsResource + prompts: resources.PromptsResource + sql_generations: resources.SqlGenerationsResource + nl_generations: resources.NlGenerationsResource + table_descriptions: resources.TableDescriptionsResource + heartbeat: resources.HeartbeatResource + engine: resources.EngineResource with_raw_response: DataheraldWithRawResponse with_streaming_response: DataheraldWithStreamedResponse @@ -148,17 +144,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = database_connections.DatabaseConnectionsResource(self) - self.finetunings = finetunings.FinetuningsResource(self) - self.golden_sqls = golden_sqls.GoldenSqlsResource(self) - self.instructions = instructions.InstructionsResource(self) - self.generations = generations.GenerationsResource(self) - self.prompts = prompts.PromptsResource(self) - self.sql_generations = sql_generations.SqlGenerationsResource(self) - self.nl_generations = nl_generations.NlGenerationsResource(self) - self.table_descriptions = table_descriptions.TableDescriptionsResource(self) - self.heartbeat = heartbeat.HeartbeatResource(self) - self.engine = engine.EngineResource(self) + self.database_connections = resources.DatabaseConnectionsResource(self) + self.finetunings = resources.FinetuningsResource(self) + self.golden_sqls = resources.GoldenSqlsResource(self) + self.instructions = resources.InstructionsResource(self) + self.generations = resources.GenerationsResource(self) + self.prompts = resources.PromptsResource(self) + self.sql_generations = resources.SqlGenerationsResource(self) + self.nl_generations = resources.NlGenerationsResource(self) + self.table_descriptions = resources.TableDescriptionsResource(self) + self.heartbeat = resources.HeartbeatResource(self) + self.engine = resources.EngineResource(self) self.with_raw_response = DataheraldWithRawResponse(self) self.with_streaming_response = DataheraldWithStreamedResponse(self) @@ -270,17 +266,17 @@ def _make_status_error( class AsyncDataherald(AsyncAPIClient): - database_connections: database_connections.AsyncDatabaseConnectionsResource - finetunings: finetunings.AsyncFinetuningsResource - golden_sqls: golden_sqls.AsyncGoldenSqlsResource - instructions: instructions.AsyncInstructionsResource - generations: generations.AsyncGenerationsResource - prompts: prompts.AsyncPromptsResource - sql_generations: sql_generations.AsyncSqlGenerationsResource - nl_generations: nl_generations.AsyncNlGenerationsResource - table_descriptions: table_descriptions.AsyncTableDescriptionsResource - heartbeat: heartbeat.AsyncHeartbeatResource - engine: engine.AsyncEngineResource + database_connections: resources.AsyncDatabaseConnectionsResource + finetunings: resources.AsyncFinetuningsResource + golden_sqls: resources.AsyncGoldenSqlsResource + instructions: resources.AsyncInstructionsResource + generations: resources.AsyncGenerationsResource + prompts: resources.AsyncPromptsResource + sql_generations: resources.AsyncSqlGenerationsResource + nl_generations: resources.AsyncNlGenerationsResource + table_descriptions: resources.AsyncTableDescriptionsResource + heartbeat: resources.AsyncHeartbeatResource + engine: resources.AsyncEngineResource with_raw_response: AsyncDataheraldWithRawResponse with_streaming_response: AsyncDataheraldWithStreamedResponse @@ -362,17 +358,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = database_connections.AsyncDatabaseConnectionsResource(self) - self.finetunings = finetunings.AsyncFinetuningsResource(self) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResource(self) - self.instructions = instructions.AsyncInstructionsResource(self) - self.generations = generations.AsyncGenerationsResource(self) - self.prompts = prompts.AsyncPromptsResource(self) - self.sql_generations = sql_generations.AsyncSqlGenerationsResource(self) - self.nl_generations = nl_generations.AsyncNlGenerationsResource(self) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResource(self) - self.heartbeat = heartbeat.AsyncHeartbeatResource(self) - self.engine = engine.AsyncEngineResource(self) + self.database_connections = resources.AsyncDatabaseConnectionsResource(self) + self.finetunings = resources.AsyncFinetuningsResource(self) + self.golden_sqls = resources.AsyncGoldenSqlsResource(self) + self.instructions = resources.AsyncInstructionsResource(self) + self.generations = resources.AsyncGenerationsResource(self) + self.prompts = resources.AsyncPromptsResource(self) + self.sql_generations = resources.AsyncSqlGenerationsResource(self) + self.nl_generations = resources.AsyncNlGenerationsResource(self) + self.table_descriptions = resources.AsyncTableDescriptionsResource(self) + self.heartbeat = resources.AsyncHeartbeatResource(self) + self.engine = resources.AsyncEngineResource(self) self.with_raw_response = AsyncDataheraldWithRawResponse(self) self.with_streaming_response = AsyncDataheraldWithStreamedResponse(self) @@ -485,76 +481,70 @@ def _make_status_error( class DataheraldWithRawResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = database_connections.DatabaseConnectionsResourceWithRawResponse( - client.database_connections - ) - self.finetunings = finetunings.FinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = golden_sqls.GoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = instructions.InstructionsResourceWithRawResponse(client.instructions) - self.generations = generations.GenerationsResourceWithRawResponse(client.generations) - self.prompts = prompts.PromptsResourceWithRawResponse(client.prompts) - self.sql_generations = sql_generations.SqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = nl_generations.NlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = table_descriptions.TableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = heartbeat.HeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = engine.EngineResourceWithRawResponse(client.engine) + self.database_connections = resources.DatabaseConnectionsResourceWithRawResponse(client.database_connections) + self.finetunings = resources.FinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = resources.GoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = resources.InstructionsResourceWithRawResponse(client.instructions) + self.generations = resources.GenerationsResourceWithRawResponse(client.generations) + self.prompts = resources.PromptsResourceWithRawResponse(client.prompts) + self.sql_generations = resources.SqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = resources.NlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = resources.TableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = resources.HeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = resources.EngineResourceWithRawResponse(client.engine) class AsyncDataheraldWithRawResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithRawResponse( + self.database_connections = resources.AsyncDatabaseConnectionsResourceWithRawResponse( client.database_connections ) - self.finetunings = finetunings.AsyncFinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = instructions.AsyncInstructionsResourceWithRawResponse(client.instructions) - self.generations = generations.AsyncGenerationsResourceWithRawResponse(client.generations) - self.prompts = prompts.AsyncPromptsResourceWithRawResponse(client.prompts) - self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithRawResponse( - client.table_descriptions - ) - self.heartbeat = heartbeat.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = engine.AsyncEngineResourceWithRawResponse(client.engine) + self.finetunings = resources.AsyncFinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = resources.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = resources.AsyncInstructionsResourceWithRawResponse(client.instructions) + self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations) + self.prompts = resources.AsyncPromptsResourceWithRawResponse(client.prompts) + self.sql_generations = resources.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = resources.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = resources.AsyncTableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = resources.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = resources.AsyncEngineResourceWithRawResponse(client.engine) class DataheraldWithStreamedResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = database_connections.DatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = resources.DatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = finetunings.FinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = golden_sqls.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = instructions.InstructionsResourceWithStreamingResponse(client.instructions) - self.generations = generations.GenerationsResourceWithStreamingResponse(client.generations) - self.prompts = prompts.PromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = sql_generations.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = nl_generations.NlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = table_descriptions.TableDescriptionsResourceWithStreamingResponse( - client.table_descriptions - ) - self.heartbeat = heartbeat.HeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = engine.EngineResourceWithStreamingResponse(client.engine) + self.finetunings = resources.FinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = resources.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = resources.InstructionsResourceWithStreamingResponse(client.instructions) + self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations) + self.prompts = resources.PromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = resources.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = resources.NlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = resources.TableDescriptionsResourceWithStreamingResponse(client.table_descriptions) + self.heartbeat = resources.HeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = resources.EngineResourceWithStreamingResponse(client.engine) class AsyncDataheraldWithStreamedResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = resources.AsyncDatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = finetunings.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = instructions.AsyncInstructionsResourceWithStreamingResponse(client.instructions) - self.generations = generations.AsyncGenerationsResourceWithStreamingResponse(client.generations) - self.prompts = prompts.AsyncPromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithStreamingResponse( + self.finetunings = resources.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = resources.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = resources.AsyncInstructionsResourceWithStreamingResponse(client.instructions) + self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations) + self.prompts = resources.AsyncPromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = resources.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = resources.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = resources.AsyncTableDescriptionsResourceWithStreamingResponse( client.table_descriptions ) - self.heartbeat = heartbeat.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = engine.AsyncEngineResourceWithStreamingResponse(client.engine) + self.heartbeat = resources.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = resources.AsyncEngineResourceWithStreamingResponse(client.engine) Client = Dataherald From b6bb66d23275d077c6f2defcf076dc9b23c270bf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 05:10:40 +0000 Subject: [PATCH 20/39] chore(internal): codegen related update (#74) --- README.md | 10 ++ src/dataherald/_client.py | 190 ++++++++++++++++++++------------------ 2 files changed, 110 insertions(+), 90 deletions(-) diff --git a/README.md b/README.md index b655fbf..8556333 100644 --- a/README.md +++ b/README.md @@ -311,6 +311,16 @@ client.with_options(http_client=DefaultHttpxClient(...)) By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. +```py +from dataherald import Dataherald + +with Dataherald() as client: + # make requests here + ... + +# HTTP client is now closed +``` + ## Versioning This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: diff --git a/src/dataherald/_client.py b/src/dataherald/_client.py index b0077d0..38ada33 100644 --- a/src/dataherald/_client.py +++ b/src/dataherald/_client.py @@ -8,7 +8,7 @@ import httpx -from . import resources, _exceptions +from . import _exceptions from ._qs import Querystring from ._types import ( NOT_GIVEN, @@ -24,6 +24,7 @@ get_async_library, ) from ._version import __version__ +from .resources import engine, heartbeat, finetunings, generations, golden_sqls, nl_generations, table_descriptions from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError, DataheraldError from ._base_client import ( @@ -31,6 +32,10 @@ SyncAPIClient, AsyncAPIClient, ) +from .resources.prompts import prompts +from .resources.instructions import instructions +from .resources.sql_generations import sql_generations +from .resources.database_connections import database_connections __all__ = [ "ENVIRONMENTS", @@ -38,7 +43,6 @@ "Transport", "ProxiesTypes", "RequestOptions", - "resources", "Dataherald", "AsyncDataherald", "Client", @@ -52,17 +56,17 @@ class Dataherald(SyncAPIClient): - database_connections: resources.DatabaseConnectionsResource - finetunings: resources.FinetuningsResource - golden_sqls: resources.GoldenSqlsResource - instructions: resources.InstructionsResource - generations: resources.GenerationsResource - prompts: resources.PromptsResource - sql_generations: resources.SqlGenerationsResource - nl_generations: resources.NlGenerationsResource - table_descriptions: resources.TableDescriptionsResource - heartbeat: resources.HeartbeatResource - engine: resources.EngineResource + database_connections: database_connections.DatabaseConnectionsResource + finetunings: finetunings.FinetuningsResource + golden_sqls: golden_sqls.GoldenSqlsResource + instructions: instructions.InstructionsResource + generations: generations.GenerationsResource + prompts: prompts.PromptsResource + sql_generations: sql_generations.SqlGenerationsResource + nl_generations: nl_generations.NlGenerationsResource + table_descriptions: table_descriptions.TableDescriptionsResource + heartbeat: heartbeat.HeartbeatResource + engine: engine.EngineResource with_raw_response: DataheraldWithRawResponse with_streaming_response: DataheraldWithStreamedResponse @@ -144,17 +148,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = resources.DatabaseConnectionsResource(self) - self.finetunings = resources.FinetuningsResource(self) - self.golden_sqls = resources.GoldenSqlsResource(self) - self.instructions = resources.InstructionsResource(self) - self.generations = resources.GenerationsResource(self) - self.prompts = resources.PromptsResource(self) - self.sql_generations = resources.SqlGenerationsResource(self) - self.nl_generations = resources.NlGenerationsResource(self) - self.table_descriptions = resources.TableDescriptionsResource(self) - self.heartbeat = resources.HeartbeatResource(self) - self.engine = resources.EngineResource(self) + self.database_connections = database_connections.DatabaseConnectionsResource(self) + self.finetunings = finetunings.FinetuningsResource(self) + self.golden_sqls = golden_sqls.GoldenSqlsResource(self) + self.instructions = instructions.InstructionsResource(self) + self.generations = generations.GenerationsResource(self) + self.prompts = prompts.PromptsResource(self) + self.sql_generations = sql_generations.SqlGenerationsResource(self) + self.nl_generations = nl_generations.NlGenerationsResource(self) + self.table_descriptions = table_descriptions.TableDescriptionsResource(self) + self.heartbeat = heartbeat.HeartbeatResource(self) + self.engine = engine.EngineResource(self) self.with_raw_response = DataheraldWithRawResponse(self) self.with_streaming_response = DataheraldWithStreamedResponse(self) @@ -266,17 +270,17 @@ def _make_status_error( class AsyncDataherald(AsyncAPIClient): - database_connections: resources.AsyncDatabaseConnectionsResource - finetunings: resources.AsyncFinetuningsResource - golden_sqls: resources.AsyncGoldenSqlsResource - instructions: resources.AsyncInstructionsResource - generations: resources.AsyncGenerationsResource - prompts: resources.AsyncPromptsResource - sql_generations: resources.AsyncSqlGenerationsResource - nl_generations: resources.AsyncNlGenerationsResource - table_descriptions: resources.AsyncTableDescriptionsResource - heartbeat: resources.AsyncHeartbeatResource - engine: resources.AsyncEngineResource + database_connections: database_connections.AsyncDatabaseConnectionsResource + finetunings: finetunings.AsyncFinetuningsResource + golden_sqls: golden_sqls.AsyncGoldenSqlsResource + instructions: instructions.AsyncInstructionsResource + generations: generations.AsyncGenerationsResource + prompts: prompts.AsyncPromptsResource + sql_generations: sql_generations.AsyncSqlGenerationsResource + nl_generations: nl_generations.AsyncNlGenerationsResource + table_descriptions: table_descriptions.AsyncTableDescriptionsResource + heartbeat: heartbeat.AsyncHeartbeatResource + engine: engine.AsyncEngineResource with_raw_response: AsyncDataheraldWithRawResponse with_streaming_response: AsyncDataheraldWithStreamedResponse @@ -358,17 +362,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = resources.AsyncDatabaseConnectionsResource(self) - self.finetunings = resources.AsyncFinetuningsResource(self) - self.golden_sqls = resources.AsyncGoldenSqlsResource(self) - self.instructions = resources.AsyncInstructionsResource(self) - self.generations = resources.AsyncGenerationsResource(self) - self.prompts = resources.AsyncPromptsResource(self) - self.sql_generations = resources.AsyncSqlGenerationsResource(self) - self.nl_generations = resources.AsyncNlGenerationsResource(self) - self.table_descriptions = resources.AsyncTableDescriptionsResource(self) - self.heartbeat = resources.AsyncHeartbeatResource(self) - self.engine = resources.AsyncEngineResource(self) + self.database_connections = database_connections.AsyncDatabaseConnectionsResource(self) + self.finetunings = finetunings.AsyncFinetuningsResource(self) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResource(self) + self.instructions = instructions.AsyncInstructionsResource(self) + self.generations = generations.AsyncGenerationsResource(self) + self.prompts = prompts.AsyncPromptsResource(self) + self.sql_generations = sql_generations.AsyncSqlGenerationsResource(self) + self.nl_generations = nl_generations.AsyncNlGenerationsResource(self) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResource(self) + self.heartbeat = heartbeat.AsyncHeartbeatResource(self) + self.engine = engine.AsyncEngineResource(self) self.with_raw_response = AsyncDataheraldWithRawResponse(self) self.with_streaming_response = AsyncDataheraldWithStreamedResponse(self) @@ -481,70 +485,76 @@ def _make_status_error( class DataheraldWithRawResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = resources.DatabaseConnectionsResourceWithRawResponse(client.database_connections) - self.finetunings = resources.FinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = resources.GoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = resources.InstructionsResourceWithRawResponse(client.instructions) - self.generations = resources.GenerationsResourceWithRawResponse(client.generations) - self.prompts = resources.PromptsResourceWithRawResponse(client.prompts) - self.sql_generations = resources.SqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = resources.NlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = resources.TableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = resources.HeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = resources.EngineResourceWithRawResponse(client.engine) + self.database_connections = database_connections.DatabaseConnectionsResourceWithRawResponse( + client.database_connections + ) + self.finetunings = finetunings.FinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = golden_sqls.GoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = instructions.InstructionsResourceWithRawResponse(client.instructions) + self.generations = generations.GenerationsResourceWithRawResponse(client.generations) + self.prompts = prompts.PromptsResourceWithRawResponse(client.prompts) + self.sql_generations = sql_generations.SqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = nl_generations.NlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = table_descriptions.TableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = heartbeat.HeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = engine.EngineResourceWithRawResponse(client.engine) class AsyncDataheraldWithRawResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = resources.AsyncDatabaseConnectionsResourceWithRawResponse( + self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithRawResponse( client.database_connections ) - self.finetunings = resources.AsyncFinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = resources.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = resources.AsyncInstructionsResourceWithRawResponse(client.instructions) - self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations) - self.prompts = resources.AsyncPromptsResourceWithRawResponse(client.prompts) - self.sql_generations = resources.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = resources.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = resources.AsyncTableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = resources.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = resources.AsyncEngineResourceWithRawResponse(client.engine) + self.finetunings = finetunings.AsyncFinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = instructions.AsyncInstructionsResourceWithRawResponse(client.instructions) + self.generations = generations.AsyncGenerationsResourceWithRawResponse(client.generations) + self.prompts = prompts.AsyncPromptsResourceWithRawResponse(client.prompts) + self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithRawResponse( + client.table_descriptions + ) + self.heartbeat = heartbeat.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = engine.AsyncEngineResourceWithRawResponse(client.engine) class DataheraldWithStreamedResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = resources.DatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = database_connections.DatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = resources.FinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = resources.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = resources.InstructionsResourceWithStreamingResponse(client.instructions) - self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations) - self.prompts = resources.PromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = resources.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = resources.NlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = resources.TableDescriptionsResourceWithStreamingResponse(client.table_descriptions) - self.heartbeat = resources.HeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = resources.EngineResourceWithStreamingResponse(client.engine) + self.finetunings = finetunings.FinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = golden_sqls.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = instructions.InstructionsResourceWithStreamingResponse(client.instructions) + self.generations = generations.GenerationsResourceWithStreamingResponse(client.generations) + self.prompts = prompts.PromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = sql_generations.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = nl_generations.NlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = table_descriptions.TableDescriptionsResourceWithStreamingResponse( + client.table_descriptions + ) + self.heartbeat = heartbeat.HeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = engine.EngineResourceWithStreamingResponse(client.engine) class AsyncDataheraldWithStreamedResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = resources.AsyncDatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = resources.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = resources.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = resources.AsyncInstructionsResourceWithStreamingResponse(client.instructions) - self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations) - self.prompts = resources.AsyncPromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = resources.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = resources.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = resources.AsyncTableDescriptionsResourceWithStreamingResponse( + self.finetunings = finetunings.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = instructions.AsyncInstructionsResourceWithStreamingResponse(client.instructions) + self.generations = generations.AsyncGenerationsResourceWithStreamingResponse(client.generations) + self.prompts = prompts.AsyncPromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithStreamingResponse( client.table_descriptions ) - self.heartbeat = resources.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = resources.AsyncEngineResourceWithStreamingResponse(client.engine) + self.heartbeat = heartbeat.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = engine.AsyncEngineResourceWithStreamingResponse(client.engine) Client = Dataherald From 4b98a9631848d80f30c22aee9c3c0817770aee28 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 05:14:24 +0000 Subject: [PATCH 21/39] chore(internal): codegen related update (#75) --- README.md | 10 -- src/dataherald/_client.py | 190 ++++++++++++++++++-------------------- 2 files changed, 90 insertions(+), 110 deletions(-) diff --git a/README.md b/README.md index 8556333..b655fbf 100644 --- a/README.md +++ b/README.md @@ -311,16 +311,6 @@ client.with_options(http_client=DefaultHttpxClient(...)) By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. -```py -from dataherald import Dataherald - -with Dataherald() as client: - # make requests here - ... - -# HTTP client is now closed -``` - ## Versioning This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: diff --git a/src/dataherald/_client.py b/src/dataherald/_client.py index 38ada33..b0077d0 100644 --- a/src/dataherald/_client.py +++ b/src/dataherald/_client.py @@ -8,7 +8,7 @@ import httpx -from . import _exceptions +from . import resources, _exceptions from ._qs import Querystring from ._types import ( NOT_GIVEN, @@ -24,7 +24,6 @@ get_async_library, ) from ._version import __version__ -from .resources import engine, heartbeat, finetunings, generations, golden_sqls, nl_generations, table_descriptions from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError, DataheraldError from ._base_client import ( @@ -32,10 +31,6 @@ SyncAPIClient, AsyncAPIClient, ) -from .resources.prompts import prompts -from .resources.instructions import instructions -from .resources.sql_generations import sql_generations -from .resources.database_connections import database_connections __all__ = [ "ENVIRONMENTS", @@ -43,6 +38,7 @@ "Transport", "ProxiesTypes", "RequestOptions", + "resources", "Dataherald", "AsyncDataherald", "Client", @@ -56,17 +52,17 @@ class Dataherald(SyncAPIClient): - database_connections: database_connections.DatabaseConnectionsResource - finetunings: finetunings.FinetuningsResource - golden_sqls: golden_sqls.GoldenSqlsResource - instructions: instructions.InstructionsResource - generations: generations.GenerationsResource - prompts: prompts.PromptsResource - sql_generations: sql_generations.SqlGenerationsResource - nl_generations: nl_generations.NlGenerationsResource - table_descriptions: table_descriptions.TableDescriptionsResource - heartbeat: heartbeat.HeartbeatResource - engine: engine.EngineResource + database_connections: resources.DatabaseConnectionsResource + finetunings: resources.FinetuningsResource + golden_sqls: resources.GoldenSqlsResource + instructions: resources.InstructionsResource + generations: resources.GenerationsResource + prompts: resources.PromptsResource + sql_generations: resources.SqlGenerationsResource + nl_generations: resources.NlGenerationsResource + table_descriptions: resources.TableDescriptionsResource + heartbeat: resources.HeartbeatResource + engine: resources.EngineResource with_raw_response: DataheraldWithRawResponse with_streaming_response: DataheraldWithStreamedResponse @@ -148,17 +144,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = database_connections.DatabaseConnectionsResource(self) - self.finetunings = finetunings.FinetuningsResource(self) - self.golden_sqls = golden_sqls.GoldenSqlsResource(self) - self.instructions = instructions.InstructionsResource(self) - self.generations = generations.GenerationsResource(self) - self.prompts = prompts.PromptsResource(self) - self.sql_generations = sql_generations.SqlGenerationsResource(self) - self.nl_generations = nl_generations.NlGenerationsResource(self) - self.table_descriptions = table_descriptions.TableDescriptionsResource(self) - self.heartbeat = heartbeat.HeartbeatResource(self) - self.engine = engine.EngineResource(self) + self.database_connections = resources.DatabaseConnectionsResource(self) + self.finetunings = resources.FinetuningsResource(self) + self.golden_sqls = resources.GoldenSqlsResource(self) + self.instructions = resources.InstructionsResource(self) + self.generations = resources.GenerationsResource(self) + self.prompts = resources.PromptsResource(self) + self.sql_generations = resources.SqlGenerationsResource(self) + self.nl_generations = resources.NlGenerationsResource(self) + self.table_descriptions = resources.TableDescriptionsResource(self) + self.heartbeat = resources.HeartbeatResource(self) + self.engine = resources.EngineResource(self) self.with_raw_response = DataheraldWithRawResponse(self) self.with_streaming_response = DataheraldWithStreamedResponse(self) @@ -270,17 +266,17 @@ def _make_status_error( class AsyncDataherald(AsyncAPIClient): - database_connections: database_connections.AsyncDatabaseConnectionsResource - finetunings: finetunings.AsyncFinetuningsResource - golden_sqls: golden_sqls.AsyncGoldenSqlsResource - instructions: instructions.AsyncInstructionsResource - generations: generations.AsyncGenerationsResource - prompts: prompts.AsyncPromptsResource - sql_generations: sql_generations.AsyncSqlGenerationsResource - nl_generations: nl_generations.AsyncNlGenerationsResource - table_descriptions: table_descriptions.AsyncTableDescriptionsResource - heartbeat: heartbeat.AsyncHeartbeatResource - engine: engine.AsyncEngineResource + database_connections: resources.AsyncDatabaseConnectionsResource + finetunings: resources.AsyncFinetuningsResource + golden_sqls: resources.AsyncGoldenSqlsResource + instructions: resources.AsyncInstructionsResource + generations: resources.AsyncGenerationsResource + prompts: resources.AsyncPromptsResource + sql_generations: resources.AsyncSqlGenerationsResource + nl_generations: resources.AsyncNlGenerationsResource + table_descriptions: resources.AsyncTableDescriptionsResource + heartbeat: resources.AsyncHeartbeatResource + engine: resources.AsyncEngineResource with_raw_response: AsyncDataheraldWithRawResponse with_streaming_response: AsyncDataheraldWithStreamedResponse @@ -362,17 +358,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = database_connections.AsyncDatabaseConnectionsResource(self) - self.finetunings = finetunings.AsyncFinetuningsResource(self) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResource(self) - self.instructions = instructions.AsyncInstructionsResource(self) - self.generations = generations.AsyncGenerationsResource(self) - self.prompts = prompts.AsyncPromptsResource(self) - self.sql_generations = sql_generations.AsyncSqlGenerationsResource(self) - self.nl_generations = nl_generations.AsyncNlGenerationsResource(self) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResource(self) - self.heartbeat = heartbeat.AsyncHeartbeatResource(self) - self.engine = engine.AsyncEngineResource(self) + self.database_connections = resources.AsyncDatabaseConnectionsResource(self) + self.finetunings = resources.AsyncFinetuningsResource(self) + self.golden_sqls = resources.AsyncGoldenSqlsResource(self) + self.instructions = resources.AsyncInstructionsResource(self) + self.generations = resources.AsyncGenerationsResource(self) + self.prompts = resources.AsyncPromptsResource(self) + self.sql_generations = resources.AsyncSqlGenerationsResource(self) + self.nl_generations = resources.AsyncNlGenerationsResource(self) + self.table_descriptions = resources.AsyncTableDescriptionsResource(self) + self.heartbeat = resources.AsyncHeartbeatResource(self) + self.engine = resources.AsyncEngineResource(self) self.with_raw_response = AsyncDataheraldWithRawResponse(self) self.with_streaming_response = AsyncDataheraldWithStreamedResponse(self) @@ -485,76 +481,70 @@ def _make_status_error( class DataheraldWithRawResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = database_connections.DatabaseConnectionsResourceWithRawResponse( - client.database_connections - ) - self.finetunings = finetunings.FinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = golden_sqls.GoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = instructions.InstructionsResourceWithRawResponse(client.instructions) - self.generations = generations.GenerationsResourceWithRawResponse(client.generations) - self.prompts = prompts.PromptsResourceWithRawResponse(client.prompts) - self.sql_generations = sql_generations.SqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = nl_generations.NlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = table_descriptions.TableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = heartbeat.HeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = engine.EngineResourceWithRawResponse(client.engine) + self.database_connections = resources.DatabaseConnectionsResourceWithRawResponse(client.database_connections) + self.finetunings = resources.FinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = resources.GoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = resources.InstructionsResourceWithRawResponse(client.instructions) + self.generations = resources.GenerationsResourceWithRawResponse(client.generations) + self.prompts = resources.PromptsResourceWithRawResponse(client.prompts) + self.sql_generations = resources.SqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = resources.NlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = resources.TableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = resources.HeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = resources.EngineResourceWithRawResponse(client.engine) class AsyncDataheraldWithRawResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithRawResponse( + self.database_connections = resources.AsyncDatabaseConnectionsResourceWithRawResponse( client.database_connections ) - self.finetunings = finetunings.AsyncFinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = instructions.AsyncInstructionsResourceWithRawResponse(client.instructions) - self.generations = generations.AsyncGenerationsResourceWithRawResponse(client.generations) - self.prompts = prompts.AsyncPromptsResourceWithRawResponse(client.prompts) - self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithRawResponse( - client.table_descriptions - ) - self.heartbeat = heartbeat.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = engine.AsyncEngineResourceWithRawResponse(client.engine) + self.finetunings = resources.AsyncFinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = resources.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = resources.AsyncInstructionsResourceWithRawResponse(client.instructions) + self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations) + self.prompts = resources.AsyncPromptsResourceWithRawResponse(client.prompts) + self.sql_generations = resources.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = resources.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = resources.AsyncTableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = resources.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = resources.AsyncEngineResourceWithRawResponse(client.engine) class DataheraldWithStreamedResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = database_connections.DatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = resources.DatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = finetunings.FinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = golden_sqls.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = instructions.InstructionsResourceWithStreamingResponse(client.instructions) - self.generations = generations.GenerationsResourceWithStreamingResponse(client.generations) - self.prompts = prompts.PromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = sql_generations.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = nl_generations.NlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = table_descriptions.TableDescriptionsResourceWithStreamingResponse( - client.table_descriptions - ) - self.heartbeat = heartbeat.HeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = engine.EngineResourceWithStreamingResponse(client.engine) + self.finetunings = resources.FinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = resources.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = resources.InstructionsResourceWithStreamingResponse(client.instructions) + self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations) + self.prompts = resources.PromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = resources.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = resources.NlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = resources.TableDescriptionsResourceWithStreamingResponse(client.table_descriptions) + self.heartbeat = resources.HeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = resources.EngineResourceWithStreamingResponse(client.engine) class AsyncDataheraldWithStreamedResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = resources.AsyncDatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = finetunings.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = instructions.AsyncInstructionsResourceWithStreamingResponse(client.instructions) - self.generations = generations.AsyncGenerationsResourceWithStreamingResponse(client.generations) - self.prompts = prompts.AsyncPromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithStreamingResponse( + self.finetunings = resources.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = resources.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = resources.AsyncInstructionsResourceWithStreamingResponse(client.instructions) + self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations) + self.prompts = resources.AsyncPromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = resources.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = resources.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = resources.AsyncTableDescriptionsResourceWithStreamingResponse( client.table_descriptions ) - self.heartbeat = heartbeat.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = engine.AsyncEngineResourceWithStreamingResponse(client.engine) + self.heartbeat = resources.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = resources.AsyncEngineResourceWithStreamingResponse(client.engine) Client = Dataherald From 2ab650352fdc1f7a49f0bbd553b53ad47a81dce7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 05:25:17 +0000 Subject: [PATCH 22/39] chore(internal): codegen related update (#76) --- README.md | 10 ++ src/dataherald/_client.py | 190 ++++++++++++++++++++------------------ 2 files changed, 110 insertions(+), 90 deletions(-) diff --git a/README.md b/README.md index b655fbf..8556333 100644 --- a/README.md +++ b/README.md @@ -311,6 +311,16 @@ client.with_options(http_client=DefaultHttpxClient(...)) By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. +```py +from dataherald import Dataherald + +with Dataherald() as client: + # make requests here + ... + +# HTTP client is now closed +``` + ## Versioning This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: diff --git a/src/dataherald/_client.py b/src/dataherald/_client.py index b0077d0..38ada33 100644 --- a/src/dataherald/_client.py +++ b/src/dataherald/_client.py @@ -8,7 +8,7 @@ import httpx -from . import resources, _exceptions +from . import _exceptions from ._qs import Querystring from ._types import ( NOT_GIVEN, @@ -24,6 +24,7 @@ get_async_library, ) from ._version import __version__ +from .resources import engine, heartbeat, finetunings, generations, golden_sqls, nl_generations, table_descriptions from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError, DataheraldError from ._base_client import ( @@ -31,6 +32,10 @@ SyncAPIClient, AsyncAPIClient, ) +from .resources.prompts import prompts +from .resources.instructions import instructions +from .resources.sql_generations import sql_generations +from .resources.database_connections import database_connections __all__ = [ "ENVIRONMENTS", @@ -38,7 +43,6 @@ "Transport", "ProxiesTypes", "RequestOptions", - "resources", "Dataherald", "AsyncDataherald", "Client", @@ -52,17 +56,17 @@ class Dataherald(SyncAPIClient): - database_connections: resources.DatabaseConnectionsResource - finetunings: resources.FinetuningsResource - golden_sqls: resources.GoldenSqlsResource - instructions: resources.InstructionsResource - generations: resources.GenerationsResource - prompts: resources.PromptsResource - sql_generations: resources.SqlGenerationsResource - nl_generations: resources.NlGenerationsResource - table_descriptions: resources.TableDescriptionsResource - heartbeat: resources.HeartbeatResource - engine: resources.EngineResource + database_connections: database_connections.DatabaseConnectionsResource + finetunings: finetunings.FinetuningsResource + golden_sqls: golden_sqls.GoldenSqlsResource + instructions: instructions.InstructionsResource + generations: generations.GenerationsResource + prompts: prompts.PromptsResource + sql_generations: sql_generations.SqlGenerationsResource + nl_generations: nl_generations.NlGenerationsResource + table_descriptions: table_descriptions.TableDescriptionsResource + heartbeat: heartbeat.HeartbeatResource + engine: engine.EngineResource with_raw_response: DataheraldWithRawResponse with_streaming_response: DataheraldWithStreamedResponse @@ -144,17 +148,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = resources.DatabaseConnectionsResource(self) - self.finetunings = resources.FinetuningsResource(self) - self.golden_sqls = resources.GoldenSqlsResource(self) - self.instructions = resources.InstructionsResource(self) - self.generations = resources.GenerationsResource(self) - self.prompts = resources.PromptsResource(self) - self.sql_generations = resources.SqlGenerationsResource(self) - self.nl_generations = resources.NlGenerationsResource(self) - self.table_descriptions = resources.TableDescriptionsResource(self) - self.heartbeat = resources.HeartbeatResource(self) - self.engine = resources.EngineResource(self) + self.database_connections = database_connections.DatabaseConnectionsResource(self) + self.finetunings = finetunings.FinetuningsResource(self) + self.golden_sqls = golden_sqls.GoldenSqlsResource(self) + self.instructions = instructions.InstructionsResource(self) + self.generations = generations.GenerationsResource(self) + self.prompts = prompts.PromptsResource(self) + self.sql_generations = sql_generations.SqlGenerationsResource(self) + self.nl_generations = nl_generations.NlGenerationsResource(self) + self.table_descriptions = table_descriptions.TableDescriptionsResource(self) + self.heartbeat = heartbeat.HeartbeatResource(self) + self.engine = engine.EngineResource(self) self.with_raw_response = DataheraldWithRawResponse(self) self.with_streaming_response = DataheraldWithStreamedResponse(self) @@ -266,17 +270,17 @@ def _make_status_error( class AsyncDataherald(AsyncAPIClient): - database_connections: resources.AsyncDatabaseConnectionsResource - finetunings: resources.AsyncFinetuningsResource - golden_sqls: resources.AsyncGoldenSqlsResource - instructions: resources.AsyncInstructionsResource - generations: resources.AsyncGenerationsResource - prompts: resources.AsyncPromptsResource - sql_generations: resources.AsyncSqlGenerationsResource - nl_generations: resources.AsyncNlGenerationsResource - table_descriptions: resources.AsyncTableDescriptionsResource - heartbeat: resources.AsyncHeartbeatResource - engine: resources.AsyncEngineResource + database_connections: database_connections.AsyncDatabaseConnectionsResource + finetunings: finetunings.AsyncFinetuningsResource + golden_sqls: golden_sqls.AsyncGoldenSqlsResource + instructions: instructions.AsyncInstructionsResource + generations: generations.AsyncGenerationsResource + prompts: prompts.AsyncPromptsResource + sql_generations: sql_generations.AsyncSqlGenerationsResource + nl_generations: nl_generations.AsyncNlGenerationsResource + table_descriptions: table_descriptions.AsyncTableDescriptionsResource + heartbeat: heartbeat.AsyncHeartbeatResource + engine: engine.AsyncEngineResource with_raw_response: AsyncDataheraldWithRawResponse with_streaming_response: AsyncDataheraldWithStreamedResponse @@ -358,17 +362,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = resources.AsyncDatabaseConnectionsResource(self) - self.finetunings = resources.AsyncFinetuningsResource(self) - self.golden_sqls = resources.AsyncGoldenSqlsResource(self) - self.instructions = resources.AsyncInstructionsResource(self) - self.generations = resources.AsyncGenerationsResource(self) - self.prompts = resources.AsyncPromptsResource(self) - self.sql_generations = resources.AsyncSqlGenerationsResource(self) - self.nl_generations = resources.AsyncNlGenerationsResource(self) - self.table_descriptions = resources.AsyncTableDescriptionsResource(self) - self.heartbeat = resources.AsyncHeartbeatResource(self) - self.engine = resources.AsyncEngineResource(self) + self.database_connections = database_connections.AsyncDatabaseConnectionsResource(self) + self.finetunings = finetunings.AsyncFinetuningsResource(self) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResource(self) + self.instructions = instructions.AsyncInstructionsResource(self) + self.generations = generations.AsyncGenerationsResource(self) + self.prompts = prompts.AsyncPromptsResource(self) + self.sql_generations = sql_generations.AsyncSqlGenerationsResource(self) + self.nl_generations = nl_generations.AsyncNlGenerationsResource(self) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResource(self) + self.heartbeat = heartbeat.AsyncHeartbeatResource(self) + self.engine = engine.AsyncEngineResource(self) self.with_raw_response = AsyncDataheraldWithRawResponse(self) self.with_streaming_response = AsyncDataheraldWithStreamedResponse(self) @@ -481,70 +485,76 @@ def _make_status_error( class DataheraldWithRawResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = resources.DatabaseConnectionsResourceWithRawResponse(client.database_connections) - self.finetunings = resources.FinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = resources.GoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = resources.InstructionsResourceWithRawResponse(client.instructions) - self.generations = resources.GenerationsResourceWithRawResponse(client.generations) - self.prompts = resources.PromptsResourceWithRawResponse(client.prompts) - self.sql_generations = resources.SqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = resources.NlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = resources.TableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = resources.HeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = resources.EngineResourceWithRawResponse(client.engine) + self.database_connections = database_connections.DatabaseConnectionsResourceWithRawResponse( + client.database_connections + ) + self.finetunings = finetunings.FinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = golden_sqls.GoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = instructions.InstructionsResourceWithRawResponse(client.instructions) + self.generations = generations.GenerationsResourceWithRawResponse(client.generations) + self.prompts = prompts.PromptsResourceWithRawResponse(client.prompts) + self.sql_generations = sql_generations.SqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = nl_generations.NlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = table_descriptions.TableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = heartbeat.HeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = engine.EngineResourceWithRawResponse(client.engine) class AsyncDataheraldWithRawResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = resources.AsyncDatabaseConnectionsResourceWithRawResponse( + self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithRawResponse( client.database_connections ) - self.finetunings = resources.AsyncFinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = resources.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = resources.AsyncInstructionsResourceWithRawResponse(client.instructions) - self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations) - self.prompts = resources.AsyncPromptsResourceWithRawResponse(client.prompts) - self.sql_generations = resources.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = resources.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = resources.AsyncTableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = resources.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = resources.AsyncEngineResourceWithRawResponse(client.engine) + self.finetunings = finetunings.AsyncFinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = instructions.AsyncInstructionsResourceWithRawResponse(client.instructions) + self.generations = generations.AsyncGenerationsResourceWithRawResponse(client.generations) + self.prompts = prompts.AsyncPromptsResourceWithRawResponse(client.prompts) + self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithRawResponse( + client.table_descriptions + ) + self.heartbeat = heartbeat.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = engine.AsyncEngineResourceWithRawResponse(client.engine) class DataheraldWithStreamedResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = resources.DatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = database_connections.DatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = resources.FinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = resources.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = resources.InstructionsResourceWithStreamingResponse(client.instructions) - self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations) - self.prompts = resources.PromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = resources.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = resources.NlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = resources.TableDescriptionsResourceWithStreamingResponse(client.table_descriptions) - self.heartbeat = resources.HeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = resources.EngineResourceWithStreamingResponse(client.engine) + self.finetunings = finetunings.FinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = golden_sqls.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = instructions.InstructionsResourceWithStreamingResponse(client.instructions) + self.generations = generations.GenerationsResourceWithStreamingResponse(client.generations) + self.prompts = prompts.PromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = sql_generations.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = nl_generations.NlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = table_descriptions.TableDescriptionsResourceWithStreamingResponse( + client.table_descriptions + ) + self.heartbeat = heartbeat.HeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = engine.EngineResourceWithStreamingResponse(client.engine) class AsyncDataheraldWithStreamedResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = resources.AsyncDatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = resources.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = resources.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = resources.AsyncInstructionsResourceWithStreamingResponse(client.instructions) - self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations) - self.prompts = resources.AsyncPromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = resources.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = resources.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = resources.AsyncTableDescriptionsResourceWithStreamingResponse( + self.finetunings = finetunings.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = instructions.AsyncInstructionsResourceWithStreamingResponse(client.instructions) + self.generations = generations.AsyncGenerationsResourceWithStreamingResponse(client.generations) + self.prompts = prompts.AsyncPromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithStreamingResponse( client.table_descriptions ) - self.heartbeat = resources.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = resources.AsyncEngineResourceWithStreamingResponse(client.engine) + self.heartbeat = heartbeat.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = engine.AsyncEngineResourceWithStreamingResponse(client.engine) Client = Dataherald From 694418c48ff2842cb2c41699729baee90754488d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 05:29:45 +0000 Subject: [PATCH 23/39] chore(internal): codegen related update (#77) --- README.md | 10 -- src/dataherald/_client.py | 190 ++++++++++++++++++-------------------- 2 files changed, 90 insertions(+), 110 deletions(-) diff --git a/README.md b/README.md index 8556333..b655fbf 100644 --- a/README.md +++ b/README.md @@ -311,16 +311,6 @@ client.with_options(http_client=DefaultHttpxClient(...)) By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. -```py -from dataherald import Dataherald - -with Dataherald() as client: - # make requests here - ... - -# HTTP client is now closed -``` - ## Versioning This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: diff --git a/src/dataherald/_client.py b/src/dataherald/_client.py index 38ada33..b0077d0 100644 --- a/src/dataherald/_client.py +++ b/src/dataherald/_client.py @@ -8,7 +8,7 @@ import httpx -from . import _exceptions +from . import resources, _exceptions from ._qs import Querystring from ._types import ( NOT_GIVEN, @@ -24,7 +24,6 @@ get_async_library, ) from ._version import __version__ -from .resources import engine, heartbeat, finetunings, generations, golden_sqls, nl_generations, table_descriptions from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError, DataheraldError from ._base_client import ( @@ -32,10 +31,6 @@ SyncAPIClient, AsyncAPIClient, ) -from .resources.prompts import prompts -from .resources.instructions import instructions -from .resources.sql_generations import sql_generations -from .resources.database_connections import database_connections __all__ = [ "ENVIRONMENTS", @@ -43,6 +38,7 @@ "Transport", "ProxiesTypes", "RequestOptions", + "resources", "Dataherald", "AsyncDataherald", "Client", @@ -56,17 +52,17 @@ class Dataherald(SyncAPIClient): - database_connections: database_connections.DatabaseConnectionsResource - finetunings: finetunings.FinetuningsResource - golden_sqls: golden_sqls.GoldenSqlsResource - instructions: instructions.InstructionsResource - generations: generations.GenerationsResource - prompts: prompts.PromptsResource - sql_generations: sql_generations.SqlGenerationsResource - nl_generations: nl_generations.NlGenerationsResource - table_descriptions: table_descriptions.TableDescriptionsResource - heartbeat: heartbeat.HeartbeatResource - engine: engine.EngineResource + database_connections: resources.DatabaseConnectionsResource + finetunings: resources.FinetuningsResource + golden_sqls: resources.GoldenSqlsResource + instructions: resources.InstructionsResource + generations: resources.GenerationsResource + prompts: resources.PromptsResource + sql_generations: resources.SqlGenerationsResource + nl_generations: resources.NlGenerationsResource + table_descriptions: resources.TableDescriptionsResource + heartbeat: resources.HeartbeatResource + engine: resources.EngineResource with_raw_response: DataheraldWithRawResponse with_streaming_response: DataheraldWithStreamedResponse @@ -148,17 +144,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = database_connections.DatabaseConnectionsResource(self) - self.finetunings = finetunings.FinetuningsResource(self) - self.golden_sqls = golden_sqls.GoldenSqlsResource(self) - self.instructions = instructions.InstructionsResource(self) - self.generations = generations.GenerationsResource(self) - self.prompts = prompts.PromptsResource(self) - self.sql_generations = sql_generations.SqlGenerationsResource(self) - self.nl_generations = nl_generations.NlGenerationsResource(self) - self.table_descriptions = table_descriptions.TableDescriptionsResource(self) - self.heartbeat = heartbeat.HeartbeatResource(self) - self.engine = engine.EngineResource(self) + self.database_connections = resources.DatabaseConnectionsResource(self) + self.finetunings = resources.FinetuningsResource(self) + self.golden_sqls = resources.GoldenSqlsResource(self) + self.instructions = resources.InstructionsResource(self) + self.generations = resources.GenerationsResource(self) + self.prompts = resources.PromptsResource(self) + self.sql_generations = resources.SqlGenerationsResource(self) + self.nl_generations = resources.NlGenerationsResource(self) + self.table_descriptions = resources.TableDescriptionsResource(self) + self.heartbeat = resources.HeartbeatResource(self) + self.engine = resources.EngineResource(self) self.with_raw_response = DataheraldWithRawResponse(self) self.with_streaming_response = DataheraldWithStreamedResponse(self) @@ -270,17 +266,17 @@ def _make_status_error( class AsyncDataherald(AsyncAPIClient): - database_connections: database_connections.AsyncDatabaseConnectionsResource - finetunings: finetunings.AsyncFinetuningsResource - golden_sqls: golden_sqls.AsyncGoldenSqlsResource - instructions: instructions.AsyncInstructionsResource - generations: generations.AsyncGenerationsResource - prompts: prompts.AsyncPromptsResource - sql_generations: sql_generations.AsyncSqlGenerationsResource - nl_generations: nl_generations.AsyncNlGenerationsResource - table_descriptions: table_descriptions.AsyncTableDescriptionsResource - heartbeat: heartbeat.AsyncHeartbeatResource - engine: engine.AsyncEngineResource + database_connections: resources.AsyncDatabaseConnectionsResource + finetunings: resources.AsyncFinetuningsResource + golden_sqls: resources.AsyncGoldenSqlsResource + instructions: resources.AsyncInstructionsResource + generations: resources.AsyncGenerationsResource + prompts: resources.AsyncPromptsResource + sql_generations: resources.AsyncSqlGenerationsResource + nl_generations: resources.AsyncNlGenerationsResource + table_descriptions: resources.AsyncTableDescriptionsResource + heartbeat: resources.AsyncHeartbeatResource + engine: resources.AsyncEngineResource with_raw_response: AsyncDataheraldWithRawResponse with_streaming_response: AsyncDataheraldWithStreamedResponse @@ -362,17 +358,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = database_connections.AsyncDatabaseConnectionsResource(self) - self.finetunings = finetunings.AsyncFinetuningsResource(self) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResource(self) - self.instructions = instructions.AsyncInstructionsResource(self) - self.generations = generations.AsyncGenerationsResource(self) - self.prompts = prompts.AsyncPromptsResource(self) - self.sql_generations = sql_generations.AsyncSqlGenerationsResource(self) - self.nl_generations = nl_generations.AsyncNlGenerationsResource(self) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResource(self) - self.heartbeat = heartbeat.AsyncHeartbeatResource(self) - self.engine = engine.AsyncEngineResource(self) + self.database_connections = resources.AsyncDatabaseConnectionsResource(self) + self.finetunings = resources.AsyncFinetuningsResource(self) + self.golden_sqls = resources.AsyncGoldenSqlsResource(self) + self.instructions = resources.AsyncInstructionsResource(self) + self.generations = resources.AsyncGenerationsResource(self) + self.prompts = resources.AsyncPromptsResource(self) + self.sql_generations = resources.AsyncSqlGenerationsResource(self) + self.nl_generations = resources.AsyncNlGenerationsResource(self) + self.table_descriptions = resources.AsyncTableDescriptionsResource(self) + self.heartbeat = resources.AsyncHeartbeatResource(self) + self.engine = resources.AsyncEngineResource(self) self.with_raw_response = AsyncDataheraldWithRawResponse(self) self.with_streaming_response = AsyncDataheraldWithStreamedResponse(self) @@ -485,76 +481,70 @@ def _make_status_error( class DataheraldWithRawResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = database_connections.DatabaseConnectionsResourceWithRawResponse( - client.database_connections - ) - self.finetunings = finetunings.FinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = golden_sqls.GoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = instructions.InstructionsResourceWithRawResponse(client.instructions) - self.generations = generations.GenerationsResourceWithRawResponse(client.generations) - self.prompts = prompts.PromptsResourceWithRawResponse(client.prompts) - self.sql_generations = sql_generations.SqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = nl_generations.NlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = table_descriptions.TableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = heartbeat.HeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = engine.EngineResourceWithRawResponse(client.engine) + self.database_connections = resources.DatabaseConnectionsResourceWithRawResponse(client.database_connections) + self.finetunings = resources.FinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = resources.GoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = resources.InstructionsResourceWithRawResponse(client.instructions) + self.generations = resources.GenerationsResourceWithRawResponse(client.generations) + self.prompts = resources.PromptsResourceWithRawResponse(client.prompts) + self.sql_generations = resources.SqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = resources.NlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = resources.TableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = resources.HeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = resources.EngineResourceWithRawResponse(client.engine) class AsyncDataheraldWithRawResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithRawResponse( + self.database_connections = resources.AsyncDatabaseConnectionsResourceWithRawResponse( client.database_connections ) - self.finetunings = finetunings.AsyncFinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = instructions.AsyncInstructionsResourceWithRawResponse(client.instructions) - self.generations = generations.AsyncGenerationsResourceWithRawResponse(client.generations) - self.prompts = prompts.AsyncPromptsResourceWithRawResponse(client.prompts) - self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithRawResponse( - client.table_descriptions - ) - self.heartbeat = heartbeat.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = engine.AsyncEngineResourceWithRawResponse(client.engine) + self.finetunings = resources.AsyncFinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = resources.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = resources.AsyncInstructionsResourceWithRawResponse(client.instructions) + self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations) + self.prompts = resources.AsyncPromptsResourceWithRawResponse(client.prompts) + self.sql_generations = resources.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = resources.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = resources.AsyncTableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = resources.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = resources.AsyncEngineResourceWithRawResponse(client.engine) class DataheraldWithStreamedResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = database_connections.DatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = resources.DatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = finetunings.FinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = golden_sqls.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = instructions.InstructionsResourceWithStreamingResponse(client.instructions) - self.generations = generations.GenerationsResourceWithStreamingResponse(client.generations) - self.prompts = prompts.PromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = sql_generations.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = nl_generations.NlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = table_descriptions.TableDescriptionsResourceWithStreamingResponse( - client.table_descriptions - ) - self.heartbeat = heartbeat.HeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = engine.EngineResourceWithStreamingResponse(client.engine) + self.finetunings = resources.FinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = resources.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = resources.InstructionsResourceWithStreamingResponse(client.instructions) + self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations) + self.prompts = resources.PromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = resources.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = resources.NlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = resources.TableDescriptionsResourceWithStreamingResponse(client.table_descriptions) + self.heartbeat = resources.HeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = resources.EngineResourceWithStreamingResponse(client.engine) class AsyncDataheraldWithStreamedResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = resources.AsyncDatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = finetunings.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = instructions.AsyncInstructionsResourceWithStreamingResponse(client.instructions) - self.generations = generations.AsyncGenerationsResourceWithStreamingResponse(client.generations) - self.prompts = prompts.AsyncPromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithStreamingResponse( + self.finetunings = resources.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = resources.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = resources.AsyncInstructionsResourceWithStreamingResponse(client.instructions) + self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations) + self.prompts = resources.AsyncPromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = resources.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = resources.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = resources.AsyncTableDescriptionsResourceWithStreamingResponse( client.table_descriptions ) - self.heartbeat = heartbeat.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = engine.AsyncEngineResourceWithStreamingResponse(client.engine) + self.heartbeat = resources.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = resources.AsyncEngineResourceWithStreamingResponse(client.engine) Client = Dataherald From dcf8544e5f5edd785eb6cb6768106d44877a5c70 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:05:48 +0000 Subject: [PATCH 24/39] chore(internal): codegen related update (#78) --- README.md | 10 ++ src/dataherald/_client.py | 190 ++++++++++++++++++++------------------ tests/test_client.py | 8 +- 3 files changed, 114 insertions(+), 94 deletions(-) diff --git a/README.md b/README.md index b655fbf..8556333 100644 --- a/README.md +++ b/README.md @@ -311,6 +311,16 @@ client.with_options(http_client=DefaultHttpxClient(...)) By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. +```py +from dataherald import Dataherald + +with Dataherald() as client: + # make requests here + ... + +# HTTP client is now closed +``` + ## Versioning This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: diff --git a/src/dataherald/_client.py b/src/dataherald/_client.py index b0077d0..38ada33 100644 --- a/src/dataherald/_client.py +++ b/src/dataherald/_client.py @@ -8,7 +8,7 @@ import httpx -from . import resources, _exceptions +from . import _exceptions from ._qs import Querystring from ._types import ( NOT_GIVEN, @@ -24,6 +24,7 @@ get_async_library, ) from ._version import __version__ +from .resources import engine, heartbeat, finetunings, generations, golden_sqls, nl_generations, table_descriptions from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError, DataheraldError from ._base_client import ( @@ -31,6 +32,10 @@ SyncAPIClient, AsyncAPIClient, ) +from .resources.prompts import prompts +from .resources.instructions import instructions +from .resources.sql_generations import sql_generations +from .resources.database_connections import database_connections __all__ = [ "ENVIRONMENTS", @@ -38,7 +43,6 @@ "Transport", "ProxiesTypes", "RequestOptions", - "resources", "Dataherald", "AsyncDataherald", "Client", @@ -52,17 +56,17 @@ class Dataherald(SyncAPIClient): - database_connections: resources.DatabaseConnectionsResource - finetunings: resources.FinetuningsResource - golden_sqls: resources.GoldenSqlsResource - instructions: resources.InstructionsResource - generations: resources.GenerationsResource - prompts: resources.PromptsResource - sql_generations: resources.SqlGenerationsResource - nl_generations: resources.NlGenerationsResource - table_descriptions: resources.TableDescriptionsResource - heartbeat: resources.HeartbeatResource - engine: resources.EngineResource + database_connections: database_connections.DatabaseConnectionsResource + finetunings: finetunings.FinetuningsResource + golden_sqls: golden_sqls.GoldenSqlsResource + instructions: instructions.InstructionsResource + generations: generations.GenerationsResource + prompts: prompts.PromptsResource + sql_generations: sql_generations.SqlGenerationsResource + nl_generations: nl_generations.NlGenerationsResource + table_descriptions: table_descriptions.TableDescriptionsResource + heartbeat: heartbeat.HeartbeatResource + engine: engine.EngineResource with_raw_response: DataheraldWithRawResponse with_streaming_response: DataheraldWithStreamedResponse @@ -144,17 +148,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = resources.DatabaseConnectionsResource(self) - self.finetunings = resources.FinetuningsResource(self) - self.golden_sqls = resources.GoldenSqlsResource(self) - self.instructions = resources.InstructionsResource(self) - self.generations = resources.GenerationsResource(self) - self.prompts = resources.PromptsResource(self) - self.sql_generations = resources.SqlGenerationsResource(self) - self.nl_generations = resources.NlGenerationsResource(self) - self.table_descriptions = resources.TableDescriptionsResource(self) - self.heartbeat = resources.HeartbeatResource(self) - self.engine = resources.EngineResource(self) + self.database_connections = database_connections.DatabaseConnectionsResource(self) + self.finetunings = finetunings.FinetuningsResource(self) + self.golden_sqls = golden_sqls.GoldenSqlsResource(self) + self.instructions = instructions.InstructionsResource(self) + self.generations = generations.GenerationsResource(self) + self.prompts = prompts.PromptsResource(self) + self.sql_generations = sql_generations.SqlGenerationsResource(self) + self.nl_generations = nl_generations.NlGenerationsResource(self) + self.table_descriptions = table_descriptions.TableDescriptionsResource(self) + self.heartbeat = heartbeat.HeartbeatResource(self) + self.engine = engine.EngineResource(self) self.with_raw_response = DataheraldWithRawResponse(self) self.with_streaming_response = DataheraldWithStreamedResponse(self) @@ -266,17 +270,17 @@ def _make_status_error( class AsyncDataherald(AsyncAPIClient): - database_connections: resources.AsyncDatabaseConnectionsResource - finetunings: resources.AsyncFinetuningsResource - golden_sqls: resources.AsyncGoldenSqlsResource - instructions: resources.AsyncInstructionsResource - generations: resources.AsyncGenerationsResource - prompts: resources.AsyncPromptsResource - sql_generations: resources.AsyncSqlGenerationsResource - nl_generations: resources.AsyncNlGenerationsResource - table_descriptions: resources.AsyncTableDescriptionsResource - heartbeat: resources.AsyncHeartbeatResource - engine: resources.AsyncEngineResource + database_connections: database_connections.AsyncDatabaseConnectionsResource + finetunings: finetunings.AsyncFinetuningsResource + golden_sqls: golden_sqls.AsyncGoldenSqlsResource + instructions: instructions.AsyncInstructionsResource + generations: generations.AsyncGenerationsResource + prompts: prompts.AsyncPromptsResource + sql_generations: sql_generations.AsyncSqlGenerationsResource + nl_generations: nl_generations.AsyncNlGenerationsResource + table_descriptions: table_descriptions.AsyncTableDescriptionsResource + heartbeat: heartbeat.AsyncHeartbeatResource + engine: engine.AsyncEngineResource with_raw_response: AsyncDataheraldWithRawResponse with_streaming_response: AsyncDataheraldWithStreamedResponse @@ -358,17 +362,17 @@ def __init__( _strict_response_validation=_strict_response_validation, ) - self.database_connections = resources.AsyncDatabaseConnectionsResource(self) - self.finetunings = resources.AsyncFinetuningsResource(self) - self.golden_sqls = resources.AsyncGoldenSqlsResource(self) - self.instructions = resources.AsyncInstructionsResource(self) - self.generations = resources.AsyncGenerationsResource(self) - self.prompts = resources.AsyncPromptsResource(self) - self.sql_generations = resources.AsyncSqlGenerationsResource(self) - self.nl_generations = resources.AsyncNlGenerationsResource(self) - self.table_descriptions = resources.AsyncTableDescriptionsResource(self) - self.heartbeat = resources.AsyncHeartbeatResource(self) - self.engine = resources.AsyncEngineResource(self) + self.database_connections = database_connections.AsyncDatabaseConnectionsResource(self) + self.finetunings = finetunings.AsyncFinetuningsResource(self) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResource(self) + self.instructions = instructions.AsyncInstructionsResource(self) + self.generations = generations.AsyncGenerationsResource(self) + self.prompts = prompts.AsyncPromptsResource(self) + self.sql_generations = sql_generations.AsyncSqlGenerationsResource(self) + self.nl_generations = nl_generations.AsyncNlGenerationsResource(self) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResource(self) + self.heartbeat = heartbeat.AsyncHeartbeatResource(self) + self.engine = engine.AsyncEngineResource(self) self.with_raw_response = AsyncDataheraldWithRawResponse(self) self.with_streaming_response = AsyncDataheraldWithStreamedResponse(self) @@ -481,70 +485,76 @@ def _make_status_error( class DataheraldWithRawResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = resources.DatabaseConnectionsResourceWithRawResponse(client.database_connections) - self.finetunings = resources.FinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = resources.GoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = resources.InstructionsResourceWithRawResponse(client.instructions) - self.generations = resources.GenerationsResourceWithRawResponse(client.generations) - self.prompts = resources.PromptsResourceWithRawResponse(client.prompts) - self.sql_generations = resources.SqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = resources.NlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = resources.TableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = resources.HeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = resources.EngineResourceWithRawResponse(client.engine) + self.database_connections = database_connections.DatabaseConnectionsResourceWithRawResponse( + client.database_connections + ) + self.finetunings = finetunings.FinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = golden_sqls.GoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = instructions.InstructionsResourceWithRawResponse(client.instructions) + self.generations = generations.GenerationsResourceWithRawResponse(client.generations) + self.prompts = prompts.PromptsResourceWithRawResponse(client.prompts) + self.sql_generations = sql_generations.SqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = nl_generations.NlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = table_descriptions.TableDescriptionsResourceWithRawResponse(client.table_descriptions) + self.heartbeat = heartbeat.HeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = engine.EngineResourceWithRawResponse(client.engine) class AsyncDataheraldWithRawResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = resources.AsyncDatabaseConnectionsResourceWithRawResponse( + self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithRawResponse( client.database_connections ) - self.finetunings = resources.AsyncFinetuningsResourceWithRawResponse(client.finetunings) - self.golden_sqls = resources.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) - self.instructions = resources.AsyncInstructionsResourceWithRawResponse(client.instructions) - self.generations = resources.AsyncGenerationsResourceWithRawResponse(client.generations) - self.prompts = resources.AsyncPromptsResourceWithRawResponse(client.prompts) - self.sql_generations = resources.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) - self.nl_generations = resources.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) - self.table_descriptions = resources.AsyncTableDescriptionsResourceWithRawResponse(client.table_descriptions) - self.heartbeat = resources.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) - self.engine = resources.AsyncEngineResourceWithRawResponse(client.engine) + self.finetunings = finetunings.AsyncFinetuningsResourceWithRawResponse(client.finetunings) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithRawResponse(client.golden_sqls) + self.instructions = instructions.AsyncInstructionsResourceWithRawResponse(client.instructions) + self.generations = generations.AsyncGenerationsResourceWithRawResponse(client.generations) + self.prompts = prompts.AsyncPromptsResourceWithRawResponse(client.prompts) + self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithRawResponse(client.sql_generations) + self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithRawResponse(client.nl_generations) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithRawResponse( + client.table_descriptions + ) + self.heartbeat = heartbeat.AsyncHeartbeatResourceWithRawResponse(client.heartbeat) + self.engine = engine.AsyncEngineResourceWithRawResponse(client.engine) class DataheraldWithStreamedResponse: def __init__(self, client: Dataherald) -> None: - self.database_connections = resources.DatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = database_connections.DatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = resources.FinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = resources.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = resources.InstructionsResourceWithStreamingResponse(client.instructions) - self.generations = resources.GenerationsResourceWithStreamingResponse(client.generations) - self.prompts = resources.PromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = resources.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = resources.NlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = resources.TableDescriptionsResourceWithStreamingResponse(client.table_descriptions) - self.heartbeat = resources.HeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = resources.EngineResourceWithStreamingResponse(client.engine) + self.finetunings = finetunings.FinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = golden_sqls.GoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = instructions.InstructionsResourceWithStreamingResponse(client.instructions) + self.generations = generations.GenerationsResourceWithStreamingResponse(client.generations) + self.prompts = prompts.PromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = sql_generations.SqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = nl_generations.NlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = table_descriptions.TableDescriptionsResourceWithStreamingResponse( + client.table_descriptions + ) + self.heartbeat = heartbeat.HeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = engine.EngineResourceWithStreamingResponse(client.engine) class AsyncDataheraldWithStreamedResponse: def __init__(self, client: AsyncDataherald) -> None: - self.database_connections = resources.AsyncDatabaseConnectionsResourceWithStreamingResponse( + self.database_connections = database_connections.AsyncDatabaseConnectionsResourceWithStreamingResponse( client.database_connections ) - self.finetunings = resources.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) - self.golden_sqls = resources.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) - self.instructions = resources.AsyncInstructionsResourceWithStreamingResponse(client.instructions) - self.generations = resources.AsyncGenerationsResourceWithStreamingResponse(client.generations) - self.prompts = resources.AsyncPromptsResourceWithStreamingResponse(client.prompts) - self.sql_generations = resources.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) - self.nl_generations = resources.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) - self.table_descriptions = resources.AsyncTableDescriptionsResourceWithStreamingResponse( + self.finetunings = finetunings.AsyncFinetuningsResourceWithStreamingResponse(client.finetunings) + self.golden_sqls = golden_sqls.AsyncGoldenSqlsResourceWithStreamingResponse(client.golden_sqls) + self.instructions = instructions.AsyncInstructionsResourceWithStreamingResponse(client.instructions) + self.generations = generations.AsyncGenerationsResourceWithStreamingResponse(client.generations) + self.prompts = prompts.AsyncPromptsResourceWithStreamingResponse(client.prompts) + self.sql_generations = sql_generations.AsyncSqlGenerationsResourceWithStreamingResponse(client.sql_generations) + self.nl_generations = nl_generations.AsyncNlGenerationsResourceWithStreamingResponse(client.nl_generations) + self.table_descriptions = table_descriptions.AsyncTableDescriptionsResourceWithStreamingResponse( client.table_descriptions ) - self.heartbeat = resources.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) - self.engine = resources.AsyncEngineResourceWithStreamingResponse(client.engine) + self.heartbeat = heartbeat.AsyncHeartbeatResourceWithStreamingResponse(client.heartbeat) + self.engine = engine.AsyncEngineResourceWithStreamingResponse(client.engine) Client = Dataherald diff --git a/tests/test_client.py b/tests/test_client.py index 0ab254b..e603042 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From 2dbb7674a7c3db79fec9ca141b4283c2f050c918 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:06:16 +0000 Subject: [PATCH 25/39] chore(internal): codegen related update (#79) --- tests/test_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index e603042..0ab254b 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overridden"}, + params={"foo": "baz", "query_param": "overriden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overridden"}, + params={"foo": "baz", "query_param": "overriden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From 412f142303414a7fde0763aa72cdc74c31a8998d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:06:49 +0000 Subject: [PATCH 26/39] chore(internal): codegen related update (#80) --- tests/test_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index 0ab254b..e603042 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From 031e5b84710ee455ce19eba2b8d5148134b4fe31 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:19:12 +0000 Subject: [PATCH 27/39] chore(internal): codegen related update (#81) --- tests/test_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index e603042..0ab254b 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overridden"}, + params={"foo": "baz", "query_param": "overriden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overridden"}, + params={"foo": "baz", "query_param": "overriden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From 476ccee978ece6c6e855f63e680f30aeaa4c3e18 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:21:24 +0000 Subject: [PATCH 28/39] chore(internal): fix some typos (#82) --- tests/test_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index 0ab254b..e603042 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From 5bcb54b4145ad3cbb0fa753f509db7838237e869 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:24:20 +0000 Subject: [PATCH 29/39] chore(internal): codegen related update (#83) --- tests/test_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index e603042..0ab254b 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overridden"}, + params={"foo": "baz", "query_param": "overriden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overridden"}, + params={"foo": "baz", "query_param": "overriden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From f9b8c6a812e0dc7607caccda8c45d7170d77af5e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:24:49 +0000 Subject: [PATCH 30/39] chore(internal): codegen related update (#84) --- tests/test_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index 0ab254b..e603042 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From f34930770a07b9553337b9a214883092035f442a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:25:21 +0000 Subject: [PATCH 31/39] chore(internal): codegen related update (#85) --- tests/test_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index e603042..0ab254b 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overridden"}, + params={"foo": "baz", "query_param": "overriden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overridden"}, + params={"foo": "baz", "query_param": "overriden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From 57946cb1c1f04f1dc9b9bc64d9406267a29efb2f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 05:25:49 +0000 Subject: [PATCH 32/39] chore(internal): codegen related update (#86) --- tests/test_client.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index 0ab254b..e603042 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -345,11 +345,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( @@ -1119,11 +1119,11 @@ def test_default_query_option(self) -> None: FinalRequestOptions( method="get", url="/foo", - params={"foo": "baz", "query_param": "overriden"}, + params={"foo": "baz", "query_param": "overridden"}, ) ) url = httpx.URL(request.url) - assert dict(url.params) == {"foo": "baz", "query_param": "overriden"} + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} def test_request_extra_json(self) -> None: request = self.client._build_request( From 7dbadda36b312ae2e93433aa2cbb0d41c4055705 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 2 Jan 2025 05:05:25 +0000 Subject: [PATCH 33/39] chore(internal): codegen related update (#87) --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index fb24d69..dcb1385 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2024 Dataherald + Copyright 2025 Dataherald Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. From cc7a47330003f951306e3e313722c2a5020e8a3e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 7 Jan 2025 05:11:25 +0000 Subject: [PATCH 34/39] chore: add missing isclass check (#88) --- src/dataherald/_models.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/dataherald/_models.py b/src/dataherald/_models.py index 7a547ce..d56ea1d 100644 --- a/src/dataherald/_models.py +++ b/src/dataherald/_models.py @@ -488,7 +488,11 @@ def construct_type(*, value: object, type_: object) -> object: _, items_type = get_args(type_) # Dict[_, items_type] return {key: construct_type(value=item, type_=items_type) for key, item in value.items()} - if not is_literal_type(type_) and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)): + if ( + not is_literal_type(type_) + and inspect.isclass(origin) + and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)) + ): if is_list(value): return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value] From c37a99ced506fa058866003013a9a5410871d0c8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2025 05:04:49 +0000 Subject: [PATCH 35/39] chore(internal): bump httpx dependency (#89) --- pyproject.toml | 2 +- requirements-dev.lock | 5 ++--- requirements.lock | 3 +-- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d5cee2f..977f0ad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ dev-dependencies = [ "dirty-equals>=0.6.0", "importlib-metadata>=6.7.0", "rich>=13.7.1", - "nest_asyncio==1.6.0" + "nest_asyncio==1.6.0", ] [tool.rye.scripts] diff --git a/requirements-dev.lock b/requirements-dev.lock index 18fea86..061ce54 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -35,7 +35,7 @@ h11==0.14.0 # via httpcore httpcore==1.0.2 # via httpx -httpx==0.25.2 +httpx==0.28.1 # via dataherald # via respx idna==3.4 @@ -76,7 +76,7 @@ python-dateutil==2.8.2 # via time-machine pytz==2023.3.post1 # via dirty-equals -respx==0.20.2 +respx==0.22.0 rich==13.7.1 ruff==0.6.9 setuptools==68.2.2 @@ -86,7 +86,6 @@ six==1.16.0 sniffio==1.3.0 # via anyio # via dataherald - # via httpx time-machine==2.9.0 tomli==2.0.2 # via mypy diff --git a/requirements.lock b/requirements.lock index ae9bfcd..e323cc8 100644 --- a/requirements.lock +++ b/requirements.lock @@ -25,7 +25,7 @@ h11==0.14.0 # via httpcore httpcore==1.0.2 # via httpx -httpx==0.25.2 +httpx==0.28.1 # via dataherald idna==3.4 # via anyio @@ -37,7 +37,6 @@ pydantic-core==2.27.1 sniffio==1.3.0 # via anyio # via dataherald - # via httpx typing-extensions==4.12.2 # via anyio # via dataherald From 071db7c123939fc97e425f143040b32c941f49a7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2025 05:08:37 +0000 Subject: [PATCH 36/39] fix(client): only call .close() when needed (#90) --- src/dataherald/_base_client.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/dataherald/_base_client.py b/src/dataherald/_base_client.py index e550cd6..c9ca736 100644 --- a/src/dataherald/_base_client.py +++ b/src/dataherald/_base_client.py @@ -767,6 +767,9 @@ def __init__(self, **kwargs: Any) -> None: class SyncHttpxClientWrapper(DefaultHttpxClient): def __del__(self) -> None: + if self.is_closed: + return + try: self.close() except Exception: @@ -1334,6 +1337,9 @@ def __init__(self, **kwargs: Any) -> None: class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): def __del__(self) -> None: + if self.is_closed: + return + try: # TODO(someday): support non asyncio runtimes here asyncio.get_running_loop().create_task(self.aclose()) From e3b409cb56dda58af9dca833b74a802682e5d9db Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 9 Jan 2025 05:07:32 +0000 Subject: [PATCH 37/39] docs: fix typos (#91) --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 8556333..3a6c3ed 100644 --- a/README.md +++ b/README.md @@ -110,7 +110,7 @@ except dataherald.APIStatusError as e: print(e.response) ``` -Error codes are as followed: +Error codes are as follows: | Status Code | Error Type | | ----------- | -------------------------- | @@ -253,8 +253,7 @@ If you need to access undocumented endpoints, params, or response properties, th #### Undocumented endpoints To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other -http verbs. Options on the client will be respected (such as retries) will be respected when making this -request. +http verbs. Options on the client will be respected (such as retries) when making this request. ```py import httpx From ac962fb29ea9fbb91e308f580e726a3eb778fc6a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 9 Jan 2025 05:09:01 +0000 Subject: [PATCH 38/39] chore(internal): codegen related update (#92) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3a6c3ed..1fd0152 100644 --- a/README.md +++ b/README.md @@ -325,7 +325,7 @@ with Dataherald() as client: This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: 1. Changes that only affect static types, without breaking runtime behavior. -2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals)_. +2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals.)_ 3. Changes that we do not expect to impact the vast majority of users in practice. We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. From b814470396003af743efd26eeed144453a43bedc Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 9 Jan 2025 05:09:28 +0000 Subject: [PATCH 39/39] release: 0.20.1 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 53 +++++++++++++++++++++++++++++++++++ pyproject.toml | 2 +- src/dataherald/_version.py | 2 +- 4 files changed, 56 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 0c2ecec..461342f 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.20.0" + ".": "0.20.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 756a833..f145be5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,58 @@ # Changelog +## 0.20.1 (2025-01-09) + +Full Changelog: [v0.20.0...v0.20.1](https://github.com/Dataherald/dataherald-python/compare/v0.20.0...v0.20.1) + +### Bug Fixes + +* **client:** compat with new httpx 0.28.0 release ([#65](https://github.com/Dataherald/dataherald-python/issues/65)) ([9ee84bd](https://github.com/Dataherald/dataherald-python/commit/9ee84bd044e4fd133b94e0346dff772ea62ec152)) +* **client:** only call .close() when needed ([#90](https://github.com/Dataherald/dataherald-python/issues/90)) ([071db7c](https://github.com/Dataherald/dataherald-python/commit/071db7c123939fc97e425f143040b32c941f49a7)) + + +### Chores + +* add missing isclass check ([#88](https://github.com/Dataherald/dataherald-python/issues/88)) ([cc7a473](https://github.com/Dataherald/dataherald-python/commit/cc7a47330003f951306e3e313722c2a5020e8a3e)) +* **internal:** add support for TypeAliasType ([#71](https://github.com/Dataherald/dataherald-python/issues/71)) ([911abd4](https://github.com/Dataherald/dataherald-python/commit/911abd4190cbc49d96fa291c0be303440fc651e7)) +* **internal:** bump httpx dependency ([#89](https://github.com/Dataherald/dataherald-python/issues/89)) ([c37a99c](https://github.com/Dataherald/dataherald-python/commit/c37a99ced506fa058866003013a9a5410871d0c8)) +* **internal:** bump pydantic dependency ([#68](https://github.com/Dataherald/dataherald-python/issues/68)) ([13d4ec2](https://github.com/Dataherald/dataherald-python/commit/13d4ec203e3b194ec38c33b8582288e344a16f19)) +* **internal:** bump pyright ([#66](https://github.com/Dataherald/dataherald-python/issues/66)) ([9d5db21](https://github.com/Dataherald/dataherald-python/commit/9d5db21aae350d5eb3fa5a52cb72a8fcab17f0c2)) +* **internal:** bump pyright ([#70](https://github.com/Dataherald/dataherald-python/issues/70)) ([2a732e0](https://github.com/Dataherald/dataherald-python/commit/2a732e03fafe75f1b0ab7812176259a9c311f2bc)) +* **internal:** codegen related update ([#64](https://github.com/Dataherald/dataherald-python/issues/64)) ([5ba7000](https://github.com/Dataherald/dataherald-python/commit/5ba70006e6e4c0addb3461eaec8424d45d12c3c5)) +* **internal:** codegen related update ([#72](https://github.com/Dataherald/dataherald-python/issues/72)) ([6ddf080](https://github.com/Dataherald/dataherald-python/commit/6ddf080b63f5fb373d1fd32ec4919baada704b64)) +* **internal:** codegen related update ([#73](https://github.com/Dataherald/dataherald-python/issues/73)) ([d8c41ed](https://github.com/Dataherald/dataherald-python/commit/d8c41ed0a8cf93d7839a61ab572da71262f0c0db)) +* **internal:** codegen related update ([#74](https://github.com/Dataherald/dataherald-python/issues/74)) ([b6bb66d](https://github.com/Dataherald/dataherald-python/commit/b6bb66d23275d077c6f2defcf076dc9b23c270bf)) +* **internal:** codegen related update ([#75](https://github.com/Dataherald/dataherald-python/issues/75)) ([4b98a96](https://github.com/Dataherald/dataherald-python/commit/4b98a9631848d80f30c22aee9c3c0817770aee28)) +* **internal:** codegen related update ([#76](https://github.com/Dataherald/dataherald-python/issues/76)) ([2ab6503](https://github.com/Dataherald/dataherald-python/commit/2ab650352fdc1f7a49f0bbd553b53ad47a81dce7)) +* **internal:** codegen related update ([#77](https://github.com/Dataherald/dataherald-python/issues/77)) ([694418c](https://github.com/Dataherald/dataherald-python/commit/694418c48ff2842cb2c41699729baee90754488d)) +* **internal:** codegen related update ([#78](https://github.com/Dataherald/dataherald-python/issues/78)) ([dcf8544](https://github.com/Dataherald/dataherald-python/commit/dcf8544e5f5edd785eb6cb6768106d44877a5c70)) +* **internal:** codegen related update ([#79](https://github.com/Dataherald/dataherald-python/issues/79)) ([2dbb767](https://github.com/Dataherald/dataherald-python/commit/2dbb7674a7c3db79fec9ca141b4283c2f050c918)) +* **internal:** codegen related update ([#80](https://github.com/Dataherald/dataherald-python/issues/80)) ([412f142](https://github.com/Dataherald/dataherald-python/commit/412f142303414a7fde0763aa72cdc74c31a8998d)) +* **internal:** codegen related update ([#81](https://github.com/Dataherald/dataherald-python/issues/81)) ([031e5b8](https://github.com/Dataherald/dataherald-python/commit/031e5b84710ee455ce19eba2b8d5148134b4fe31)) +* **internal:** codegen related update ([#83](https://github.com/Dataherald/dataherald-python/issues/83)) ([5bcb54b](https://github.com/Dataherald/dataherald-python/commit/5bcb54b4145ad3cbb0fa753f509db7838237e869)) +* **internal:** codegen related update ([#84](https://github.com/Dataherald/dataherald-python/issues/84)) ([f9b8c6a](https://github.com/Dataherald/dataherald-python/commit/f9b8c6a812e0dc7607caccda8c45d7170d77af5e)) +* **internal:** codegen related update ([#85](https://github.com/Dataherald/dataherald-python/issues/85)) ([f349307](https://github.com/Dataherald/dataherald-python/commit/f34930770a07b9553337b9a214883092035f442a)) +* **internal:** codegen related update ([#86](https://github.com/Dataherald/dataherald-python/issues/86)) ([57946cb](https://github.com/Dataherald/dataherald-python/commit/57946cb1c1f04f1dc9b9bc64d9406267a29efb2f)) +* **internal:** codegen related update ([#87](https://github.com/Dataherald/dataherald-python/issues/87)) ([7dbadda](https://github.com/Dataherald/dataherald-python/commit/7dbadda36b312ae2e93433aa2cbb0d41c4055705)) +* **internal:** codegen related update ([#92](https://github.com/Dataherald/dataherald-python/issues/92)) ([ac962fb](https://github.com/Dataherald/dataherald-python/commit/ac962fb29ea9fbb91e308f580e726a3eb778fc6a)) +* **internal:** fix compat model_dump method when warnings are passed ([#61](https://github.com/Dataherald/dataherald-python/issues/61)) ([6fedf36](https://github.com/Dataherald/dataherald-python/commit/6fedf36fd30a8a1ccbcb8633f85d999f0b38ec34)) +* **internal:** fix some typos ([#82](https://github.com/Dataherald/dataherald-python/issues/82)) ([476ccee](https://github.com/Dataherald/dataherald-python/commit/476ccee978ece6c6e855f63e680f30aeaa4c3e18)) +* **internal:** version bump ([#54](https://github.com/Dataherald/dataherald-python/issues/54)) ([762488e](https://github.com/Dataherald/dataherald-python/commit/762488ecc8e55fb18e05b90a7cb353bea9ee6a00)) +* make the `Omit` type public ([#67](https://github.com/Dataherald/dataherald-python/issues/67)) ([3070bff](https://github.com/Dataherald/dataherald-python/commit/3070bff7d602bd234433ad802c92b9fba210a852)) +* rebuild project due to codegen change ([#56](https://github.com/Dataherald/dataherald-python/issues/56)) ([3f06cb7](https://github.com/Dataherald/dataherald-python/commit/3f06cb70c1ef852ec71f9b4763f991ffcf3e1562)) +* rebuild project due to codegen change ([#57](https://github.com/Dataherald/dataherald-python/issues/57)) ([eaf1330](https://github.com/Dataherald/dataherald-python/commit/eaf13302cfbaf6c7ed61717be1ce9359d716cbdf)) +* rebuild project due to codegen change ([#58](https://github.com/Dataherald/dataherald-python/issues/58)) ([2e4560f](https://github.com/Dataherald/dataherald-python/commit/2e4560f29d5e27c88dbcfd80aaa84fca1e64133b)) +* rebuild project due to codegen change ([#59](https://github.com/Dataherald/dataherald-python/issues/59)) ([7728427](https://github.com/Dataherald/dataherald-python/commit/7728427bca7e453c0897e599c2cb6e80b558e80e)) +* rebuild project due to codegen change ([#60](https://github.com/Dataherald/dataherald-python/issues/60)) ([6742d13](https://github.com/Dataherald/dataherald-python/commit/6742d13cf6e40582d84e19aa085ef36f6359fb4c)) +* remove now unused `cached-property` dep ([#63](https://github.com/Dataherald/dataherald-python/issues/63)) ([540b49a](https://github.com/Dataherald/dataherald-python/commit/540b49a21e53183ac7a5eb670b375e3ded2ecf34)) + + +### Documentation + +* add info log level to readme ([#62](https://github.com/Dataherald/dataherald-python/issues/62)) ([175f6b8](https://github.com/Dataherald/dataherald-python/commit/175f6b8f5e544c979425b1a4c37be9a04162ceb8)) +* fix typos ([#91](https://github.com/Dataherald/dataherald-python/issues/91)) ([e3b409c](https://github.com/Dataherald/dataherald-python/commit/e3b409cb56dda58af9dca833b74a802682e5d9db)) +* **readme:** fix http client proxies example ([#69](https://github.com/Dataherald/dataherald-python/issues/69)) ([f0129cf](https://github.com/Dataherald/dataherald-python/commit/f0129cf790bb9849d6c64bae68b912756006e9da)) + ## 0.20.0 (2024-05-06) Full Changelog: [v0.19.0...v0.20.0](https://github.com/Dataherald/dataherald-python/compare/v0.19.0...v0.20.0) diff --git a/pyproject.toml b/pyproject.toml index 977f0ad..f414fbb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dataherald" -version = "0.20.0" +version = "0.20.1" description = "The official Python library for the Dataherald API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/dataherald/_version.py b/src/dataherald/_version.py index e157f48..6bdd8c9 100644 --- a/src/dataherald/_version.py +++ b/src/dataherald/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "dataherald" -__version__ = "0.20.0" # x-release-please-version +__version__ = "0.20.1" # x-release-please-version