Skip to content

Commit

Permalink
Merge pull request #31 from Dataherald/release-please--branches--main…
Browse files Browse the repository at this point in the history
…--changes--next

release: 0.15.0
  • Loading branch information
jcjc712 authored Jan 24, 2024
2 parents 883b146 + 7ff4a75 commit 63fc9e1
Show file tree
Hide file tree
Showing 52 changed files with 1,138 additions and 736 deletions.
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.14.0"
".": "0.15.0"
}
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 45
configured_endpoints: 46
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
# Changelog

## 0.15.0 (2024-01-24)

Full Changelog: [v0.14.0...v0.15.0](https://github.com/Dataherald/dataherald-python/compare/v0.14.0...v0.15.0)

### Features

* **api:** OpenAPI spec update ([#30](https://github.com/Dataherald/dataherald-python/issues/30)) ([d5328bf](https://github.com/Dataherald/dataherald-python/commit/d5328bfab08dc97760ce01661c3f83665bafd389))
* **api:** OpenAPI spec update ([#32](https://github.com/Dataherald/dataherald-python/issues/32)) ([e443463](https://github.com/Dataherald/dataherald-python/commit/e443463f508753125e7b48582a4432156959b898))
* **api:** OpenAPI spec update ([#33](https://github.com/Dataherald/dataherald-python/issues/33)) ([1c8a887](https://github.com/Dataherald/dataherald-python/commit/1c8a88761d37f060a0835735fabe8e75980ae2d7))
* **api:** OpenAPI spec update ([#34](https://github.com/Dataherald/dataherald-python/issues/34)) ([6e1ac9c](https://github.com/Dataherald/dataherald-python/commit/6e1ac9cf083f2382260f44f5f2627cbbc6b3d8f2))
* **api:** OpenAPI spec update ([#35](https://github.com/Dataherald/dataherald-python/issues/35)) ([317b743](https://github.com/Dataherald/dataherald-python/commit/317b74340bebc295024fcadb0a4ff3aeebefe06e))

## 0.14.0 (2024-01-16)

Full Changelog: [v0.13.0...v0.14.0](https://github.com/Dataherald/dataherald-python/compare/v0.13.0...v0.14.0)
Expand Down
35 changes: 28 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,10 @@ client = Dataherald(
environment="staging",
)

db_connection_response = client.database_connections.create()
db_connection_response = client.database_connections.create(
alias="string",
connection_uri="string",
)
print(db_connection_response.id)
```

Expand All @@ -58,7 +61,10 @@ client = AsyncDataherald(


async def main() -> None:
db_connection_response = await client.database_connections.create()
db_connection_response = await client.database_connections.create(
alias="string",
connection_uri="string",
)
print(db_connection_response.id)


Expand Down Expand Up @@ -92,7 +98,10 @@ from dataherald import Dataherald
client = Dataherald()

try:
client.database_connections.create()
client.database_connections.create(
alias="string",
connection_uri="string",
)
except dataherald.APIConnectionError as e:
print("The server could not be reached")
print(e.__cause__) # an underlying Exception, likely raised within httpx.
Expand Down Expand Up @@ -135,7 +144,10 @@ client = Dataherald(
)

# Or, configure per-request:
client.with_options(max_retries=5).database_connections.create()
client.with_options(max_retries=5).database_connections.create(
alias="string",
connection_uri="string",
)
```

### Timeouts
Expand All @@ -158,7 +170,10 @@ client = Dataherald(
)

# Override per-request:
client.with_options(timeout=5 * 1000).database_connections.create()
client.with_options(timeout=5 * 1000).database_connections.create(
alias="string",
connection_uri="string",
)
```

On timeout, an `APITimeoutError` is thrown.
Expand Down Expand Up @@ -197,7 +212,10 @@ The "raw" Response object can be accessed by prefixing `.with_raw_response.` to
from dataherald import Dataherald

client = Dataherald()
response = client.database_connections.with_raw_response.create()
response = client.database_connections.with_raw_response.create(
alias="string",
connection_uri="string",
)
print(response.headers.get('X-My-Header'))

database_connection = response.parse() # get the object that `database_connections.create()` would have returned
Expand All @@ -215,7 +233,10 @@ The above interface eagerly reads the full response body when you make the reque
To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods.

```python
with client.database_connections.with_streaming_response.create() as response:
with client.database_connections.with_streaming_response.create(
alias="string",
connection_uri="string",
) as response:
print(response.headers.get("X-My-Header"))

for line in response.iter_lines():
Expand Down
1 change: 1 addition & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ from dataherald.types import InstructionListResponse, InstructionDeleteResponse
Methods:

- <code title="post /api/instructions">client.instructions.<a href="./src/dataherald/resources/instructions/instructions.py">create</a>(\*\*<a href="src/dataherald/types/instruction_create_params.py">params</a>) -> <a href="./src/dataherald/types/shared/instruction_response.py">InstructionResponse</a></code>
- <code title="get /api/instructions/{id}">client.instructions.<a href="./src/dataherald/resources/instructions/instructions.py">retrieve</a>(id) -> <a href="./src/dataherald/types/shared/instruction_response.py">InstructionResponse</a></code>
- <code title="put /api/instructions/{id}">client.instructions.<a href="./src/dataherald/resources/instructions/instructions.py">update</a>(id, \*\*<a href="src/dataherald/types/instruction_update_params.py">params</a>) -> <a href="./src/dataherald/types/shared/instruction_response.py">InstructionResponse</a></code>
- <code title="get /api/instructions">client.instructions.<a href="./src/dataherald/resources/instructions/instructions.py">list</a>(\*\*<a href="src/dataherald/types/instruction_list_params.py">params</a>) -> <a href="./src/dataherald/types/instruction_list_response.py">InstructionListResponse</a></code>
- <code title="delete /api/instructions/{id}">client.instructions.<a href="./src/dataherald/resources/instructions/instructions.py">delete</a>(id) -> <a href="./src/dataherald/types/instruction_delete_response.py">object</a></code>
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "dataherald"
version = "0.14.0"
version = "0.15.0"
description = "The official Python library for the Dataherald API"
readme = "README.md"
license = "Apache-2.0"
Expand Down
68 changes: 41 additions & 27 deletions src/dataherald/_base_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,9 @@
from ._constants import (
DEFAULT_LIMITS,
DEFAULT_TIMEOUT,
MAX_RETRY_DELAY,
DEFAULT_MAX_RETRIES,
INITIAL_RETRY_DELAY,
RAW_RESPONSE_HEADER,
OVERRIDE_CAST_TO_HEADER,
)
Expand Down Expand Up @@ -589,47 +591,57 @@ def base_url(self, url: URL | str) -> None:
def platform_headers(self) -> Dict[str, str]:
return platform_headers(self._version)

def _parse_retry_after_header(self, response_headers: Optional[httpx.Headers] = None) -> float | None:
"""Returns a float of the number of seconds (not milliseconds) to wait after retrying, or None if unspecified.
About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
See also https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After#syntax
"""
if response_headers is None:
return None

# First, try the non-standard `retry-after-ms` header for milliseconds,
# which is more precise than integer-seconds `retry-after`
try:
retry_ms_header = response_headers.get("retry-after-ms", None)
return float(retry_ms_header) / 1000
except (TypeError, ValueError):
pass

# Next, try parsing `retry-after` header as seconds (allowing nonstandard floats).
retry_header = response_headers.get("retry-after")
try:
# note: the spec indicates that this should only ever be an integer
# but if someone sends a float there's no reason for us to not respect it
return float(retry_header)
except (TypeError, ValueError):
pass

# Last, try parsing `retry-after` as a date.
retry_date_tuple = email.utils.parsedate_tz(retry_header)
if retry_date_tuple is None:
return None

retry_date = email.utils.mktime_tz(retry_date_tuple)
return float(retry_date - time.time())

def _calculate_retry_timeout(
self,
remaining_retries: int,
options: FinalRequestOptions,
response_headers: Optional[httpx.Headers] = None,
) -> float:
max_retries = options.get_max_retries(self.max_retries)
try:
# About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
#
# <http-date>". See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After#syntax for
# details.
if response_headers is not None:
retry_header = response_headers.get("retry-after")
try:
# note: the spec indicates that this should only ever be an integer
# but if someone sends a float there's no reason for us to not respect it
retry_after = float(retry_header)
except Exception:
retry_date_tuple = email.utils.parsedate_tz(retry_header)
if retry_date_tuple is None:
retry_after = -1
else:
retry_date = email.utils.mktime_tz(retry_date_tuple)
retry_after = int(retry_date - time.time())
else:
retry_after = -1

except Exception:
retry_after = -1

# If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says.
if 0 < retry_after <= 60:
retry_after = self._parse_retry_after_header(response_headers)
if retry_after is not None and 0 < retry_after <= 60:
return retry_after

initial_retry_delay = 0.5
max_retry_delay = 8.0
nb_retries = max_retries - remaining_retries

# Apply exponential backoff, but not more than the max.
sleep_seconds = min(initial_retry_delay * pow(2.0, nb_retries), max_retry_delay)
sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY)

# Apply some jitter, plus-or-minus half a second.
jitter = 1 - 0.25 * random()
Expand Down Expand Up @@ -764,6 +776,7 @@ def __init__(
proxies=proxies,
transport=transport,
limits=limits,
follow_redirects=True,
)

def is_closed(self) -> bool:
Expand Down Expand Up @@ -1292,6 +1305,7 @@ def __init__(
proxies=proxies,
transport=transport,
limits=limits,
follow_redirects=True,
)

def is_closed(self) -> bool:
Expand Down
39 changes: 38 additions & 1 deletion src/dataherald/_compat.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
from __future__ import annotations

from typing import TYPE_CHECKING, Any, Union, TypeVar, cast
from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
from datetime import date, datetime
from typing_extensions import Self

import pydantic
from pydantic.fields import FieldInfo

from ._types import StrBytesIntFloat

_T = TypeVar("_T")
_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel)

# --------------- Pydantic v2 compatibility ---------------
Expand Down Expand Up @@ -178,8 +180,43 @@ class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel):
# cached properties
if TYPE_CHECKING:
cached_property = property

# we define a separate type (copied from typeshed)
# that represents that `cached_property` is `set`able
# at runtime, which differs from `@property`.
#
# this is a separate type as editors likely special case
# `@property` and we don't want to cause issues just to have
# more helpful internal types.

class typed_cached_property(Generic[_T]):
func: Callable[[Any], _T]
attrname: str | None

def __init__(self, func: Callable[[Any], _T]) -> None:
...

@overload
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self:
...

@overload
def __get__(self, instance: object, owner: type[Any] | None = None) -> _T:
...

def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self:
raise NotImplementedError()

def __set_name__(self, owner: type[Any], name: str) -> None:
...

# __set__ is not defined at runtime, but @cached_property is designed to be settable
def __set__(self, instance: object, value: _T) -> None:
...
else:
try:
from functools import cached_property as cached_property
except ImportError:
from cached_property import cached_property as cached_property

typed_cached_property = cached_property
3 changes: 3 additions & 0 deletions src/dataherald/_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,6 @@
DEFAULT_TIMEOUT = httpx.Timeout(timeout=60.0, connect=5.0)
DEFAULT_MAX_RETRIES = 2
DEFAULT_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20)

INITIAL_RETRY_DELAY = 0.5
MAX_RETRY_DELAY = 8.0
1 change: 1 addition & 0 deletions src/dataherald/_utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from ._sync import asyncify as asyncify
from ._proxy import LazyProxy as LazyProxy
from ._utils import (
flatten as flatten,
Expand Down
20 changes: 2 additions & 18 deletions src/dataherald/_utils/_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from abc import ABC, abstractmethod
from typing import Generic, TypeVar, Iterable, cast
from typing_extensions import ClassVar, override
from typing_extensions import override

T = TypeVar("T")

Expand All @@ -13,11 +13,6 @@ class LazyProxy(Generic[T], ABC):
This includes forwarding attribute access and othe methods.
"""

should_cache: ClassVar[bool] = False

def __init__(self) -> None:
self.__proxied: T | None = None

# Note: we have to special case proxies that themselves return proxies
# to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz`

Expand Down Expand Up @@ -57,18 +52,7 @@ def __class__(self) -> type:
return proxied.__class__

def __get_proxied__(self) -> T:
if not self.should_cache:
return self.__load__()

proxied = self.__proxied
if proxied is not None:
return proxied

self.__proxied = proxied = self.__load__()
return proxied

def __set_proxied__(self, value: T) -> None:
self.__proxied = value
return self.__load__()

def __as_proxied__(self) -> T:
"""Helper method that returns the current proxy, typed as the loaded object"""
Expand Down
Loading

0 comments on commit 63fc9e1

Please sign in to comment.