Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Nc/1oct/py prepare multipart #1054

Merged
merged 19 commits into from
Oct 2, 2024
Merged
295 changes: 269 additions & 26 deletions python/langsmith/client.py

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions python/langsmith/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
List,
Optional,
Protocol,
Tuple,
Union,
runtime_checkable,
)
Expand Down Expand Up @@ -43,6 +44,9 @@
SCORE_TYPE = Union[StrictBool, StrictInt, StrictFloat, None]
VALUE_TYPE = Union[Dict, str, None]

Attachments = Dict[str, Tuple[str, bytes]]
"""Attachments associated with the run. Each entry is a tuple of (mime_type, bytes)."""


class ExampleBase(BaseModel):
"""Example base model."""
Expand Down Expand Up @@ -318,6 +322,9 @@ class Run(RunBase):
""" # noqa: E501
in_dataset: Optional[bool] = None
"""Whether this run is in a dataset."""
attachments: Attachments = Field(default_factory=dict)
nfcampos marked this conversation as resolved.
Show resolved Hide resolved
"""Attachments associated with the run.
Each entry is a tuple of (mime_type, bytes)."""
_host_url: Optional[str] = PrivateAttr(default=None)

def __init__(self, _host_url: Optional[str] = None, **kwargs: Any) -> None:
Expand Down Expand Up @@ -376,6 +383,7 @@ class RunLikeDict(TypedDict, total=False):
output_attachments: Optional[dict]
trace_id: UUID
dotted_order: str
attachments: Attachments


class RunWithAnnotationQueueInfo(RunBase):
Expand Down Expand Up @@ -637,6 +645,8 @@ class AnnotationQueue(BaseModel):
class BatchIngestConfig(TypedDict, total=False):
"""Configuration for batch ingestion."""

use_multipart_endpoint: bool
"""Whether to use the multipart endpoint for batch ingestion."""
scale_up_qsize_trigger: int
"""The queue size threshold that triggers scaling up."""
scale_up_nthreads_limit: int
Expand Down
3 changes: 0 additions & 3 deletions python/langsmith/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,9 +146,6 @@ def raise_for_status_with_text(
except requests.HTTPError as e:
raise requests.HTTPError(str(e), response.text) from e # type: ignore[call-arg]

except httpx.HTTPError as e:
raise httpx.HTTPError(str(e), response.text) from e # type: ignore[call-arg]


def get_enum_value(enu: Union[enum.Enum, str]) -> str:
"""Get the value of a string enum."""
Expand Down
33 changes: 31 additions & 2 deletions python/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ pydantic = [
requests = "^2"
orjson = "^3.9.14"
httpx = ">=0.23.0,<1"
requests-toolbelt = "^1.0.0"

[tool.poetry.group.dev.dependencies]
pytest = "^7.3.1"
Expand All @@ -59,6 +60,7 @@ pytest-rerunfailures = "^14.0"
pytest-socket = "^0.7.0"
pyperf = "^2.7.0"
py-spy = "^0.3.14"
multipart = "^1.0.0"

[tool.poetry.group.lint.dependencies]
openai = "^1.10"
Expand Down
12 changes: 10 additions & 2 deletions python/tests/integration_tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,10 @@ def test_create_chat_example(
langchain_client.delete_dataset(dataset_id=dataset.id)


def test_batch_ingest_runs(langchain_client: Client) -> None:
@pytest.mark.parametrize("use_multipart_endpoint", [True, False])
def test_batch_ingest_runs(
langchain_client: Client, use_multipart_endpoint: bool
) -> None:
_session = "__test_batch_ingest_runs"
trace_id = uuid4()
trace_id_2 = uuid4()
Expand Down Expand Up @@ -669,7 +672,12 @@ def test_batch_ingest_runs(langchain_client: Client) -> None:
"outputs": {"output1": 4, "output2": 5},
},
]
langchain_client.batch_ingest_runs(create=runs_to_create, update=runs_to_update)
if use_multipart_endpoint:
langchain_client.multipart_ingest_runs(
create=runs_to_create, update=runs_to_update
)
else:
langchain_client.batch_ingest_runs(create=runs_to_create, update=runs_to_update)
runs = []
wait = 4
for _ in range(15):
Expand Down
Loading
Loading