From e93e3bd45fcef320c4fa70eed3987d852e9c96e5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 11:17:39 +0000 Subject: [PATCH 1/3] chore(internal): remove some duplicated imports (#1946) --- src/openai/resources/beta/beta.py | 20 +++++++++---------- src/openai/resources/beta/threads/threads.py | 17 ++++++++-------- .../resources/fine_tuning/fine_tuning.py | 5 ++--- 3 files changed, 19 insertions(+), 23 deletions(-) diff --git a/src/openai/resources/beta/beta.py b/src/openai/resources/beta/beta.py index a7d3e707c8..5079c989a5 100644 --- a/src/openai/resources/beta/beta.py +++ b/src/openai/resources/beta/beta.py @@ -2,14 +2,6 @@ from __future__ import annotations -from .threads import ( - Threads, - AsyncThreads, - ThreadsWithRawResponse, - AsyncThreadsWithRawResponse, - ThreadsWithStreamingResponse, - AsyncThreadsWithStreamingResponse, -) from ..._compat import cached_property from .chat.chat import Chat, AsyncChat from .assistants import ( @@ -21,7 +13,15 @@ AsyncAssistantsWithStreamingResponse, ) from ..._resource import SyncAPIResource, AsyncAPIResource -from .vector_stores import ( +from .threads.threads import ( + Threads, + AsyncThreads, + ThreadsWithRawResponse, + AsyncThreadsWithRawResponse, + ThreadsWithStreamingResponse, + AsyncThreadsWithStreamingResponse, +) +from .vector_stores.vector_stores import ( VectorStores, AsyncVectorStores, VectorStoresWithRawResponse, @@ -29,8 +29,6 @@ VectorStoresWithStreamingResponse, AsyncVectorStoresWithStreamingResponse, ) -from .threads.threads import Threads, AsyncThreads -from .vector_stores.vector_stores import VectorStores, AsyncVectorStores __all__ = ["Beta", "AsyncBeta"] diff --git a/src/openai/resources/beta/threads/threads.py b/src/openai/resources/beta/threads/threads.py index 058ba71a17..e45090abb0 100644 --- a/src/openai/resources/beta/threads/threads.py +++ b/src/openai/resources/beta/threads/threads.py @@ -9,14 +9,6 @@ import httpx from .... import _legacy_response -from .runs import ( - Runs, - AsyncRuns, - RunsWithRawResponse, - AsyncRunsWithRawResponse, - RunsWithStreamingResponse, - AsyncRunsWithStreamingResponse, -) from .messages import ( Messages, AsyncMessages, @@ -31,7 +23,14 @@ maybe_transform, async_maybe_transform, ) -from .runs.runs import Runs, AsyncRuns +from .runs.runs import ( + Runs, + AsyncRuns, + RunsWithRawResponse, + AsyncRunsWithRawResponse, + RunsWithStreamingResponse, + AsyncRunsWithStreamingResponse, +) from ...._compat import cached_property from ...._resource import SyncAPIResource, AsyncAPIResource from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper diff --git a/src/openai/resources/fine_tuning/fine_tuning.py b/src/openai/resources/fine_tuning/fine_tuning.py index c386de3c2a..d2bce87c48 100644 --- a/src/openai/resources/fine_tuning/fine_tuning.py +++ b/src/openai/resources/fine_tuning/fine_tuning.py @@ -2,7 +2,8 @@ from __future__ import annotations -from .jobs import ( +from ..._compat import cached_property +from .jobs.jobs import ( Jobs, AsyncJobs, JobsWithRawResponse, @@ -10,8 +11,6 @@ JobsWithStreamingResponse, AsyncJobsWithStreamingResponse, ) -from ..._compat import cached_property -from .jobs.jobs import Jobs, AsyncJobs from ..._resource import SyncAPIResource, AsyncAPIResource __all__ = ["FineTuning", "AsyncFineTuning"] From 2e2531d944a0c4ff748f9fb0b1c9a015f029dd2f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 14:01:59 +0000 Subject: [PATCH 2/3] chore(internal): updated imports (#1948) --- src/openai/_client.py | 212 +++++++++++++++++++++--------------------- 1 file changed, 104 insertions(+), 108 deletions(-) diff --git a/src/openai/_client.py b/src/openai/_client.py index d3ee6cf0f1..5419e88f06 100644 --- a/src/openai/_client.py +++ b/src/openai/_client.py @@ -8,7 +8,7 @@ import httpx -from . import resources, _exceptions +from . import _exceptions from ._qs import Querystring from ._types import ( NOT_GIVEN, @@ -25,6 +25,7 @@ get_async_library, ) from ._version import __version__ +from .resources import files, images, models, batches, embeddings, completions, moderations from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import OpenAIError, APIStatusError from ._base_client import ( @@ -32,33 +33,28 @@ SyncAPIClient, AsyncAPIClient, ) +from .resources.beta import beta +from .resources.chat import chat +from .resources.audio import audio +from .resources.uploads import uploads +from .resources.fine_tuning import fine_tuning -__all__ = [ - "Timeout", - "Transport", - "ProxiesTypes", - "RequestOptions", - "resources", - "OpenAI", - "AsyncOpenAI", - "Client", - "AsyncClient", -] +__all__ = ["Timeout", "Transport", "ProxiesTypes", "RequestOptions", "OpenAI", "AsyncOpenAI", "Client", "AsyncClient"] class OpenAI(SyncAPIClient): - completions: resources.Completions - chat: resources.Chat - embeddings: resources.Embeddings - files: resources.Files - images: resources.Images - audio: resources.Audio - moderations: resources.Moderations - models: resources.Models - fine_tuning: resources.FineTuning - beta: resources.Beta - batches: resources.Batches - uploads: resources.Uploads + completions: completions.Completions + chat: chat.Chat + embeddings: embeddings.Embeddings + files: files.Files + images: images.Images + audio: audio.Audio + moderations: moderations.Moderations + models: models.Models + fine_tuning: fine_tuning.FineTuning + beta: beta.Beta + batches: batches.Batches + uploads: uploads.Uploads with_raw_response: OpenAIWithRawResponse with_streaming_response: OpenAIWithStreamedResponse @@ -133,18 +129,18 @@ def __init__( self._default_stream_cls = Stream - self.completions = resources.Completions(self) - self.chat = resources.Chat(self) - self.embeddings = resources.Embeddings(self) - self.files = resources.Files(self) - self.images = resources.Images(self) - self.audio = resources.Audio(self) - self.moderations = resources.Moderations(self) - self.models = resources.Models(self) - self.fine_tuning = resources.FineTuning(self) - self.beta = resources.Beta(self) - self.batches = resources.Batches(self) - self.uploads = resources.Uploads(self) + self.completions = completions.Completions(self) + self.chat = chat.Chat(self) + self.embeddings = embeddings.Embeddings(self) + self.files = files.Files(self) + self.images = images.Images(self) + self.audio = audio.Audio(self) + self.moderations = moderations.Moderations(self) + self.models = models.Models(self) + self.fine_tuning = fine_tuning.FineTuning(self) + self.beta = beta.Beta(self) + self.batches = batches.Batches(self) + self.uploads = uploads.Uploads(self) self.with_raw_response = OpenAIWithRawResponse(self) self.with_streaming_response = OpenAIWithStreamedResponse(self) @@ -261,18 +257,18 @@ def _make_status_error( class AsyncOpenAI(AsyncAPIClient): - completions: resources.AsyncCompletions - chat: resources.AsyncChat - embeddings: resources.AsyncEmbeddings - files: resources.AsyncFiles - images: resources.AsyncImages - audio: resources.AsyncAudio - moderations: resources.AsyncModerations - models: resources.AsyncModels - fine_tuning: resources.AsyncFineTuning - beta: resources.AsyncBeta - batches: resources.AsyncBatches - uploads: resources.AsyncUploads + completions: completions.AsyncCompletions + chat: chat.AsyncChat + embeddings: embeddings.AsyncEmbeddings + files: files.AsyncFiles + images: images.AsyncImages + audio: audio.AsyncAudio + moderations: moderations.AsyncModerations + models: models.AsyncModels + fine_tuning: fine_tuning.AsyncFineTuning + beta: beta.AsyncBeta + batches: batches.AsyncBatches + uploads: uploads.AsyncUploads with_raw_response: AsyncOpenAIWithRawResponse with_streaming_response: AsyncOpenAIWithStreamedResponse @@ -347,18 +343,18 @@ def __init__( self._default_stream_cls = AsyncStream - self.completions = resources.AsyncCompletions(self) - self.chat = resources.AsyncChat(self) - self.embeddings = resources.AsyncEmbeddings(self) - self.files = resources.AsyncFiles(self) - self.images = resources.AsyncImages(self) - self.audio = resources.AsyncAudio(self) - self.moderations = resources.AsyncModerations(self) - self.models = resources.AsyncModels(self) - self.fine_tuning = resources.AsyncFineTuning(self) - self.beta = resources.AsyncBeta(self) - self.batches = resources.AsyncBatches(self) - self.uploads = resources.AsyncUploads(self) + self.completions = completions.AsyncCompletions(self) + self.chat = chat.AsyncChat(self) + self.embeddings = embeddings.AsyncEmbeddings(self) + self.files = files.AsyncFiles(self) + self.images = images.AsyncImages(self) + self.audio = audio.AsyncAudio(self) + self.moderations = moderations.AsyncModerations(self) + self.models = models.AsyncModels(self) + self.fine_tuning = fine_tuning.AsyncFineTuning(self) + self.beta = beta.AsyncBeta(self) + self.batches = batches.AsyncBatches(self) + self.uploads = uploads.AsyncUploads(self) self.with_raw_response = AsyncOpenAIWithRawResponse(self) self.with_streaming_response = AsyncOpenAIWithStreamedResponse(self) @@ -476,66 +472,66 @@ def _make_status_error( class OpenAIWithRawResponse: def __init__(self, client: OpenAI) -> None: - self.completions = resources.CompletionsWithRawResponse(client.completions) - self.chat = resources.ChatWithRawResponse(client.chat) - self.embeddings = resources.EmbeddingsWithRawResponse(client.embeddings) - self.files = resources.FilesWithRawResponse(client.files) - self.images = resources.ImagesWithRawResponse(client.images) - self.audio = resources.AudioWithRawResponse(client.audio) - self.moderations = resources.ModerationsWithRawResponse(client.moderations) - self.models = resources.ModelsWithRawResponse(client.models) - self.fine_tuning = resources.FineTuningWithRawResponse(client.fine_tuning) - self.beta = resources.BetaWithRawResponse(client.beta) - self.batches = resources.BatchesWithRawResponse(client.batches) - self.uploads = resources.UploadsWithRawResponse(client.uploads) + self.completions = completions.CompletionsWithRawResponse(client.completions) + self.chat = chat.ChatWithRawResponse(client.chat) + self.embeddings = embeddings.EmbeddingsWithRawResponse(client.embeddings) + self.files = files.FilesWithRawResponse(client.files) + self.images = images.ImagesWithRawResponse(client.images) + self.audio = audio.AudioWithRawResponse(client.audio) + self.moderations = moderations.ModerationsWithRawResponse(client.moderations) + self.models = models.ModelsWithRawResponse(client.models) + self.fine_tuning = fine_tuning.FineTuningWithRawResponse(client.fine_tuning) + self.beta = beta.BetaWithRawResponse(client.beta) + self.batches = batches.BatchesWithRawResponse(client.batches) + self.uploads = uploads.UploadsWithRawResponse(client.uploads) class AsyncOpenAIWithRawResponse: def __init__(self, client: AsyncOpenAI) -> None: - self.completions = resources.AsyncCompletionsWithRawResponse(client.completions) - self.chat = resources.AsyncChatWithRawResponse(client.chat) - self.embeddings = resources.AsyncEmbeddingsWithRawResponse(client.embeddings) - self.files = resources.AsyncFilesWithRawResponse(client.files) - self.images = resources.AsyncImagesWithRawResponse(client.images) - self.audio = resources.AsyncAudioWithRawResponse(client.audio) - self.moderations = resources.AsyncModerationsWithRawResponse(client.moderations) - self.models = resources.AsyncModelsWithRawResponse(client.models) - self.fine_tuning = resources.AsyncFineTuningWithRawResponse(client.fine_tuning) - self.beta = resources.AsyncBetaWithRawResponse(client.beta) - self.batches = resources.AsyncBatchesWithRawResponse(client.batches) - self.uploads = resources.AsyncUploadsWithRawResponse(client.uploads) + self.completions = completions.AsyncCompletionsWithRawResponse(client.completions) + self.chat = chat.AsyncChatWithRawResponse(client.chat) + self.embeddings = embeddings.AsyncEmbeddingsWithRawResponse(client.embeddings) + self.files = files.AsyncFilesWithRawResponse(client.files) + self.images = images.AsyncImagesWithRawResponse(client.images) + self.audio = audio.AsyncAudioWithRawResponse(client.audio) + self.moderations = moderations.AsyncModerationsWithRawResponse(client.moderations) + self.models = models.AsyncModelsWithRawResponse(client.models) + self.fine_tuning = fine_tuning.AsyncFineTuningWithRawResponse(client.fine_tuning) + self.beta = beta.AsyncBetaWithRawResponse(client.beta) + self.batches = batches.AsyncBatchesWithRawResponse(client.batches) + self.uploads = uploads.AsyncUploadsWithRawResponse(client.uploads) class OpenAIWithStreamedResponse: def __init__(self, client: OpenAI) -> None: - self.completions = resources.CompletionsWithStreamingResponse(client.completions) - self.chat = resources.ChatWithStreamingResponse(client.chat) - self.embeddings = resources.EmbeddingsWithStreamingResponse(client.embeddings) - self.files = resources.FilesWithStreamingResponse(client.files) - self.images = resources.ImagesWithStreamingResponse(client.images) - self.audio = resources.AudioWithStreamingResponse(client.audio) - self.moderations = resources.ModerationsWithStreamingResponse(client.moderations) - self.models = resources.ModelsWithStreamingResponse(client.models) - self.fine_tuning = resources.FineTuningWithStreamingResponse(client.fine_tuning) - self.beta = resources.BetaWithStreamingResponse(client.beta) - self.batches = resources.BatchesWithStreamingResponse(client.batches) - self.uploads = resources.UploadsWithStreamingResponse(client.uploads) + self.completions = completions.CompletionsWithStreamingResponse(client.completions) + self.chat = chat.ChatWithStreamingResponse(client.chat) + self.embeddings = embeddings.EmbeddingsWithStreamingResponse(client.embeddings) + self.files = files.FilesWithStreamingResponse(client.files) + self.images = images.ImagesWithStreamingResponse(client.images) + self.audio = audio.AudioWithStreamingResponse(client.audio) + self.moderations = moderations.ModerationsWithStreamingResponse(client.moderations) + self.models = models.ModelsWithStreamingResponse(client.models) + self.fine_tuning = fine_tuning.FineTuningWithStreamingResponse(client.fine_tuning) + self.beta = beta.BetaWithStreamingResponse(client.beta) + self.batches = batches.BatchesWithStreamingResponse(client.batches) + self.uploads = uploads.UploadsWithStreamingResponse(client.uploads) class AsyncOpenAIWithStreamedResponse: def __init__(self, client: AsyncOpenAI) -> None: - self.completions = resources.AsyncCompletionsWithStreamingResponse(client.completions) - self.chat = resources.AsyncChatWithStreamingResponse(client.chat) - self.embeddings = resources.AsyncEmbeddingsWithStreamingResponse(client.embeddings) - self.files = resources.AsyncFilesWithStreamingResponse(client.files) - self.images = resources.AsyncImagesWithStreamingResponse(client.images) - self.audio = resources.AsyncAudioWithStreamingResponse(client.audio) - self.moderations = resources.AsyncModerationsWithStreamingResponse(client.moderations) - self.models = resources.AsyncModelsWithStreamingResponse(client.models) - self.fine_tuning = resources.AsyncFineTuningWithStreamingResponse(client.fine_tuning) - self.beta = resources.AsyncBetaWithStreamingResponse(client.beta) - self.batches = resources.AsyncBatchesWithStreamingResponse(client.batches) - self.uploads = resources.AsyncUploadsWithStreamingResponse(client.uploads) + self.completions = completions.AsyncCompletionsWithStreamingResponse(client.completions) + self.chat = chat.AsyncChatWithStreamingResponse(client.chat) + self.embeddings = embeddings.AsyncEmbeddingsWithStreamingResponse(client.embeddings) + self.files = files.AsyncFilesWithStreamingResponse(client.files) + self.images = images.AsyncImagesWithStreamingResponse(client.images) + self.audio = audio.AsyncAudioWithStreamingResponse(client.audio) + self.moderations = moderations.AsyncModerationsWithStreamingResponse(client.moderations) + self.models = models.AsyncModelsWithStreamingResponse(client.models) + self.fine_tuning = fine_tuning.AsyncFineTuningWithStreamingResponse(client.fine_tuning) + self.beta = beta.AsyncBetaWithStreamingResponse(client.beta) + self.batches = batches.AsyncBatchesWithStreamingResponse(client.batches) + self.uploads = uploads.AsyncUploadsWithStreamingResponse(client.uploads) Client = OpenAI From e94d98e9bf97a5d2d02d79d58f2abdbab26ff2bd Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 14:02:30 +0000 Subject: [PATCH 3/3] release: 1.57.4 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/openai/_version.py | 2 +- 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 58e64c502c..f9ae229e1a 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.57.3" + ".": "1.57.4" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 80ed457618..02b7d0271d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## 1.57.4 (2024-12-13) + +Full Changelog: [v1.57.3...v1.57.4](https://github.com/openai/openai-python/compare/v1.57.3...v1.57.4) + +### Chores + +* **internal:** remove some duplicated imports ([#1946](https://github.com/openai/openai-python/issues/1946)) ([f94fddd](https://github.com/openai/openai-python/commit/f94fddd377015764b3c82919fdf956f619447b77)) +* **internal:** updated imports ([#1948](https://github.com/openai/openai-python/issues/1948)) ([13971fc](https://github.com/openai/openai-python/commit/13971fc450106746c0ae02ab931e68b770ee105e)) + ## 1.57.3 (2024-12-12) Full Changelog: [v1.57.2...v1.57.3](https://github.com/openai/openai-python/compare/v1.57.2...v1.57.3) diff --git a/pyproject.toml b/pyproject.toml index a6a0868405..e03d4e798f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openai" -version = "1.57.3" +version = "1.57.4" description = "The official Python library for the openai API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/openai/_version.py b/src/openai/_version.py index 94ba432279..5b82015017 100644 --- a/src/openai/_version.py +++ b/src/openai/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "openai" -__version__ = "1.57.3" # x-release-please-version +__version__ = "1.57.4" # x-release-please-version