Skip to content

Commit

Permalink
fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
baskaryan committed Sep 3, 2024
1 parent 615f8b0 commit 5f5287c
Show file tree
Hide file tree
Showing 8 changed files with 60 additions and 92 deletions.
20 changes: 8 additions & 12 deletions libs/partners/openai/langchain_openai/chat_models/azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,15 @@
PydanticToolsParser,
)
from langchain_core.outputs import ChatResult
from pydantic import BaseModel, Field, SecretStr, model_validator
from langchain_core.runnables import Runnable, RunnableMap, RunnablePassthrough
from langchain_core.tools import BaseTool
from langchain_core.utils import from_env, secret_from_env
from langchain_core.utils.function_calling import convert_to_openai_tool
from langchain_core.utils.pydantic import is_basemodel_subclass

from langchain_openai.chat_models.base import BaseChatOpenAI
from pydantic import BaseModel, Field, SecretStr, model_validator
from typing_extensions import Self

from langchain_openai.chat_models.base import BaseChatOpenAI

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -604,19 +603,15 @@ def validate_environment(self) -> Self:
"Or you can equivalently specify:\n\n"
'base_url="https://xxx.openai.azure.com/openai/deployments/my-deployment"'
)
client_params = {
client_params: dict = {
"api_version": self.openai_api_version,
"azure_endpoint": self.azure_endpoint,
"azure_deployment": self.deployment_name,
"api_key": (
self.openai_api_key.get_secret_value()
if self.openai_api_key
else None
self.openai_api_key.get_secret_value() if self.openai_api_key else None
),
"azure_ad_token": (
self.azure_ad_token.get_secret_value()
if self.azure_ad_token
else None
self.azure_ad_token.get_secret_value() if self.azure_ad_token else None
),
"azure_ad_token_provider": self.azure_ad_token_provider,
"organization": self.openai_organization,
Expand All @@ -628,12 +623,13 @@ def validate_environment(self) -> Self:
}
if not (self.client or None):
sync_specific = {"http_client": self.http_client}
self.root_client = openai.AzureOpenAI(**client_params, **sync_specific)
self.root_client = openai.AzureOpenAI(**client_params, **sync_specific) # type: ignore[arg-type]
self.client = self.root_client.chat.completions
if not (self.async_client or None):
async_specific = {"http_client": self.http_async_client}
self.root_async_client = openai.AsyncAzureOpenAI(
**client_params, **async_specific
**client_params,
**async_specific, # type: ignore[arg-type]
)
self.async_client = self.root_async_client.chat.completions
return self
Expand Down
34 changes: 12 additions & 22 deletions libs/partners/openai/langchain_openai/chat_models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,10 @@
parse_tool_call,
)
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from pydantic import BaseModel, Field, model_validator, SecretStr
from langchain_core.runnables import Runnable, RunnableMap, RunnablePassthrough, chain
from langchain_core.runnables.config import run_in_executor
from langchain_core.tools import BaseTool
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names
from langchain_core.utils import get_pydantic_field_names
from langchain_core.utils.function_calling import (
convert_to_openai_function,
convert_to_openai_tool,
Expand All @@ -88,10 +87,9 @@
is_basemodel_subclass,
)
from langchain_core.utils.utils import build_extra_kwargs, from_env, secret_from_env
from pydantic import ConfigDict
from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator
from typing_extensions import Self


logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -361,8 +359,7 @@ class BaseChatOpenAI(BaseChatModel):
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Holds any model parameters valid for `create` call not explicitly specified."""
openai_api_key: Optional[SecretStr] = Field(
alias="api_key",
default_factory=secret_from_env("OPENAI_API_KEY", default=None),
alias="api_key", default_factory=secret_from_env("OPENAI_API_KEY", default=None)
)
openai_api_base: Optional[str] = Field(default=None, alias="base_url")
"""Base URL path for API requests, leave blank if not using a proxy or service
Expand Down Expand Up @@ -431,7 +428,7 @@ class BaseChatOpenAI(BaseChatModel):
include_response_headers: bool = False
"""Whether to include response headers in the output message response_metadata."""

model_config = ConfigDict(populate_by_name=True,)
model_config = ConfigDict(populate_by_name=True)

@model_validator(mode="before")
@classmethod
Expand All @@ -458,14 +455,10 @@ def validate_environment(self) -> Self:
or os.getenv("OPENAI_ORG_ID")
or os.getenv("OPENAI_ORGANIZATION")
)
self.openai_api_base = self.openai_api_base or os.getenv(
"OPENAI_API_BASE"
)
client_params = {
self.openai_api_base = self.openai_api_base or os.getenv("OPENAI_API_BASE")
client_params: dict = {
"api_key": (
self.openai_api_key.get_secret_value()
if self.openai_api_key
else None
self.openai_api_key.get_secret_value() if self.openai_api_key else None
),
"organization": self.openai_organization,
"base_url": self.openai_api_base,
Expand All @@ -474,9 +467,7 @@ def validate_environment(self) -> Self:
"default_headers": self.default_headers,
"default_query": self.default_query,
}
if self.openai_proxy and (
self.http_client or self.http_async_client
):
if self.openai_proxy and (self.http_client or self.http_async_client):
openai_proxy = self.openai_proxy
http_client = self.http_client
http_async_client = self.http_async_client
Expand All @@ -496,7 +487,7 @@ def validate_environment(self) -> Self:
) from e
self.http_client = httpx.Client(proxy=self.openai_proxy)
sync_specific = {"http_client": self.http_client}
self.root_client = openai.OpenAI(**client_params, **sync_specific)
self.root_client = openai.OpenAI(**client_params, **sync_specific) # type: ignore[arg-type]
self.client = self.root_client.chat.completions
if not (self.async_client or None):
if self.openai_proxy and not self.http_async_client:
Expand All @@ -507,12 +498,11 @@ def validate_environment(self) -> Self:
"Could not import httpx python package. "
"Please install it with `pip install httpx`."
) from e
self.http_async_client = httpx.AsyncClient(
proxy=self.openai_proxy
)
self.http_async_client = httpx.AsyncClient(proxy=self.openai_proxy)
async_specific = {"http_client": self.http_async_client}
self.root_async_client = openai.AsyncOpenAI(
**client_params, **async_specific
**client_params,
**async_specific, # type: ignore[arg-type]
)
self.async_client = self.root_async_client.chat.completions
return self
Expand Down
29 changes: 13 additions & 16 deletions libs/partners/openai/langchain_openai/embeddings/azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,14 @@

from __future__ import annotations

from typing import Callable, Dict, Optional, Union
from typing import Callable, Optional, Union

import openai
from pydantic import Field, SecretStr, root_validator, model_validator
from langchain_core.utils import from_env, secret_from_env
from pydantic import Field, SecretStr, model_validator
from typing_extensions import Self, cast

from langchain_openai.embeddings.base import OpenAIEmbeddings
from typing_extensions import Self



class AzureOpenAIEmbeddings(OpenAIEmbeddings):
Expand Down Expand Up @@ -163,7 +162,7 @@ def validate_environment(self) -> Self:
openai_api_base = self.openai_api_base
if openai_api_base and self.validate_base_url:
if "/openai" not in openai_api_base:
self.openai_api_base += "/openai"
self.openai_api_base = cast(str, self.openai_api_base) + "/openai"
raise ValueError(
"As of openai>=1.0.0, Azure endpoints should be specified via "
"the `azure_endpoint` param not `openai_api_base` "
Expand All @@ -177,19 +176,15 @@ def validate_environment(self) -> Self:
"Instead use `deployment` (or alias `azure_deployment`) "
"and `azure_endpoint`."
)
client_params = {
client_params: dict = {
"api_version": self.openai_api_version,
"azure_endpoint": self.azure_endpoint,
"azure_deployment": self.deployment,
"api_key": (
self.openai_api_key.get_secret_value()
if self.openai_api_key
else None
self.openai_api_key.get_secret_value() if self.openai_api_key else None
),
"azure_ad_token": (
self.azure_ad_token.get_secret_value()
if self.azure_ad_token
else None
self.azure_ad_token.get_secret_value() if self.azure_ad_token else None
),
"azure_ad_token_provider": self.azure_ad_token_provider,
"organization": self.openai_organization,
Expand All @@ -200,14 +195,16 @@ def validate_environment(self) -> Self:
"default_query": self.default_query,
}
if not (self.client or None):
sync_specific = {"http_client": self.http_client}
sync_specific: dict = {"http_client": self.http_client}
self.client = openai.AzureOpenAI(
**client_params, **sync_specific
**client_params, # type: ignore[arg-type]
**sync_specific,
).embeddings
if not (self.async_client or None):
async_specific = {"http_client": self.http_async_client}
async_specific: dict = {"http_client": self.http_async_client}
self.async_client = openai.AsyncAzureOpenAI(
**client_params, **async_specific
**client_params, # type: ignore[arg-type]
**async_specific,
).embeddings
return self

Expand Down
28 changes: 9 additions & 19 deletions libs/partners/openai/langchain_openai/embeddings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,10 @@
import openai
import tiktoken
from langchain_core.embeddings import Embeddings
from pydantic import BaseModel, Field, SecretStr, root_validator, model_validator
from langchain_core.utils import from_env, get_pydantic_field_names, secret_from_env
from pydantic import ConfigDict
from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator
from typing_extensions import Self



logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -267,7 +264,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
"""Whether to check the token length of inputs and automatically split inputs
longer than embedding_ctx_length."""

model_config = ConfigDict(extra="forbid",populate_by_name=True,)
model_config = ConfigDict(extra="forbid", populate_by_name=True)

@model_validator(mode="before")
@classmethod
Expand Down Expand Up @@ -304,11 +301,9 @@ def validate_environment(self) -> Self:
"If you are using Azure, "
"please use the `AzureOpenAIEmbeddings` class."
)
client_params = {
client_params: dict = {
"api_key": (
self.openai_api_key.get_secret_value()
if self.openai_api_key
else None
self.openai_api_key.get_secret_value() if self.openai_api_key else None
),
"organization": self.openai_organization,
"base_url": self.openai_api_base,
Expand All @@ -318,9 +313,7 @@ def validate_environment(self) -> Self:
"default_query": self.default_query,
}

if self.openai_proxy and (
self.http_client or self.http_async_client
):
if self.openai_proxy and (self.http_client or self.http_async_client):
openai_proxy = self.openai_proxy
http_client = self.http_client
http_async_client = self.http_async_client
Expand All @@ -340,9 +333,7 @@ def validate_environment(self) -> Self:
) from e
self.http_client = httpx.Client(proxy=self.openai_proxy)
sync_specific = {"http_client": self.http_client}
self.client = openai.OpenAI(
**client_params, **sync_specific
).embeddings
self.client = openai.OpenAI(**client_params, **sync_specific).embeddings # type: ignore[arg-type]
if not (self.async_client or None):
if self.openai_proxy and not self.http_async_client:
try:
Expand All @@ -352,12 +343,11 @@ def validate_environment(self) -> Self:
"Could not import httpx python package. "
"Please install it with `pip install httpx`."
) from e
self.http_async_client = httpx.AsyncClient(
proxy=self.openai_proxy
)
self.http_async_client = httpx.AsyncClient(proxy=self.openai_proxy)
async_specific = {"http_client": self.http_async_client}
self.async_client = openai.AsyncOpenAI(
**client_params, **async_specific
**client_params,
**async_specific, # type: ignore[arg-type]
).embeddings
return self

Expand Down
15 changes: 8 additions & 7 deletions libs/partners/openai/langchain_openai/llms/azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,11 @@

import openai
from langchain_core.language_models import LangSmithParams
from pydantic import Field, SecretStr, root_validator, model_validator
from langchain_core.utils import from_env, secret_from_env
from pydantic import Field, SecretStr, model_validator
from typing_extensions import Self, cast

from langchain_openai.llms.base import BaseOpenAI
from typing_extensions import Self


logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -117,7 +116,7 @@ def validate_environment(self) -> Self:
if openai_api_base and self.validate_base_url:
if "/openai" not in openai_api_base:
self.openai_api_base = (
self.openai_api_base.rstrip("/") + "/openai"
cast(str, self.openai_api_base).rstrip("/") + "/openai"
)
raise ValueError(
"As of openai>=1.0.0, Azure endpoints should be specified via "
Expand All @@ -133,7 +132,7 @@ def validate_environment(self) -> Self:
"and `azure_endpoint`."
)
self.deployment_name = None
client_params = {
client_params: dict = {
"api_version": self.openai_api_version,
"azure_endpoint": self.azure_endpoint,
"azure_deployment": self.deployment_name,
Expand All @@ -154,12 +153,14 @@ def validate_environment(self) -> Self:
if not (self.client or None):
sync_specific = {"http_client": self.http_client}
self.client = openai.AzureOpenAI(
**client_params, **sync_specific
**client_params,
**sync_specific, # type: ignore[arg-type]
).completions
if not (self.async_client or None):
async_specific = {"http_client": self.http_async_client}
self.async_client = openai.AsyncAzureOpenAI(
**client_params, **async_specific
**client_params,
**async_specific, # type: ignore[arg-type]
).completions

return self
Expand Down
Loading

0 comments on commit 5f5287c

Please sign in to comment.