Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
baskaryan committed Dec 5, 2023
1 parent bf7b59e commit ee1478b
Show file tree
Hide file tree
Showing 130 changed files with 3,901 additions and 761 deletions.
4 changes: 1 addition & 3 deletions .scripts/community_split/script_integrations.sh
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ git grep -l 'from langchain.tools.base' | xargs sed -i '' 's/from langchain.tool
git grep -l 'from langchain_community.llms.openai' | xargs sed -i '' 's/from langchain_community.llms.openai/from langchain_openai.llm/g'
git grep -l 'from langchain_community.chat_models.openai' | xargs sed -i '' 's/from langchain_community.chat_models.openai/from langchain_openai.chat_model/g'
git grep -l 'from langchain_community.embeddings.openai' | xargs sed -i '' 's/from langchain_community.embeddings.openai/from langchain_openai.embedding/g'
git grep -l 'from langchain.utils.json_schema' | xargs sed -i '' 's/from langchain.utils.json_schema/from langchain_core.utils.json_schema/g'

cd ..

Expand All @@ -152,11 +151,10 @@ mv community/langchain_community/embeddings/azure_openai.py partners/openai/lang
cp langchain/langchain/utils/openai.py partners/openai/langchain_openai/utils.py
cp langchain/langchain/utils/openai_functions.py partners/openai/langchain_openai/functions.py

git add partners core

git grep -l 'from langchain.utils.json_schema' | xargs sed -i '' 's/from langchain.utils.json_schema/from langchain_core.utils.json_schema/g'

git add partners core

rm community/langchain_community/chat_models/base.py
rm community/langchain_community/llms/base.py
rm community/langchain_community/tools/base.py
Expand Down
137 changes: 136 additions & 1 deletion libs/community/langchain_community/adapters/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
SystemMessage,
ToolMessage,
)
from langchain_core.pydantic_v1 import BaseModel
from typing_extensions import Literal


Expand All @@ -38,6 +39,29 @@ async def aenumerate(
i += 1


class IndexableBaseModel(BaseModel):
"""Allows a BaseModel to return its fields by string variable indexing"""

def __getitem__(self, item: str) -> Any:
return getattr(self, item)


class Choice(IndexableBaseModel):
message: dict


class ChatCompletions(IndexableBaseModel):
choices: List[Choice]


class ChoiceChunk(IndexableBaseModel):
delta: dict


class ChatCompletionChunk(IndexableBaseModel):
choices: List[ChoiceChunk]


def convert_dict_to_message(_dict: Mapping[str, Any]) -> BaseMessage:
"""Convert a dictionary to a LangChain message.
Expand Down Expand Up @@ -129,7 +153,7 @@ def convert_openai_messages(messages: Sequence[Dict[str, Any]]) -> List[BaseMess
return [convert_dict_to_message(m) for m in messages]


def _convert_message_chunk_to_delta(chunk: BaseMessageChunk, i: int) -> Dict[str, Any]:
def _convert_message_chunk(chunk: BaseMessageChunk, i: int) -> dict:
_dict: Dict[str, Any] = {}
if isinstance(chunk, AIMessageChunk):
if i == 0:
Expand All @@ -148,6 +172,11 @@ def _convert_message_chunk_to_delta(chunk: BaseMessageChunk, i: int) -> Dict[str
# This only happens at the end of streams, and OpenAI returns as empty dict
if _dict == {"content": ""}:
_dict = {}
return _dict


def _convert_message_chunk_to_delta(chunk: BaseMessageChunk, i: int) -> Dict[str, Any]:
_dict = _convert_message_chunk(chunk, i)
return {"choices": [{"delta": _dict}]}


Expand Down Expand Up @@ -262,3 +291,109 @@ def convert_messages_for_finetuning(
for session in sessions
if _has_assistant_message(session)
]


class Completions:
"""Completion."""

@overload
@staticmethod
def create(
messages: Sequence[Dict[str, Any]],
*,
provider: str = "ChatOpenAI",
stream: Literal[False] = False,
**kwargs: Any,
) -> ChatCompletions:
...

@overload
@staticmethod
def create(
messages: Sequence[Dict[str, Any]],
*,
provider: str = "ChatOpenAI",
stream: Literal[True],
**kwargs: Any,
) -> Iterable:
...

@staticmethod
def create(
messages: Sequence[Dict[str, Any]],
*,
provider: str = "ChatOpenAI",
stream: bool = False,
**kwargs: Any,
) -> Union[ChatCompletions, Iterable]:
models = importlib.import_module("langchain.chat_models")
model_cls = getattr(models, provider)
model_config = model_cls(**kwargs)
converted_messages = convert_openai_messages(messages)
if not stream:
result = model_config.invoke(converted_messages)
return ChatCompletions(
choices=[Choice(message=convert_message_to_dict(result))]
)
else:
return (
ChatCompletionChunk(
choices=[ChoiceChunk(delta=_convert_message_chunk(c, i))]
)
for i, c in enumerate(model_config.stream(converted_messages))
)

@overload
@staticmethod
async def acreate(
messages: Sequence[Dict[str, Any]],
*,
provider: str = "ChatOpenAI",
stream: Literal[False] = False,
**kwargs: Any,
) -> ChatCompletions:
...

@overload
@staticmethod
async def acreate(
messages: Sequence[Dict[str, Any]],
*,
provider: str = "ChatOpenAI",
stream: Literal[True],
**kwargs: Any,
) -> AsyncIterator:
...

@staticmethod
async def acreate(
messages: Sequence[Dict[str, Any]],
*,
provider: str = "ChatOpenAI",
stream: bool = False,
**kwargs: Any,
) -> Union[ChatCompletions, AsyncIterator]:
models = importlib.import_module("langchain.chat_models")
model_cls = getattr(models, provider)
model_config = model_cls(**kwargs)
converted_messages = convert_openai_messages(messages)
if not stream:
result = await model_config.ainvoke(converted_messages)
return ChatCompletions(
choices=[Choice(message=convert_message_to_dict(result))]
)
else:
return (
ChatCompletionChunk(
choices=[ChoiceChunk(delta=_convert_message_chunk(c, i))]
)
async for i, c in aenumerate(model_config.astream(converted_messages))
)


class Chat:
def __init__(self) -> None:
self.completions = Completions()


chat = Chat()
4 changes: 4 additions & 0 deletions libs/community/langchain_community/agent_toolkits/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from langchain_community.agent_toolkits.json.base import create_json_agent
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
from langchain_community.agent_toolkits.multion.toolkit import MultionToolkit
from langchain_community.agent_toolkits.nasa.toolkit import NasaToolkit
from langchain_community.agent_toolkits.nla.toolkit import NLAToolkit
from langchain_community.agent_toolkits.office365.toolkit import O365Toolkit
from langchain_community.agent_toolkits.openapi.base import create_openapi_agent
Expand All @@ -49,6 +50,7 @@
from langchain_community.agent_toolkits.spark_sql.toolkit import SparkSQLToolkit
from langchain_community.agent_toolkits.sql.base import create_sql_agent
from langchain_community.agent_toolkits.sql.toolkit import SQLDatabaseToolkit
from langchain_community.agent_toolkits.steam.toolkit import SteamToolkit
from langchain_community.agent_toolkits.vectorstore.base import (
create_vectorstore_agent,
create_vectorstore_router_agent,
Expand Down Expand Up @@ -94,12 +96,14 @@ def __getattr__(name: str) -> Any:
"JiraToolkit",
"JsonToolkit",
"MultionToolkit",
"NasaToolkit",
"NLAToolkit",
"O365Toolkit",
"OpenAPIToolkit",
"PlayWrightBrowserToolkit",
"PowerBIToolkit",
"SlackToolkit",
"SteamToolkit",
"SQLDatabaseToolkit",
"SparkSQLToolkit",
"VectorStoreInfo",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from langchain_core.messages import SystemMessage
from langchain_core.prompts.chat import MessagesPlaceholder
from langchain_core.tools import BaseTool
from langchain_openai.chat_model import ChatOpenAI

from langchain_community.agents.agent import AgentExecutor
from langchain_community.agents.openai_functions_agent.agent_token_buffer_memory import (
Expand Down Expand Up @@ -57,8 +56,6 @@ def create_conversational_retrieval_agent(
An agent executor initialized appropriately
"""

if not isinstance(llm, ChatOpenAI):
raise ValueError("Only supported with ChatOpenAI models.")
if remember_intermediate_steps:
memory: BaseMemory = AgentTokenBufferMemory(
memory_key=memory_key, llm=llm, max_token_limit=max_token_limit
Expand Down
Loading

0 comments on commit ee1478b

Please sign in to comment.