diff --git a/cookbook/advanced_rag_eval.ipynb b/cookbook/advanced_rag_eval.ipynb index 45d424b452d4c..02e86817b29ce 100644 --- a/cookbook/advanced_rag_eval.ipynb +++ b/cookbook/advanced_rag_eval.ipynb @@ -520,7 +520,7 @@ "source": [ "import re\n", "\n", - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "from langchain_core.runnables import RunnableLambda\n", "\n", "\n", diff --git a/cookbook/apache_kafka_message_handling.ipynb b/cookbook/apache_kafka_message_handling.ipynb index 616c12ac68039..36a0c07e965bd 100644 --- a/cookbook/apache_kafka_message_handling.ipynb +++ b/cookbook/apache_kafka_message_handling.ipynb @@ -167,7 +167,7 @@ "from langchain.llms import LlamaCpp\n", "from langchain.memory import ConversationTokenBufferMemory\n", "from langchain.prompts import PromptTemplate, load_prompt\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "from langchain_experimental.chat_models import Llama2Chat\n", "from quixstreams import Application, State, message_key\n", "\n", diff --git a/cookbook/custom_agent_with_plugin_retrieval.ipynb b/cookbook/custom_agent_with_plugin_retrieval.ipynb index 9131599da0fab..ac7545bcdbbcc 100644 --- a/cookbook/custom_agent_with_plugin_retrieval.ipynb +++ b/cookbook/custom_agent_with_plugin_retrieval.ipynb @@ -42,9 +42,9 @@ ")\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish\n", "from langchain_community.agent_toolkits import NLAToolkit\n", "from langchain_community.tools.plugin import AIPlugin\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "from langchain_openai import OpenAI" ] }, @@ -114,8 +114,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb b/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb index 30fc61712da6b..7dfa363ece945 100644 --- a/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb +++ b/cookbook/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb @@ -67,9 +67,9 @@ ")\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish\n", "from langchain_community.agent_toolkits import NLAToolkit\n", "from langchain_community.tools.plugin import AIPlugin\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "from langchain_openai import OpenAI" ] }, @@ -138,8 +138,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/cookbook/custom_agent_with_tool_retrieval.ipynb b/cookbook/custom_agent_with_tool_retrieval.ipynb index 7981a13716ba0..73105ea68e39e 100644 --- a/cookbook/custom_agent_with_tool_retrieval.ipynb +++ b/cookbook/custom_agent_with_tool_retrieval.ipynb @@ -40,8 +40,8 @@ ")\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish\n", "from langchain_community.utilities import SerpAPIWrapper\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "from langchain_openai import OpenAI" ] }, @@ -103,8 +103,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/cookbook/custom_multi_action_agent.ipynb b/cookbook/custom_multi_action_agent.ipynb index 271c4c0d81610..c37a5bf9dd420 100644 --- a/cookbook/custom_multi_action_agent.ipynb +++ b/cookbook/custom_multi_action_agent.ipynb @@ -72,7 +72,7 @@ "source": [ "from typing import Any, List, Tuple, Union\n", "\n", - "from langchain.schema import AgentAction, AgentFinish\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "\n", "\n", "class FakeAgent(BaseMultiActionAgent):\n", diff --git a/cookbook/forward_looking_retrieval_augmented_generation.ipynb b/cookbook/forward_looking_retrieval_augmented_generation.ipynb index 0abfe0bfeff60..4406c1812db08 100644 --- a/cookbook/forward_looking_retrieval_augmented_generation.ipynb +++ b/cookbook/forward_looking_retrieval_augmented_generation.ipynb @@ -73,8 +73,9 @@ " AsyncCallbackManagerForRetrieverRun,\n", " CallbackManagerForRetrieverRun,\n", ")\n", - "from langchain.schema import BaseRetriever, Document\n", "from langchain_community.utilities import GoogleSerperAPIWrapper\n", + "from langchain_core.documents import Document\n", + "from langchain_core.retrievers import BaseRetriever\n", "from langchain_openai import ChatOpenAI, OpenAI" ] }, diff --git a/cookbook/openai_functions_retrieval_qa.ipynb b/cookbook/openai_functions_retrieval_qa.ipynb index 648b28b5e2c17..621e997088e32 100644 --- a/cookbook/openai_functions_retrieval_qa.ipynb +++ b/cookbook/openai_functions_retrieval_qa.ipynb @@ -358,7 +358,7 @@ "\n", "from langchain.chains.openai_functions import create_qa_with_structure_chain\n", "from langchain.prompts.chat import ChatPromptTemplate, HumanMessagePromptTemplate\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from pydantic import BaseModel, Field" ] }, diff --git a/cookbook/sales_agent_with_context.ipynb b/cookbook/sales_agent_with_context.ipynb index 158329a5f09e6..e125046af9228 100644 --- a/cookbook/sales_agent_with_context.ipynb +++ b/cookbook/sales_agent_with_context.ipynb @@ -51,10 +51,10 @@ "from langchain.chains.base import Chain\n", "from langchain.prompts import PromptTemplate\n", "from langchain.prompts.base import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish\n", "from langchain.text_splitter import CharacterTextSplitter\n", "from langchain_community.llms import BaseLLM\n", "from langchain_community.vectorstores import Chroma\n", + "from langchain_core.agents import AgentAction, AgentFinish\n", "from langchain_openai import ChatOpenAI, OpenAI, OpenAIEmbeddings\n", "from pydantic import BaseModel, Field" ] diff --git a/cookbook/wikibase_agent.ipynb b/cookbook/wikibase_agent.ipynb index 692193b0229df..13c4063cf7852 100644 --- a/cookbook/wikibase_agent.ipynb +++ b/cookbook/wikibase_agent.ipynb @@ -401,7 +401,7 @@ ")\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.schema import AgentAction, AgentFinish" + "from langchain_core.agents import AgentAction, AgentFinish" ] }, { diff --git a/docs/docs/expression_language/cookbook/multiple_chains.ipynb b/docs/docs/expression_language/cookbook/multiple_chains.ipynb index 60f87c3764a20..8abe267107318 100644 --- a/docs/docs/expression_language/cookbook/multiple_chains.ipynb +++ b/docs/docs/expression_language/cookbook/multiple_chains.ipynb @@ -47,7 +47,7 @@ "source": [ "from operator import itemgetter\n", "\n", - "from langchain.schema import StrOutputParser\n", + "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_openai import ChatOpenAI\n", "\n", diff --git a/docs/docs/expression_language/cookbook/retrieval.ipynb b/docs/docs/expression_language/cookbook/retrieval.ipynb index e7708ca675bfc..89df3b6a0cf05 100644 --- a/docs/docs/expression_language/cookbook/retrieval.ipynb +++ b/docs/docs/expression_language/cookbook/retrieval.ipynb @@ -169,8 +169,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import format_document\n", "from langchain_core.messages import AIMessage, HumanMessage, get_buffer_string\n", + "from langchain_core.prompts import format_document\n", "from langchain_core.runnables import RunnableParallel" ] }, diff --git a/docs/docs/expression_language/how_to/binding.ipynb b/docs/docs/expression_language/how_to/binding.ipynb index 375b863585b0d..fe25f1a3bc936 100644 --- a/docs/docs/expression_language/how_to/binding.ipynb +++ b/docs/docs/expression_language/how_to/binding.ipynb @@ -29,7 +29,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import StrOutputParser\n", + "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.runnables import RunnablePassthrough\n", "from langchain_openai import ChatOpenAI" diff --git a/docs/docs/expression_language/streaming.ipynb b/docs/docs/expression_language/streaming.ipynb index d597c4d825fb3..10254e6918fb4 100644 --- a/docs/docs/expression_language/streaming.ipynb +++ b/docs/docs/expression_language/streaming.ipynb @@ -68,7 +68,7 @@ "source": [ "# Showing the example using anthropic, but you can use\n", "# your favorite chat model!\n", - "from langchain.chat_models import ChatAnthropic\n", + "from langchain_community.chat_models import ChatAnthropic\n", "\n", "model = ChatAnthropic()\n", "\n", diff --git a/docs/docs/guides/evaluation/trajectory/custom.ipynb b/docs/docs/guides/evaluation/trajectory/custom.ipynb index c6be21a279366..6afb6ef4bebb1 100644 --- a/docs/docs/guides/evaluation/trajectory/custom.ipynb +++ b/docs/docs/guides/evaluation/trajectory/custom.ipynb @@ -35,7 +35,7 @@ "\n", "from langchain.chains import LLMChain\n", "from langchain.evaluation import AgentTrajectoryEvaluator\n", - "from langchain.schema import AgentAction\n", + "from langchain_core.agents import AgentAction\n", "from langchain_openai import ChatOpenAI\n", "\n", "\n", diff --git a/docs/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection.ipynb b/docs/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection.ipynb index 8c7f4574ef3af..1bf0b77ab4857 100644 --- a/docs/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection.ipynb +++ b/docs/docs/guides/privacy/presidio_data_anonymization/qa_privacy_protection.ipynb @@ -90,7 +90,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "documents = [Document(page_content=document_content)]" ] @@ -879,7 +879,7 @@ "outputs": [], "source": [ "from langchain.prompts.prompt import PromptTemplate\n", - "from langchain.schema import format_document\n", + "from langchain_core.prompts import format_document\n", "\n", "DEFAULT_DOCUMENT_PROMPT = PromptTemplate.from_template(template=\"{page_content}\")\n", "\n", diff --git a/docs/docs/integrations/callbacks/labelstudio.ipynb b/docs/docs/integrations/callbacks/labelstudio.ipynb index 91507b0b046ab..6170b7c05521b 100644 --- a/docs/docs/integrations/callbacks/labelstudio.ipynb +++ b/docs/docs/integrations/callbacks/labelstudio.ipynb @@ -242,7 +242,7 @@ "outputs": [], "source": [ "from langchain.callbacks import LabelStudioCallbackHandler\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_openai import ChatOpenAI\n", "\n", "chat_llm = ChatOpenAI(\n", diff --git a/docs/docs/integrations/callbacks/llmonitor.md b/docs/docs/integrations/callbacks/llmonitor.md index 266332a7e3438..a3513356b9c59 100644 --- a/docs/docs/integrations/callbacks/llmonitor.md +++ b/docs/docs/integrations/callbacks/llmonitor.md @@ -53,7 +53,7 @@ Example: ```python from langchain_openai import ChatOpenAI -from langchain.schema import SystemMessage, HumanMessage +from langchain_core.messages import SystemMessage, HumanMessage from langchain.agents import OpenAIFunctionsAgent, AgentExecutor, tool from langchain.callbacks import LLMonitorCallbackHandler diff --git a/docs/docs/integrations/callbacks/trubrics.ipynb b/docs/docs/integrations/callbacks/trubrics.ipynb index 0469e313d0ebe..d4a6419e38400 100644 --- a/docs/docs/integrations/callbacks/trubrics.ipynb +++ b/docs/docs/integrations/callbacks/trubrics.ipynb @@ -267,7 +267,7 @@ "outputs": [], "source": [ "from langchain.callbacks import TrubricsCallbackHandler\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/chat/anthropic_functions.ipynb b/docs/docs/integrations/chat/anthropic_functions.ipynb index 6b2a031f4565f..e91547c074ea8 100644 --- a/docs/docs/integrations/chat/anthropic_functions.ipynb +++ b/docs/docs/integrations/chat/anthropic_functions.ipynb @@ -83,7 +83,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage" + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/anyscale.ipynb b/docs/docs/integrations/chat/anyscale.ipynb index 29718174655a9..98cac216ad353 100644 --- a/docs/docs/integrations/chat/anyscale.ipynb +++ b/docs/docs/integrations/chat/anyscale.ipynb @@ -109,7 +109,7 @@ "source": [ "import asyncio\n", "\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(content=\"You are a helpful AI that shares everything you know.\"),\n", diff --git a/docs/docs/integrations/chat/azure_chat_openai.ipynb b/docs/docs/integrations/chat/azure_chat_openai.ipynb index 399edd6ebf73f..57e677340c61b 100644 --- a/docs/docs/integrations/chat/azure_chat_openai.ipynb +++ b/docs/docs/integrations/chat/azure_chat_openai.ipynb @@ -31,7 +31,7 @@ "source": [ "import os\n", "\n", - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "from langchain_openai import AzureChatOpenAI" ] }, diff --git a/docs/docs/integrations/chat/azureml_chat_endpoint.ipynb b/docs/docs/integrations/chat/azureml_chat_endpoint.ipynb index 6fe4c869f4f94..0bca033c6afc7 100644 --- a/docs/docs/integrations/chat/azureml_chat_endpoint.ipynb +++ b/docs/docs/integrations/chat/azureml_chat_endpoint.ipynb @@ -74,11 +74,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models.azureml_endpoint import (\n", " AzureMLEndpointApiType,\n", " LlamaChatContentFormatter,\n", - ")" + ")\n", + "from langchain_core.messages import HumanMessage" ] }, { @@ -105,8 +105,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models.azureml_endpoint import LlamaContentFormatter\n", + "from langchain_core.messages import HumanMessage\n", "\n", "chat = AzureMLChatOnlineEndpoint(\n", " endpoint_url=\"https://..inference.ml.azure.com/score\",\n", diff --git a/docs/docs/integrations/chat/baichuan.ipynb b/docs/docs/integrations/chat/baichuan.ipynb index 3b184b953fa2d..8b46b88b2b0f0 100644 --- a/docs/docs/integrations/chat/baichuan.ipynb +++ b/docs/docs/integrations/chat/baichuan.ipynb @@ -29,8 +29,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import ChatBaichuan" + "from langchain_community.chat_models import ChatBaichuan\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/bedrock.ipynb b/docs/docs/integrations/chat/bedrock.ipynb index add4051b7c8b1..927a78b58591d 100644 --- a/docs/docs/integrations/chat/bedrock.ipynb +++ b/docs/docs/integrations/chat/bedrock.ipynb @@ -47,8 +47,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import BedrockChat" + "from langchain_community.chat_models import BedrockChat\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/deepinfra.ipynb b/docs/docs/integrations/chat/deepinfra.ipynb index 0f88097a20ac7..121e8eaabf8f0 100644 --- a/docs/docs/integrations/chat/deepinfra.ipynb +++ b/docs/docs/integrations/chat/deepinfra.ipynb @@ -68,8 +68,8 @@ }, "outputs": [], "source": [ - "from langchain.chat_models import ChatDeepInfra\n", - "from langchain.schema import HumanMessage" + "from langchain_community.chat_models import ChatDeepInfra\n", + "from langchain_core.messages import HumanMessage" ] }, { @@ -216,7 +216,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/docs/integrations/chat/ernie.ipynb b/docs/docs/integrations/chat/ernie.ipynb index fd467c22276e4..cd0e121b06b73 100644 --- a/docs/docs/integrations/chat/ernie.ipynb +++ b/docs/docs/integrations/chat/ernie.ipynb @@ -76,8 +76,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import ErnieBotChat\n", + "from langchain_core.messages import HumanMessage\n", "\n", "chat = ErnieBotChat(\n", " ernie_client_id=\"YOUR_CLIENT_ID\", ernie_client_secret=\"YOUR_CLIENT_SECRET\"\n", diff --git a/docs/docs/integrations/chat/everlyai.ipynb b/docs/docs/integrations/chat/everlyai.ipynb index e4e548e85b201..3c08355cf7040 100644 --- a/docs/docs/integrations/chat/everlyai.ipynb +++ b/docs/docs/integrations/chat/everlyai.ipynb @@ -73,8 +73,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", "from langchain_community.chat_models import ChatEverlyAI\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(content=\"You are a helpful AI that shares everything you know.\"),\n", @@ -127,8 +127,8 @@ ], "source": [ "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", - "from langchain.schema import HumanMessage, SystemMessage\n", "from langchain_community.chat_models import ChatEverlyAI\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(content=\"You are a humorous AI that delights people.\"),\n", @@ -185,8 +185,8 @@ ], "source": [ "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", - "from langchain.schema import HumanMessage, SystemMessage\n", "from langchain_community.chat_models import ChatEverlyAI\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(content=\"You are a humorous AI that delights people.\"),\n", diff --git a/docs/docs/integrations/chat/fireworks.ipynb b/docs/docs/integrations/chat/fireworks.ipynb index 4be0d01d78d81..58afd0a65e5d0 100644 --- a/docs/docs/integrations/chat/fireworks.ipynb +++ b/docs/docs/integrations/chat/fireworks.ipynb @@ -37,8 +37,8 @@ "source": [ "import os\n", "\n", - "from langchain.schema import HumanMessage, SystemMessage\n", - "from langchain_community.chat_models.fireworks import ChatFireworks" + "from langchain_community.chat_models.fireworks import ChatFireworks\n", + "from langchain_core.messages import HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/integrations/chat/gigachat.ipynb b/docs/docs/integrations/chat/gigachat.ipynb index f4838512ed2db..e33b0fa2cda35 100644 --- a/docs/docs/integrations/chat/gigachat.ipynb +++ b/docs/docs/integrations/chat/gigachat.ipynb @@ -75,7 +75,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(\n", diff --git a/docs/docs/integrations/chat/gpt_router.ipynb b/docs/docs/integrations/chat/gpt_router.ipynb index 35fd32d7469e8..967bf5bbddb5a 100644 --- a/docs/docs/integrations/chat/gpt_router.ipynb +++ b/docs/docs/integrations/chat/gpt_router.ipynb @@ -70,9 +70,9 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import GPTRouter\n", - "from langchain_community.chat_models.gpt_router import GPTRouterModel" + "from langchain_community.chat_models.gpt_router import GPTRouterModel\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/jinachat.ipynb b/docs/docs/integrations/chat/jinachat.ipynb index 672018f477507..08b247c7698c6 100644 --- a/docs/docs/integrations/chat/jinachat.ipynb +++ b/docs/docs/integrations/chat/jinachat.ipynb @@ -24,8 +24,8 @@ " HumanMessagePromptTemplate,\n", " SystemMessagePromptTemplate,\n", ")\n", - "from langchain.schema import HumanMessage, SystemMessage\n", - "from langchain_community.chat_models import JinaChat" + "from langchain_community.chat_models import JinaChat\n", + "from langchain_core.messages import HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/integrations/chat/konko.ipynb b/docs/docs/integrations/chat/konko.ipynb index 3582e21d2302e..293aa4eb5b02d 100644 --- a/docs/docs/integrations/chat/konko.ipynb +++ b/docs/docs/integrations/chat/konko.ipynb @@ -40,8 +40,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", - "from langchain_community.chat_models import ChatKonko" + "from langchain_community.chat_models import ChatKonko\n", + "from langchain_core.messages import HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/integrations/chat/litellm.ipynb b/docs/docs/integrations/chat/litellm.ipynb index 1de1f3b95402e..6ab12ff186d01 100644 --- a/docs/docs/integrations/chat/litellm.ipynb +++ b/docs/docs/integrations/chat/litellm.ipynb @@ -32,8 +32,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import ChatLiteLLM" + "from langchain_community.chat_models import ChatLiteLLM\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/litellm_router.ipynb b/docs/docs/integrations/chat/litellm_router.ipynb index 46e7da49e92e8..4f31928526b0c 100644 --- a/docs/docs/integrations/chat/litellm_router.ipynb +++ b/docs/docs/integrations/chat/litellm_router.ipynb @@ -38,8 +38,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import ChatLiteLLMRouter\n", + "from langchain_core.messages import HumanMessage\n", "from litellm import Router" ] }, diff --git a/docs/docs/integrations/chat/llama2_chat.ipynb b/docs/docs/integrations/chat/llama2_chat.ipynb index de1e773e34bc3..debb538f394c5 100644 --- a/docs/docs/integrations/chat/llama2_chat.ipynb +++ b/docs/docs/integrations/chat/llama2_chat.ipynb @@ -54,7 +54,7 @@ " HumanMessagePromptTemplate,\n", " MessagesPlaceholder,\n", ")\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "\n", "template_messages = [\n", " SystemMessage(content=\"You are a helpful assistant.\"),\n", diff --git a/docs/docs/integrations/chat/minimax.ipynb b/docs/docs/integrations/chat/minimax.ipynb index 9b0735f7f27fe..3411b0beb065b 100644 --- a/docs/docs/integrations/chat/minimax.ipynb +++ b/docs/docs/integrations/chat/minimax.ipynb @@ -39,8 +39,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import MiniMaxChat" + "from langchain_community.chat_models import MiniMaxChat\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/ollama.ipynb b/docs/docs/integrations/chat/ollama.ipynb index d0df5b4b99d8a..3e4279a95c16e 100644 --- a/docs/docs/integrations/chat/ollama.ipynb +++ b/docs/docs/integrations/chat/ollama.ipynb @@ -278,7 +278,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "\n", "messages = [\n", " HumanMessage(\n", @@ -313,8 +313,8 @@ "source": [ "import json\n", "\n", - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import ChatOllama\n", + "from langchain_core.messages import HumanMessage\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "\n", @@ -463,8 +463,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models import ChatOllama\n", + "from langchain_core.messages import HumanMessage\n", "\n", "llm = ChatOllama(model=\"bakllava\", temperature=0)\n", "\n", diff --git a/docs/docs/integrations/chat/ollama_functions.ipynb b/docs/docs/integrations/chat/ollama_functions.ipynb index 707b8d74ccaff..8a2e2826e9d01 100644 --- a/docs/docs/integrations/chat/ollama_functions.ipynb +++ b/docs/docs/integrations/chat/ollama_functions.ipynb @@ -102,7 +102,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "\n", "model.invoke(\"what is the weather in Boston?\")" ] diff --git a/docs/docs/integrations/chat/openai.ipynb b/docs/docs/integrations/chat/openai.ipynb index bdfc034267eaf..d5fe3e76605cf 100644 --- a/docs/docs/integrations/chat/openai.ipynb +++ b/docs/docs/integrations/chat/openai.ipynb @@ -34,7 +34,7 @@ " HumanMessagePromptTemplate,\n", " SystemMessagePromptTemplate,\n", ")\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/chat/promptlayer_chatopenai.ipynb b/docs/docs/integrations/chat/promptlayer_chatopenai.ipynb index de2cc76d707a0..7924442a89b18 100644 --- a/docs/docs/integrations/chat/promptlayer_chatopenai.ipynb +++ b/docs/docs/integrations/chat/promptlayer_chatopenai.ipynb @@ -62,8 +62,8 @@ "source": [ "import os\n", "\n", - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import PromptLayerChatOpenAI" + "from langchain_community.chat_models import PromptLayerChatOpenAI\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/sparkllm.ipynb b/docs/docs/integrations/chat/sparkllm.ipynb index 4fe68f9a2a709..8e62af8baed08 100644 --- a/docs/docs/integrations/chat/sparkllm.ipynb +++ b/docs/docs/integrations/chat/sparkllm.ipynb @@ -30,8 +30,8 @@ "outputs": [], "source": [ "\"\"\"For basic init and call\"\"\"\n", - "from langchain.chat_models import ChatSparkLLM\n", - "from langchain.schema import HumanMessage\n", + "from langchain_community.chat_models import ChatSparkLLM\n", + "from langchain_core.messages import HumanMessage\n", "\n", "chat = ChatSparkLLM(\n", " spark_app_id=\"\", spark_api_key=\"\", spark_api_secret=\"\"\n", diff --git a/docs/docs/integrations/chat/tencent_hunyuan.ipynb b/docs/docs/integrations/chat/tencent_hunyuan.ipynb index d184784f66f0b..4906f52678945 100644 --- a/docs/docs/integrations/chat/tencent_hunyuan.ipynb +++ b/docs/docs/integrations/chat/tencent_hunyuan.ipynb @@ -36,8 +36,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import ChatHunyuan" + "from langchain_community.chat_models import ChatHunyuan\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/tongyi.ipynb b/docs/docs/integrations/chat/tongyi.ipynb index 4bfb8646c1770..a80f876ac32e3 100644 --- a/docs/docs/integrations/chat/tongyi.ipynb +++ b/docs/docs/integrations/chat/tongyi.ipynb @@ -100,8 +100,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.chat_models.tongyi import ChatTongyi\n", + "from langchain_core.messages import HumanMessage\n", "\n", "chatLLM = ChatTongyi(\n", " streaming=True,\n", @@ -128,7 +128,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(\n", diff --git a/docs/docs/integrations/chat/vllm.ipynb b/docs/docs/integrations/chat/vllm.ipynb index 88333bcb01aa3..ef03e1d8d13d1 100644 --- a/docs/docs/integrations/chat/vllm.ipynb +++ b/docs/docs/integrations/chat/vllm.ipynb @@ -36,7 +36,7 @@ " HumanMessagePromptTemplate,\n", " SystemMessagePromptTemplate,\n", ")\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/chat/volcengine_maas.ipynb b/docs/docs/integrations/chat/volcengine_maas.ipynb index a82c8ce2e76bd..3cead3bac24d2 100644 --- a/docs/docs/integrations/chat/volcengine_maas.ipynb +++ b/docs/docs/integrations/chat/volcengine_maas.ipynb @@ -48,8 +48,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", - "from langchain_community.chat_models import VolcEngineMaasChat" + "from langchain_community.chat_models import VolcEngineMaasChat\n", + "from langchain_core.messages import HumanMessage" ] }, { diff --git a/docs/docs/integrations/chat/yandex.ipynb b/docs/docs/integrations/chat/yandex.ipynb index 5a389fa84e711..403a97dae713c 100644 --- a/docs/docs/integrations/chat/yandex.ipynb +++ b/docs/docs/integrations/chat/yandex.ipynb @@ -58,8 +58,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", - "from langchain_community.chat_models import ChatYandexGPT" + "from langchain_community.chat_models import ChatYandexGPT\n", + "from langchain_core.messages import HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/integrations/chat_loaders/discord.ipynb b/docs/docs/integrations/chat_loaders/discord.ipynb index b4eb0263b15ea..4425ce3deb397 100644 --- a/docs/docs/integrations/chat_loaders/discord.ipynb +++ b/docs/docs/integrations/chat_loaders/discord.ipynb @@ -79,8 +79,8 @@ "import re\n", "from typing import Iterator, List\n", "\n", - "from langchain.schema import BaseMessage, HumanMessage\n", "from langchain_community.chat_loaders import base as chat_loaders\n", + "from langchain_core.messages import BaseMessage, HumanMessage\n", "\n", "logger = logging.getLogger()\n", "\n", diff --git a/docs/docs/integrations/chat_loaders/twitter.ipynb b/docs/docs/integrations/chat_loaders/twitter.ipynb index af80f142c209c..e906af7e67b0b 100644 --- a/docs/docs/integrations/chat_loaders/twitter.ipynb +++ b/docs/docs/integrations/chat_loaders/twitter.ipynb @@ -22,7 +22,7 @@ "import json\n", "\n", "from langchain.adapters.openai import convert_message_to_dict\n", - "from langchain.schema import AIMessage" + "from langchain_core.messages import AIMessage" ] }, { diff --git a/docs/docs/integrations/chat_loaders/wechat.ipynb b/docs/docs/integrations/chat_loaders/wechat.ipynb index bb81d8cc8876d..0a0146a495475 100644 --- a/docs/docs/integrations/chat_loaders/wechat.ipynb +++ b/docs/docs/integrations/chat_loaders/wechat.ipynb @@ -78,8 +78,8 @@ "import re\n", "from typing import Iterator, List\n", "\n", - "from langchain.schema import BaseMessage, HumanMessage\n", "from langchain_community.chat_loaders import base as chat_loaders\n", + "from langchain_core.messages import BaseMessage, HumanMessage\n", "\n", "logger = logging.getLogger()\n", "\n", diff --git a/docs/docs/integrations/document_loaders/tensorflow_datasets.ipynb b/docs/docs/integrations/document_loaders/tensorflow_datasets.ipynb index 390597bbd82a4..61c9d22f0bc0d 100644 --- a/docs/docs/integrations/document_loaders/tensorflow_datasets.ipynb +++ b/docs/docs/integrations/document_loaders/tensorflow_datasets.ipynb @@ -198,8 +198,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.document_loaders import TensorflowDatasetLoader\n", + "from langchain_core.documents import Document\n", "\n", "loader = TensorflowDatasetLoader(\n", " dataset_name=\"mlqa/en\",\n", diff --git a/docs/docs/integrations/document_transformers/doctran_extract_properties.ipynb b/docs/docs/integrations/document_transformers/doctran_extract_properties.ipynb index bba3221ec346b..56d5240cf0e38 100644 --- a/docs/docs/integrations/document_transformers/doctran_extract_properties.ipynb +++ b/docs/docs/integrations/document_transformers/doctran_extract_properties.ipynb @@ -32,8 +32,8 @@ "source": [ "import json\n", "\n", - "from langchain.schema import Document\n", - "from langchain_community.document_transformers import DoctranPropertyExtractor" + "from langchain_community.document_transformers import DoctranPropertyExtractor\n", + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_transformers/doctran_interrogate_document.ipynb b/docs/docs/integrations/document_transformers/doctran_interrogate_document.ipynb index 1f52616b23c97..1d6fdd07f5b3f 100644 --- a/docs/docs/integrations/document_transformers/doctran_interrogate_document.ipynb +++ b/docs/docs/integrations/document_transformers/doctran_interrogate_document.ipynb @@ -30,8 +30,8 @@ "source": [ "import json\n", "\n", - "from langchain.schema import Document\n", - "from langchain_community.document_transformers import DoctranQATransformer" + "from langchain_community.document_transformers import DoctranQATransformer\n", + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_transformers/doctran_translate_document.ipynb b/docs/docs/integrations/document_transformers/doctran_translate_document.ipynb index 7f1e93c111195..6f46135b5a45f 100644 --- a/docs/docs/integrations/document_transformers/doctran_translate_document.ipynb +++ b/docs/docs/integrations/document_transformers/doctran_translate_document.ipynb @@ -28,8 +28,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", - "from langchain_community.document_transformers import DoctranTextTranslator" + "from langchain_community.document_transformers import DoctranTextTranslator\n", + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_transformers/google_translate.ipynb b/docs/docs/integrations/document_transformers/google_translate.ipynb index 67a84aa7f9aa3..cfe74de583150 100644 --- a/docs/docs/integrations/document_transformers/google_translate.ipynb +++ b/docs/docs/integrations/document_transformers/google_translate.ipynb @@ -31,8 +31,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", - "from langchain_community.document_transformers import GoogleTranslateTransformer" + "from langchain_community.document_transformers import GoogleTranslateTransformer\n", + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/document_transformers/openai_metadata_tagger.ipynb b/docs/docs/integrations/document_transformers/openai_metadata_tagger.ipynb index 11a4b2ae960e3..dbe8341274bed 100644 --- a/docs/docs/integrations/document_transformers/openai_metadata_tagger.ipynb +++ b/docs/docs/integrations/document_transformers/openai_metadata_tagger.ipynb @@ -21,10 +21,10 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.document_transformers.openai_functions import (\n", " create_metadata_tagger,\n", ")\n", + "from langchain_core.documents import Document\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/llms/azure_ml.ipynb b/docs/docs/integrations/llms/azure_ml.ipynb index 9d066bddb3cc9..b2adb40a84b52 100644 --- a/docs/docs/integrations/llms/azure_ml.ipynb +++ b/docs/docs/integrations/llms/azure_ml.ipynb @@ -70,11 +70,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.llms.azureml_endpoint import (\n", " AzureMLEndpointApiType,\n", " LlamaContentFormatter,\n", ")\n", + "from langchain_core.messages import HumanMessage\n", "\n", "llm = AzureMLOnlineEndpoint(\n", " endpoint_url=\"https://..inference.ml.azure.com/score\",\n", @@ -117,11 +117,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import HumanMessage\n", "from langchain_community.llms.azureml_endpoint import (\n", " AzureMLEndpointApiType,\n", " LlamaContentFormatter,\n", ")\n", + "from langchain_core.messages import HumanMessage\n", "\n", "llm = AzureMLOnlineEndpoint(\n", " endpoint_url=\"https://..inference.ml.azure.com/v1/completions\",\n", diff --git a/docs/docs/integrations/llms/javelin.ipynb b/docs/docs/integrations/llms/javelin.ipynb index 935dd0c6e1074..c5bcc247d2dc2 100644 --- a/docs/docs/integrations/llms/javelin.ipynb +++ b/docs/docs/integrations/llms/javelin.ipynb @@ -180,8 +180,8 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", "from langchain_community.chat_models import ChatJavelinAIGateway\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "messages = [\n", " SystemMessage(\n", diff --git a/docs/docs/integrations/memory/zep_memory.ipynb b/docs/docs/integrations/memory/zep_memory.ipynb index e1018e1235963..86379dd7851d3 100644 --- a/docs/docs/integrations/memory/zep_memory.ipynb +++ b/docs/docs/integrations/memory/zep_memory.ipynb @@ -52,8 +52,8 @@ "from langchain.agents import AgentType, Tool, initialize_agent\n", "from langchain.memory import ZepMemory\n", "from langchain.retrievers import ZepRetriever\n", - "from langchain.schema import AIMessage, HumanMessage\n", "from langchain_community.utilities import WikipediaAPIWrapper\n", + "from langchain_core.messages import AIMessage, HumanMessage\n", "from langchain_openai import OpenAI\n", "\n", "# Set this to your Zep server URL\n", diff --git a/docs/docs/integrations/providers/arthur_tracking.ipynb b/docs/docs/integrations/providers/arthur_tracking.ipynb index 74114875f373e..e7db9365dddb5 100644 --- a/docs/docs/integrations/providers/arthur_tracking.ipynb +++ b/docs/docs/integrations/providers/arthur_tracking.ipynb @@ -28,7 +28,7 @@ "source": [ "from langchain.callbacks import ArthurCallbackHandler\n", "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/integrations/providers/cohere.mdx b/docs/docs/integrations/providers/cohere.mdx index 729874a25d8f0..48c091d07125a 100644 --- a/docs/docs/integrations/providers/cohere.mdx +++ b/docs/docs/integrations/providers/cohere.mdx @@ -27,7 +27,7 @@ Get a [Cohere api key](https://dashboard.cohere.ai/) and set it as an environmen ```python from langchain_community.chat_models import ChatCohere -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage chat = ChatCohere() messages = [HumanMessage(content="knock knock")] print(chat(messages)) diff --git a/docs/docs/integrations/providers/flyte.mdx b/docs/docs/integrations/providers/flyte.mdx index 1e7dbb748ca22..1e75bea5c7cf4 100644 --- a/docs/docs/integrations/providers/flyte.mdx +++ b/docs/docs/integrations/providers/flyte.mdx @@ -30,7 +30,7 @@ from langchain.callbacks import FlyteCallbackHandler from langchain.chains import LLMChain from langchain_openai import ChatOpenAI from langchain.prompts import PromptTemplate -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage ``` Set up the necessary environment variables to utilize the OpenAI API and Serp API: diff --git a/docs/docs/integrations/providers/javelin_ai_gateway.mdx b/docs/docs/integrations/providers/javelin_ai_gateway.mdx index d4f4ceeec0295..41e7f6fe45be9 100644 --- a/docs/docs/integrations/providers/javelin_ai_gateway.mdx +++ b/docs/docs/integrations/providers/javelin_ai_gateway.mdx @@ -66,7 +66,7 @@ print(embeddings.embed_documents(["hello"])) ## Chat Example ```python from langchain_community.chat_models import ChatJavelinAIGateway -from langchain.schema import HumanMessage, SystemMessage +from langchain_core.messages import HumanMessage, SystemMessage messages = [ SystemMessage( diff --git a/docs/docs/integrations/providers/konko.mdx b/docs/docs/integrations/providers/konko.mdx index 5e474d4ae001d..47f3f088f87d7 100644 --- a/docs/docs/integrations/providers/konko.mdx +++ b/docs/docs/integrations/providers/konko.mdx @@ -55,7 +55,7 @@ See a usage [example](/docs/integrations/chat/konko). - **ChatCompletion with Mistral-7B:** ```python - from langchain.schema import HumanMessage + from langchain_core.messages import HumanMessage from langchain_community.chat_models import ChatKonko chat_instance = ChatKonko(max_tokens=10, model = 'mistralai/mistral-7b-instruct-v0.1') msg = HumanMessage(content="Hi") diff --git a/docs/docs/integrations/providers/log10.mdx b/docs/docs/integrations/providers/log10.mdx index bd5890bb1f97a..38ef1fc763077 100644 --- a/docs/docs/integrations/providers/log10.mdx +++ b/docs/docs/integrations/providers/log10.mdx @@ -18,7 +18,7 @@ Integration with log10 is a simple one-line `log10_callback` integration as show ```python from langchain_openai import ChatOpenAI -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage from log10.langchain import Log10Callback from log10.llm import Log10Config @@ -43,7 +43,7 @@ llm = ChatOpenAI(model_name="gpt-3.5-turbo", callbacks=[log10_callback]) from langchain_openai import OpenAI from langchain_community.chat_models import ChatAnthropic from langchain_openai import ChatOpenAI -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage from log10.langchain import Log10Callback from log10.llm import Log10Config diff --git a/docs/docs/integrations/providers/mlflow.mdx b/docs/docs/integrations/providers/mlflow.mdx index 5219f0b7a9c85..791b976f388cd 100644 --- a/docs/docs/integrations/providers/mlflow.mdx +++ b/docs/docs/integrations/providers/mlflow.mdx @@ -100,7 +100,7 @@ print(embeddings.embed_documents(["hello"])) ```python from langchain_community.chat_models import ChatMlflow -from langchain.schema import HumanMessage, SystemMessage +from langchain_core.messages import HumanMessage, SystemMessage chat = ChatMlflow( target_uri="http://127.0.0.1:5000", diff --git a/docs/docs/integrations/providers/mlflow_ai_gateway.mdx b/docs/docs/integrations/providers/mlflow_ai_gateway.mdx index dccabba494506..a18f4a28e681b 100644 --- a/docs/docs/integrations/providers/mlflow_ai_gateway.mdx +++ b/docs/docs/integrations/providers/mlflow_ai_gateway.mdx @@ -113,7 +113,7 @@ print(embeddings.embed_documents(["hello"])) ```python from langchain_community.chat_models import ChatMLflowAIGateway -from langchain.schema import HumanMessage, SystemMessage +from langchain_core.messages import HumanMessage, SystemMessage chat = ChatMLflowAIGateway( gateway_uri="http://127.0.0.1:5000", diff --git a/docs/docs/integrations/retrievers/activeloop.ipynb b/docs/docs/integrations/retrievers/activeloop.ipynb index 42b71c4a0ca7f..e703ecabf06de 100644 --- a/docs/docs/integrations/retrievers/activeloop.ipynb +++ b/docs/docs/integrations/retrievers/activeloop.ipynb @@ -276,7 +276,7 @@ "from langchain.chains.openai_functions import (\n", " create_structured_output_chain,\n", ")\n", - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate\n", "from langchain_openai import ChatOpenAI\n", "from pydantic import BaseModel, Field" diff --git a/docs/docs/integrations/retrievers/bm25.ipynb b/docs/docs/integrations/retrievers/bm25.ipynb index 311605411406c..d7416048d85f5 100644 --- a/docs/docs/integrations/retrievers/bm25.ipynb +++ b/docs/docs/integrations/retrievers/bm25.ipynb @@ -81,7 +81,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "retriever = BM25Retriever.from_documents(\n", " [\n", diff --git a/docs/docs/integrations/retrievers/fleet_context.ipynb b/docs/docs/integrations/retrievers/fleet_context.ipynb index 736184a83666d..0609d6cba2f72 100644 --- a/docs/docs/integrations/retrievers/fleet_context.ipynb +++ b/docs/docs/integrations/retrievers/fleet_context.ipynb @@ -34,8 +34,8 @@ "\n", "import pandas as pd\n", "from langchain.retrievers import MultiVectorRetriever\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_core.stores import BaseStore\n", "from langchain_core.vectorstores import VectorStore\n", "from langchain_openai import OpenAIEmbeddings\n", @@ -194,7 +194,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import StrOutputParser\n", + "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.runnables import RunnablePassthrough\n", "from langchain_openai import ChatOpenAI\n", diff --git a/docs/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query.ipynb index 8b4f7aeed99b6..64cdc8f670261 100644 --- a/docs/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/activeloop_deeplake_self_query.ipynb @@ -83,8 +83,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import DeepLake\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/astradb.ipynb b/docs/docs/integrations/retrievers/self_query/astradb.ipynb index 43386e6a94b47..aa8e81b5e148b 100644 --- a/docs/docs/integrations/retrievers/self_query/astradb.ipynb +++ b/docs/docs/integrations/retrievers/self_query/astradb.ipynb @@ -84,8 +84,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain.vectorstores import AstraDB\n", + "from langchain_core.documents import Document\n", "\n", "docs = [\n", " Document(\n", diff --git a/docs/docs/integrations/retrievers/self_query/chroma_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/chroma_self_query.ipynb index 7341495e8a079..8e316f146d97d 100644 --- a/docs/docs/integrations/retrievers/self_query/chroma_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/chroma_self_query.ipynb @@ -87,8 +87,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Chroma\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/dashvector.ipynb b/docs/docs/integrations/retrievers/self_query/dashvector.ipynb index 4f3e0b09ec87a..7f58c757d9418 100644 --- a/docs/docs/integrations/retrievers/self_query/dashvector.ipynb +++ b/docs/docs/integrations/retrievers/self_query/dashvector.ipynb @@ -92,9 +92,9 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.embeddings import DashScopeEmbeddings\n", "from langchain_community.vectorstores import DashVector\n", + "from langchain_core.documents import Document\n", "\n", "embeddings = DashScopeEmbeddings()\n", "\n", diff --git a/docs/docs/integrations/retrievers/self_query/elasticsearch_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/elasticsearch_self_query.ipynb index 047f48f8e21c3..6bef6db0d0c24 100644 --- a/docs/docs/integrations/retrievers/self_query/elasticsearch_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/elasticsearch_self_query.ipynb @@ -60,8 +60,8 @@ "import getpass\n", "import os\n", "\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import ElasticsearchStore\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")\n", diff --git a/docs/docs/integrations/retrievers/self_query/milvus_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/milvus_self_query.ipynb index d5d9775ec8343..2a66e110406c5 100644 --- a/docs/docs/integrations/retrievers/self_query/milvus_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/milvus_self_query.ipynb @@ -67,8 +67,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Milvus\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/mongodb_atlas.ipynb b/docs/docs/integrations/retrievers/self_query/mongodb_atlas.ipynb index 1ab8ae718685e..cfe0aa6a79e44 100644 --- a/docs/docs/integrations/retrievers/self_query/mongodb_atlas.ipynb +++ b/docs/docs/integrations/retrievers/self_query/mongodb_atlas.ipynb @@ -57,8 +57,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import MongoDBAtlasVectorSearch\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "from pymongo import MongoClient\n", "\n", diff --git a/docs/docs/integrations/retrievers/self_query/myscale_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/myscale_self_query.ipynb index a886efe8f57f9..3fb8c27ea47ab 100644 --- a/docs/docs/integrations/retrievers/self_query/myscale_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/myscale_self_query.ipynb @@ -78,8 +78,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import MyScale\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/opensearch_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/opensearch_self_query.ipynb index 7779ccc51927f..1f46e5e2d25c7 100644 --- a/docs/docs/integrations/retrievers/self_query/opensearch_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/opensearch_self_query.ipynb @@ -59,8 +59,8 @@ "import getpass\n", "import os\n", "\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import OpenSearchVectorSearch\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API Key:\")\n", diff --git a/docs/docs/integrations/retrievers/self_query/pgvector_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/pgvector_self_query.ipynb index b647510936277..8daf192f59319 100644 --- a/docs/docs/integrations/retrievers/self_query/pgvector_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/pgvector_self_query.ipynb @@ -67,8 +67,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import PGVector\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "collection = \"Name of your collection\"\n", diff --git a/docs/docs/integrations/retrievers/self_query/pinecone.ipynb b/docs/docs/integrations/retrievers/self_query/pinecone.ipynb index 576cd7e53b74c..fe1ceaa988ba0 100644 --- a/docs/docs/integrations/retrievers/self_query/pinecone.ipynb +++ b/docs/docs/integrations/retrievers/self_query/pinecone.ipynb @@ -77,7 +77,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", + "from langchain_community.vectorstores import Pinecone\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "from langchain_pinecone import PineconeVectorStore\n", "\n", diff --git a/docs/docs/integrations/retrievers/self_query/qdrant_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/qdrant_self_query.ipynb index f7915707ba594..063fa3573d7a5 100644 --- a/docs/docs/integrations/retrievers/self_query/qdrant_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/qdrant_self_query.ipynb @@ -70,8 +70,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Qdrant\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/redis_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/redis_self_query.ipynb index 42c57603e7c21..0d5adf0ce0c4c 100644 --- a/docs/docs/integrations/retrievers/self_query/redis_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/redis_self_query.ipynb @@ -67,8 +67,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Redis\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/supabase_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/supabase_self_query.ipynb index fe16a03a79090..7477cfec580b5 100644 --- a/docs/docs/integrations/retrievers/self_query/supabase_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/supabase_self_query.ipynb @@ -217,8 +217,8 @@ "source": [ "import os\n", "\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import SupabaseVectorStore\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "from supabase.client import Client, create_client\n", "\n", diff --git a/docs/docs/integrations/retrievers/self_query/timescalevector_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/timescalevector_self_query.ipynb index acac5be8e2b1d..9dc762d025ed7 100644 --- a/docs/docs/integrations/retrievers/self_query/timescalevector_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/timescalevector_self_query.ipynb @@ -143,8 +143,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores.timescalevector import TimescaleVector\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/self_query/vectara_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/vectara_self_query.ipynb index d8171be1cafc4..cedc3eb510326 100644 --- a/docs/docs/integrations/retrievers/self_query/vectara_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/vectara_self_query.ipynb @@ -89,11 +89,11 @@ "from langchain.chains import ConversationalRetrievalChain\n", "from langchain.chains.query_constructor.base import AttributeInfo\n", "from langchain.retrievers.self_query.base import SelfQueryRetriever\n", - "from langchain.schema import Document\n", "from langchain.text_splitter import CharacterTextSplitter\n", "from langchain_community.document_loaders import TextLoader\n", "from langchain_community.embeddings import FakeEmbeddings\n", "from langchain_community.vectorstores import Vectara\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAI" ] }, diff --git a/docs/docs/integrations/retrievers/self_query/weaviate_self_query.ipynb b/docs/docs/integrations/retrievers/self_query/weaviate_self_query.ipynb index 1e50c9ad21a77..ed29277217cbc 100644 --- a/docs/docs/integrations/retrievers/self_query/weaviate_self_query.ipynb +++ b/docs/docs/integrations/retrievers/self_query/weaviate_self_query.ipynb @@ -45,8 +45,8 @@ }, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Weaviate\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" diff --git a/docs/docs/integrations/retrievers/tf_idf.ipynb b/docs/docs/integrations/retrievers/tf_idf.ipynb index 9576f8b0dfe8e..3e8d2eb4bd5fd 100644 --- a/docs/docs/integrations/retrievers/tf_idf.ipynb +++ b/docs/docs/integrations/retrievers/tf_idf.ipynb @@ -73,7 +73,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "retriever = TFIDFRetriever.from_documents(\n", " [\n", diff --git a/docs/docs/integrations/retrievers/weaviate-hybrid.ipynb b/docs/docs/integrations/retrievers/weaviate-hybrid.ipynb index 43c24d0930058..fdcad48d659c7 100644 --- a/docs/docs/integrations/retrievers/weaviate-hybrid.ipynb +++ b/docs/docs/integrations/retrievers/weaviate-hybrid.ipynb @@ -74,7 +74,7 @@ ], "source": [ "from langchain.retrievers.weaviate_hybrid_search import WeaviateHybridSearchRetriever\n", - "from langchain.schema import Document" + "from langchain_core.documents import Document" ] }, { diff --git a/docs/docs/integrations/retrievers/zep_memorystore.ipynb b/docs/docs/integrations/retrievers/zep_memorystore.ipynb index 8aeca6c1add0c..95a4de10efe25 100644 --- a/docs/docs/integrations/retrievers/zep_memorystore.ipynb +++ b/docs/docs/integrations/retrievers/zep_memorystore.ipynb @@ -63,7 +63,7 @@ "from uuid import uuid4\n", "\n", "from langchain.memory import ZepMemory\n", - "from langchain.schema import AIMessage, HumanMessage\n", + "from langchain_core.messages import AIMessage, HumanMessage\n", "\n", "# Set this to your Zep server URL\n", "ZEP_API_URL = \"http://localhost:8000\"" diff --git a/docs/docs/integrations/toolkits/cogniswitch.ipynb b/docs/docs/integrations/toolkits/cogniswitch.ipynb index 836f425cf6055..e39168cb477c2 100644 --- a/docs/docs/integrations/toolkits/cogniswitch.ipynb +++ b/docs/docs/integrations/toolkits/cogniswitch.ipynb @@ -8,13 +8,19 @@ "## Cogniswitch Tools\n", "\n", "**Use CogniSwitch to build production ready applications that can consume, organize and retrieve knowledge flawlessly. Using the framework of your choice, in this case Langchain CogniSwitch helps alleviate the stress of decision making when it comes to, choosing the right storage and retrieval formats. It also eradicates reliability issues and hallucinations when it comes to responses that are generated. Get started by interacting with your knowledge in just two simple steps.**\n", - "\n", - "visit [https://www.cogniswitch.ai/developer to register](https://www.cogniswitch.ai/developer?utm_source=langchain&utm_medium=langchainbuild&utm_id=dev).\n\n", - "**Registration:** \n\n", - "- Signup with your email and verify your registration \n\n", - "- You will get a mail with a platform token and oauth token for using the services.\n\n\n", "\n", - "**step 1: Instantiate the toolkit and get the tools:**\n\n", + "visit [https://www.cogniswitch.ai/developer to register](https://www.cogniswitch.ai/developer?utm_source=langchain&utm_medium=langchainbuild&utm_id=dev).\n", + "\n", + "**Registration:** \n", + "\n", + "- Signup with your email and verify your registration \n", + "\n", + "- You will get a mail with a platform token and oauth token for using the services.\n", + "\n", + "\n", + "\n", + "**step 1: Instantiate the toolkit and get the tools:**\n", + "\n", "- Instantiate the cogniswitch toolkit with the cogniswitch token, openAI API key and OAuth token and get the tools. \n", "\n", "**step 2: Instantiate the agent with the tools and llm:**\n", @@ -61,8 +67,8 @@ "import os\n", "\n", "from langchain.agents.agent_toolkits import create_conversational_retrieval_agent\n", - "from langchain.chat_models import ChatOpenAI\n", - "from langchain_community.agent_toolkits import CogniswitchToolkit" + "from langchain_community.agent_toolkits import CogniswitchToolkit\n", + "from langchain_openai import ChatOpenAI" ] }, { diff --git a/docs/docs/integrations/toolkits/connery.ipynb b/docs/docs/integrations/toolkits/connery.ipynb index 184b934b6353b..335fbcf34e364 100644 --- a/docs/docs/integrations/toolkits/connery.ipynb +++ b/docs/docs/integrations/toolkits/connery.ipynb @@ -74,8 +74,8 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.chat_models import ChatOpenAI\n", "from langchain_community.agent_toolkits.connery import ConneryToolkit\n", + "from langchain_community.chat_models import ChatOpenAI\n", "from langchain_community.tools.connery import ConneryService\n", "\n", "# Specify your Connery Runner credentials.\n", diff --git a/docs/docs/integrations/toolkits/robocorp.ipynb b/docs/docs/integrations/toolkits/robocorp.ipynb index 2dc39ea75e62c..a5ba7b3fb4966 100644 --- a/docs/docs/integrations/toolkits/robocorp.ipynb +++ b/docs/docs/integrations/toolkits/robocorp.ipynb @@ -131,8 +131,8 @@ ], "source": [ "from langchain.agents import AgentExecutor, OpenAIFunctionsAgent\n", - "from langchain.chat_models import ChatOpenAI\n", "from langchain_core.messages import SystemMessage\n", + "from langchain_openai import ChatOpenAI\n", "from langchain_robocorp import ActionServerToolkit\n", "\n", "# Initialize LLM chat model\n", diff --git a/docs/docs/integrations/tools/connery.ipynb b/docs/docs/integrations/tools/connery.ipynb index a5c08296937c3..43228f56f38d8 100644 --- a/docs/docs/integrations/tools/connery.ipynb +++ b/docs/docs/integrations/tools/connery.ipynb @@ -49,8 +49,8 @@ "import os\n", "\n", "from langchain.agents import AgentType, initialize_agent\n", - "from langchain.chat_models import ChatOpenAI\n", "from langchain_community.tools.connery import ConneryService\n", + "from langchain_openai import ChatOpenAI\n", "\n", "# Specify your Connery Runner credentials.\n", "os.environ[\"CONNERY_RUNNER_URL\"] = \"\"\n", diff --git a/docs/docs/integrations/tools/exa_search.ipynb b/docs/docs/integrations/tools/exa_search.ipynb index 5043225331399..8347c2cb5b87f 100644 --- a/docs/docs/integrations/tools/exa_search.ipynb +++ b/docs/docs/integrations/tools/exa_search.ipynb @@ -204,7 +204,7 @@ "outputs": [], "source": [ "from langchain.agents import AgentExecutor, OpenAIFunctionsAgent\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "from langchain_openai import ChatOpenAI\n", "\n", "llm = ChatOpenAI(temperature=0)\n", @@ -393,7 +393,7 @@ "outputs": [], "source": [ "from langchain.agents import AgentExecutor, OpenAIFunctionsAgent\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "from langchain_openai import ChatOpenAI\n", "\n", "llm = ChatOpenAI(temperature=0, model=\"gpt-4\")\n", diff --git a/docs/docs/integrations/vectorstores/astradb.ipynb b/docs/docs/integrations/vectorstores/astradb.ipynb index fe980dd137762..92f2b3d0f369f 100644 --- a/docs/docs/integrations/vectorstores/astradb.ipynb +++ b/docs/docs/integrations/vectorstores/astradb.ipynb @@ -91,9 +91,9 @@ "from datasets import (\n", " load_dataset,\n", ")\n", - "from langchain.schema import Document\n", "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", "from langchain_community.document_loaders import PyPDFLoader\n", + "from langchain_core.documents import Document\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.runnables import RunnablePassthrough\n", diff --git a/docs/docs/integrations/vectorstores/cassandra.ipynb b/docs/docs/integrations/vectorstores/cassandra.ipynb index 524f76a1052c9..b18376c73e875 100644 --- a/docs/docs/integrations/vectorstores/cassandra.ipynb +++ b/docs/docs/integrations/vectorstores/cassandra.ipynb @@ -74,9 +74,9 @@ "from datasets import (\n", " load_dataset,\n", ")\n", - "from langchain.schema import Document\n", "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", "from langchain_community.document_loaders import PyPDFLoader\n", + "from langchain_core.documents import Document\n", "from langchain_core.output_parsers import StrOutputParser\n", "from langchain_core.prompts import ChatPromptTemplate\n", "from langchain_core.runnables import RunnablePassthrough\n", diff --git a/docs/docs/integrations/vectorstores/faiss.ipynb b/docs/docs/integrations/vectorstores/faiss.ipynb index 54894e4c66e75..12f3f1e45d871 100644 --- a/docs/docs/integrations/vectorstores/faiss.ipynb +++ b/docs/docs/integrations/vectorstores/faiss.ipynb @@ -437,7 +437,7 @@ } ], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "list_of_documents = [\n", " Document(page_content=\"foo\", metadata=dict(page=1)),\n", diff --git a/docs/docs/integrations/vectorstores/faiss_async.ipynb b/docs/docs/integrations/vectorstores/faiss_async.ipynb index a4663ec85a58b..38f94c4f53c11 100644 --- a/docs/docs/integrations/vectorstores/faiss_async.ipynb +++ b/docs/docs/integrations/vectorstores/faiss_async.ipynb @@ -288,7 +288,7 @@ } ], "source": [ - "from langchain.schema import Document\n", + "from langchain_core.documents import Document\n", "\n", "list_of_documents = [\n", " Document(page_content=\"foo\", metadata=dict(page=1)),\n", diff --git a/docs/docs/modules/callbacks/async_callbacks.ipynb b/docs/docs/modules/callbacks/async_callbacks.ipynb index 244fc1e7763c4..a1326a5cc612a 100644 --- a/docs/docs/modules/callbacks/async_callbacks.ipynb +++ b/docs/docs/modules/callbacks/async_callbacks.ipynb @@ -62,7 +62,7 @@ "from typing import Any, Dict, List\n", "\n", "from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler\n", - "from langchain.schema import HumanMessage, LLMResult\n", + "from langchain_core.messages import HumanMessage, LLMResult\n", "from langchain_openai import ChatOpenAI\n", "\n", "\n", diff --git a/docs/docs/modules/callbacks/custom_callbacks.ipynb b/docs/docs/modules/callbacks/custom_callbacks.ipynb index 185a249dd90aa..8adaafd6e0386 100644 --- a/docs/docs/modules/callbacks/custom_callbacks.ipynb +++ b/docs/docs/modules/callbacks/custom_callbacks.ipynb @@ -53,7 +53,7 @@ ], "source": [ "from langchain.callbacks.base import BaseCallbackHandler\n", - "from langchain.schema import HumanMessage\n", + "from langchain_core.messages import HumanMessage\n", "from langchain_openai import ChatOpenAI\n", "\n", "\n", diff --git a/docs/docs/modules/callbacks/multiple_callbacks.ipynb b/docs/docs/modules/callbacks/multiple_callbacks.ipynb index 208c52922596f..252f660c15f4f 100644 --- a/docs/docs/modules/callbacks/multiple_callbacks.ipynb +++ b/docs/docs/modules/callbacks/multiple_callbacks.ipynb @@ -128,7 +128,7 @@ "\n", "from langchain.agents import AgentType, initialize_agent, load_tools\n", "from langchain.callbacks.base import BaseCallbackHandler\n", - "from langchain.schema import AgentAction\n", + "from langchain_core.agents import AgentAction\n", "from langchain_openai import OpenAI\n", "\n", "\n", diff --git a/docs/docs/modules/data_connection/indexing.ipynb b/docs/docs/modules/data_connection/indexing.ipynb index 45a5d92a42bc9..7767f931ddb36 100644 --- a/docs/docs/modules/data_connection/indexing.ipynb +++ b/docs/docs/modules/data_connection/indexing.ipynb @@ -91,8 +91,8 @@ "outputs": [], "source": [ "from langchain.indexes import SQLRecordManager, index\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import ElasticsearchStore\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/docs/docs/modules/data_connection/retrievers/index.mdx b/docs/docs/modules/data_connection/retrievers/index.mdx index 4f5ca07284144..40e3efa229e94 100644 --- a/docs/docs/modules/data_connection/retrievers/index.mdx +++ b/docs/docs/modules/data_connection/retrievers/index.mdx @@ -49,7 +49,7 @@ Since retrievers are `Runnable`'s, we can easily compose them with other `Runnab ```python from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate -from langchain.schema import StrOutputParser +from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import RunnablePassthrough template = """Answer the question based only on the following context: diff --git a/docs/docs/modules/data_connection/retrievers/self_query.ipynb b/docs/docs/modules/data_connection/retrievers/self_query.ipynb index 2b44db886f0ae..973a55ad699d0 100644 --- a/docs/docs/modules/data_connection/retrievers/self_query.ipynb +++ b/docs/docs/modules/data_connection/retrievers/self_query.ipynb @@ -40,8 +40,8 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import Document\n", "from langchain_community.vectorstores import Chroma\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings\n", "\n", "docs = [\n", diff --git a/docs/docs/modules/data_connection/retrievers/time_weighted_vectorstore.ipynb b/docs/docs/modules/data_connection/retrievers/time_weighted_vectorstore.ipynb index 3f739f0f300f7..bc334cb6401c7 100644 --- a/docs/docs/modules/data_connection/retrievers/time_weighted_vectorstore.ipynb +++ b/docs/docs/modules/data_connection/retrievers/time_weighted_vectorstore.ipynb @@ -30,8 +30,8 @@ "import faiss\n", "from langchain.docstore import InMemoryDocstore\n", "from langchain.retrievers import TimeWeightedVectorStoreRetriever\n", - "from langchain.schema import Document\n", "from langchain_community.vectorstores import FAISS\n", + "from langchain_core.documents import Document\n", "from langchain_openai import OpenAIEmbeddings" ] }, diff --git a/docs/docs/modules/memory/adding_memory.ipynb b/docs/docs/modules/memory/adding_memory.ipynb index e68e8437cc5d3..bbfe51344a612 100644 --- a/docs/docs/modules/memory/adding_memory.ipynb +++ b/docs/docs/modules/memory/adding_memory.ipynb @@ -181,7 +181,7 @@ " HumanMessagePromptTemplate,\n", " MessagesPlaceholder,\n", ")\n", - "from langchain.schema import SystemMessage\n", + "from langchain_core.messages import SystemMessage\n", "from langchain_openai import ChatOpenAI" ] }, diff --git a/docs/docs/modules/model_io/chat/quick_start.ipynb b/docs/docs/modules/model_io/chat/quick_start.ipynb index 1be5c1f3acd9d..3c48cf50be88b 100644 --- a/docs/docs/modules/model_io/chat/quick_start.ipynb +++ b/docs/docs/modules/model_io/chat/quick_start.ipynb @@ -633,7 +633,7 @@ } ], "source": [ - "from langchain.schema import HumanMessage, SystemMessage\n", + "from langchain_core.messages import HumanMessage, SystemMessage\n", "\n", "chat(\n", " [\n", diff --git a/docs/docs/modules/model_io/prompts/composition.ipynb b/docs/docs/modules/model_io/prompts/composition.ipynb index 0069ca72a74fa..f94248efecf76 100644 --- a/docs/docs/modules/model_io/prompts/composition.ipynb +++ b/docs/docs/modules/model_io/prompts/composition.ipynb @@ -169,7 +169,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.schema import AIMessage, HumanMessage, SystemMessage" + "from langchain_core.messages import AIMessage, HumanMessage, SystemMessage" ] }, { diff --git a/docs/docs/modules/model_io/quick_start.mdx b/docs/docs/modules/model_io/quick_start.mdx index 3c6be1d519909..62a78413d2b16 100644 --- a/docs/docs/modules/model_io/quick_start.mdx +++ b/docs/docs/modules/model_io/quick_start.mdx @@ -77,7 +77,7 @@ For a deeper conceptual explanation of this difference please see [this document We can see the difference between an LLM and a ChatModel when we invoke it. ```python -from langchain.schema import HumanMessage +from langchain_core.messages import HumanMessage text = "What would be a good company name for a company that makes colorful socks?" messages = [HumanMessage(content=text)] diff --git a/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb b/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb index 4c464e840d522..3cba274628e3c 100644 --- a/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb +++ b/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb @@ -209,7 +209,7 @@ "source": [ "import boto3\n", "from langchain.chains.graph_qa.neptune_sparql import NeptuneSparqlQAChain\n", - "from langchain.chat_models import BedrockChat\n", + "from langchain_community.chat_models import BedrockChat\n", "from langchain_community.graphs import NeptuneRdfGraph\n", "\n", "host = \"\"\n", diff --git a/libs/core/langchain_core/runnables/base.py b/libs/core/langchain_core/runnables/base.py index 8f2f23d76e0ee..58ab8a0a8ff14 100644 --- a/libs/core/langchain_core/runnables/base.py +++ b/libs/core/langchain_core/runnables/base.py @@ -2934,7 +2934,7 @@ async def agen(input: AsyncIterator[Any]) -> AsyncIterator[str]: from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import RunnableGenerator, RunnableLambda from langchain_openai import ChatOpenAI - from langchain.schema import StrOutputParser + from langchain_core.output_parsers import StrOutputParser model = ChatOpenAI() diff --git a/libs/experimental/langchain_experimental/autonomous_agents/autogpt/output_parser.py b/libs/experimental/langchain_experimental/autonomous_agents/autogpt/output_parser.py index d1d655029c3cf..774a123054867 100644 --- a/libs/experimental/langchain_experimental/autonomous_agents/autogpt/output_parser.py +++ b/libs/experimental/langchain_experimental/autonomous_agents/autogpt/output_parser.py @@ -3,7 +3,7 @@ from abc import abstractmethod from typing import Dict, NamedTuple -from langchain.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser class AutoGPTAction(NamedTuple): diff --git a/libs/experimental/langchain_experimental/comprehend_moderation/base_moderation.py b/libs/experimental/langchain_experimental/comprehend_moderation/base_moderation.py index 3d19794ffeefb..165962d47354b 100644 --- a/libs/experimental/langchain_experimental/comprehend_moderation/base_moderation.py +++ b/libs/experimental/langchain_experimental/comprehend_moderation/base_moderation.py @@ -2,7 +2,7 @@ from typing import Any, Callable, Optional, cast from langchain.callbacks.manager import CallbackManagerForChainRun -from langchain.schema import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.prompt_values import ChatPromptValue, StringPromptValue from langchain_experimental.comprehend_moderation.pii import ComprehendPII diff --git a/libs/experimental/langchain_experimental/graph_transformers/diffbot.py b/libs/experimental/langchain_experimental/graph_transformers/diffbot.py index 302a4d1fb1ea2..58c84094edac9 100644 --- a/libs/experimental/langchain_experimental/graph_transformers/diffbot.py +++ b/libs/experimental/langchain_experimental/graph_transformers/diffbot.py @@ -1,9 +1,9 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Union import requests -from langchain.schema import Document from langchain.utils import get_from_env from langchain_community.graphs.graph_document import GraphDocument, Node, Relationship +from langchain_core.documents import Document def format_property_key(s: str) -> str: diff --git a/libs/experimental/langchain_experimental/llm_bash/prompt.py b/libs/experimental/langchain_experimental/llm_bash/prompt.py index 72951d2fe9f01..3ba55e8e064d0 100644 --- a/libs/experimental/langchain_experimental/llm_bash/prompt.py +++ b/libs/experimental/langchain_experimental/llm_bash/prompt.py @@ -5,7 +5,8 @@ from typing import List from langchain.prompts.prompt import PromptTemplate -from langchain.schema import BaseOutputParser, OutputParserException +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.exceptions import OutputParserException _PROMPT_TEMPLATE = """If someone asks you to perform a task, your job is to come up with a series of bash commands that will perform the task. There is no need to put "#!/bin/bash" in your answer. Make sure to reason step by step, using this format: diff --git a/libs/experimental/langchain_experimental/plan_and_execute/schema.py b/libs/experimental/langchain_experimental/plan_and_execute/schema.py index 41a323880a070..2fc2660def15f 100644 --- a/libs/experimental/langchain_experimental/plan_and_execute/schema.py +++ b/libs/experimental/langchain_experimental/plan_and_execute/schema.py @@ -1,7 +1,7 @@ from abc import abstractmethod from typing import List, Tuple -from langchain.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser from langchain_experimental.pydantic_v1 import BaseModel, Field diff --git a/libs/experimental/langchain_experimental/retrievers/vector_sql_database.py b/libs/experimental/langchain_experimental/retrievers/vector_sql_database.py index 1ec088dbc515f..fdde2da32759c 100644 --- a/libs/experimental/langchain_experimental/retrievers/vector_sql_database.py +++ b/libs/experimental/langchain_experimental/retrievers/vector_sql_database.py @@ -1,11 +1,13 @@ """Vector SQL Database Chain Retriever""" + from typing import Any, Dict, List from langchain.callbacks.manager import ( AsyncCallbackManagerForRetrieverRun, CallbackManagerForRetrieverRun, ) -from langchain.schema import BaseRetriever, Document +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain diff --git a/libs/experimental/langchain_experimental/sql/vector_sql.py b/libs/experimental/langchain_experimental/sql/vector_sql.py index a21a6bae8682e..07125e1f828ea 100644 --- a/libs/experimental/langchain_experimental/sql/vector_sql.py +++ b/libs/experimental/langchain_experimental/sql/vector_sql.py @@ -1,4 +1,5 @@ """Vector SQL Database Chain Retriever""" + from __future__ import annotations from typing import Any, Dict, List, Optional, Sequence, Union @@ -7,11 +8,12 @@ from langchain.chains.llm import LLMChain from langchain.chains.sql_database.prompt import PROMPT, SQL_PROMPTS from langchain.prompts.prompt import PromptTemplate -from langchain.schema import BaseOutputParser, BasePromptTemplate from langchain_community.tools.sql_database.prompt import QUERY_CHECKER from langchain_community.utilities.sql_database import SQLDatabase from langchain_core.embeddings import Embeddings from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser +from langchain_core.prompts import BasePromptTemplate from langchain_experimental.sql.base import INTERMEDIATE_STEPS_KEY, SQLDatabaseChain diff --git a/libs/experimental/langchain_experimental/tot/prompts.py b/libs/experimental/langchain_experimental/tot/prompts.py index b11e9072a6013..a59a2be9e36bc 100644 --- a/libs/experimental/langchain_experimental/tot/prompts.py +++ b/libs/experimental/langchain_experimental/tot/prompts.py @@ -3,7 +3,7 @@ from typing import List from langchain.prompts import PromptTemplate -from langchain.schema import BaseOutputParser +from langchain_core.output_parsers import BaseOutputParser from langchain_experimental.tot.thought import ThoughtValidity diff --git a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_llama2chat.py b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_llama2chat.py index 3135b10745f72..abf1129f60d0b 100644 --- a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_llama2chat.py +++ b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_llama2chat.py @@ -5,8 +5,8 @@ AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage from langchain_core.language_models import LLM +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain_experimental.chat_models import Llama2Chat from langchain_experimental.chat_models.llm_wrapper import DEFAULT_SYSTEM_PROMPT diff --git a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_orca.py b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_orca.py index 96f4c2af91873..902d163c3798c 100644 --- a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_orca.py +++ b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_orca.py @@ -1,5 +1,5 @@ import pytest -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain_experimental.chat_models import Orca from tests.unit_tests.chat_models.test_llm_wrapper_llama2chat import FakeLLM diff --git a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_vicuna.py b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_vicuna.py index 21506c3788e33..8722b3ec5fc27 100644 --- a/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_vicuna.py +++ b/libs/experimental/tests/unit_tests/chat_models/test_llm_wrapper_vicuna.py @@ -1,5 +1,5 @@ import pytest -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain_experimental.chat_models import Vicuna from tests.unit_tests.chat_models.test_llm_wrapper_llama2chat import FakeLLM diff --git a/templates/cassandra-synonym-caching/cassandra_synonym_caching/__init__.py b/templates/cassandra-synonym-caching/cassandra_synonym_caching/__init__.py index 85ee65b883396..04ca550915fc6 100644 --- a/templates/cassandra-synonym-caching/cassandra_synonym_caching/__init__.py +++ b/templates/cassandra-synonym-caching/cassandra_synonym_caching/__init__.py @@ -3,8 +3,8 @@ import cassio import langchain from langchain.cache import CassandraCache -from langchain.schema import BaseMessage from langchain_community.chat_models import ChatOpenAI +from langchain_core.messages import BaseMessage from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import RunnableLambda diff --git a/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py b/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py index 0f0f8c45a8901..5252e6784b253 100644 --- a/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py +++ b/templates/chain-of-note-wiki/chain_of_note_wiki/chain.py @@ -1,7 +1,7 @@ from langchain import hub -from langchain.schema import StrOutputParser from langchain_community.chat_models import ChatAnthropic from langchain_community.utilities import WikipediaAPIWrapper +from langchain_core.output_parsers import StrOutputParser from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnableLambda, RunnablePassthrough diff --git a/templates/chat-bot-feedback/README.md b/templates/chat-bot-feedback/README.md index f12aef309b170..dd8739b9dc1ef 100644 --- a/templates/chat-bot-feedback/README.md +++ b/templates/chat-bot-feedback/README.md @@ -69,7 +69,7 @@ from functools import partial from typing import Dict, Optional, Callable, List from langserve import RemoteRunnable from langchain.callbacks.manager import tracing_v2_enabled -from langchain.schema import BaseMessage, AIMessage, HumanMessage +from langchain_core.messages import BaseMessage, AIMessage, HumanMessage # Update with the URL provided by your LangServe server chain = RemoteRunnable("http://127.0.0.1:8031/chat-bot-feedback") diff --git a/templates/neo4j-generation/neo4j_generation/chain.py b/templates/neo4j-generation/neo4j_generation/chain.py index aa7ea35f62a93..a9dac72d29cf3 100644 --- a/templates/neo4j-generation/neo4j_generation/chain.py +++ b/templates/neo4j-generation/neo4j_generation/chain.py @@ -3,10 +3,10 @@ from langchain.chains.openai_functions import ( create_structured_output_chain, ) -from langchain.schema import Document from langchain_community.chat_models import ChatOpenAI from langchain_community.graphs import Neo4jGraph from langchain_community.graphs.graph_document import GraphDocument +from langchain_core.documents import Document from langchain_core.prompts import ChatPromptTemplate from neo4j_generation.utils import ( diff --git a/templates/neo4j-semantic-layer/neo4j_semantic_layer/agent.py b/templates/neo4j-semantic-layer/neo4j_semantic_layer/agent.py index 976f27b8ad629..79fbae96b7f38 100644 --- a/templates/neo4j-semantic-layer/neo4j_semantic_layer/agent.py +++ b/templates/neo4j-semantic-layer/neo4j_semantic_layer/agent.py @@ -5,9 +5,9 @@ from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain.pydantic_v1 import BaseModel, Field -from langchain.schema import AIMessage, HumanMessage from langchain.tools.render import format_tool_to_openai_function from langchain_community.chat_models import ChatOpenAI +from langchain_core.messages import AIMessage, HumanMessage from neo4j_semantic_layer.information_tool import InformationTool from neo4j_semantic_layer.memory_tool import MemoryTool @@ -45,9 +45,9 @@ def _format_chat_history(chat_history: List[Tuple[str, str]]): agent = ( { "input": lambda x: x["input"], - "chat_history": lambda x: _format_chat_history(x["chat_history"]) - if x.get("chat_history") - else [], + "chat_history": lambda x: ( + _format_chat_history(x["chat_history"]) if x.get("chat_history") else [] + ), "agent_scratchpad": lambda x: format_to_openai_function_messages( x["intermediate_steps"] ), diff --git a/templates/neo4j-semantic-ollama/neo4j_semantic_ollama/agent.py b/templates/neo4j-semantic-ollama/neo4j_semantic_ollama/agent.py index ad4010d6cfc92..08d8fdc2a17e0 100644 --- a/templates/neo4j-semantic-ollama/neo4j_semantic_ollama/agent.py +++ b/templates/neo4j-semantic-ollama/neo4j_semantic_ollama/agent.py @@ -8,9 +8,9 @@ ) from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain.pydantic_v1 import BaseModel, Field -from langchain.schema import AIMessage, HumanMessage from langchain.tools.render import render_text_description_and_args from langchain_community.chat_models import ChatOllama +from langchain_core.messages import AIMessage, HumanMessage from neo4j_semantic_ollama.information_tool import InformationTool from neo4j_semantic_ollama.memory_tool import MemoryTool @@ -87,9 +87,9 @@ def _format_chat_history(chat_history: List[Tuple[str, str]]): { "input": lambda x: x["input"], "agent_scratchpad": lambda x: format_log_to_messages(x["intermediate_steps"]), - "chat_history": lambda x: _format_chat_history(x["chat_history"]) - if x.get("chat_history") - else [], + "chat_history": lambda x: ( + _format_chat_history(x["chat_history"]) if x.get("chat_history") else [] + ), } | prompt | chat_model_with_stop diff --git a/templates/neo4j-vector-memory/neo4j_vector_memory/history.py b/templates/neo4j-vector-memory/neo4j_vector_memory/history.py index 9fccc109784f6..a88fafc2c7ade 100644 --- a/templates/neo4j-vector-memory/neo4j_vector_memory/history.py +++ b/templates/neo4j-vector-memory/neo4j_vector_memory/history.py @@ -1,8 +1,8 @@ from typing import Any, Dict, List, Union from langchain.memory import ChatMessageHistory -from langchain.schema import AIMessage, HumanMessage from langchain_community.graphs import Neo4jGraph +from langchain_core.messages import AIMessage, HumanMessage graph = Neo4jGraph() diff --git a/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py b/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py index ba0e34e1ee8af..c20f50f21837f 100644 --- a/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py +++ b/templates/openai-functions-tool-retrieval-agent/openai_functions_tool_retrieval_agent/agent.py @@ -6,13 +6,13 @@ ) from langchain.agents.format_scratchpad import format_to_openai_functions from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser -from langchain.schema import Document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.tools.convert_to_openai import format_tool_to_openai_function from langchain_community.tools.tavily_search import TavilySearchResults from langchain_community.utilities.tavily_search import TavilySearchAPIWrapper from langchain_community.vectorstores import FAISS +from langchain_core.documents import Document from langchain_core.messages import AIMessage, HumanMessage from langchain_core.prompts import ( ChatPromptTemplate, diff --git a/templates/rag-conversation-zep/rag_conversation_zep/chain.py b/templates/rag-conversation-zep/rag_conversation_zep/chain.py index 32d8db63da53d..1031e2cd453e8 100644 --- a/templates/rag-conversation-zep/rag_conversation_zep/chain.py +++ b/templates/rag-conversation-zep/rag_conversation_zep/chain.py @@ -2,13 +2,16 @@ from operator import itemgetter from typing import List, Tuple -from langchain.schema import AIMessage, HumanMessage, format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.vectorstores.zep import CollectionConfig, ZepVectorStore from langchain_core.documents import Document -from langchain_core.messages import BaseMessage +from langchain_core.messages import AIMessage, BaseMessage, HumanMessage from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain_core.prompts import ( + ChatPromptTemplate, + MessagesPlaceholder, + format_document, +) from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import ( diff --git a/templates/rag-conversation/rag_conversation/chain.py b/templates/rag-conversation/rag_conversation/chain.py index 29199b2211efe..6607e028ec1ee 100644 --- a/templates/rag-conversation/rag_conversation/chain.py +++ b/templates/rag-conversation/rag_conversation/chain.py @@ -2,11 +2,15 @@ from operator import itemgetter from typing import List, Tuple -from langchain.schema import AIMessage, HumanMessage, format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain_core.prompts import ( + ChatPromptTemplate, + MessagesPlaceholder, + format_document, +) from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import ( diff --git a/templates/rag-elasticsearch/rag_elasticsearch/chain.py b/templates/rag-elasticsearch/rag_elasticsearch/chain.py index a28060e0e6e7e..778003e78839c 100644 --- a/templates/rag-elasticsearch/rag_elasticsearch/chain.py +++ b/templates/rag-elasticsearch/rag_elasticsearch/chain.py @@ -1,11 +1,12 @@ from operator import itemgetter from typing import List, Optional, Tuple -from langchain.schema import BaseMessage, format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import HuggingFaceEmbeddings from langchain_community.vectorstores.elasticsearch import ElasticsearchStore +from langchain_core.messages import BaseMessage from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import format_document from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import RunnableParallel, RunnablePassthrough diff --git a/templates/rag-gpt-crawler/rag_gpt_crawler/chain.py b/templates/rag-gpt-crawler/rag_gpt_crawler/chain.py index 6fce7456746a5..3e322446b6765 100644 --- a/templates/rag-gpt-crawler/rag_gpt_crawler/chain.py +++ b/templates/rag-gpt-crawler/rag_gpt_crawler/chain.py @@ -1,11 +1,11 @@ import json from pathlib import Path -from langchain.schema import Document from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.vectorstores import Chroma +from langchain_core.documents import Document from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel diff --git a/templates/rag-multi-index-fusion/rag_multi_index_fusion/chain.py b/templates/rag-multi-index-fusion/rag_multi_index_fusion/chain.py index 7c059ff9008ad..48a42d088ac96 100644 --- a/templates/rag-multi-index-fusion/rag_multi_index_fusion/chain.py +++ b/templates/rag-multi-index-fusion/rag_multi_index_fusion/chain.py @@ -7,10 +7,10 @@ PubMedRetriever, WikipediaRetriever, ) -from langchain.schema import StrOutputParser from langchain.utils.math import cosine_similarity from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings +from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import ( diff --git a/templates/rag-multi-index-router/rag_multi_index_router/chain.py b/templates/rag-multi-index-router/rag_multi_index_router/chain.py index 142621311395d..fddc1abeb0333 100644 --- a/templates/rag-multi-index-router/rag_multi_index_router/chain.py +++ b/templates/rag-multi-index-router/rag_multi_index_router/chain.py @@ -8,9 +8,9 @@ PubMedRetriever, WikipediaRetriever, ) -from langchain.schema import StrOutputParser from langchain.utils.openai_functions import convert_pydantic_to_openai_function from langchain_community.chat_models import ChatOpenAI +from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import ( diff --git a/templates/rag-self-query/rag_self_query/chain.py b/templates/rag-self-query/rag_self_query/chain.py index 340ec8888698a..b0526ef890082 100644 --- a/templates/rag-self-query/rag_self_query/chain.py +++ b/templates/rag-self-query/rag_self_query/chain.py @@ -3,11 +3,11 @@ from typing import List, Tuple from langchain.retrievers import SelfQueryRetriever -from langchain.schema import format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.vectorstores.elasticsearch import ElasticsearchStore from langchain_core.output_parsers import StrOutputParser +from langchain_core.prompts import format_document from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import RunnableParallel, RunnablePassthrough diff --git a/templates/rag-timescale-conversation/rag_timescale_conversation/chain.py b/templates/rag-timescale-conversation/rag_timescale_conversation/chain.py index 0efa9f851dcaf..1456e67302cd1 100644 --- a/templates/rag-timescale-conversation/rag_timescale_conversation/chain.py +++ b/templates/rag-timescale-conversation/rag_timescale_conversation/chain.py @@ -4,12 +4,16 @@ from typing import List, Optional, Tuple from dotenv import find_dotenv, load_dotenv -from langchain.schema import AIMessage, HumanMessage, format_document from langchain_community.chat_models import ChatOpenAI from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.vectorstores.timescalevector import TimescaleVector +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.output_parsers import StrOutputParser -from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain_core.prompts import ( + ChatPromptTemplate, + MessagesPlaceholder, + format_document, +) from langchain_core.prompts.prompt import PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.runnables import ( @@ -147,12 +151,16 @@ def get_retriever_with_metadata(x): ) _datetime_to_string = RunnablePassthrough.assign( - start_date=lambda x: x.get("start_date", None).isoformat() - if x.get("start_date", None) is not None - else None, - end_date=lambda x: x.get("end_date", None).isoformat() - if x.get("end_date", None) is not None - else None, + start_date=lambda x: ( + x.get("start_date", None).isoformat() + if x.get("start_date", None) is not None + else None + ), + end_date=lambda x: ( + x.get("end_date", None).isoformat() + if x.get("end_date", None) is not None + else None + ), ).with_types(input_type=ChatHistory) chain = ( diff --git a/templates/retrieval-agent-fireworks/retrieval_agent_fireworks/chain.py b/templates/retrieval-agent-fireworks/retrieval_agent_fireworks/chain.py index 7207cc99d3232..6fef1ba1a97a9 100644 --- a/templates/retrieval-agent-fireworks/retrieval_agent_fireworks/chain.py +++ b/templates/retrieval-agent-fireworks/retrieval_agent_fireworks/chain.py @@ -5,12 +5,13 @@ from langchain.agents.format_scratchpad import format_log_to_str from langchain.agents.output_parsers import ReActJsonSingleInputOutputParser from langchain.callbacks.manager import CallbackManagerForRetrieverRun -from langchain.schema import BaseRetriever, Document from langchain.tools.render import render_text_description from langchain.tools.retriever import create_retriever_tool from langchain_community.chat_models.fireworks import ChatFireworks from langchain_community.utilities.arxiv import ArxivAPIWrapper +from langchain_core.documents import Document from langchain_core.pydantic_v1 import BaseModel +from langchain_core.retrievers import BaseRetriever MODEL_ID = "accounts/fireworks/models/mixtral-8x7b-instruct" diff --git a/templates/retrieval-agent/retrieval_agent/chain.py b/templates/retrieval-agent/retrieval_agent/chain.py index 2f774f6bfe30a..f74e37d71b662 100644 --- a/templates/retrieval-agent/retrieval_agent/chain.py +++ b/templates/retrieval-agent/retrieval_agent/chain.py @@ -5,13 +5,14 @@ from langchain.agents.format_scratchpad import format_to_openai_function_messages from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser from langchain.callbacks.manager import CallbackManagerForRetrieverRun -from langchain.schema import BaseRetriever, Document from langchain.tools.retriever import create_retriever_tool from langchain_community.tools.convert_to_openai import format_tool_to_openai_function from langchain_community.utilities.arxiv import ArxivAPIWrapper +from langchain_core.documents import Document from langchain_core.messages import AIMessage, HumanMessage from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain_core.pydantic_v1 import BaseModel, Field +from langchain_core.retrievers import BaseRetriever from langchain_openai import AzureChatOpenAI diff --git a/templates/self-query-qdrant/README.md b/templates/self-query-qdrant/README.md index c32af05dac62a..bbb0f7fccd64a 100644 --- a/templates/self-query-qdrant/README.md +++ b/templates/self-query-qdrant/README.md @@ -63,7 +63,7 @@ You can find the documents in the `packages/self-query-qdrant/self_query_qdrant/ Here is one of the documents: ```python -from langchain.schema import Document +from langchain_core.documents import Document Document( page_content="Spaghetti with meatballs and tomato sauce", @@ -108,7 +108,7 @@ chain = create_chain( The same goes for the `initialize` function that creates a Qdrant collection and indexes the documents: ```python -from langchain.schema import Document +from langchain_core.documents import Document from langchain_community.embeddings import HuggingFaceEmbeddings from self_query_qdrant.chain import initialize diff --git a/templates/self-query-qdrant/self_query_qdrant/chain.py b/templates/self-query-qdrant/self_query_qdrant/chain.py index 799d8816b86e8..ccc30be636191 100644 --- a/templates/self-query-qdrant/self_query_qdrant/chain.py +++ b/templates/self-query-qdrant/self_query_qdrant/chain.py @@ -3,11 +3,11 @@ from langchain.chains.query_constructor.schema import AttributeInfo from langchain.retrievers import SelfQueryRetriever -from langchain.schema import Document, StrOutputParser from langchain_community.embeddings import OpenAIEmbeddings from langchain_community.llms import BaseLLM from langchain_community.llms.openai import OpenAI from langchain_community.vectorstores.qdrant import Qdrant +from langchain_core.documents import Document, StrOutputParser from langchain_core.embeddings import Embeddings from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnableParallel, RunnablePassthrough diff --git a/templates/self-query-qdrant/self_query_qdrant/defaults.py b/templates/self-query-qdrant/self_query_qdrant/defaults.py index ec0f90b131896..84dfb98560fd2 100644 --- a/templates/self-query-qdrant/self_query_qdrant/defaults.py +++ b/templates/self-query-qdrant/self_query_qdrant/defaults.py @@ -1,5 +1,5 @@ from langchain.chains.query_constructor.schema import AttributeInfo -from langchain.schema import Document +from langchain_core.documents import Document # Qdrant collection name DEFAULT_COLLECTION_NAME = "restaurants" diff --git a/templates/self-query-qdrant/self_query_qdrant/helper.py b/templates/self-query-qdrant/self_query_qdrant/helper.py index 8c9074b8cf83e..ab3156efb770d 100644 --- a/templates/self-query-qdrant/self_query_qdrant/helper.py +++ b/templates/self-query-qdrant/self_query_qdrant/helper.py @@ -1,7 +1,7 @@ from string import Formatter from typing import List -from langchain.schema import Document +from langchain_core.documents import Document document_template = """ PASSAGE: {page_content} diff --git a/templates/solo-performance-prompting-agent/solo_performance_prompting_agent/parser.py b/templates/solo-performance-prompting-agent/solo_performance_prompting_agent/parser.py index f81c61063fa42..0fceca5094757 100644 --- a/templates/solo-performance-prompting-agent/solo_performance_prompting_agent/parser.py +++ b/templates/solo-performance-prompting-agent/solo_performance_prompting_agent/parser.py @@ -1,4 +1,4 @@ -from langchain.schema import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish def parse_output(message: str): diff --git a/templates/xml-agent/xml_agent/agent.py b/templates/xml-agent/xml_agent/agent.py index 6759fa9198597..13345e32407b7 100644 --- a/templates/xml-agent/xml_agent/agent.py +++ b/templates/xml-agent/xml_agent/agent.py @@ -2,10 +2,10 @@ from langchain.agents import AgentExecutor from langchain.agents.format_scratchpad import format_xml -from langchain.schema import AIMessage, HumanMessage from langchain.tools import DuckDuckGoSearchRun from langchain.tools.render import render_text_description from langchain_community.chat_models import ChatAnthropic +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.pydantic_v1 import BaseModel, Field from xml_agent.prompts import conversational_prompt, parse_output diff --git a/templates/xml-agent/xml_agent/prompts.py b/templates/xml-agent/xml_agent/prompts.py index 652f8cf6dd810..d39e090885835 100644 --- a/templates/xml-agent/xml_agent/prompts.py +++ b/templates/xml-agent/xml_agent/prompts.py @@ -1,4 +1,4 @@ -from langchain.schema import AgentAction, AgentFinish +from langchain_core.agents import AgentAction, AgentFinish from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder template = """You are a helpful assistant. Help the user answer any questions.