Skip to content

Commit

Permalink
Merge branch 'master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
markcusack authored Apr 24, 2024
2 parents 4c03167 + 8c95ac3 commit 72e49cf
Show file tree
Hide file tree
Showing 10 changed files with 111 additions and 85 deletions.
115 changes: 59 additions & 56 deletions docs/docs/modules/model_io/chat/structured_output.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"id": "6e3f0f72",
"metadata": {},
"source": [
"# [beta] Structured Output\n",
"# Structured Output\n",
"\n",
"It is often crucial to have LLMs return structured output. This is because oftentimes the outputs of the LLMs are used in downstream applications, where specific arguments are required. Having the LLM return structured output reliably is necessary for that.\n",
"\n",
Expand All @@ -39,21 +39,14 @@
},
{
"cell_type": "code",
"execution_count": 2,
"id": "08029f4e",
"metadata": {},
"outputs": [],
"source": [
"from langchain_core.pydantic_v1 import BaseModel, Field"
]
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 1,
"id": "070bf702",
"metadata": {},
"outputs": [],
"source": [
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
"\n",
"\n",
"class Joke(BaseModel):\n",
" setup: str = Field(description=\"The setup of the joke\")\n",
" punchline: str = Field(description=\"The punchline to the joke\")"
Expand Down Expand Up @@ -93,7 +86,7 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 3,
"id": "6700994a",
"metadata": {},
"outputs": [],
Expand All @@ -104,17 +97,17 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 4,
"id": "c55a61b8",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"Joke(setup='Why was the cat sitting on the computer?', punchline='It wanted to keep an eye on the mouse!')"
"Joke(setup='Why was the cat sitting on the computer?', punchline='To keep an eye on the mouse!')"
]
},
"execution_count": 10,
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -135,7 +128,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": 5,
"id": "df0370e3",
"metadata": {},
"outputs": [],
Expand All @@ -145,17 +138,17 @@
},
{
"cell_type": "code",
"execution_count": 14,
"execution_count": 6,
"id": "23844a26",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"Joke(setup=\"Why don't cats play poker in the jungle?\", punchline='Too many cheetahs!')"
"Joke(setup='Why was the cat sitting on the computer?', punchline='Because it wanted to keep an eye on the mouse!')"
]
},
"execution_count": 14,
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -180,7 +173,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 7,
"id": "ad45fdd8",
"metadata": {},
"outputs": [],
Expand Down Expand Up @@ -252,7 +245,7 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": 11,
"id": "649f9632",
"metadata": {},
"outputs": [
Expand All @@ -262,7 +255,7 @@
"Joke(setup='Why did the dog sit in the shade?', punchline='To avoid getting burned.')"
]
},
"execution_count": 12,
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -287,7 +280,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 12,
"id": "bffd3fad",
"metadata": {},
"outputs": [],
Expand All @@ -297,7 +290,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": 13,
"id": "c8bd7549",
"metadata": {},
"outputs": [],
Expand All @@ -308,10 +301,21 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 14,
"id": "17b15816",
"metadata": {},
"outputs": [],
"outputs": [
{
"data": {
"text/plain": [
"Joke(setup=\"Why don't cats play poker in the jungle?\", punchline='Too many cheetahs!')"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"structured_llm.invoke(\"Tell me a joke about cats\")"
]
Expand All @@ -328,7 +332,7 @@
},
{
"cell_type": "code",
"execution_count": 20,
"execution_count": 15,
"id": "9b9617e3",
"metadata": {},
"outputs": [],
Expand All @@ -340,7 +344,7 @@
},
{
"cell_type": "code",
"execution_count": 24,
"execution_count": 16,
"id": "90549664",
"metadata": {},
"outputs": [],
Expand All @@ -355,7 +359,7 @@
},
{
"cell_type": "code",
"execution_count": 25,
"execution_count": 17,
"id": "01da39be",
"metadata": {},
"outputs": [
Expand All @@ -365,7 +369,7 @@
"Joke(setup='Why did the cat sit on the computer?', punchline='To keep an eye on the mouse!')"
]
},
"execution_count": 25,
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -388,7 +392,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": 18,
"id": "70511bc3",
"metadata": {},
"outputs": [],
Expand All @@ -408,27 +412,18 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": 19,
"id": "be9fdf04",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/reag/src/langchain/libs/core/langchain_core/_api/beta_decorator.py:87: LangChainBetaWarning: The function `with_structured_output` is in beta. It is actively being worked on, so the API may change.\n",
" warn_beta(\n"
]
}
],
"outputs": [],
"source": [
"model = ChatGroq()\n",
"structured_llm = model.with_structured_output(Joke)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 20,
"id": "e13f4676",
"metadata": {},
"outputs": [
Expand All @@ -438,7 +433,7 @@
"Joke(setup=\"Why don't cats play poker in the jungle?\", punchline='Too many cheetahs!')"
]
},
"execution_count": 7,
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -459,7 +454,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 21,
"id": "86574fb8",
"metadata": {},
"outputs": [],
Expand All @@ -469,7 +464,7 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 22,
"id": "01dced9c",
"metadata": {},
"outputs": [
Expand All @@ -479,7 +474,7 @@
"Joke(setup=\"Why don't cats play poker in the jungle?\", punchline='Too many cheetahs!')"
]
},
"execution_count": 9,
"execution_count": 22,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -504,7 +499,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 23,
"id": "12682237-6689-4408-88b1-3595feac447f",
"metadata": {},
"outputs": [
Expand All @@ -514,7 +509,7 @@
"Joke(setup='What do you call a cat that loves to bowl?', punchline='An alley cat!')"
]
},
"execution_count": 5,
"execution_count": 23,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -541,17 +536,17 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 2,
"id": "24421189-02bf-4589-a91a-197584c4a696",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"Joke(setup='A cat-ch', punchline='What do you call a cat that loves to play fetch?')"
"Joke(setup='Why did the scarecrow win an award?', punchline='Why did the scarecrow win an award? Because he was outstanding in his field.')"
]
},
"execution_count": 7,
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -563,13 +558,21 @@
"structured_llm = llm.with_structured_output(Joke)\n",
"structured_llm.invoke(\"Tell me a joke about cats\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2630a2cb",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "poetry-venv-2",
"display_name": ".venv",
"language": "python",
"name": "poetry-venv-2"
"name": "python3"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -581,7 +584,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.1"
"version": "3.11.4"
}
},
"nbformat": 4,
Expand Down
3 changes: 1 addition & 2 deletions libs/core/langchain_core/language_models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

from typing_extensions import TypeAlias

from langchain_core._api import beta, deprecated
from langchain_core._api import deprecated
from langchain_core.messages import (
AnyMessage,
BaseMessage,
Expand Down Expand Up @@ -201,7 +201,6 @@ async def agenerate_prompt(
prompt and additional model provider-specific output.
"""

@beta()
def with_structured_output(
self, schema: Union[Dict, Type[BaseModel]], **kwargs: Any
) -> Runnable[LanguageModelInput, Union[Dict, BaseModel]]:
Expand Down
Loading

0 comments on commit 72e49cf

Please sign in to comment.