From 6c0339ca4b4600f27355555443ce189ea7969510 Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 08:46:44 -0700 Subject: [PATCH 01/17] ci: Enable mypy checks for checkpoint-sqlite lib --- libs/checkpoint-sqlite/Makefile | 3 +- .../langgraph/checkpoint/sqlite/__init__.py | 3 +- .../langgraph/checkpoint/sqlite/aio.py | 5 +- libs/checkpoint-sqlite/poetry.lock | 56 +++++++++---------- libs/checkpoint-sqlite/pyproject.toml | 10 ++++ .../checkpoint-sqlite/tests/test_aiosqlite.py | 14 +++-- libs/checkpoint-sqlite/tests/test_sqlite.py | 34 ++++++----- 7 files changed, 72 insertions(+), 53 deletions(-) diff --git a/libs/checkpoint-sqlite/Makefile b/libs/checkpoint-sqlite/Makefile index 94b1963d6b..ddf087ef51 100644 --- a/libs/checkpoint-sqlite/Makefile +++ b/libs/checkpoint-sqlite/Makefile @@ -27,7 +27,8 @@ lint lint_diff lint_package lint_tests: poetry run ruff check . [ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff [ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --select I $(PYTHON_FILES) - [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) || poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) + [ "$(PYTHON_FILES)" = "" ] || poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) format format_diff: poetry run ruff format $(PYTHON_FILES) diff --git a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/__init__.py b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/__init__.py index 25d823c86a..82aa2dc743 100644 --- a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/__init__.py +++ b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/__init__.py @@ -31,7 +31,7 @@ ) -class SqliteSaver(BaseCheckpointSaver): +class SqliteSaver(BaseCheckpointSaver[str]): """A checkpoint saver that stores checkpoints in a SQLite database. Note: @@ -487,6 +487,7 @@ async def aput( config: RunnableConfig, checkpoint: Checkpoint, metadata: CheckpointMetadata, + new_versions: ChannelVersions, ) -> RunnableConfig: """Save a checkpoint to the database asynchronously. diff --git a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py index 73bc3ef026..fdf3e5e881 100644 --- a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py +++ b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py @@ -4,6 +4,7 @@ from typing import ( Any, AsyncIterator, + Callable, Dict, Iterator, List, @@ -30,10 +31,10 @@ from langgraph.checkpoint.serde.types import ChannelProtocol from langgraph.checkpoint.sqlite.utils import search_where -T = TypeVar("T", bound=callable) +T = TypeVar("T", bound=Callable) -class AsyncSqliteSaver(BaseCheckpointSaver): +class AsyncSqliteSaver(BaseCheckpointSaver[str]): """An asynchronous checkpoint saver that stores checkpoints in a SQLite database. This class provides an asynchronous interface for saving and retrieving checkpoints diff --git a/libs/checkpoint-sqlite/poetry.lock b/libs/checkpoint-sqlite/poetry.lock index 22956ac857..a786cbf823 100644 --- a/libs/checkpoint-sqlite/poetry.lock +++ b/libs/checkpoint-sqlite/poetry.lock @@ -442,38 +442,38 @@ files = [ [[package]] name = "mypy" -version = "1.11.0" +version = "1.11.2" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3824187c99b893f90c845bab405a585d1ced4ff55421fdf5c84cb7710995229"}, - {file = "mypy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96f8dbc2c85046c81bcddc246232d500ad729cb720da4e20fce3b542cab91287"}, - {file = "mypy-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a5d8d8dd8613a3e2be3eae829ee891b6b2de6302f24766ff06cb2875f5be9c6"}, - {file = "mypy-1.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72596a79bbfb195fd41405cffa18210af3811beb91ff946dbcb7368240eed6be"}, - {file = "mypy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:35ce88b8ed3a759634cb4eb646d002c4cef0a38f20565ee82b5023558eb90c00"}, - {file = "mypy-1.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98790025861cb2c3db8c2f5ad10fc8c336ed2a55f4daf1b8b3f877826b6ff2eb"}, - {file = "mypy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25bcfa75b9b5a5f8d67147a54ea97ed63a653995a82798221cca2a315c0238c1"}, - {file = "mypy-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bea2a0e71c2a375c9fa0ede3d98324214d67b3cbbfcbd55ac8f750f85a414e3"}, - {file = "mypy-1.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2b3d36baac48e40e3064d2901f2fbd2a2d6880ec6ce6358825c85031d7c0d4d"}, - {file = "mypy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8e2e43977f0e09f149ea69fd0556623919f816764e26d74da0c8a7b48f3e18a"}, - {file = "mypy-1.11.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d44c1e44a8be986b54b09f15f2c1a66368eb43861b4e82573026e04c48a9e20"}, - {file = "mypy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cea3d0fb69637944dd321f41bc896e11d0fb0b0aa531d887a6da70f6e7473aba"}, - {file = "mypy-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a83ec98ae12d51c252be61521aa5731f5512231d0b738b4cb2498344f0b840cd"}, - {file = "mypy-1.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7b73a856522417beb78e0fb6d33ef89474e7a622db2653bc1285af36e2e3e3d"}, - {file = "mypy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:f2268d9fcd9686b61ab64f077be7ffbc6fbcdfb4103e5dd0cc5eaab53a8886c2"}, - {file = "mypy-1.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:940bfff7283c267ae6522ef926a7887305945f716a7704d3344d6d07f02df850"}, - {file = "mypy-1.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14f9294528b5f5cf96c721f231c9f5b2733164e02c1c018ed1a0eff8a18005ac"}, - {file = "mypy-1.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7b54c27783991399046837df5c7c9d325d921394757d09dbcbf96aee4649fe9"}, - {file = "mypy-1.11.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65f190a6349dec29c8d1a1cd4aa71284177aee5949e0502e6379b42873eddbe7"}, - {file = "mypy-1.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbe286303241fea8c2ea5466f6e0e6a046a135a7e7609167b07fd4e7baf151bf"}, - {file = "mypy-1.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:104e9c1620c2675420abd1f6c44bab7dd33cc85aea751c985006e83dcd001095"}, - {file = "mypy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f006e955718ecd8d159cee9932b64fba8f86ee6f7728ca3ac66c3a54b0062abe"}, - {file = "mypy-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:becc9111ca572b04e7e77131bc708480cc88a911adf3d0239f974c034b78085c"}, - {file = "mypy-1.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6801319fe76c3f3a3833f2b5af7bd2c17bb93c00026a2a1b924e6762f5b19e13"}, - {file = "mypy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1a184c64521dc549324ec6ef7cbaa6b351912be9cb5edb803c2808a0d7e85ac"}, - {file = "mypy-1.11.0-py3-none-any.whl", hash = "sha256:56913ec8c7638b0091ef4da6fcc9136896914a9d60d54670a75880c3e5b99ace"}, - {file = "mypy-1.11.0.tar.gz", hash = "sha256:93743608c7348772fdc717af4aeee1997293a1ad04bc0ea6efa15bf65385c538"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [package.dependencies] diff --git a/libs/checkpoint-sqlite/pyproject.toml b/libs/checkpoint-sqlite/pyproject.toml index 1b9462085f..f37bfc72d4 100644 --- a/libs/checkpoint-sqlite/pyproject.toml +++ b/libs/checkpoint-sqlite/pyproject.toml @@ -53,3 +53,13 @@ now = true delay = 0.1 runner_args = ["--ff", "-v", "--tb", "short"] patterns = ["*.py"] + +[tool.mypy] +# https://mypy.readthedocs.io/en/stable/config_file.html +disallow_untyped_defs = "True" +explicit_package_bases = "True" +warn_no_return = "False" +warn_unused_ignores = "True" +warn_redundant_casts = "True" +allow_redefinition = "True" +disable_error_code = "typeddict-item, return-value" diff --git a/libs/checkpoint-sqlite/tests/test_aiosqlite.py b/libs/checkpoint-sqlite/tests/test_aiosqlite.py index 0380301725..89adb26a7b 100644 --- a/libs/checkpoint-sqlite/tests/test_aiosqlite.py +++ b/libs/checkpoint-sqlite/tests/test_aiosqlite.py @@ -1,3 +1,5 @@ +from typing import Any + import pytest from langchain_core.runnables import RunnableConfig @@ -12,7 +14,7 @@ class TestAsyncSqliteSaver: @pytest.fixture(autouse=True) - def setup(self): + def setup(self) -> None: # objects for test setup self.config_1: RunnableConfig = { "configurable": { @@ -55,20 +57,20 @@ def setup(self): } self.metadata_3: CheckpointMetadata = {} - async def test_asearch(self): + async def test_asearch(self) -> None: async with AsyncSqliteSaver.from_conn_string(":memory:") as saver: await saver.aput(self.config_1, self.chkpnt_1, self.metadata_1, {}) await saver.aput(self.config_2, self.chkpnt_2, self.metadata_2, {}) await saver.aput(self.config_3, self.chkpnt_3, self.metadata_3, {}) # call method / assertions - query_1: CheckpointMetadata = {"source": "input"} # search by 1 key - query_2: CheckpointMetadata = { + query_1 = {"source": "input"} # search by 1 key + query_2 = { "step": 1, "writes": {"foo": "bar"}, } # search by multiple keys - query_3: CheckpointMetadata = {} # search by no keys, return all checkpoints - query_4: CheckpointMetadata = {"source": "update", "step": 1} # no match + query_3: dict[str, Any] = {} # search by no keys, return all checkpoints + query_4 = {"source": "update", "step": 1} # no match search_results_1 = [c async for c in saver.alist(None, filter=query_1)] assert len(search_results_1) == 1 diff --git a/libs/checkpoint-sqlite/tests/test_sqlite.py b/libs/checkpoint-sqlite/tests/test_sqlite.py index 99b7a3728a..d88377f3a0 100644 --- a/libs/checkpoint-sqlite/tests/test_sqlite.py +++ b/libs/checkpoint-sqlite/tests/test_sqlite.py @@ -1,3 +1,5 @@ +from typing import Any, cast + import pytest from langchain_core.runnables import RunnableConfig @@ -13,7 +15,7 @@ class TestSqliteSaver: @pytest.fixture(autouse=True) - def setup(self): + def setup(self) -> None: # objects for test setup self.config_1: RunnableConfig = { "configurable": { @@ -56,7 +58,7 @@ def setup(self): } self.metadata_3: CheckpointMetadata = {} - def test_search(self): + def test_search(self) -> None: with SqliteSaver.from_conn_string(":memory:") as saver: # set up test # save checkpoints @@ -65,13 +67,13 @@ def test_search(self): saver.put(self.config_3, self.chkpnt_3, self.metadata_3, {}) # call method / assertions - query_1: CheckpointMetadata = {"source": "input"} # search by 1 key - query_2: CheckpointMetadata = { + query_1 = {"source": "input"} # search by 1 key + query_2 = { "step": 1, "writes": {"foo": "bar"}, } # search by multiple keys - query_3: CheckpointMetadata = {} # search by no keys, return all checkpoints - query_4: CheckpointMetadata = {"source": "update", "step": 1} # no match + query_3: dict[str, Any] = {} # search by no keys, return all checkpoints + query_4 = {"source": "update", "step": 1} # no match search_results_1 = list(saver.list(None, filter=query_1)) assert len(search_results_1) == 1 @@ -99,16 +101,18 @@ def test_search(self): # TODO: test before and limit params - def test_search_where(self): + def test_search_where(self) -> None: # call method / assertions expected_predicate_1 = "WHERE json_extract(CAST(metadata AS TEXT), '$.source') = ? AND json_extract(CAST(metadata AS TEXT), '$.step') = ? AND json_extract(CAST(metadata AS TEXT), '$.writes') = ? AND json_extract(CAST(metadata AS TEXT), '$.score') = ? AND checkpoint_id < ?" expected_param_values_1 = ["input", 2, "{}", 1, "1"] - assert search_where(None, self.metadata_1, self.config_1) == ( + assert search_where( + None, cast(dict[str, Any], self.metadata_1), self.config_1 + ) == ( expected_predicate_1, expected_param_values_1, ) - def test_metadata_predicate(self): + def test_metadata_predicate(self) -> None: # call method / assertions expected_predicate_1 = [ "json_extract(CAST(metadata AS TEXT), '$.source') = ?", @@ -122,26 +126,26 @@ def test_metadata_predicate(self): "json_extract(CAST(metadata AS TEXT), '$.writes') = ?", "json_extract(CAST(metadata AS TEXT), '$.score') IS ?", ] - expected_predicate_3 = [] + expected_predicate_3: list[str] = [] expected_param_values_1 = ["input", 2, "{}", 1] expected_param_values_2 = ["loop", 1, '{"foo":"bar"}', None] - expected_param_values_3 = [] + expected_param_values_3: list[Any] = [] - assert _metadata_predicate(self.metadata_1) == ( + assert _metadata_predicate(cast(dict[str, Any], self.metadata_1)) == ( expected_predicate_1, expected_param_values_1, ) - assert _metadata_predicate(self.metadata_2) == ( + assert _metadata_predicate(cast(dict[str, Any], self.metadata_2)) == ( expected_predicate_2, expected_param_values_2, ) - assert _metadata_predicate(self.metadata_3) == ( + assert _metadata_predicate(cast(dict[str, Any], self.metadata_3)) == ( expected_predicate_3, expected_param_values_3, ) - async def test_informative_async_errors(self): + async def test_informative_async_errors(self) -> None: with SqliteSaver.from_conn_string(":memory:") as saver: # call method / assertions with pytest.raises(NotImplementedError, match="AsyncSqliteSaver"): From 95304d658a964edb46c0197b96bfbbb5803588a8 Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 08:51:15 -0700 Subject: [PATCH 02/17] Fix 3.9 --- libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py index fdf3e5e881..22a64913d2 100644 --- a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py +++ b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py @@ -184,7 +184,8 @@ def list( while True: try: yield asyncio.run_coroutine_threadsafe( - anext(aiter_), self.loop + anext(aiter_), # type: ignore[name-defined] + self.loop, ).result() except StopAsyncIteration: break From b529365f5b5c3795832a2acf4a89de26088bc31f Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 09:19:26 -0700 Subject: [PATCH 03/17] Remove ignore for 3.9 --- libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py index 22a64913d2..39476ae8e6 100644 --- a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py +++ b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py @@ -184,7 +184,7 @@ def list( while True: try: yield asyncio.run_coroutine_threadsafe( - anext(aiter_), # type: ignore[name-defined] + anext(aiter_), self.loop, ).result() except StopAsyncIteration: From 55f8bb5227d377e3bf3d62e8ceb4a5cdb5961b5e Mon Sep 17 00:00:00 2001 From: Isaac Francisco <78627776+isahers1@users.noreply.github.com> Date: Thu, 19 Sep 2024 09:58:14 -0700 Subject: [PATCH 04/17] docs: replaying from subgraph node (#1759) * edits * extra comment --- .../docs/how-tos/subgraphs-manage-state.ipynb | 177 ++++++++++++++---- 1 file changed, 139 insertions(+), 38 deletions(-) diff --git a/docs/docs/how-tos/subgraphs-manage-state.ipynb b/docs/docs/how-tos/subgraphs-manage-state.ipynb index 464066d4a1..125fd53fbf 100644 --- a/docs/docs/how-tos/subgraphs-manage-state.ipynb +++ b/docs/docs/how-tos/subgraphs-manage-state.ipynb @@ -75,7 +75,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -126,7 +126,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -178,12 +178,12 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 3, "metadata": {}, "outputs": [ { "data": { - "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAHaAZADASIAAhEBAxEB/8QAHQABAAIDAQEBAQAAAAAAAAAAAAYHBAUIAwIBCf/EAFcQAAEEAQICAwgMCQoEBQQDAAEAAgMEBQYRBxITITEIFBUWIlWU0TU2QVFWdHWVsrTS0xcyU2Fxc5KT1CNCUlR2gZGhpLMJJDc4JTM0YrEmY3LBREaj/8QAGgEBAQEBAQEBAAAAAAAAAAAAAAECAwUEB//EADcRAQABAgEICAUDBQEBAAAAAAABAhEDBBIhQVFSkdETFDEzYXGhsTRTgsHCMoHwFSIjQuFDsv/aAAwDAQACEQMRAD8A/qmiIgIiICIiAiIgIiICIiAiIgIiICItNnMzPWnhx2NiZZytgFzBJv0UEY7ZZduvlHYGjrc7YDYczm6ppmubQdrbySMiYXvcGMHWXOOwC1ztUYZjiHZai0jtBss9a1segsbaeJ8yHagt7k9JkgJGN39xkW3I0fobv75J61sG6TwbGhrcNj2tHYBVZsP8l2tgx2zM/wA/mxdD98asL54oeks9aeNWF88UPSWetPFXC+Z6HozPUnirhfM9D0ZnqT/D4+i6Dxqwvnih6Sz1p41YXzxQ9JZ608VcL5noejM9SeKuF8z0PRmepP8AD4+hoPGrC+eKHpLPWnjVhfPFD0lnrTxVwvmeh6Mz1J4q4XzPQ9GZ6k/w+PoaHvVzeOvPDK1+rYef5sUzXH/IrNWktaI07eZyWMDjZm7bDnqRnb9HV1LBkxFzSbTZw77N+gwDpMRNJ0hDR2mB7vKDv/Y5xadthyb7pmYdWiidPjz/AJ5poSlFj0L8GTpw26sgmrzND2PHVuP0HrB/MesLIXCYmJtKCIigIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAoxofbItymbfs6a/bkjY7r3EEL3RRt/R5Ln7e/I731J1GOHbe9tPyUHbiWjcs1ngjbslc5p/vY5h/vX0U91VMdujhp+9l1JOiIvnRqdV6rxGhtO389nr8WMxFGPpbFqbflY3cAdm5JJIAABJJAHWVVWue6o0vprRWK1Jio72Zp3M9VwkjTjbkUkDpHs6RzozDz8zY3hzWFoLyQ1u5OymvGnE4jO8L8/QzuEyeocVNEwTY/Cxl9x/8o0tdCGkEvY4B42O/kdW/YefLkfEPU3CC3Yv4vUeoKWm9ZYvJYfwtjhXzeQxteeCWXnrgNLpGkSBpLWueG77bnrC8tT90LoTRuPxV3NZW3Qhydc267ZMTcMghH4z5IxCXxAb9ZkDdvdWXqfjpobSAwByWdbvn6z7mJFOtNbN6JgjLjEIWOLztLGQ0dZBJAIB2qLiVqXPa11fjJbGI4iV9CXcG91LH6fpWKVqbJdPIxzLpbyyQs6MRlgkcyM87i49Wy1PBLReepZHudTk9PZOo/T+mczQvOt05GClYDq0TWvcRs3nDJOQ7+W3ct3CC0NM90jhNScYctoaOjkoTXqUZ6tx2LuATPnbK9zZN4A2ANaxmzpHAOLnAdbSBb6o+GxkNDd09qO7b09mr2J1Zi8TVp5PG0X2a0E0ElhsjbD2g9CAJmO5nbDbfr6tleCAiIgjGntsXqrPYhmza5EWThYN9mdM6QSj++SN7z+eQqTqMY5vffELNWW79HVo1qZJGw6QullcN/d2a+L/ABUnX0Y/6onwj2hZERF86CIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgKOZOtNp/Ly5unA+zWsNa3I1YGl0p5RsyaNo/Gc0dTmjrc0Dl3LA18jRborzJ8FhGs9pjSXFXCV4sxi8VqnFNl6eKO5CyzE2QAt5gHAgOAc4e+NyFHB3NvChocBw30sA4bEDEwdY7f6P5gpVkdFYvIW33GMnx95/W+zj7D673nbbd/IQH9X9IHsHvBYviROOpuqc80e4OniP+Zj3XXNwp7Kreccv+Ghh6Z4LaA0Xl48rgNF4LC5KNrmst0MfFDK0OGzgHNaCNx1KaKL+JNj4VZ799D90niTY+FWe/fQ/dJ0eHv+klo2pQi597pXNah4TaKw2VwmqMq+1bz1DGyC26J7eimk5X7ARjytuwq2vEmx8Ks9++h+6To8Pf8ASS0bUgv0K2Vo2aVyCO1TsxuhmgmaHMkY4EOa4HqIIJBH51AI+5u4UxPa9nDjS7HtILXNxMAIPvjyVv8AxJsfCrPfvofuk8SbHwqz376H7pOjw9/0ktG1oIu5v4UwyMkj4caXZIwhzXNxMAII7CPJUtzWo20pvB9Bjb+akbvFTa/bkB7JJSN+jjHuuI3O2zQ5xDTg+Igl2FnUGdtR7bFhu9DuP0xBh/wK3OIwdDA1zBj6sdZjjzPLB5T3f0nOPW4/nJJS2FRpvne3P+dpoh56fwrcFjzCZTYsSyPnsWC3YyyvO7nbbnYb9QG52aGjsC2aIuNVU1TNU9qCIiyCIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIg537uP/AKXaZ/tdiP8AfXRC537uP/pdpn+12I/310QgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIg537uP/pdpn+12I/310Qud+7j/wCl2mf7XYj/AH10QgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIo5ntUWKl/wbiacd/INY2WYzzGKGBhJDeZwa4lx2OzQOwbkt3G+r8O6w/qGD9Lm+7X1U5PXVF9Eecwtk3RQjw7rD+oYP0ub7tPDusP6hg/S5vu1rqte2OMFk3RQjw7rD+oYP0ub7tPDusP6hg/S5vu06rXtjjBZ/Jfu3+BR4HcccnDSrdDpvN75LFlrfIY1x/lIR1bDkfuAO0NLCe1dvf8M/g1Y4e8HLuq77XxXtXyxWI4XdXLUh52wu299xkkfv7rXMUu7o3gRe7pTT+HxudrYqjJi7zbUNyrZlMvIeqWHcx9TXtA6/cLWnr22Nq0L2qMXRrUqeJwFapWjbDDBFZmayNjQA1rR0fUAAAAnVa9scYLJ8ihHh3WH9Qwfpc33aeHdYf1DB+lzfdp1WvbHGCybooR4d1h/UMH6XN92nh3WH9Qwfpc33adVr2xxgsm6KEjUOrojzvxWHnaOsxxXpWOP6CYiN/wBO36R2qT4XM189jo7lbnaxxc10creV8b2ktcxw9wggg+51dRI61yxMGvDi89nhNyzPREXBBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQQOid9bas39yasP7u92esrcrS0Pbrq39fX+rsVZa5y2ss1x8xmjMFql+msPNpmfKWZYaMFiYSssxxtMZlY4A+WAeYFu3N1bkOHrYk2zfKn2hZXOi5nzvEHiFktOcTtc4jVEOLxuiL92nWwEuPhljyDKLQZnWJSOkDpSHhvRlgaOXqO6luluKOa1FnOK7223MoYrF467ioHRR71TNQdM7c8u7vK2Plb9mw2HUuOci6TIwSNYXND3AkNJ6yBtudv7x/ivpcn6LGpuIHFvhFnbGsb9HIZHhw3I231qlQiVxlpOmZs6EgNlc8E7bEco5S0bg/dzixxa1vktV5XRmPzclbE5a1jMbjq2PxkmPtGvIYz3zLNYZYBe5rtzGG8gcNg8jczOHVqKj9KZnXGuONmtcdLqaXA6f0+/EysxNenWlke6as2WaB8rmOPISHAlvlbu8lzQNjeC3E3BFy1guJXEGLRGm9e3dWC9Us6tODsYQ42uyF9R2Tkph3SNbz9K0AODg4N2ABaTu4++e4ncQbOjtfcTMdqODH4jS2VuVq2l30InxW69OXo5emmcOlbJJyvI5HAN8nqd1rOdA6Zt24KFWazZmjr1oWOklmlcGsjYBuXOJ6gAASSV80b1bKUq9ynYit07EbZobEDw+OVjhu1zXDqIIIII6iCucNban1hxZrcVfAupRpbTOmaL6Qptx8ViXIyupCeUzOkG7GcsjWNDNies7+4ovpbiXrnNUNOaR0jHnatPT2ksJNZsYChjrU009irzNEnfszA2MNYNgwFxPNu5uw3mcOvlh8OD/AMtnh7gy9jYfsn/5K0HCvJ6mzHD/AA9vWWMbiNTPjc27UaW7BzXuaHbNe9o52hr+UOO3Ntv1Lf8ADf8A9Pn/AJXn/wDhi6VacGr9mo7JS9EReYyIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiCBUPbrq39fX+rsWK/QePk4jQ60M1nwpFin4dsIc3oDC+ZkpcRy83PzMA35ttt+r3VsM3Wsad1DcyjKk96jkRGJBUZ0ksMrG8u/IOtzS0D8Xcgt7NjuI7hOMemdS3cnTxE17J28ZMa16CnjbEr6soJBjka1hLHAg9R2PUvYzZxYpqoi8Wj0iIamJnsR3U/c26c1TlsxPLlc9QxObsNtZfAULoioZGUBoLpWcheOYMaH8j2h23Xus3VfAXD6nzuVycOZzmAOXpR0MpVw1pkMN2KNrmx84MbnNLWvLQY3MO3UdwpZ451vNme+ZLf3SeOdbzZnvmS390p0Fe7JmzsQy13PeFdjtFwY/OZ/B3dKY0Yilk8bZjZZlq8kbTHNzRuY8HomOOzRsRuNl9zcAMVHqvI5rE6i1LpyLJ3BkMhisRkBDTt2Ormlc0sLmufyjm5HN5tuvdTDxzrebM98yW/uk8c63mzPfMlv7pOgr3ZM2djDx+h4NM6h1hqbF9LbzOfbBJLVtThkHSQQ9FE1rmsLmBwA5ief3wPcWph1DxPdMwS6H00yMuAc5uqpnED3SB3gN/0bhSLxzrebM98yW/uk8c63mzPfMlv7pOhxN2UzZReHgRgIdBUNItuZI42lmRnI5TLH0xnF03OUnk25OkcRtsDy9W+/WtZnu5q01qHLZSWbJ52vhMtdGRyemq1xrMbesbtLnyM5C/yi1pc1r2tcRuQVO/HOt5sz3zJb+6TxzrebM98yW/uk6Cvdlc2diF6w7nnC6r1DmsvBntQ6bkzlYVctWwl1kMF9rWGNrpGujds8M8nmaWnYAHdeVvucMGH4SxiM/qLTOTxmJgwjsjhrkcU1yrC0CNs4dG5jiNiQ4NaRudiBsBOfHOt5sz3zJb+6TxzrebM98yW/ulOgr3ZM2djZ4fGtw2JpY9k9i0yrCyBs9uUyzSBrQOZ7z1ucdtyT1k9acN//T5/5Xn/APhiiuE4wae1RdyVHBG7mchjZBDcqVacgfXkO+zJS8NbGTsfxy3sKnmj8JNhMXKLXILlqxLbmbGd2sc924YDsN+VvK3fbr2391ZxYnDwpirRM2OyNLeoiLy2RERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEReFu7XoRNkszxV43PbGHyvDQXOIa1u590kgAe6Sg90UGr8W8XlOJGb0HjKt+xqHFURcnllpyx0WOcGGOI2C3YucJGnyQeoO91pCjceitecV+GEOO13mJdBZ2S8bErtC33MkFYb8sDpntJBIPlFu4JaCD17ALBn1xp6rqurpiXN0GajsxOnhxRsM75fG0bl4j35uXYHr226j7ygUGs9c8VeH2on6WwdvhzqCK6KmOsavqBwlia5nPOImOPaOkDd9wSGncg9VhN0lhRn488/F0pc+yu2oMtJXYbXRAuPJ0m3MG7ucdh1eUepbdBX0/BrE5/UujdVamlmzOrNN1BFDdhkkrQPnLdpJ+ga7lBJL9gSQA8jr2Cn0UMcIcI2NjDnFxDRtuSdyf0kr7RAREQEREBERAREQEREGi1lojC6+01lsBnKYt4vKw973ImSOidKz3i9hDh/cVD7HD3Vmk62gcToHUNTHabwj218pRzVd1ua9V3YCWz78zZGtD9urYl432DdjZqIIFjuKkj9XauxOZ0xltO4rAQC4NRX2sGPtwcu7nskDuot2cS0jqDdztuApVpzU2I1fh6+WwWTp5jGWATFcoztmifsdjs5pIOx6j7xWfZrRXK8texEyeCVhZJFI0Oa9pGxBB6iCPcUD1VwcoZfT2Fw2nsvk+H9PE3e/YItKvZTjeSXF8b2BvK6Nxe8luwG537QgsBFCYMjruvxOyMN3G4Z3DwURNVv155TkGTgN5o5IuXZwJLy3l7A3rJJDR+cNOMOnuKWlG5/Gm5jawtmhJXzNZ1OeKwOXeItftu7ymjySQSdt9wQAm6IiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIC0WpdcYLR1nDV81k4cfNmLrMdQZLvvYsPBLY27DtOx7epb1Vtxcy1LGZ3h7Hb0W/VklrPxQwW2Rc/gd5a7a2Tyu5Q3s38nt7UH3X1TrPV2U11gqumbOj4qEJr4XU9+SKeK5YLXDpBXB36Np6Nw3PlAkHlI2WLHwKoar0npOjxMtN4gZ3T9l16LLzQmnz2C5xD+iify7NBADTuPJB23VoIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICi3Ejhjpni5peXTurMW3LYiR7ZTA6R8ZD278rg5hDgRuesFSlEEGn0fqmDiRhcpjNWMpaKq0TTt6Wdj2PEzwH9HMycnmYQXMBG2xDPfO61WJ40S4vTWqM7xE07Pw5xuEvd7d95CyyxDZhc8NimY6PfqdzMBG3UXbbnY7Wcq87oHKU8Lwf1FdyGj36+pxMi6TTscfSOuAzRgAN5Xb8pIf+KfxEFgse2VjXsIc1w3BHYQvpeNNwfUgcI+hBY0iM/wA3q7P7l7ICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAoXxBq62s5XSLtI3KVWjFlo351tsAulo8p52R7tPl78vZt+lTRVdxnxWl8jqHhtJqLUNrB262o4ZsTBX32v2wx3LA/Zp8kjc+52dqC0UREBERAREQEREBERAREQEX45wYN3ENHvkrz75h/Ks/aCl4geqLy75h/Ks/aCd8w/lWftBLxtHqi8u+YfyrP2gnfMP5Vn7QS8bR6ovLvmH8qz9oJ3zD+VZ+0EvG0eqLy75h/Ks/aCd8w/lWftBLxtHqi8u+YfyrP2gnfMP5Vn7QS8bR6qH8XK2srnDvMQ8P7dOjq9zY+8LF8Awsd0jOfmBa4fic47D1kKV98w/lWftBVr3R9PSub4K6mpau1HNpnTsscIt5Wmd5YAJ4y0t2a49bg1vYepxS8Cy64kFeITEGXlHOR2F23WvRYlKesynA2Ow2SMRtDXlw3cNuor275h/Ks/aCXjaPVF5d8w/lWftBO+YfyrP2gl42j1ReXfMP5Vn7QTvmH8qz9oJeNo9UXl3zD+VZ+0E75h/Ks/aCXjaPVF5d8w/lWftBO+YfyrP2gl42j1ReXfMP5Vn7QTvmH8qz9oJeNo9UXl3zD+VZ+0F6dqXiR+oiKgiIgIiICIiAq24u5Wjjc7w9jt6Lfq2S1n4oYLTIucYeQtdtbJ5Xcob2b+T29qslQviDV1tZyukXaRuUqtGLLRvzrbYBdLR5Tzsj3afL35ezb9KCaIiICIiAiIgIiICIiAiIgi3EmCO1plkM0bJYZMhRa+N7Q5rgbUW4IPaFrPEvT3mLGehx+pbbiF7AQfKNH61EvtcsLBw8THrmumJ0U9sX11NXmIabxL095ixnocfqTxL095ixnocfqW5Rfd1XJ/lxwhLztabxL095ixnocfqTxL095ixnocfqW5ROq5P8uOEF52tN4l6e8xYz0OP1KK2s9w1o1dT2bUOHqwaambBlnz0AwVXuYyRu+7BzAtkYQW7g77A7ghWGudeKvA7I65431q3QF+gNT04bGqGDcCWWg53e7CfcMhni6vdbWPvLM5LgR2YdPCC87Vm5fJcOsBmLeLyVfDUrlPGHMWRPSa2OGoH8hlfJy8jRzAgAnc7HYEArT6Y1/wn1gcg3GNxjpaFV16xDbxL6sorjtmayWJrnxj+k0EdY6+sKk3cDNfZngvxEblo5cjq99yhjqUTZTA+/jcXJH0fLJuCx0/LPJvuPKkaer3JVisSzIYzUWpNE6Z4gV9d4rB2Y8TY1zYuvY2aVvXDEy1M4OdzRsJIHISG+UVnq2D8unhBedqdYDiXwn1XTzU2Hr0rs2Jpuv2ahwksVgwAH+UZE+EPkaSNgWNdudh2kKvn8d9M5PgNQ4gY/S2Jx1g28bFkIMpiZGV60dizHHIWSvjjEoaxziJGEt3AJ95YnDHB5CfjZgsxHjdezU7Wl7uMuZfV0M4/5x0kEvLyP/8AJZsx/Y1sZOwaXFYWLp5zIdzHprREukdQ1M/pvI4OvdgsYyURydFkIukfC8Atlja2MvLm7gNIJKnVsH5dPCC87V0aW1lwr1jQzVzGDD9BhWh+RNzHd6OqsLS4Pe2aNjgwtBIftykA7E7Kv+IXF/hDe4b6nt0NM0NZPx9Nl6XASYp9SW3X6ZjTLGJYQXxs5g4vaHNAHWRvutd3QPDHU2vdZcR6uCx1lxyGi8ZHBNs6GG3PBkZpn1xN1ND3RgN7dwJATsDuud+N3dD6AxWIz+Jx2D4kYziNBSmxlc6wvzWoqbLAY2w0sntSbB8PMA4M3/EI26iE4GDRN4opi3hBeXcPD3N8O+JdN8+nMXSuV4YopDMcO+KEh4PL0cj4mtk25XA8hPKRsditvquvofQ+At5vO0cPjcXVAMtiamzYbkNaAA0lziSAGgEkkAAkqne5G7p7S3FzFYvRGBw+bqXdP4Kv3zZt1omVP5NscRaxzJXHckktBaNw13Ztspx3TuHvZLhgy3iqty7mMRlaGVowU6b7nNNDYY5vSQs8t8faXcgLgBuASNjqMmyeabxh08ILztYdXi5wctYPNZnahXxeFlr18hZt4OWAVpJ5OjjY9r4Q4HmLd+ryQ5pdsDuvfIcTeE2KwlbLW6cUNK1PJXgJ03ZMkrmNa5xbGIOdzAHtPOG8p36iqSyNeTiLpHiPYfXvXtdZnMads5PTrMFbqGtUhtwsjc2KdgkkaWRzOdJtt5J3DQ1XHx2tZ+PVmlK5ZqnxIkitHIu0bFK66+0Oj73jkdD/ACscRBlPMwgcwAcQFOrYFu7p4QXna2eT4gcJsVg9PZiUYubHagc5mLmp4p1nvpzWFxa1sUTnc2zXeSQDuOXt6lE9dcXtBYrSWA1Lp+pp+3iZ9S1sLlJbuNMb6cbi7pg6NzWPjlaACA9vujyTuFVmlZr/AAzxvBWPMac1DBZxerNQh+MNR9i66OSG5IxzACemHJM1xcwuB2dsSQt9a0rqLV2r59bM0tl8Xjs1r/AWa+PuU3NtMrVITHJbmiAJia5x7X7EBgJ23CnV8Cf/ADp4QXnavbQuW4d8Sqlyxp2njLraU3e9mKTGd7zQSbAhr4pY2vbuCCNx1jsUn8S9PeYsZ6HH6lBdCYW/S7oDipkJqNmDHXqWFFe1JC5sVh7I7Ik5HkbOLd2A7E7bt39xWmukZLgfLp4QXna03iXp7zFjPQ4/UniXp7zFjPQ4/UtyivVcn+XHCC87Wm8S9PeYsZ6HH6k8S9PeYsZ6HH6luUTquT/LjhBedqL6g0fgYcDkpI8JjmPbWlc1zakYIIYdiDsp9hfYah8Xj+iFGNSe13KfFZfoFSfC+w1D4vH9EL4K8LDwso/spiP7dUW1rMzMM1ERdmRERAREQEREBVdxnxWl8jqHhtJqLUNrB262o4ZsTBX32v2wx3LA/Zp8kjc+52dqtFVtxdytHG53h7Hb0W/VslrPxQwWmRc4w8ha7a2Tyu5Q3s38nt7UFkoiICIiAiIgIiICIiAiIgjPEL2Ag+UaP1qJfa+OIXsBB8o0frUS+0ybv8Typ/JrUIiL02RERARFHsxJNl89HhGWJalYVjasSV3lkrwXcrWNeDuwbhxJHX1AAjrW6Kc6VjSkKKOHQGJJJMmV3+WLn3q/Pwf4n8plfni596t2wt6eEc10JIijf4P8T+Uyvzxc+9T8H+J/KZX54ufeq2wt6eEczQki4D/4mfBHnZiuJ+MgG7eTG5cMb7nX0Ex/zjJP/wBsLtj8H+J/KZX54uferDy/CbTOoMbPj8pVuZKhOAJatzJ2pYpACCA5jpCD1gHrHuLFVGDVFs6eEczQ5+7jzuUdMYLgnjL2s9L43M53OEZKRuTqMmNeJzf5GMcw8nyPKI7QXuB7FeEnc+8N3Yu9j4dF4ejVvNYywMfWFV8ga8Pb5cXK4bOa1w2PUQCt6OHuIaAA/KADqAGYudX/APqn4P8AE/lMr88XPvUijBiLXnhHNNDH0Hwq0twzbdOncX3nNdLTZszWJbM83Lvyh8srnPIbudgTsNzt2qWKN/g/xP5TK/PFz71Pwf4n8plfni596tZuDH+08I5roZmX0jic9msHlr1Tp8hhJpLFCbpHt6F8kTonnYEB27HuGzgR17jr61uFG/wf4n8plfni596vpug8ZHuY58rG/wBx4y9skf4yEJm4W9PCOaaEiRabTF+xZjv07cvfFnHWjVfY2a0yjkZIxzg3qDuSRu+wA3BIDQQBuVzqpmmbSdgiIsoIiINdqT2u5T4rL9AqT4X2GofF4/ohRjUntdynxWX6BUnwvsNQ+Lx/RC8vG+I+n7tamaiIqyIiICIiAiIgKF8QautrOV0i7SNylVoxZaN+dbbALpaPKedke7T5e/L2bfpU0VXcZ8VpfI6h4bSai1DawdutqOGbEwV99r9sMdywP2afJI3PudnagtFERAREQEREBERAREQEREEZ4hewEHyjR+tRL7XxxC9gIPlGj9aiX2mTd/ieVP5NahERemyIiICjcf8A1HsfJMf+89SRRuP/AKj2PkmP/eeu2F2VeTUa2dn9W4PSkbJM3msfh43sfI11+1HAHNZtzkF5HU3mG59zce+tfR4oaNydzH1KercFbt5FvPSggyUL32m7kbxtDt3jcEbt37Cq+4uY+rk+PXBSK5WhtRNny8gZMwPaHNpgtdsfdB6wfcVMZTTuKxfBPifep42pVuU+J7ZK88MDWPic3L1WNLSBuNmuc0bdgJC+Waphl1lltd6awGYqYnJ6hxWOytzbvajbuxRTz7nYcjHODndfV1BY2t+Iun9AUXS5nM4zHWZIpH1at67HXfac1u/JGHHdxPUOoHtXLuuLulsBV49YLWlFtvXGoLs7sHBLTdNZyNd9SNlFtVwaS7o5A4bNPkOBJ2X7FkMLozVWvavF1kL9S5LTmOr4izlKxsMsxNoBtiGu7lcObvnpS5o63FwOx9xnDozTnF/A3+GeltY6gyGO0nVztGvbZHkr8cbGOljD+jEj+QPI327Bvt2BeuQ4m163ETSGmateO/W1FQu348nDZBYxtfodg1oBDw/p+0OG3L7u/VyThY6+KfwoyurdTW9I6Xm4d46rjMuMfVtVmWgOaeFxsQTNie9hiIIDeYM23O2ysPSuFwPDTX3BOfG5XIZHS+QhzkNPJX6nQtFi5JXkihDGRMbE17mycjeVo6th7ikVTI6oUd/CPpLwxBifGjC+FZ5XwRUfCEPTySMeWPY1nNzFzXNc0gDcEEHrCkS41z+GoQ8A+MWVZSrsycXEO1YZdbE0TNkZlogxwftvuBuB19hI91aqmw6uy2u9NYDMVMTk9Q4rHZW5t3tRt3Yop59zsORjnBzuvq6gsbW/EXT+gKLpczmcZjrMkUj6tW9djrvtOa3fkjDju4nqHUD2rl3XF3S2Aq8esFrSi23rjUF2d2Dglpums5Gu+pGyi2q4NJd0cgcNmnyHAk7L9iyGF0ZqrXtXi6yF+pclpzHV8RZylY2GWYm0A2xDXdyuHN3z0pc0dbi4HY+5M4dGac4v4G/wz0trHUGQx2k6udo17bI8lfjjYx0sYf0YkfyB5G+3YN9uwKaU7lfIVYbVWeOzWmYHxzQvD2PaesFpHUQffC4awsdfFP4UZXVuprekdLzcO8dVxmXGPq2qzLQHNPC42IJmxPewxEEBvMGbbnbZdVcB9LYbSXDTHVdP5W7mMNYkmvVrN6BsDuWaR0hDYmxxiNnM5xa0MaAD1DbZKarje6U9m9YfKzPqVVSRRvSns3rD5WZ9SqqSL6sb9X7R7QsiIi4oIiINdqT2u5T4rL9AqT4X2GofF4/ohRjUntdynxWX6BUnwvsNQ+Lx/RC8vG+I+n7tamaiIqyIiICIiAiIgKtuLuVo43O8PY7ei36tktZ+KGC0yLnGHkLXbWyeV3KG9m/k9varJUL4g1dbWcrpF2kblKrRiy0b8622AXS0eU87I92ny9+Xs2/SgmiIiAiIgIiICIiAiIgIiIIzxC9gIPlGj9aiX2vjiF7AQfKNH61EvtMm7/E8qfya1CIi9NkREQFHGDbiLMT1c2Kj2/PtM/f/AOR/ipGtXmcK/ISQ2qlk0cjAC2KwG87S07bsezcczTsDtuCCAQQuuHMRMxOuFhtEUcNLV252zOF2/PiZv4lfneWr/POE+aZv4lXo6d+PXkW8UkRRvvLV/nnCfNM38SneWr/POE+aZv4lXo6d+PXkW8UkXlartt1poHukYyVhYXRPLHgEbbtc0gtPvEHcLQd5av8APOE+aZv4lQXjhxC1dwZ4WZ7WZnwuXGLZE/vIY+aHpeeVkf4/Tu2259+w9ik4dMac+PXkW8W7HA3BtIPhvWXV7+sMp/EKw1FqsOr7NWGbwvhG9IwP5fBMx23G/wDWV695av8APOE+aZv4lIwqY7K49eRbxSRFG+8tX+ecJ80zfxKd5av884T5pm/iVejp349eRbxSRFG+8tX+ecJ80zfxK+m0NVu3EmaxLWn+dDipA4fo3sEf4hTo6d+PXkW8X5pRpGY1c7qIdlWkEHfsp1gf8wVI1hYjEw4akK8TpJSXGSSaY80kryd3Pceobk+8AB1AAAADNWcSqKqrx4ekWJERFzQREQa7UntdynxWX6BUnwvsNQ+Lx/RCjGpPa7lPisv0CpPhfYah8Xj+iF5eN8R9P3a1M1ERVkREQEREBERAVXcZ8VpfI6h4bSai1DawdutqOGbEwV99r9sMdywP2afJI3PudnarRVbcXcrRxud4ex29Fv1bJaz8UMFpkXOMPIWu2tk8ruUN7N/J7e1BZKIiAiIgIiICIiAiIgIiIIzxC9gIPlGj9aiX2vjiF7AQfKNH61EvtMm7/E8qfya1CIi9NkREQEREBERAREQFRXdx/wDavrz9TW+twq9VRXdx/wDavrz9TW+twrNX6ZF14r2Lp/qWfRCyli4r2Lp/qWfRCylQREVBERAREQEREBERBrtSe13KfFZfoFSfC+w1D4vH9EKMak9ruU+Ky/QKk+F9hqHxeP6IXl43xH0/drUzURFWRERAREQEREBQviDV1tZyukXaRuUqtGLLRvzrbYBdLR5Tzsj3afL35ezb9Kmiq7jPitL5HUPDaTUWobWDt1tRwzYmCvvtfthjuWB+zT5JG59zs7UFooiICIiAiIgIiICIiAiIgjPEL2Ag+UaP1qJfa+OIXsBB8o0frUS+0ybv8Typ/JrUIiL02RERAREQEREBERAVFd3H/wBq+vP1Nb63Cr1VFd3H/wBq+vP1Nb63Cs1fpkXXivYun+pZ9ELKWLivYun+pZ9ELKVBERUEREBERAREQEREGu1J7Xcp8Vl+gVJ8L7DUPi8f0QoxqT2u5T4rL9AqT4X2GofF4/oheXjfEfT92tTNREVZEREBERAREQFW3F3K0cbneHsdvRb9WyWs/FDBaZFzjDyFrtrZPK7lDezfye3tVkqF8QautrOV0i7SNylVoxZaN+dbbALpaPKedke7T5e/L2bfpQTRERAREQEREBERAREQEREEZ4hewEHyjR+tRL7XxxC9gIPlGj9aiX2mTd/ieVP5NahERemyIi01/U7K1yWpUoXMtah26aOk1m0RI5g1znua0OI2PLvvsWkgBwJ1TTNU2he1uUUb8a8j8EM3+8p/xCeNeR+CGb/eU/4hdehq8OMcyySIo3415H4IZv8AeU/4hPGvI/BDN/vKf8QnQ1eHGOZZJEUb8a8j8EM3+8p/xCeNeR+CGb/eU/4hOhq8OMcyySL+WHd6Q6/0BxXzOJtav1Fc0VqP/wARp0LGTnfU5S/mfD0RdyARyDyW7eS3o1/SvxryPwQzf7yn/EKj+644QZPui+HMOMx+lb9PUePsts463dlqNjaDs2WNzmzOcGub19QPlMYuWJk9dVOi3GOZZWf/AA4hxA1idQ6z1Rq7P5jAxxeCaFHKZGaxC+XmZJJK1r3EAsDWNDh+UePcK7gVX8KcKeEvDvA6Sxmj80auMrCIyc1IGWQ+VJIR3x2ueXOP6VLPGvI/BDN/vKf8QtU4FdMWvHGOZZJEUb8a8j8EM3+8p/xCeNeR+CGb/eU/4hb6Grw4xzLJIijfjXkfghm/3lP+ITxryPwQzf7yn/EJ0NXhxjmWSRFG/GvI/BDN/vKf8Qv0aryJPtQzQ/P0lP8AiFOhq8OMcyyRotZiM9Dl3SxdDPSuRAGSpbYGytB7HdRIc07EczSRuCN9wQNmuVVM0zaUERFBrtSe13KfFZfoFSfC+w1D4vH9EKMak9ruU+Ky/QKk+F9hqHxeP6IXl43xH0/drUzUReFm9Wp8vfFiKDm7OkeG7/4rURM9jL3RYPhzG+cKv75vrTw5jfOFX9831rWZVsWzORYPhzG+cKv75vrTw5jfOFX9831pmVbCzORYPhzG+cKv75vrTw5jfOFX9831pmVbCzOX8x+Lf/ELdqLVmnoM3wxv4bJ6OzvfrqkGpQ3nni5mOhlBqHdu++4G3WF/Srw5jfOFX9831r+bPd+9zfPmuOOnNQaQZDZj1nYjoWhE8GOC8Nm9I8j8Rj49nE7dscjj2pmVbCzsfuVe6JyfdK6QyepbGjvFPGQWhUqPORNs23Bu8hA6GPla3dgB8rclw6uXru1QfhbpjTfCbh5gNI4i9UFHE1W12v6RjTK/tfI4A/jPeXPP53FSrw5jfOFX9831pmVbCzORYPhzG+cKv75vrTw5jfOFX9831pmVbCzORYPhzG+cKv75vrTw5jfOFX9831pmVbCzORYPhzG+cKv75vrX63NY97g1t+s5xOwAmaSf80zKtiM1ERYBERBGeIXsBB8o0frUS+18cQvYCD5Ro/Wol9pk3f4nlT+TWoREXpsijug3dLhLMhHlvyV/mPv7W5Wj/JoH9ykSjfD/ANr83ylkfrsy7R3VXnH3XUkiIoFxO17kdAZrRErYasmn8rmG4jJyytd0sBmY4V3scHBoHTBjHcwO/ONtlwnQieoudcT3UtvVuGsRYXG14dQW9U18Ph4bjHujs0Jnl8d4tDgSw14rD+ojrj/uWjPdYalyktjOYLBDJ6djvvrQ4iDT+VmvW4GTGJ8zLbIjWDjyueGdY2HKXh24Gc+B1Mi504q90bm+G+urcEVvTmXwdG7Tr28XTqXZchDHM6NhdLYYDXheC8uEb9uZoGx3cAttpe7re33UuvKbc3jnacp0MVI+hPVne9sTxa2EJ6YMjkLmkvdyEOHKNhy7lnRewvVFyTwI19rPQPDLh1Ys1cHY0Xls9JhGxR9N4QjdPdnYyYvJ6PbpOosDd+XY82+4GyyndW6mtXMxlNO4NmUwWOvzU4cVHgcrYuZFkMpjkfHbiiNaMkteWtPN2AOc07gTPiw6kRVBpnX+uNZcXNZYKlHgqGmdNX6UL57VaaS3ZjmqxTPjAEjWseOd3lkEdbRyHYk2+txNwRc5YPjxrybTmA1hkaWnfFi9qU6fnp1o5xbaw3n022A8vLBs8NJj5Xbjchw35R7ZzjzrZmF1hrjEYrByaE0vkrFKenZ6bwjeirSclmeOQOEcexD+Vrmu3DOsjcLOdA6HRcz8WeJWsOI2i+LkWkYMFDo7AYy5jrtvKCZ9m7N3n0kwg5HBsYYyVoBeHczvcA61jHj/AJjDR4LSOmYq0MuJ07jbN67dweSyjXyTQbxwsZSYej8lvMXvd/OAa12ztmdA6hRRThXrG7r/AIf4fPZLD2MBftxu6fHWo3sfE9r3Md1Pa13KS3mbzNBLXA7KVrXaI7l3GPW2ni3qL4LbHHftbtE7b/Fo/wAFIlG817dtN/qrf0WKSLvifpo8vvKzqERFxRrtSe13KfFZfoFSfC+w1D4vH9EKMak9ruU+Ky/QKk+F9hqHxeP6IXl43xH0/drU+stdONxdy2G8xrwvl5T7vK0nb/JQDCaZxuTxlXI5OjWyeTtwsmsW7cLZHvc5oJAJHU0b7Bo2AGwAU21V7WMx8Tm+gVHtNe1zFfFIvoBenk8zThTVTom52Q8vE7AeZMb6JH6k8TsB5kxvokfqURxXdDcPs3l6WNp6gEtm5bdQheadhkLrLXOaYDM6MRtl3admFwceogEEb+2Y49aCwOqn6dv6higycc8daYdBK6CCaTbkjlnawxRvO42a9wPWOrrXTpqt71S8pR4nYDzJjfRI/UnidgPMmN9Ej9Sj7+NGj261s6RZk5p9RVbEVWxRrULEpgfIxj2GRzIy1jC17fLcQzrI33BAj9buqOF9tlF8Opi+K+zmpzeDrQjsuA3McbzFyvlHZ0TSX79XLv1J01W96l5WB4nYDzJjfRI/UnidgPMmN9Ej9SjbOOWiZNG2NUtzLnYavaNGZ4pz9PFYBA6F0HR9K2TcjySzfrHV1qO6z4+Y48KJ9YaJuVMwIctSxkjbcMrOidLcghlZJEeR7HtZLuA7bYlpII7XTV708S8rG8TsB5kxvokfqTxOwHmTG+iR+pbdVte4k5OrxX1NphkFQ0MZpeDNwyFjuldO+awwtcebYs2hb1AA7k9fZs6WuP8AaS8pl4nYDzJjfRI/UnidgPMmN9Ej9Sr/AEvx9w0XDLQGf1fdixuW1RjI7kVSlUnlE0vRMfIyGNge8kc42buXEdm+xWTiO6V4b521QgpakErrtkUo3upWWRssFxaIJXujDYZSRsI5C1x3Gw6wnTVb08S8pv4nYDzJjfRI/UnidgPMmN9Ej9S26rzU3dA6B0fmspictnTXv4p0bb8bKViUVA+NkjHyuZGWsjLXtPSOIbvuN9wQE4tcdtU8S8pZ4nYDzJjfRI/UnidgPMmN9Ej9Sj2tuNmiuHlutVzmbbBZsVzbZDWrzWniAHbpnCFjyyPf+e7ZvUevqX1qnjTovR2Pw13J52LoMyzpMcKUUluS2zlDueNkLXucwNIJcBsNxuetOmr3vUvLf+J2A8yY30SP1L8OjsAQQcHjSCCCDUj6x7o7FWfDzuisRleEmP1pqm/UoRZDJXaVRtCCaQ2RFamjiEULeeR7jHGHENB/nHYDssbRmusFxCw7snp/IsyNNkroJCGOjfFK3bmjkY8BzHDcbtcAesdXWkY1c/7TxLy2GiZTQzmawcbiaNWKvarRk79C2UytMbf/AGgwkge5zEDZoaBMVCtL/wDUPUfybj/9y2pqvkyqLYv7R7Qs9oiIvkZRniF7AQfKNH61EvtfHEL2Ag+UaP1qJfaZN3+J5U/k1qERF6bIo3w/9r83ylkfrsykijfD/wBr83ylkfrsy7R3VXnHtK6kkUR4s8OaXFrh3m9J3531YMlCGNsxt5nwSNcHxyNHVuWva13aOxS5Fw7UVwzgTputxH0jq6pD3rPprESYepWY3yOj2DYjvv1dGwztHV19Mesbdep0nwU1DoDLOq6a14/HaKdkn5EYCXFRTyRc8pllgjsF3kxOcXdRYXAOOzgetW6imbAobVfczZTPU9X4ihrl+I03qLKHNy0hiY5Z2XC6OTrmLxzRdJEx3Jyh2w5ecBTK3wty8HFbx0w2p2YsXqtWnmsdJjhOy6yB73MMbzIDC7aR7d/LGxHVuFY6JmwKgx3c/d4cNNGaS8PdJ4uZ+DOd+d57d8dHcfZ6Lk6Tyd+bl5tzttvsexfWC4Kah0Vn7vitruTEaTu5R2VmwcuKisPjfJJ0k0cM7nDkje7m8kscW8x5SD1q3UTNgQbBaKk0LqHiDqeKSfNS5+xDfbjK0LGSsMNSOERMc+QNe53RbguLAC7Ynq3XjV4nZqxahifww1fXZI8NM0r8ZyRgnbmdy3Sdh2nYE/mKn6K2FQQdz90HC3F6O8Pc3eOoW57v3vP8fbIuu9FydJ1fjcnNzHs5tvcWt1B3NlzLjUeEq6znx2g9R5B+SyeBZQY+dz5Hh88cVrnBjjkcCSORxHM7YjdXiimbApDVXc7Ze7Y1tX0vrc6ZwGsIZBk8TJiWW2tnfAIHywvMjSzmY1vM0h2+3UW9W2XJwGzGCzNPNaN1odNZZ2Iq4jKGfFtuV77K7eWKXojI0xyNBcAeYjY7EH3bkRM2Bh4epZoYmlWuXX5K3DCyOa7JG2N07w0B0ha0BrS47nYDYb9SzERaEbzXt203+qt/RYpIo3mvbtpv9Vb+ixSRdsT9NHl95WdQiIuKNdqT2u5T4rL9AqT4X2GofF4/ohRjUntdynxWX6BUnwvsNQ+Lx/RC8vG+I+n7tanhqr2sZj4nN9AqPaa9rmK+KRfQCkmooH2tP5OGJpfLJVlY1o7SSwgBRnS0rJtMYiRjuZj6cLmuHugsC9LB7mfP7Jqc2UdGZ6Puc9M484LItytfXbLzqvekgnjiGefJ0xZtzBvRnn5ttuU777da12pcHqHG8OuKHC5mjM1lM9qfOX7GPykNIvx80VuYSMsS2fxIzE07ODtnbxjYHcLrhFM1FR8HdNX8HxP4t2rtOxGy5kccIL08DmNuMjx0DC5jiNngPDwdiQDzDt3VaaH0ZnqnCbucqc+CyMNvFZ5k2QgkpyNkps72ujnlaRvGN3sG7thu4e+F1OiZo5b1HQ1pgL/ESbF0dQUcTktc1JMhZwlN7rz8YcfC2WWoOUl28rGML4wXAc+3WOqM+JOfm0JxfixmmNVvZPnsLncdXzTZZbt6tC+s6UtfK4l8m1aQ9G53OByAgEgLspFM0Y2LvtymNqXWwz1m2YWTCG1EYpYw5oPK9h62uG+xB6wdwqfz2Fy0PdE5acYi7NjNQaPZi6+ThhL60FmGazI5kzh/5e7ZW8pPUT1DrUxzHA3h3qHKWcllND6fyGQsvMk9qzjYpJJHHtLnFu5P6VKMBp7F6UxFfFYbH1sVjK/MIadOJsUUe7i48rWgAbkk/pJWrTI5u4X4TOvd3OsNzTWYxkumamRxmUFyk9ra8jKLYg4uG7eR7h5D99ne4vHO6Mz0vBXidTiwWRfftcRO/wCrXZTkMs0Hhaq/po27buZyNc7nHVygnfYFdTopmgufsrpbLT3e6X/8IuyMzFCGOh/yzyLpGIEZbF1fynl7t2bv5XV2qwbHc+cMbU8k83D/AE1LNI4vfI/FwlznE7kk8vWSVOMfj6uJoVqNKvHUp1omwwV4WhrIo2gBrWgdQAAAAHvK2v2jmnRUuY4OaktZvL6P1Fn6+f0rhYK5xOOfalr2KsD2S1JmDyoi4va7dwDdy7cghaXhVpPUvADOaLyuo9L5fNVZdIjDv8A1HX5MTZ78lsGFzI9yGFkrGc7dxvCAerYrrhFM0cVYrQGfrac4balyum9XtxONyWomZHF4V9mplqbbd174J2xwvbI9uzQHBpPkvBAcF0jwTwOGxen8jkMPiNRYnwrddZsjVM08l6d7WNiErune+QAsjYAHEHZo6grERIpsNbpf/qHqP5Nx/wDuW1NVC9KsLteaklb1sFKjCT7zw6y4j/CRp/vU0XLKu9/an/5hqe0REXyMozxC9gIPlGj9aiX2vjiF7AQfKNH61EvtMm7/ABPKn8mtQiIvTZFG+H/tfm+Usj9dmUkUb4f+1+b5SyP12Zdo7qrzj2ldTIk0Pp2eaWWXB4+aWV7pJJJazHuc5xJJJI3O5JX54haa+D2L9Dj9S3qL5s2nYjReIWmvg9i/Q4/UniFpr4PYv0OP1LeombTsGi8QtNfB7F+hx+pPELTXwexfocfqW9RM2nYNF4haa+D2L9Dj9SeIWmvg9i/Q4/Ut6iZtOwaLxC018HsX6HH6k8QtNfB7F+hx+pb1Ezadg0XiFpr4PYv0OP1J4haa+D2L9Dj9S3M88VWCSeeRkMMbS98kjg1rWgbkknsAHur5pXa+SpwW6k8VqrYjbLDPC8PZIxw3a5rh1EEEEEdu6ZtOwajxC018HsX6HH6k8QtNfB7F+hx+pbt72xMc97gxjRu5zjsAPfKwsHqDF6nxzMhhslTy1B7nNZaozsmicWktcA5pIJBBB6+ohM2nYMHxC018HsX6HH6lmYzTuKwsj34/G1KL3jlc6tA2MuHvHYda2KK5tMahG817dtN/qrf0WKSKN5r27ab/AFVv6LFJF9GJ+mjy+8rOoREXFGu1J7Xcp8Vl+gVJ8L7DUPi8f0QoxqT2u5T4rL9AqT4X2GofF4/oheXjfEfT92tTNUVucPq8tmSWhlsnhWyuL3wUZIzEXHrJDJGPDdz1nl2BJJ23JJlSLrRiV4c3plImyHfg8sfC7Pf6T7hPweWPhdnv9J9wpii7daxdscI5LeUO/B5Y+F2e/wBJ9wn4PLHwuz3+k+4UxROtYu2OEci8od+Dyx8Ls9/pPuE/B5Y+F2e/0n3CmKJ1rF2xwjkXlDvweWPhdnv9J9wqo435HP8ADfUfDGhi9T5KWHU2pYcPdNpldzmQvY4kx7RDZ27R1ncfmXRC527q7278BP7dVv8AbkTrWLtjhHIvK2PweWPhdnv9J9wn4PLHwuz3+k+4UxROtYu2OEci8od+Dyx8Ls9/pPuE/B5Y+F2e/wBJ9wpiidaxdscI5F5Q78Hlj4XZ7/SfcJ+Dyx8Ls9/pPuFMUTrWLtjhHIvKHfg8sfC7Pf6T7hfreHswPlarzr29haTWG/8AeIAf8CpgidaxdvpHJLywMNhamBpd7VGFrS4yPe9xc+R57Xucesk++fzDsACz0RfNNU1Tee1BERQRniF7AQfKNH61EvtfHEL2Ag+UaP1qJfaZN3+J5U/k1qERF6bIo3w/9r83ylkfrsykijfD8bafm+Ush9dmXaO6q849pXUkiKI3tEZm3dsTxcQNR0opZHPZWgr40xxAncMaX03OIHYOZxOw6yT1r8paIzNW5BNLxA1Hdijka99aevjRHMAdyxxZTa7Y9h5XA7HqIPWuF0UbwtympMpV0VicTqafHSapsag1DksjFSqvsSVWWmxwP2MXRte4Swnm5Nj5Xkn3PjRucz/FvPcHvCOrr1K3Fisnn5rFKKrGLYbOyrVeWPhc3mfFO8kAADtaGnYjo/H6TweJ717xw2Pp961TRr971WM6GsSCYWbDyY92tPIOrqHV1LGdoDS73YVztN4hzsIA3Fk0YiaAGwAg8n+S25R+Lt2D3ljNkUfw84icRuJOq62Yrus4vT7cxainqzjH94Nx8D5InDcPdbdZLmtJ3EbGkncEbF3xR1lr25wX05qdurphqDWGRpVsbE/H1u9qcFm617X8gjDnubV5gd3bEDfYO8pXrU0LpqhmL+XraexVbK5Bjo7l6KlE2ey1xBc2R4bzPBIG4JPYssabxLamMqjF0hVxjmOoQCuzkqFjCxhibtswtaS0cu2wJA6lc2doqy3ry/oPiDmcfl9SzZHBae0k7NXZsjHWifLLLZkERLo42ABjK8jAGgA845uZ2xVaVeJmurGhNQ5bJ6/dhbmn9KYu69jMfTcbOVnryzuic10RJa7mrsDGbOJ25SDvv0hqHh9pbV1uO1ndNYjNWo4nQMnyFCKd7Y3b8zA57SQ07ncdh3Kjum+CenMNqnN6iv4zF5fNXskL9W9PjYxNQjbBDDHDE88xAa2AHdpb1k9QUmJEL0O3L6w4+ZTK5DO3sZPg8DjKs+EhFcwmxYY+exGQ6IvDRywHdrg7cEc3KOUTHiXxKzWicpVq4zTfhqKaHpHS9Hk3ch5iOX/lMfZb7m/lOaf/AG7bEy86SwZ1GNQHDY854RdAMr3qzvoR/wBDpdubl/NvstqtWkVbx31LPS7nXVOR5W0r17DGtG1xe0RT2WiFg/lGscNnyj8ZrT1dbQepVbqDidqng5FqnTmGy7dZwYmhh6lS3ZrV4m4y5asmsIHlhjYWiPkkDZHAjqD37O5l0vl8LjtQUjTylCrkqjnNea9yFssZc0hzTyuBG4IBB9whYFHQmmsZp6bA09PYqpg5g4S4yClEytJv280Qbynf3dwpMTMjnnXbte5LhBqnHZ7MZWvDnsjjMHi23hjhkHd8zsgtMk70a6FsZbLu0AmQBrt3dYW01xrjUOApcQ6+L1bLhINA0K0FOPvGpJNmLslcTRtlb0QAjcXxQtZC2Ml3Ps7qAFs3+EuCsnS9anXiw2D0/eOSgw+Mrx160lgA9G5wa3qDC57uVu27iCd9tju7uidO5LP1s7bwOMtZuqA2DJTU432YQOwMkLeZvaewqZsioRr3Vjn8UNWWcpYjwujYx0Gn6VeDls2IcdHZsxvldG55aZJQwcpaQWHrI6lmcC81xE1JloMnqO1ZfhbGJE08NsY4Ri298bozTFV8j+gDBMCZ5C5xLCAPKVw1MPQx7bTatKvWbbldPYEMTWCaR2wc9+w8px2G5PWdlhab0bgNGwTwYDB43Bwzv6WWPG1I67ZH/wBJwYBufzlW03GHmvbtpv8AVW/osUkUbzXt203+qt/RYpIvpxP00eX3lZ1CIi4o12pPa7lPisv0CpPhfYah8Xj+iFGNSe13KfFZfoFSfC+w1D4vH9ELy8b4j6fu1qZqIirIiIgIiICIiAudu6u9u/AT+3Vb/bkXRK527q7278BP7dVv9uRB0SiIgIiICIiAiIgIiICIiCM8QvYCD5Ro/Wol9r44hewEHyjR+tRL7TJu/wATyp/JrUIiL02RRsU8ppyay3GU4cjRsTPsCF8/QyQyPcXSbHlIc1ziXdZBBc7tGwEkRbprzdFrwt0b8N6j+DTPnFn2U8N6j+DTPnFn2VJEXTpKdyPXmt/BG/Deo/g0z5xZ9lPDeo/g0z5xZ9lSRE6Sncj15l/BG/Deo/g0z5xZ9lPDeo/g0z5xZ9lSRE6Sncj15l/BG/Deo/g0z5xZ9lRriTxducKdE5PVWd029uJxzWOnNe6yR+zpGsGzeXr8p4Vkqie7k/7Vtefqav1uFZnFpiJnMj15l/BZ8GoNQ2IY5WaaZyPaHDfIM7CN/wCivvw3qP4NM+cWfZW6xXsXT/Us+iFlK9JTuR68y/gjfhvUfwaZ84s+ynhvUfwaZ84s+ypIivSU7kevMv4I34b1H8GmfOLPsp4b1H8GmfOLPsqSInSU7kevMv4I34b1H8GmfOLPsr9Gb1Fv7WmfOLPsqRop0lO5HrzS/g0WNxt67lY8tlY4a80MToa1SCQyCNry0vc55A5nHkaNgNgB7u63qIudVU1SCIiyjXak9ruU+Ky/QKk+F9hqHxeP6IUY1J7Xcp8Vl+gVJ8L7DUPi8f0QvLxviPp+7WpmoiKsiIiAiIgIiIC527q7278BP7dVv9uRdErnburvbvwE/t1W/wBuRB0SiIgIiICIiAiIgIiICIiCM8QvYCD5Ro/Wol9r917WsWdPtFavJaljuVJjFCN3lrLEb3bD9DSVqPDlnzBl/R2/aXHDxqMHHrz5teKfyateG2Ranw5Z8wZf0dv2k8OWfMGX9Hb9pfZ13A3vctLbItT4cs+YMv6O37SeHLPmDL+jt+0nXcDe9y0tsi1Phyz5gy/o7ftJ4cs+YMv6O37SddwN73LS2yLU+HLPmDL+jt+0nhyz5gy/o7ftJ13A3vctLbItT4cs+YMv6O37SeHLPmDL+jt+0nXcDe9y0tsqJ7uT/tW15+pq/W4VcHhyz5gy/o7ftKju7ays9juXdcxvxGSrNdFW3lnhDWN/5qHtPMVJyvAq0RV2+aWlfuK9i6f6ln0QspR/F5uyMZU/8By5/kWdYrt/oj/3LK8OWfMGX9Hb9pXrmBve62ltkWp8OWfMGX9Hb9pPDlnzBl/R2/aTruBve5aW2Ranw5Z8wZf0dv2k8OWfMGX9Hb9pOu4G97lpbZFqfDlnzBl/R2/aTw5Z8wZf0dv2k67gb3uWltkWp8OWfMGX9Hb9pPDlnzBl/R2/aTruBve5aW2Ranw5Z8wZf0dv2k8OWfMGX9Hb9pOu4G97lpeupPa7lPisv0CpPhfYah8Xj+iFB8zkrl7EXq0WAy3STQPjbvA0DctIH85TvFRPgxdOORpa9kLGuafcIaN18dWLRjY+dRN4t9ydEMpERdmRERAREQEREBc7d1d7d+An9uq3+3IuiVzt3V3t34Cf26rf7ciDolERAREQEREBERAREQEREBERAREQEREBERAREQEREBUD3en/AGl8QP1NX65Ar+VA93p/2l8QP1NX65AgvPE+xVL9Sz6IWWsTE+xVL9Sz6IWWgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIC5u7sLJVMHqHghlMjahx+Mpa2rS2btqQRwwMEcm7nvcQGj85IXSKx79CrlKc1S7Wit1JmlksE7A9j2ntDmnqI/MUGPgtQ4rVOMhyWFydPL46YbxW6E7J4Xj/2vaSD/AHFbBUDn+460zUyk2b4cZjK8J9QSHmdNpybalM73BLUd/JvaP6LeULWniJx14OeTrTRtbilgI+3O6LHRZBrf6UlJ/wCO780RAHvoOkEVX8L+6W4dcXZu9MDqKBmZaeWTC5EGrejcO1phk2c4j3S3cfnVoICIiAiIgIiICIiAiIgIiICIiAiIgIiICLwu3a+NqS2rc8VWtC0vkmmeGMY0dpLj1AfnKozP92DpizlJsJw7xOV4r6hjPK6vpqHmpwn3DNcd/JMaf6QLkF9LmP8A4gGvNO4/udtWaYsZujHqTJx1mUsR07TanIswuPLEDzEcrSd9tlsfwf8AHLjB5WsdYVuFuAk7cHow9LkXN/oyXnjZjh78QIKn/DHub+HnCOY3MBp2A5l5Lpc1kHG1flcfxnGaTdw390N2H5kFhYppbjKYI2IhYCD/APiFlIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiCvOKPc/cP+MkO2q9M08hbAAjyLGmG5Ft2cs7Nnjbt2326usFVaeDfGbg7/KcNeITdZ4SPrbpnXu8r2tH82K4zZ46uprXbNHVvuulUQc54vuy8dpnIQYji3pLM8KstI7o2WchEbWMnd/8AbtxgtP8AeAB76vzBagxeqMXDksNkqmWx0w3it0Z2zRPH5nNJBXrlcRRz2PnoZOlXyNGdvJLVtxNlikHvOa4EEfpXPGsO4/0ppN+Q1Xw91ZkeDGQgjfZtW8Za/wDC+RgLnPsV5HdHyNA3I3a0AbkILwdxC07Hr5uiX5SJmqXY8ZVmNeHB76xkdH0jSRyu8pjgQCSNtyACCpEv4Q6u446s1Jxil4kDMyRapbNDJFlK8Daz3GGNkTHmNpLWlzI28zQS07uHWCv619yT3TmP7pbQD7pibS1NihHDmKTAeRr3B3JLGf6D+R5A33aWuB32DiF5oiwbmcx2OfyW8hVqv/ozTNYf8yrETVNogZyLUeN2C89Y70uP1p43YLz1jvS4/WunRYm7PBbS26LUeN2C89Y70uP1p43YLz1jvS4/WnRYm7PAtLbotR43YLz1jvS4/Wv0auwRPs1jvSo/Wp0WJuzwLS2yLyrWobkQkrzRzxnq543Bw/xC9ViYt2o8rduChVms2Zo69aFhklmlcGsY0DcucT1AAAkkrSaD17geJ2laWpdM5BuUwlwytr22xvjEhjkdE/YPAd1PY4b7de243BBX86e777s86yt3OGuhch/9PwOMWYylZ/8A66QHYwRuHbED+Mf556h5I3fE+4Kfq7ifYzPCnFa+yehsA1suobTsJVjFq11wV3sFku5oTt0RHKCD5W/uKD+kXE3j7w+4Oxjxu1VQxNhwBZS5jNaeD2FsEYdIR+fl2/Oq3/DXxV4s/wAnwx4du0/iZPxdUa9LqrC3+lFTZvK8EdbXEgdm461L+Fnct8N+EVgX8Np+O5nS7nfnMs427z3+6/pX78hPu8gaPzK2UHPlLuR4NX24slxd1jmOJ95jhI3HWH95YiF3uclSIgEjs3cTvsNwrywOncVpXFw43C42piMdCNo6lGBsMTB+ZrQAFsUQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQFjZLG1MxjrVC/VhvUbUT4LFWzGJIpo3Atcx7SCHNIJBB6iCslEFSY7uSuDWLAEPDbTzwHBwFim2cbj/wDPdTnT2i9J8OKNx+CwOH0xULA+ycdTiqMLWAkF/I0AhoLu3s3KkSrHjvl3Q4XGYhjuUZKzvMP6UUQ5yP739GD743B7V9OTYE5TjU4Ua/5PosIfrLiRkdXzPipWLGMwn8yOImKewP6UjgeZoPuMG3V+NvvythkeIoxDZtOAfn6Mbnr3O593rWWi/ScHBoyeiKMKLR/O1m8sfwbU/qsH7sepPBtT+qwfux6lkKGaj4o08DmLONr4fMZ6zTjbLd8EVWytqNcN285c5u7iBuGt5nbbHbrG/SrEiiL1Sl5SvwbU/qsH7sepPBtT+qwfux6lB7HGnEPvVamJxmX1FNaxkWYh8F12uD6z3PaHbve0Ags62nY9YA3O4Hrb4y4NmH0/ex9fIZubPRmWhj8dAH2ZGNAL3FrnNDQ3cAlxGx6utc+sYe8XlM/BtT+qwfux6k8G1P6rB+7HqUJ4N6vyGtcNnbuRM7XQ5y7VghswtilghY/Zkb2tH4zR1Hfc++Sp8umHidJTFcdkl5eVOsMXaFrHPfjLbRsJ6Tuid277HbqcPzOBHb1K5+GvEp+oJRiMuWNy7WF8U7G8rLTB29X82Qdpb2EeU3q5msp1eNq3PjWNyFQhtyi4WoHEkDnZ1gHb3D1g/mJHWviyzI6MsommqP7tU/zU1E30SvHP8B+G2qrNizl9AaZyNuw90stmxiYHTPe48znF/JzEk9ZO+5WJpLuc+GWg9VRak0/onEYnOQxvjiuVoOV0QeOV3IOxpLd27gA7OcN9nOBn9K3HfpwWoiTFPG2RhPvEbj/5XuvzeYtokERFAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBVNx7oO205kdiY4Z5ariB2dIwOBPvDeLb9JHvq2VrNSafq6pwdvF3QTBYaBzN6nMcCHNe387XAOH5wF9mR48ZNj0Ys9ke06JWHNii9jiroqpYlgn1hgIZ4nFkkcmTga5jgdiCC7cEH3FM85hrulsn4NyjAyx2xTMBEVlv8ASjJ/zb2t93cbE68065JJgjJPulgX6PFXSUxVhzFpYmLIx+F3Qo//ALpp751g+2qyzPD1s2uc7qODQ+I4k4fULYLdS26xXDqz2xNYWl0nU6Jwa1wczfbr6irz7xrD/wDjxfsBerWhjQ1oAA6gB7i5V4M4sRGJPZsjneBX2ndF2cLxQGRrYqDG4Numq+PjjrOYI4pm2JXuia0bHYB46+UA7qC6N0HrDh9BovNQYAZa7SxlvE5DFNuQxyxMks9MyWN7ncjvxQCOYdRH91+IpOTUTaYmYtzidngKp4d5mrw8x2aZrS9i9K5DKZq7koad7JwBxhkk3aQebrHuf/pSr8Lmhdt/HTT23yrB9tSmSvFMQZI2PI7C5oK+O8q/9Xi/YC3Th10RFNM6I2x/0a7A6xwGqnTNwucxuXdAAZRQtxzmMHfbm5Sdt9j2+8Vm5Vzm42zyNdJI6MtYxg3c5xGzQB75JAXryQVGPfyxwsA3c7YNG35yrA4XaCmzWQq53IwOixlZ/S1IZWlr7Eo25Zdj2Rt7W/0js4bNAL842UU5LhTiYs9nrOxYjTdb2Ex/gnDUKO4d3tXjh3Hu8rQP/wBLNRF+ZTM1TeVERFAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERBhZbDUM9SfTyNOG9VcQTFOwPbuOwjfsI9w9oUItcCdNzO/5eXJ0G+4yC65wH6Ok5lYiL6cLKcbA0YVcx+63lWf4AsJ51zXpLPsJ+ALCedc16Sz7CsxF9P9Ryv5kl5Vn+ALCedc16Sz7CfgCwnnXNeks+wrMRP6jlfzJLyrP8AWE865r0ln2EHAPB7+yuaP5u+WfYVmIp/Ucr+ZJeUMwvCHTGFsR2BRffsRkFkmQmdPykHcENceUEHr3A3/AD9QUzRF8mJjYmNOdiVTM+Je4iIuKCIiAiIgIiICIiAiIgIiICIiAiIgIiICIiD/2Q==", + "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAIMAa4DASIAAhEBAxEB/8QAHQABAQEAAwEBAQEAAAAAAAAAAAYFBAcIAwIBCf/EAFsQAAEDAwEDAw4JBwoCBwkAAAEAAgMEBQYRBxIhEzFBFBUWFyJRVVZ1lJXR0tMIMjVUYZOys7QjNDY3QnHUJDNDUlNyc3SBkiWRRWNklqGxwQkYJkRXYoPC4f/EABsBAQACAwEBAAAAAAAAAAAAAAABAwIEBQYH/8QAPBEBAAEBAwYLBwQCAgMAAAAAAAECAxESBDFRUnGRBRMUFSE0QVOSsbIzQ2Gh0dLiI2LB8HLhgcIiMvH/2gAMAwEAAhEDEQA/AP8AVNERAREQEREBERAREQEREBERAREQEREBEWFdrtV1Nw60WjdFUGh9VWSDejpGHm4ftSO/ZbzAaudw3WvzppmuboS2Z6iKmjMk0jIoxzue4NA/1KzzlNlB0N3oPOWetcCDZ/ZS8TV9KL3WaaOqrqBUPPHXgCN1n7mNaPoXOGK2QDQWeg0/yrPUrbrGM8zJ0P72VWXwxQecs9adlVl8MUHnLPWnYrZfA9B5sz1J2K2XwPQebM9Sfo/H5J6Dsqsvhig85Z607KrL4YoPOWetOxWy+B6DzZnqTsVsvgeg82Z6k/R+PyOg7KrL4YoPOWetOyqy+GKDzlnrTsVsvgeg82Z6k7FbL4HoPNmepP0fj8jocmju1DcCRS1lPUkdEMrX/wDkVy1g1mB45Xj8vYre53RI2mY17fpa4AEH6QVxJWVmFjl2z1N0sYP5WKd3KT0bf67HfGkYOctcXOA1IJ03UwUV9FE9OifqXROZUovzHIyaNskbg9jgHNc06gg8xBX6WuxEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERB86idlLTyzSHSONpe494AalYGz+B3YxS18wHVl0HXCpcNeL5ACBx/qt3WD6GBbVzo+uFtq6XXTl4Xxa97eBH/qsrBKrqzDLK8gtkbSRxSNcNC2Rjd17SPoc0j/RbEexm7TH8p7G8iItdCdzraDj+zWxi75JcBbqF0zKaNwifLJLK86Mjjjja573HQ6NaCeB7y63zL4U2M4xXbP3U0Nfc7TlVRVRmsp7ZWPkp2QxylxELIHPc/lIwws0DgN5xGjSVt/CFtNou2EUgu9qyW4CnuUFTSVOJU7prhbqhgcWVUbW6nueIOjXfH0LSCV1Ga7aDLj2x/N8tx69Xipx7Ia81sVHbP8Aib6GWnqaenqZaSPUteQ+MvY0aje10HEAO5cs+ELgGC3mC136+utlXLHFMTNQ1PJQskOkZmlEZZDr/wBY5q5eVbcMMwvKRjd0uszb+6ljrW26kt9TVTPge97GyNbFG7Vu9G/Uj4ugLtAQT5129Q5dtHO0S21dmz+qprjZIhiNrs8E1NQv5Wk1lNa5pa3lWzFwdFO74rQGtcTx7N2ZWW4zbdxkNTZ7hS0k2z20UrKuto5It2bqiofJAS4DdkaCwuYe6HDUIKHZb8IK1bTM2y/GoqGvoqyyXSWhhfJQVQinjjiic6R0roWxxu3pHARl28Q0OGocCu110fsnqLhhe1/aRj1zx69NZkGQOvVvvEVC+S3PgdRQNIdUAbrHh0Dm7rtCSW6a6rvBAREQTGDEUMN1sjdBFaKw00DW66NgdGyWJo16GtkDB9DFTqYxFvVF5ymvbryU9xEMZI01EUMcbv3922Qf6KnWxb+0mdl+27p+aZziIi10CIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiApeUOw241VUI3PsVbIZp+TaXOo5z8aQgf0TtNXEfEdq46tc5zKhFZRXhviemJSlco2eYZtPhoKnIMfs2UQwNc6klrqWOpaxr9N4sLgdA7dbzc+gWCPg27KA0t7W+LbpIJHWmDQno/Z+kqlqcCtb55J6N1XZ5pCS91sqXwNcSdSTGDuEk8dS3Xn48Svl2EVHRlN+H/AOaH3SswWU5qrtsfS86H4xDZRhez+snq8ZxSz2Cqnj5KWa20UcD3s113SWgajUA6KrUv2E1HjVfvrofdJ2E1HjVfvrofdJxdnr/KS6NKoRdV5hbrtY8nwWgpcpvBp7zdpqKr5WWHe5NtBVTjc/JjjvwR9/hvcOkVnYTUeNV++uh90nF2ev8AKS6NLXyDHbXldnqbTerdTXW2VIAmo6yJssUgBDgHNcCDoQD+8BRLPg3bKYySzZxi7SQRqLTAOBGhHxe8Vv8AYTUeNV++uh90nYTUeNV++uh90nF2ev8AKS6NLJtGwLZpYLpS3K24DjlBcKWRs0FVTWyFkkTwdQ5rg3UEHpC3rtf5KmpktNkfHPdNd2ab40VC3pfL/wDdoe5j53HTmbvObxzgUNRwrbzeq+IjQxSVzomu/eItzX9x4HpW9brZSWikZS0VNFSU7dSI4WBrdTznh0npPSn6dHTE4p2dH+zoh+LNaaexWqlt9KHCCnYGNLzvOd33OPS4nUk9JJK5qIqJmapvnOgREUAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIg6+2kFozrZTvEgnIKjd0HOetNw+kdGvf/AHdI7BXX+0jXs62U6Fv6QVGu8Br8k1/Nrx1/dx5+jVdgICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIg692lAHPNk+rmt0yGp0BHF3/CLhwHDn6ejmK7CXXu0rTs82Takg9kNTpo3Xj1ouH/ACXYSAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIimrzlFXHcZbdZ6OGsqoA01EtTMYoYS7Qhuoa4ueWne3QBoNNSNRrZRZ1Wk3UpuvUqKI6+5h8wsfnc3u06+5h8wsfnc3u1sclr0xvguW6KI6+5h8wsfnc3u06+5h8wsfnc3u05LXpjfBct0UR19zD5hY/O5vdp19zD5hY/O5vdpyWvTG+C55Q+Ez8Nys2Tba7Rj902dyzSY1cnXGmqGXUbtwhlo54I3NBgO4dKjU6E6Fjm6nivZ+IXqpyTE7Ldqy3vtNXX0MFVNQSP33Uz3xtc6Iu0GpaSW66DXTmC6A2xfB/m21Z5hWVXu32ZtZjc/KGJlRI5tbEDvthk1j+K143uH9Zw6dR2/19zD5hY/O5vdpyWvTG+C5boojr7mHzCx+dze7Tr7mHzCx+dze7TktemN8Fy3RRHX3MPmFj87m92nX3MPmFj87m92nJa9Mb4LluiiOvuYfMLH53N7tfoZXkFtY6oudqoZaKMb0pt9TI+ZjelzWOjG/oNSQCDoOAcSAo5Ladl2+C5aovnT1EVXTxTwyNlhlaHsew6hzSNQQe9ovotTMgREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAUDYjrf8ALz09dRx7/wDJadXygbD8vZd5WH4WnW9k2avZ/MMozS20RFaxEREBFwYL5b6q71dqhrYJblSRRzVFIyQGSFkhcI3ObzgO3H6a8+6VzkBERAREQEWPl2XWnBMcrb9fKvqG1UTQ+eo5N8m4C4NHcsBceLgOAK2FAIQCCCNQehEUj5bLnF+zTE3HiTaaQn6lqqFLbK/1Y4j5IpPuWqpWplHtq9s+bKrPIiItdiIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAoGw/L2XeVh+Fp1fKBsPy9l3lYfhadb2S5q9n8wyjNLbXRVps9fmnwkNoNNXZLfoLNY6ey1FLaqG5S08Ble2ZznODHAlp5PQs13Xbx3gdG6d6rHt2I2m05JeL/AEtJyV2u7II62o5R55VsIcIhuk7rd0Pd8UDXXjrwWcxexeWbfkmSR7Hsf2xyZde5cnr79BHNYjWk258Utw6ldQspfiAsjJ7oDf3mE7y2cozO+0vwfNv9xZfbjDcbZkF0goattZI2akY10XJsifrqwDe4BpGmvDnXctPsDwGly8ZPHjsTbs2rdXtJnmMDKk887acv5Jsp1J3wze1Ouuq4+V/B02eZvW3epvOP9VOu2jq6JlbURQ1Dw0NEjomSNZygAGkm7vDQcVhhkQOK4hSv+FVtHuprby6po7VaKyOlhutQyKZzm1YLHxB+69g3RuscC1pJIAJOsPsnZtd2m2HGtoFvuIbU3GsZV1Ek+VzOojTiYiam629ScmzRgcwaSb4cA4vJ1XpG97KMWyHMLblNbbHG/wBvayOCtgqpoHFjH77WSCN7WytDtSGvDhxPDiVmW/YHgVpy7slorA2luvVLq0GKqnbTiocCHSinD+SDzqdXBmvHnU4ZEVsIs9fkmU5zkN3yW/V7rZmF0oqC3vuUoo4IGndDDCHbsgG+SA/UN0buhuh17dzOeWlw++zQyPhmjoJ3skjcWua4RuIII5iD0rgQ4Y3F7PeosPbR2i5XOtluUk1fHLVwuqZXAyvczlWu7oA9y17QDpoOg49PYNoddKKa+3/Fa2zTAxVlNSWKqgllicNHNZIa1waSCeO6dO8pjoi4dLYFXX7GqTYHkRyfIL5V5dRiK70lzuL54Kgvtj6hhZG7uY3NfG0BzQC4E7xcSSs2wZBkVJsx2dbV35leq/IchvtFBW2iSsc63TRVVSYn0sVL8RhjaTo5o3tYySTxXpKm2Y41SUeJUsVt3IMUDRZmcvKepd2B0A4l2r/ybnN7ve59efise27A8CtGWNySkx6KK6sqH1cRM8zoIZ3678scBeYmPOp1c1gPE8Vjhkea9p1LctpuwradnV2yi9x1VLd6m309hpa0xUFJBT1rYWxSwDuZHuDd9zncdXDTTRUlyftL2u5xtGdYqyeiOP3Z9otzIMrltbKPchjcyaSlZSytqA9zy/WRxBHcgN3dT3Hk3watm+YXa6XK6Y2Jam6ObJWiGtqYIqh400kfHHI1hfwHd7u99K5uXbA8DzrIJb3ebA2ouc0bYqiaGqnpxVMb8VszY3tbMAOAEgdw4cyYZFdjLbqzG7U2+OgfexSRCudS68kajcHKFmoB3d7e04DhotJAAAAOYIrR8dlf6scR8kUn3LVUqW2V/qxxHyRSfctVStTKPbV7Z82VWeRERa7EREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBQNh+Xsu8rD8LTq+XWbrtJLfeuOLUb8qs15dJLPU0M8YippodyFxEj3Brg7dA3WnUOjeeIJ3dzJqoiaqZnPH8wyhUosTrtfvEy6+dUXv067X7xMuvnVF79beD90eKPqXNtFiddr94mXXzqi9+nXa/eJl186ovfpg/dHij6lzbRYnXa/eJl186ovfp12v3iZdfOqL36YP3R4o+pc20WJ12v3iZdfOqL36ddr94mXXzqi9+mD90eKPqXNtFiddr94mXXzqi9+nXa/eJl186ovfpg/dHij6lzbRYnXa/eJl186ovfp12v3iZdfOqL36YP3R4o+pc20WJ12v3iZdfOqL364l0umTSQ08FPitfRuqp2Uz6uWWnlFI15A5YsjlLnBuvMP9dBqQwXZ6o3x9UXKDZX+rHEfJFJ9y1VKxsRqrTPYqemstZHW0Vv8A5BvRyB5jfF3DmP7z2luhB0Oq2VzbWqK7SquO2ZJm+bxERVIEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQERTEOTVGVUjH4yIpaCsoZJqXIJQJaUSa7sekQe18zTxfqC1paBuv7oEBtXm9W/HbXU3K611PbbdTM5Serq5WxRRN77nOIAH71mVt3u9c6sprNbhDNTVMUJq7qDHTyMIDpHxBur5N0HTiGNLjwdwK+9BjNPT1jq+qlmuFxkghhlmnkcYzyfEOZFruRku7oloBJ01J3W6bKDAbiEFTWNqrpV1F4lhrnV1G2pLWx0hLQ1rGMYGhwaASC/ecHOcdeYDdYxsbQ1oDWgaAAaABfpEBERAREQEREBERAREQEREBERBm3HHLfdK6irZ6f8AllE90lPURuLHsLm7juLSNQW6Ag6g6DhwGmbTQ5BYG00LpuyOgp6N/K1ExZHcJpmnVmga1kLt4dyf5sBwB5j3NIiDJtGUW68TspY52wXM0kVbJa6ghlXBFJqGuki13mjVrm682rXDXUFayz73YqLIbfPR1sbzFNGYi+CZ8ErQSD3EsZa9h1a0hzSCC0EHUBZ1Qb5Y5Kyoi0v9C51O2noGMbFUwN+LM4yufuy9Dw0hpGjxvO1aGhQouBar5QXs1goaqOpdRVL6SpY091DM3QljgeIOha4a87XNcNQ4E89AREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQFxLrcetVvlqupqisczQNp6SPflkcSA1oHMNSRxcQ0DUuIAJH7uFwprTQVNdWzx0tHTROmnnmcGsjY0Euc4nmAAJJ+hZFgt7q2oF9uENHJcZGSQ0s1MHnkqNz95jAX8znARufo1upa0EHk2lB+orDUXGuirLzMyZ9HWSz0MFI6SOJjC3cZyo3tJXgbztSN0F/AatDluoiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIOBcbJTXSroKqXlmVFDKZYZIZnx8S0tc1waQHtIPFrtRqAdN5rSOLY6+4Rvhtt4YJbkyn5V9dSU7o6Sfuy07oLnFjtAxxY4nTfAa5+64jZXDu1no77ROpK+nbUQF7JA12oLXscHse0ji1zXNa5rgQQQCCCEHMRYuO3eWqfVW641VDLe6I61MVCXACJ7n8jIWO4t32t5tXAOa8Bzt3VbSAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiCbziuZDTWq39cobbPdbjBSRcvS9UCoAJmlgDeYF8MMw3ncG8/OAFSKcyO5ijyTFKTrw23uq6yZvUZpeVNeG00ruTD9PyW7oJN7p5Pd/aVGgIiICIiAiIgIiICIiAiIgIuDdr3b7FAya41tPQwvfuNfUSBgc7QnQE9OgJ/0WX2xMX8YbZ50z1qivKLGznDXXET8ZhN0yokU72xMX8YbZ50z1p2xMX8YbZ50z1rDleT95TvhOGdCiRTvbExfxhtnnTPWnbExfxhtnnTPWnK8n7ynfBhnQokU72xMX8YbZ50z1p2xMX8YbZ50z1pyvJ+8p3wYZ0KJFO9sTF/GG2edM9adsTF/GG2edM9acryfvKd8GGdCiRTvbExfxhtnnTPWnbExfxhtnnTPWnK8n7ynfBhnQokU72xMX8YbZ50z1p2xMX8YbZ50z1pyvJ+8p3wYZ0PzeaxlrzTHnyXOGkjuTai3toX0u8+snDOXjLZQNWcnHDUndPBweekBUigMk2m45T3HG2xZpa7e2W4lkkJ3JurG9Tznkd7X8lxAfv/APV7v7a3e2Ji/jDbPOmetOVZP3kb4MM6FEine2Ji/jDbPOmetO2Ji/jDbPOmetOV5P3lO+DDOhRIp3tiYv4w2zzpnrTtiYv4w2zzpnrTleT95TvgwzoUSKd7YmL+MNs86Z607YmL+MNs86Z605Xk/eU74MM6FEine2Ji/jDbPOmetO2Ji/jDbPOmetOV5P3lO+DDOhRIp3tiYv4w2zzpnrTtiYv4w2zzpnrTleT95TvgwzoUSKd7YmL+MNs86Z607YmL+MNs86Z605Xk/eU74MM6FEixKLN8euNVFTUt7oKiolO6yKKoa5zj3gAeK21dRaUWsX0VROzpRMTGcREViBERAREQEREBERBPX+6dRZNi9L13FB1ZUTs6h6m5Tq7dge/c39Dye7u7+uo13dOnRUKncguTqTJ8Wphem28VdROw0BpuUNw3aeR24H6fk93TlNenc06VRICIiAiIgIiICIiAiIgIiIJvKvlrFPKMn4SoWysbKvlrFPKMn4SoWyvMZR1i02x6YX05oERFSkREQEREGZbsos14oLfXUF3oK2iuLiyiqaapZJHVOAc4iNwJDzox50brwa7vFfGszTHrdDdZqu+2ylitLmsuMk1ZGxtE5zWuaJiT+TJa5pAdpqHA9K8i12zDNXZXluP2aOamtWza4z5ZjLYnFrK6pqntqYqXTpYwCuhI/wC0DvceDkOPXJuzvZnm2RVNdZLJf8krMpyWrpaKKsNvfVwvFBJJFNFKwsiZyMZc5h3OBGhAI2uJp0/3OxvewotoOLT40/IosltEmPs+NdmV8RpG8dOMu9uDj9Km8628Ybg+zGszs3mivFhgIjjmtlZDK2okLg0Rxv391zuOpAOugJ6F5+rjjmI7N8iyfE8oZl1ryTIbbQ3e+3mzU77dahH8auZTw08UUhaHRDlNC3fDCT3BUc6hp7ls1+EjbLDXT5TSvFsu9HO23R03VjBGwTVEMMUcbHN1gkbvxt0dyeupJ1M02MZ5/uYvepr/APCBxm0XrCWU1ztNxxzJDX65FFdIuo6UU0W+4741Y4FwLD3Td0jp5lcR5jYJsb7IWXy2vsHJmXrq2rjNLuA6F3K67umvDXVdCZTfML2sbW9hdXZH23Icf6rvJZuwh8AlZRB47lw03mkg83A/SF1NlNnfRW++NbLU2jBLJtZq5Lo62UMVS23wuoYnRzGCSOSMxMnl3nAscAXBwGoBERZRVdGaf9yXvVl72mYtU02KXakzSxNs9VeW0bKxssdTBVyugm3adkzXFschOhDteO6Wc7wqzHsns2XW819iu1DeqESOiNTb6lk8W+3g5u8wkajpHQvEm0DFMIuOJU91sWcT5hQ5LnOPWy4ipttNDS1hjkcdyMQU8TC4sk1dIAdRFuE9C90UlFT2+BsFLBFTQt5o4WBjR+4BV2lEUxFyYlwcjyqy4fbjcL9eKCyUAcGGquNSyni3jzDeeQNTpzarLG1PDDb4a/stsYoZ3zMiqXXGERyOhBdMGu3tDuAEu05gNTwXS/wrcit+BZ3styy6x265UFDLcaZ1rutQ2nhe6WFgE7ZZAYw+Pc0DXcSJXbvMVA4DiFkmrPg8UJrLLkVtlvuQXJsNpmbU0FK98E9QyCM6aEQuc0cQO6broFlTZRNMVT/c/wBEXvUk+1jCKZ9rZNmWPxPukbZaBr7pA01jHfFdFq/8oD0Fuuq+2SbS8QwysFJkGVWSxVRjbMILlcYad5Y5xa1269wOhc1wB5iWkdC8s/CduVHfMj2h4tcp6LG+pccjhsdDTWGGqr8ie+GRwZHI+J7hHHJ3AbEA5hLnbzeC+9q2l4LYdrmOX3N7lQChrtltsEdXXwmdsr3VM5e0dy7VzuPc850I0Oiy4noiS937le26y4dnWO2W5VFBSWa72uruhv1VcGQ08LYXwNaNXDdcH8uCHbw5uY68L223KjvNBT11BVQV1FUMEkNTTSCSORp5nNcCQQe+F432H4fHNnGx2mvlkDaLrVk9farbdKcF9HSyV0DqYbjgd0iKQaDnAdpwXdXwVoI6DCcqt1MxsFBb8xvtJSU8Y0ZBC2ul3WMHQ0anQBY2lnTTHR/emSJdzoiLXZCIiAiIgwso/Ocf8px/YeqhS+UfnOP+U4/sPVQupwb7zbHlCuvsERF2VYiIgIiICIiAiIgncguPUmT4tTdeW2/qqonZ1CaXlDX7tPI7cD/6Ld05TXp3N3pVEpzIbkKTKMVpjd20Bq6mdgojScqa7dp5Hbgk0/Jbum/r07u70qjQEREBERAREQEREBERAREQTeVfLWKeUZPwlQtlY2VfLWKeUZPwlQtleYyjrFptj0wvpzQIiKlIiIgIToNTzIoe5UsGV5Rc6O5Rtq7fbRCyOjlG9E6Rzd8ve0jRxALQNdQNDoNSt3JMlnKrSaL7oiL58v5M3Ss+qof7aP8A3BOqof7aP/cFG9r7FvFqz+YReyna+xbxas/mEXsrtc0WXez4Y+5hihZdVQ/20f8AuCdVQ/20f+4KN7X2LeLVn8wi9lO19i3i1Z/MIvZTmiy72fDH3GKFl1VD/bR/7gnVUP8AbR/7go3tfYt4tWfzCL2U7X2LeLVn8wi9lOaLLvZ8MfcYoeGPh+bBshu+3bG73jE9RUMzOWG3PZHKQyGrY1sYLtODWmNrXan+o/vL2JjvwfcYx/FbFZYr1kVPHa6JlIDbckrqGOVw1c+V0cMzW773uc4nTU68/AKj7X2LeLVn8wi9lO19i3i1Z/MIvZV9XB9FVNNPGz0ftj7kXwyblsZoKi1U9Fbc5zOxugqHVAqqbIpqmZ5c1rS1xqjMHM0aCGkaAkkaElb2zfAbJsvsM1stVVUVRqauWvq6y4VPLVFVUSEF8sjuA3joOYAcBoFx+19i3i1Z/MIvZTtfYt4tWfzCL2VVPBdnMXTaz4Y+5OKFl1VD/bR/7gpqPCrTFtJqM2FZN11mtMdmdCZWcgIWTPmDgN3e396RwJ3tNAOHSuF2vsW8WrP5hF7Kdr7FvFqz+YReysY4Jso97Phj7jFCy6qh/to/9wX7ZKyTXce12nPunVRXa+xbxas/mEXsr8T4LZqeB0lrt9LZq9gLoKy3wthkif0HVo4jUDVp1a4DQggkKJ4Is+y1m/8Ax/JOKF0izMXu7sgxm0XRzWsdW0cNSWs10BewO0GvHTitNebromiqaKs8dDIREWIIiIMLKPznH/Kcf2HqoUvlH5zj/lOP7D1ULqcG+82x5Qrr7BERdlWIiICIiAiIgIiIJ3Ibn1Hk+LUvXrrf1XUTs6g6l5Xrhu08jtzlNPyW7pymvTubvSqJTuQXI0mTYvTC9C3irqJ2GgNNyhuG7TyO3A/+j3dOU16dzd6VRICIiAiIgIiICIiAiIgIiIJvKvlrFPKMn4SoWysbKvlrFPKMn4SoWyvMZR1i02x6YX05oERFSkREQFFWv9NMr/xab7hqtVFWv9NMr/xab7hq7vBHtLT/AB/7UonNLdRF1pmm3a3YdlF0x9mOZFf7lbbdFdqllnpY5Gspnukbv7z5GDVpiOrfjHUbodo7d9FM3KHZaLq/DPhC2HNr3YKCC03y3QZDTSVVluNxpGxU9xbGwPeI9Hl4IZq4B7W6gEjULjWf4SuM3q7W6KK23yKxXOvNst+TTUbW2ysqd5zWsZJv7+jnNLWucwNcRoCVGKB2yi877aPhLPt+MZBHhdBfJKi23OltsmTwW+OS2wz9VxMmiL3klxDXPYXBhaHEDeB0V3le320YvdrvRxWLIb/T2XTrvcbNQiemt53BIWyOL2uc5rHNe5sbXkAjUDmTFA7NRdXbONoNZmG1nPaGO5Mr8bo6CzVlqEcbA0NqYpnveHABzg/dYe6J004aaldj3a5RWe1VlfM17oaWF872xgFxa1pcQNSOOgUxN45SLqzC/hD2PNLpjNLHZL/aafJqd1RZrhdKRkVPW7sXKuY0iRzmuDN5w3mtDg0lpcNCfjZ/hK4zertboorbfIrFc682y35NNRtbbKyp3nNaxkm/v6Oc0ta5zA1xGgJUYoHbKLzvto+Es+34xkEeF0F8kqLbc6W2yZPBb45LbDP1XEyaIveSXENc9hcGFocQN4HRXeV7fbRi92u9HFYshv8AT2XTrvcbNQiemt53BIWyOL2uc5rHNe5sbXkAjUDmTFA7NX5k+I79xXHtdzpb1bKS40M7Kqiq4WVEE8Z1bJG5oc1w+gggrkSfEd+4rIcTZj+rbE/JNJ9yxUqmtmP6tsT8k0n3LFSrxOV9YtP8p82zOcREWqgREQYWUfnOP+U4/sPVQpfKPznH/Kcf2HqoXU4N95tjyhXX2CIi7KsREQEREBERAREQT2QV7qXJsXpxdoqEVNROw0b4N91bpTyO3Gv/AGC3TfJ6Q0jpVCp3IK00+T4tD1ypqQVFRO00s0O/JV6U8jt2N37BbpvE9IaR0qiQEREBERAREQEREBERAREQTeVfLWKeUZPwlQtlY2VfLWKeUZPwlQtleYyjrFptj0wvpzQIiKlIiIgKKtf6aZX/AItN9w1WqirX+mmV/wCLTfcNXd4I9paf4/8AalE5pbq6nqsHvcm2DO722i1tdzxWkttJPyrPylQySqLmbu9vDQSs4kAd1z8Dp2wi9FMXqHn7GdlGS0lq+DzTVlsMXYtRTQXsCoiJpS61yU4God3f5Rwbqze59ebip+y7LtoMuD4Tssrccgo7Ljl2pKioyttfE6KqpKWflo+ShB5Vsr91jTvNAad46leoUWOGB5Ou+zfaVatkl52WUOFtu9A28isocihutPGyamdcm1h34nuDxK0FzSNNDpqHHmPOuuw2px/aFmlVU7IbHtQo8guTrrQ3asnpYpaN0jGiSnn5Yb3Jtc0uaYw/g48NV6jRMEDpe02U7HtrOV32tprZZdntztFsgjuklbDS09sfSh8LKdzHlujXCVoaW8Bpp0hUd12rYPmdouNjsWa43drxcKSanpKGjvFNJLNI6NwDWtD9SV2HJGyVhY9rXtPO1w1C+bKKnjcHMp4muHMWsAIU3XZh0RbdleRtxT4PlBUW0slxeOKO9NFRFrSgWuWndxDu7/KODe43ufXm4qbsuy7aDLg+E7LK3HIKOy45dqSoqMrbXxOiqqSln5aPkoQeVbK/dY07zQGneOpXqFFGGB5Ou+zfaVatkl52WUOFtu9A28isocihutPGyamdcm1h34nuDxK0FzSNNDpqHHmPOuuw2px/aFmlVU7IbHtQo8guTrrQ3asnpYpaN0jGiSnn5Yb3Jtc0uaYw/g48NV6jRMEDjWy3UtnttJQUNNFR0VLEyCCmgaGxxRtAa1jQOAAAAAHQF95PiO/cV+l+ZPiO/cVmOJsx/Vtifkmk+5YqVTWzH9W2J+SaT7lipV4nK+sWn+U+bZnOIiLVQIiIMLKPznH/ACnH9h6qFL5R+c4/5Tj+w9VC6nBvvNseUK6+wREXZViIiAiIgIiICIiCcyGtFPlGKwG401IaipnaKWWn35KrSnkduxv/AGC3TeJ6Q0jpVGp3IK402T4tALrDRCpqJ2mjkg331ulPI7cY/wDYLdN8npDSOlUSAiIgIiICIiAiIgIiICIiCbyr5axTyjJ+EqFsrGyr5axTyjJ+EqFsrzGUdYtNsemF9OaBERUpEREBRdG0U2cZHHId2SoFPURtPO5gj3CR39HNIP7wrRZ16x6gyCOJtbC57onb0csUr4pYz07r2EObr06HiujkOU05LaTVXHRMXTdtif4J6YufBFm9rW0fOr36drfep2tbR86vfp2t96u9zlkmmrwx9yvB8Wkize1raPnV79O1vvU7Wto+dXv07W+9TnLJNNXhj7jB8Wkize1raPnV79O1vvU7Wto+dXv07W+9TnLJNNXhj7jB8Wki6+zvEYbRe8Ggobje4YLjfDSVjevNY7lIeoqqTd1Mh3e7jjOo0+Lprx0Nf2tbR86vfp2t96pnhLJI7avDH3GD4tJFm9rW0fOr36drfep2tbR86vfp2t96o5yyTTV4Y+4wfFpIs3ta2j51e/Ttb71O1raPnV79O1vvU5yyTTV4Y+4wfFpLjXKthttvqaupkbFTwRukke46BrQNSVxu1raPnV79O1vvVyKLALPR1EMxbWVj4XiSMV9wqKprXAghwbI9w1BAIOmoI1GhUTwnkkdMYp/4j7k4H2wSgmtWD49RVDHRz01up4ZGPGha5sTQQR0HULcRF5W0rm0rqrntm9mIiKsEREGFlH5zj/lOP7D1UKXyj85x/wApx/YeqhdTg33m2PKFdfYIiLsqxERAREQEREBERBO5BWimybF4DdIaI1FRO0UklPyj6zSnkdusf/Rlum+T0hpHSqJTmQ14pcoxWnNxgpDU1M7BSy05kfVaU8jt1j/6Mt03iekNI6VRoCIiAiIgIiICIiAiIgIiIJvKvlrFPKMn4SoWysbKvlrFPKMn4SoWyvMZR1i02x6YX05oERFSkREQEREBERAREQEREEBtQJGSbMuDT/8AEx59Nfk6u5tf/Tjz9Gqv11/tRIGS7MAddTk5A0dp/wBG13P3/wB3qXYCynNAIiLEEREBERAREQEREBERBhZR+c4/5Tj+w9VCl8o/Ocf8px/YeqhdTg33m2PKFdfYIiLsqxERAREQEREBERBOZDcBS5RitP1ygpOqamdnUstOZH1elPI7dY/+jLdN8npDSOlUancgr+psnxaDrnBR9U1E7epJKflH1elPI7dY/wDoy3TfJ6Q0jpVEgIiICIiAiIgIiICIiAiIgm8q+WsU8oyfhKhbKxsq+WsU8oyfhKhbK8xlHWLTbHphfTmgREVKRERAREQEREBERAREQdf7UXluS7MAHubvZMQQ3md/w2uOh+jhr/oF2AoHaeSMj2Z6EjXJSDo8N1/4dXc4/a/d/r0K+WU5oBERYgiIgIiICIiAiIgIiIMLKPznH/Kcf2HqoUvlH5zj/lOP7D1ULqcG+82x5Qrr7BERdlWIiICIiAiIgIiIJzIa3qfJ8Wg6upKbqionb1PPDvy1OlPI7did+wRpvE9LQR0qjU7kFa6nybF4G3Ono21FRO11JLDvyVmlPI7djd+wW6b5PSGkdKokBERAREQEREBERAREQEREE3lXy1inlGT8JULZWNlXy1inlGT8JULZXmMo6xabY9ML6c0CIipSIijI8hvmRRdWWaS3UNtfr1NLWwPqHzt14SbrZGBrXcSBqTpuk6Elo28myW0yqZwZozzJmzrNFGb2Z+F7F6Im/ik3sz8L2L0RN/FLf5pttan5/RF8aVmijN7M/C9i9ETfxSb2Z+F7F6Im/ik5pttan5/QvjSs0UZvZn4XsXoib+KTezPwvYvRE38UnNNtrU/P6F8aVmijN7M/C9i9ETfxSb2Z+F7F6Im/ik5pttan5/QvjS8u/C7+F9ctjO1/G7BU4C+vpbRWR3uirxduS64sfSTwOZucg7c3ZJnjXV2vJDgN7h65we9XLJMQs91u9pFhuVbTMqJrYJzOaYuGu4XljNSARr3I0Oo6NV1HtS2DS7Yb7id2yOts09VjdZ1ZS8naZA2XmJjlBqDvM1a06cOI5+J17F3sz8L2L0RN/FK+vguuaKYpmm/t6Z+iImL86zRRm9mfhexeiJv4pN7M/C9i9ETfxSo5pttan5/RN8aVmijN7M/C9i9ETfxSb2Z+F7F6Im/ik5pttan5/QvjSs0UZvZn4XsXoib+KTezPwvYvRE38UnNNtrU/P6F8aVmijN7M/C9i9ETfxS/okzFp1N0scgH7ItczNf9eqTp/wAinNNtrU/P6F8aVkiycdvpvVPUNmhFLXUkvIVUDX74Y/da4brtBvNLXNcCQDoeIB1A1lybSzqsq5ori6YSIiKsYWUfnOP+U4/sPVQpfKPznH/Kcf2HqoXU4N95tjyhXX2CIi7KsREQEREBERAREQTeRVop8pxOA3OGjNRUztFJJTco+r0p5HbrH6fky3TfJ4ahpHSqRf53/C6wPa9Q/Caxu24bnOW0llzOcdRR016qmQ0Ew0FQ1oa/RjWtPKcAAGu06F/oDYbWbHYrdbTV1NeaOmjpzV1khkmm3Ghu/I88XOdpqSeJJJQc9ERAREQEREBERAREQEREE3lXy1inlGT8JULZWNlXy1inlGT8JULZXmMo6xabY9ML6c0CIipS/E38y/8AulRWzxxfgGMucS5xtlMSTzn8k1Ws38zJ/dKidnX6vsY8l0v3TV6bgj2Vrtp8qmFWZQoiLtKhERAREQEU9ctoGO2rHL7fp7tA602Plhcamn1mFM6EaytcGAneb0tAJB4aarcpamOtpoaiF2/DKwSMdoRq0jUHQ/QoH1REUgiIgIix8ny604bSUlTeKvqOCrrILfC7k3v355pBHEzRoJG85wGp4DXiQFA2ERFIIiIMrESTlOVjXgJqfT6lqrVI4h+lWWf41N9w1Vy8nwn1qrZT6YbHYIiLljCyj85x/wApx/YeqhS+UfnOP+U4/sPVQupwb7zbHlCuvsca51otttq6st3hBE+Ut7+6CdP/AAXXVvxK15JbqS5X2hpr1caqFk0s1bGJQ0uaDusDhoxo5gAB9OpJJusq/Ri8f5Ob7BU9jX6OWr/KRfYC9Xk8zRZzVTN03sc0M3ta4l4sWfzGL2U7WuJeLFn8xi9lUiLY4+1153yi+dKb7WuJeLFn8xi9lO1riXixZ/MYvZVIicfa6875L50pvta4l4sWfzGL2U7WuJeLFn8xi9lUiJx9rrzvkvnSm+1riXixZ/MYvZTta4l4sWfzGL2VSInH2uvO+S+dKb7WuJeLFn8xi9lO1riXixZ/MYvZVIicfa6875L50pvta4l4sWfzGL2U7WuJeLFn8xi9lUiJx9rrzvkvnSm+1riXixZ/MYvZTta4l4sWfzGL2VSInH2uvO+S+dKb7WuJeLFn8xi9lO1riXixZ/MYvZVIicfa6875L50pvta4l4sWfzGL2V+mbOsWiJMWPW2nf/aQUrI3jjrwc0Ajm6FRIo4+1153yXzpfPA7hUVFJc6GonfVPtdaaNs8p3pHs5OORm+elwbKBrznTUkkkqnUds++UMx8sN/BUqsVz8piItZu+HziJJziIi1kJvKvlrFPKMn4SoWysbKvlrFPKMn4SoWyvMZR1i02x6YX05oERFSl+Jv5mT+6VE7Ov1fYx5Lpfumq2m/mZP7pUTs6/V9jHkul+6avT8EextdtPlUwqzKFERdlU8+7fM/vuyDP6G8UM1ZXUuS2aex2+2co50DL014dRuDD3LTIJJGuIHERjXmXX8O1fN6Wlq7XPWVdTkOynHbpV35zXyCK51jWOit7pRqDIx8QfUkO5zoecar1hecbtmQyW2S5UcdY63Vba6kMg15GdrXNbIPpAe7T96/FPi1ppLrdrlFQQtrrsyKOum3dTUNjaWsDweB0a4j9xVc0zfnHnnZVYdp1NW4nls93MthqYBWXmorssmucVbTPgLt+KlNJGyBwcWPHJOAABbo7XVYWzzKclg2obNb5RVOSNw3NKisia3I8g6ukrIepZZ4pRS8nuU3GNpG4/wCKdCBqu+cO2DYJgF566WGxCgqgySONvVc8kMDXnV7YonvMcQPSGNAXFs/wctneP3O33Cgx0U9XbqkVdDIK2od1G/jq2EGQiKM7x1jYAx3MWnQKMMjz7bcLpLJ8H74RNfDcLxUTR1uSUHI1l1qKiHca4kPMb3lplOg1kI3zqdSdSqm43K+bDL5Zprbf73k0N1wy7XOa3XqrNTGauihglidE3QCLe5RzCxmjdNOGo1XcVx2C4LdLlkVdPZHCfIYJae6NhraiKKqbIwMkLo2SBge5oALwA76VRT4NY6q92W7y0IfcLNTTUdDKZX6RRTBgkbu67rtREzi4EjThpqdZwyPPex2wbVrrNg2XtuxqKC4iGsu09ZlktdBXU0sW84RURpGRwOBc1zRG8Bu6Wne1JVb8Faz194waiy+9ZLfr3c6mouNO2GtuUr6aKJtbKxrREXbrnAR8HuBcA4tBDQALbEtguCYLkDLzYrCLdWxmQwtZVTuggMmu/wAlA55ji11Ou40c60oMKmw7DobHgTrfYhDM+WIXOCauhaJJHyS9yJmPJL3uIO/oNdNNNAEUzGcfDbbcKq07Gc9rqGpmo62msFfNBU08hZJFI2nkLXtcOLXAgEEcQQulBd79syveAXOkyC+ZFLkeMXKsuFFd659RFNUwUkVRE+OM9zCS4uaRGGgh3NrxXbzcSzTIYau05hd8Zu+M3Cmmo6+it1nqqSeaKSNzC1sprH7nxuJDddNdCDoRQdgFhNwxyu6g/lWOwyU9rk5aT+TxyRtjeNN7R+rWtGrtTw4cVMxM9I87WC433Ese2L5w3M75kN1zKvoqe7W6srTLR1DKunfK/kYPiw8i4AtLA3g0h2uql5qO453so2dbTbxlV6r7te8wtM8trbWuFspmm4tY2nZTfFbyYaBvfHLmkk8SF6SxjYJgWG5JHfbPj0VJcYTIacmeaSKlMn84YIXPMcO9qQeTa3gSOlcJ/wAGrZu+/G8DGxHW9cI7sGxVtTHA2rZIJGzNhbII2u3gCSGjXiDqCQscMjpCmO1fa/cs3vOPV8lDXWy/VtptzuyqWjp6DqeTdjbNQNpHxzagB7uUeS4P4Fg009a0JqDRU5rBGKvk28sIiSwP07rd146a66KGvewPAshyuTJK2wNdd5pI5Z5YaqeGOofHpuOliY8RyuGg0L2k8AuwFlTExnBERZjJxD9Kss/xqb7hqrlI4h+lWWf41N9w1Vy8nwn1qrZT6YbHYIiLljCyj85x/wApx/YeqhS+UfnOP+U4/sPVQupwb7zbHlCuvsZeVfoxeP8AJzfYKnsa/Ry1f5SL7AVDlX6MXj/JzfYKnsa/Ry1f5SL7AXqrH2M7f4YdjSXXWLfCG2fZrcrVQ2bIBVzXUHqGR1HURQ1Dg0udGyV8YYZAAdY97fGhBAIXYq8sYphl+pdg+wCgksVxhuNryWhqK2mfSSNlpIx1SHvlbpqxoDxqXaDuh31EzMId0O29YEzL+xk5DF126rFvIEEppxVHmpzUbnJCXo5Pf3teGmq/rNuuEzZDcLHDdpqi526eWmrYYLfUyCmfHGZH8o9sZawboJDiQHFpDSSCF5+mxzJGbG59jLMQvbsmkv7ni+miPW0wm5dViuNV8XUR6dxrv7w03V2js7xm42+xbbxPaqqmqLnkdympRJTua6qidSQNY+PUavaSHAEagkHTpURVMjZh+FHsyqX0rYsjklNZDy9HuWyrPVreGop/yX5Zw3hqyPecOOoGh02Z9umEU+F0uVm9GWyVNSaOKWCjnlldON7ei5FrDKHjcfq0tBG6dQurcOxO9UsPwX+Ws1fEbPaZY7lv0r29QvNp5Pdm1H5Ml/c6O07rhzqfloMxx6nvUTKHJ7bjNftDulRd5ceo5TcX0bog6B8Aa0yck+UDekiGumuhHEqMUjtfKduVHLi2H37DquivFvvWTUNkllmjkHJslm5OUbhLXMlbx4PHA87Su1144s+JZDatn95nixTKHtte0ykydlDXxPqLhU27WB5kYXOcZpNA8ubvFwIIdoV7DgmFRBHKGvYHtDg2Rpa4ajXQg8x+hZUzM5x1Pe9rt4tt52yUkVNQujw2x01zt5fG8mWWSmqJXCXu+Ld6FoAbunQnjzEabdueO2LHcSnyav6ivN+tLLlDRUdFUVD5tGRmURMja9zt0yg7o1du6niGuIhsnxa+TbRttNtistdNHmGKQRWu4Rxa0hmip6mJ0MknNG8umYQHc41PQvjs8ortdc42M3ObHrxa4LdiNwt9aLhRPiNLUNdSR7jyRo3eMTyzj3TRqNQovn+7R2Bj3wjNneVXC10dryNlVJc5OQpJepJ2QSTaF3I8q6MMEugP5MuD/oXZC8sWzDL9FsdwujdYriytptpjK+WnNHIJIqbrzK8zubpqI+TcHb54bp110XqdZUzM5x11dfhDbPrJd622VuQCGqoKsUNaeo6h0VJMd3dE0gjLIgd9ujnuDTx0J0OnKzbblg+zu79a7/fW0dc2EVEsUdNNP1PEToJJjGxwhYSD3UhaOB4rp3JcNvdVsj+EnRMsdwlq7teK2W3U7aR5krGmhpWsdC3TWQFzXAFuvFpHOF96uW8bNMo2odWYZf8AJzmFLSTW2e2UDqpkrm0Tad1LUPHCHde0nV+jd15OuuoWOKR3BmW2nC8Bmt0N6vkcU1xiNRSw0sMtU+SEaay7sLXkR8fjkBv0qSwH4RNqqtjmH5dmVZTW24X+J7o6S20s8zpnNc4HkoWcpI4BoBJGumvEjULrrZbYsg+D1lVGcjxq95NHcMTs9rhuFionV3UU9KyRs1K/d4sY5z2uDzo06cT3o3Ednd8xuxbJ8gyDHs0daKbHKqz1tFjclXS3O21D6rlmPkigcyV0b2t3SBroQwkcAoxSPYOI5jZc8scN4sFwiuVulc5jZotRo5p0c1zSAWuBBBa4Ag84WyoLYtjVmx/EJZ7Nab5Zo7tWzXCop8jnllrnzOIYZJDK97gXNja7QnXQjUAkhXqsjMODs++UMx8sN/BUqsVHbPvlDMfLDfwVKrFa+Ve1nZHlCZERFqoTeVfLWKeUZPwlQtlY2VfLWKeUZPwlQtleYyjrFptj0wvpzQIiKlL8TfzMn90qJ2dfq+xjyXS/dNVtN/Myf3SonZ1+r7GPJdL901en4I9ja7afKphVmb00ohhfIWucGNLt1o1J07w76yW5JvtDutdxGo10MIB+0tlF2Jv7FTH7Ij4LuH1Q9pOyI+C7h9UPaWwii6dIx+yI+C7h9UPaTsiPgu4fVD2lsIl06Rj9kR8F3D6oe0nZEfBdw+qHtLYRLp0jH7Ij4LuH1Q9pOyI+C7h9UPaWwiXTpGP2RHwXcPqh7SdkR8F3D6oe0thEunSMfsiPgu4fVD2k7Ij4LuH1Q9pbCJdOkY/ZEfBdw+qHtJ2RHwXcPqh7S2ES6dI41BW9XRF/U81PodN2doBP08CVyURSMnEP0qyz/GpvuGquUjiH6VZZ/jU33DVXLynCfWqtlPphsdgiIuWMLKPznH/Kcf2HqoUvlH5zj/lOP7D1ULqcG+82x5Qrr7GXlX6MXj/JzfYKnsa/Ry1f5SL7AVNf6aSssNyp4hvSy00kbR3yWkBS2KTsqcYtMkbg5ppYx+4hoBB+kEEH6QvVWPsZ2/ww7GqiIskCIiAiIgKDuGwXZtdq+prq3A8dq62pldNPUTWyF75ZHElznOLdSSSSSe+rxFF144dns1Bj1rprbbKOC32+mYI4KWmjEccTRzBrRwA/cuYiKR/CA4EEag84XXn/ALuuy3/6d4x6Kg9ldiIouiR86eCOlgjhhjbFDG0MZGwaNa0DQADoAC+iIpBERAREQcHZ98oZj5Yb+CpVYqQ2eM3pcnqm91DU3dxjf0O3IIYX6Hp0fE9v72kdCr1rZT7Wf+PKEznERFqoTeVfLWKeUZPwlQtlY2VfLWKeUZPwlQtleYyjrFptj0wvpzQIiKlL8TfzMn90qJ2dfq+xjyXS/dNVtN/Myf3SonZ1+r7GPJdL901en4I9ja7afKphVmUKIi7KoRRW22+SYzsfzS7RVUlFPRWiqnimiYxzg9sTi0APa5p1Og0LSOK6QyK/Zdg+I5hQ2fJ5bDRYBh9tEVPS0FNL1Rc+RmcWOMkbtGOApwWt0PdDdLeO9jM3D1Ii86VN9uls2lbVM3qLzXVD8Lx+ni7HafqfqeeQUb6uaPjEZA0ufC4Oa4O3gQS5oDW5FNtE2pW3Z7keW3CtqGwtxqSaGOsjtpjF0lLBSGjbTOkcYAXOBNQ9zndxoPjKMQ9RIul7w3NbdmWC4hHnVZLWXOOuuV2r20FHvMgghhj5OBvI7rGmadjgXh7ucEuHBT+Q7Vcit2LbRHUuQ6XGLJqLFMedNDAZRO5lJHI/d3A17zJNNJoWkAN4ANGiYrh6IRebhtIy6ea33yky41IuOcy2OgxsUdMY6mgjrXQTOLgzld5kccsu+HAANG8Drqqz4P8AR1V1u+eZXNklfc4rnkFdTx0E4p+Sijppepo3DdiEjSBTuABdu7p1LS4lxYrx3KuLcrpRWal6pr6uChpt9kfLVMrY2b73hjG6kgauc5rQOkuAHErrq+bVr9a87NigxXqqgFRFD1w5K7HuXburtY7a+DhvH+n3eHdObx04HwiQ+7x4BjUFw621F4yikdy4DHFjKVslYXAPBaSHU7NA4Eakag8ym8dvLh3e82/H7bPcbpXU1tt9O3emq6yZsUUY101c9xAA49JXmR22zNnw0OO22tqr+6tya4W2myaggoW1VXQ0sMb3OhbM6KmdLyr3xb2m7pC9wYTwWnerXl+W1+yHHsnv9ZS11Vd7hd5DTMopJXUtNG99K6bSJ0LpWOkp9TGOT1cSASGOEYtA9H0lXBX0sNVSzR1NNMxskU0Lw5kjCNQ5pHAgggghfVedaDahlN5lxfIqS/iNl7yySyU+Ix00DoxQxTyxTSPdumblWxwumJDwxvBpb0ngy7UM4tmxmDPG3qa61+R3Q0NptxpaSOloqeori2nlJcIy+RsOgbvytY4uaHcdXligemEUBsdkyua03STJ56+drqzSg67dRdWNhEbA4S9R/kR+UEmgBLgNN4681+so6Rk4h+lWWf41N9w1VykcQ/SrLP8AGpvuGquXlOE+tVbKfTDY7BERcsYWUfnOP+U4/sPVQpfKPznH/Kcf2HqoXU4N95tjyhXX2Cmrjs/tVwrJaprq6hmmcXy9QV00DHuPO4sY4N3j0nTU9JVKi7tFpXZzfRNzC+5I9rO3eEb56WqPaTtZ27wjfPS1R7SrkVvKbbXkvlI9rO3eEb56WqPaTtZ27wjfPS1R7SrkTlNtryXyke1nbvCN89LVHtJ2s7d4Rvnpao9pVyJym215L5SPazt3hG+elqj2k7Wdu8I3z0tUe0q5E5Tba8l8uoMvxZlpzjBLbTXa9MpLrWVUNWw3SYl7WUksjQCXajumNPDvKx7Wdu8I3z0tUe0svPnAbTNmAJ4m4Vuncg//ACE/T0LsJOU22vJfKR7Wdu8I3z0tUe0nazt3hG+elqj2lXInKbbXkvlI9rO3eEb56WqPaTtZ27wjfPS1R7SrkTlNtryXyke1nbvCN89LVHtJ2s7d4Rvnpao9pVyJym215L5SPazt3hG+elqj2l+mbNbYD3dbeZmdLH3ao0P79Hj/APvNzKsROU22vJfL40lJBQUsNNTQsp6eFgjjiiaGtY0DQAAcwAX2RFrzN/TKBERQJvKvlrFPKMn4SoWysbKvlrFPKMn4SoWyvMZR1i02x6YX05oERFSl+Jv5mT+6VE7Ov1fYx5Lpfumq2m/mZP7pUTs6/V9jHkul+6avT8EextdtPlUwqzN+RpfG5rXujJBAe3TVv0jUEf8ANRrcCvjTx2kZO7gRoaa1/wAErRF2FSQpdn88hkhvmT3TKrZKwsltd4pLe6ml5iC4R0rHHQgEd1p9BW3VYpZK5lxZU2egqGXF7JK1stKxwqnMDQx0uo7stDGAF2uga3TmC1ES4ZHYfYRkE1+6yW7r5ND1NLc+pI+qXxcPybpdN4t4DuSdOC4lt2c4nZ7ZU26gxezUNvqZm1E9JTW+KOKWRrg5r3MDQHODmtIJGoIB6FRIlw4rrVRPucdydRwG4xwup2VZibyzYnOa5zA/TUNLmtJGuhLQegLHn2cYnU3117lxeyy3p0jJjcpLfE6oL2EFjjIW72rS1uh11Gg05lRIggdlexmwbMLNQNht9tqsiip+SrMgjt8cFVWPJLnuc4au0LiTulx04DUqps+JWPHq+5V1qs1vtlbcpBLXVNHSsikqnjUh0rmgF57p3F2vxj31qol0QCx8jw2wZjCyG/2O23yFgc1sdypI6hrQSC4APB01LW6/3R3lsIgw7pgmNXyx09luWPWq4Wen3eRt9VRRS08e6NG7sbmlo0B4aDguDBs8oIs6pMmMshkoLa61W6gYxjKejie5jpCxoGu87kohz6AMAAGp1qkS4YtDhGOWu+Vl5orBa6S8VoIqrhBRxsqJ9effkDd52v0kr6yYpZJcdbj77PQPsLYW04tbqVhpRE3TdZyWm7ujQaDTQaBaqIODZLFbcatcFttFvpbVbqcEQ0dFA2GGMEkndY0ADUkngOclc5EUjJxD9Kss/wAam+4aq5SOIfpVln+NTfcNVcvJ8J9aq2U+mGx2CIi5Ywso/Ocf8px/YeqhS+UfnOP+U4/sPVQupwb7zbHlCuvsERF2VYiIgIiICIiAiIg69z92m0zZgN5zdbhWjQcx/kE/Ouwl15tAJG03ZcOHG4V2uo/7BOuw0BERAREQEREBERAREQEREE3lXy1inlGT8JULZWNlXy1inlGT8JULZXmMo6xabY9ML6c0CIipS/jmhzSDzEaFdfWq4Nwu00lmulPWMdQxNp4qiCjlminjYA1jw6NhAJGmrToQQ7QEAOPYSLo5Hlk5Lii6+Ju+Gb/6iYvzobs9tHfr/RlT7tOz20d+v9GVPu1coulzvR3c+L8UYYQ3Z7aO/X+jKn3adnto79f6Mqfdq5ROd6O7nxfiYYQ3Z7aO/X+jKn3adnto79f6Mqfdq5ROd6O7nxfiYYQ3Z7aO/X+jKn3adnto79f6Mqfdq5ROd6O7nxfiYYdf1G0qwUklOyeoqoX1EnJQtkt9Q0yv3S7daDHxO61x0HQ0noX37PbR36/0ZU+7Xw2otacl2YknQjJiR+/rbXfT6/8A1HYCynhaiIj9OfF+KMMIbs9tHfr/AEZU+7Ts9tHfr/RlT7tXKLHneju58X4pwwhuz20d+v8ARlT7tOz20d+v9GVPu1conO9Hdz4vxMMIbs9tHfr/AEZU+7Ts9tHfr/RlT7tXKJzvR3c+L8TDCG7PbR36/wBGVPu1/W53ankBjbi9x5mstdUSf9OTVwic7Ud3Pi/EwwnMQttRC+53OrhdSy3KZsjad5BfFG2NrGh2nDeOhcRx03tNeCo0RcO3tqre0m0qzyyERFQMLKPznH/Kcf2HqoUvlH5zj/lOP7D1ULqcG+82x5Qrr7BERdlWIiICIiAiIgIiIOvM/aTtN2XkNJAuFdqe9/IJ12Guu9oGnbN2Xakg9ca7TQf9gnXYiAiIgIiICIiAiIgIiICIiCbyr5axTyjJ+EqFsrAzatp7fcsWnqp4qaBtxfvSzPDGjWkqANSeHOvv2Y2Dw5bfO4/WvK5VXRTlFpFU3dMemF9MdENhFj9mNg8OW3zuP1p2Y2Dw5bfO4/WtbjbPWjeyulsIsfsxsHhy2+dx+tOzGweHLb53H6042z1o3l0thFj9mNg8OW3zuP1p2Y2Dw5bfO4/WnG2etG8ulsIsfsxsHhy2+dx+tOzGweHLb53H6042z1o3l0thFj9mNg8OW3zuP1p2Y2Dw5bfO4/WnG2etG8ulsIsfsxsHhy2+dx+tOzGweHLb53H6042z1o3l0pjagNck2ZcAdMmPOQP+ja7v8/8Apx/01V+uq9pmWWSTItmxZeKB4Zkhc8tnjcGjrdWjUnXuRqQNe+QOlXnZjYPDlt87j9azqtbO6P8Ayjei6Wwix+zGweHLb53H607MbB4ctvncfrWHG2etG9N0thFj9mNg8OW3zuP1p2Y2Dw5bfO4/WnG2etG8ulsIsfsxsHhy2+dx+tOzGweHLb53H6042z1o3l0thFj9mNg8OW3zuP1p2Y2Dw5bfO4/WnG2etG8ulsIsfsxsHhy2+dx+tOzGweHLb53H6042z1o3l0thFj9mNg8OW3zuP1p2Y2Dw5bfO4/WnG2etG8ul8so/Ocf8px/YeqhQ97yG1XO4Y/DR3Ojq5uuUbuTgqGPdpuP46Aq4XY4MqirjJpm/pjyhXX2CIi7aoREQEREBERAREQde7RHcjtD2VvL3NEl5q4AG8zibZVv0PHvRk/6BdhLr7bHrQUeKXzUiOzZFRzyu3w0NjmLqN7iT0NbVOcfoaV2CgIiICIiAiIgIiICIiAiIg+csEc7Q2WNsjQddHtBC+XW6k+aw/VhclFjNMTngcbrdSfNYfqwnW6k+aw/VhclFGCnQm9xut1J81h+rCdbqT5rD9WFyUTBToL3G63UnzWH6sJ1upPmsP1YXJRMFOgvcbrdSfNYfqwnW6k+aw/VhclEwU6C9xut1J81h+rCdbqT5rD9WFyUTBToL3G63UnzWH6sJ1upPmsP1YXJRMFOgvdabUaGl7LNlUQpodZMnfw3BzNtVwcfs9K7C63UnzWH6sKEywdeNs2B25mjm2unuF7mP9Q8m2liB+lwqptP8Ny7ETBToQ43W6k+aw/VhOt1J81h+rC5KJgp0JvcbrdSfNYfqwnW6k+aw/VhclEwU6C9xut1J81h+rCdbqT5rD9WFyUTBToL3G63UnzWH6sJ1upPmsP1YXJRMFOgvcbrdSfNYfqwnW6k+aw/VhclEwU6C9xut1J81h+rCdbqT5rD9WFyUTBToL3wZQ00Tw5lPExw5nNYAQvuiLKIiMyBERSCIiAiIgIiICIiDOyOwUWV4/crNcYuWoLhTyUs7OYlj2lp0PQdDwPQp/Z9kVZNC/Hb/AC72VWqJrap5aGCti4tZVxgcN1+nED4j95p5gTYrByzDqTLIIC+oqbbcaUl9Hdbe9rKqkedNSwua5pB0G8x7XMdoA5rhwQbyLr45xfMIk5DMrW+qt+9ozJLJA+Wn3eg1NONZYD33NEkQALnPj13RaWa927I7ZT3K019LdLdUN34auimbNDK3vte0kEfuKDmoiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAuDer1RY7aqm5XGobS0VOzfklcCdBzAADiSSQAACSSAASVK3baxbhcJ7TjdLPmN9heYpaO0FroqR/DUVNQSIoCNQdxzuUI1LGP00X6suFXK53OmveYVsVfcac79LaqIuFuoHdDmhwDppRzcrIBpp3DIt528H62f2atmr7xld4p30t1vZjZFSStAkoqKLe5CB+hPd6vlleNTo+ZzQSGgm0REBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAUVdtktkq7rLd7W+rxa9yycrLcLFKKd1Q/hq6ePQxTnQAaysedBw0VqiDrxtbtExLdbWUNBndADoai2uFvuDW9GsMjjDKefVwli6NGceGhj+13GMgucdpNc+z36TXds16hdRVj9OfcjlAMoGo7qPebxHHirNZuQY3acstkluvdso7vb5Dq+lroGzRuPQd1wI1+lBpIuvWbLa/GnMfhuVXGzQtOvWq5k3OgcO8Gyu5aMdAEUrGjX4p4afztg5Li43ctxCpfTtHdXbGC64wc/O6ANFS0kcdGRyAcdX82odhoun9oXwsNmeza0WW53HIoKukudybbB1vkZLLTO0cXyTRbwe1ke7o/Rpc0uaN3UrtqjrKe40kFVSzx1NLOxssU8Lw9kjHDVrmuHAggggjnQfZERAREQEREBERAREQEXWeQ/CIwvGtsFk2aVVe6TJ7nTzVTmxbhhoo44Xza1Dy4bhcxji0AE6AEgAgnlzbZbddJDBiFruOcVGpby1ojaKFpB0O9WSFsJ0IOrWPe8f1TqNQ7BWNlGZ2LCqJtXfrvR2iB7tyN1XM1hld/VYDxe7vNaCT3lLHH89y1gN5v9NiFI4kuocaaKioLeGjXVc7NO/ruQtcNeD+lbGL7LsYw+tdcKC1sku727kl3rpH1ddI3vOqJS6Qjie53tBrwAQYo2iZFlBDcRw+qdTOPC75KXW2m077IS11Q89Ojoo2nho/pH7bsvr8kax+bZNV30BxcbZbQ6227o4GNjzJKOHxZZXtP9ULsFEHEtNooLBbae32yip7dQU7dyGlpImxRRN7zWNAAH0ALloiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIPLfwsvgYVfwmswsd1ZldNYKG3Ubqc0/WtskrpHP1dIZQ5rngtDGhjuDdwkfHcux/g57FLj8HvBZscuGbVGV2qB3KUZrKVtOKFnEva077iWknXQnQaHTnK7dXVm3LIJYqe3WCBxa2u356sjphZoAw/Q5zh+8McOlbWS2FWU21NlT2/2UszLNsdwuE8lNjZjo6NpLeuUrBJJL9MTD3Ib3nO3tf6oGhMTPfL7VPL5skvDnHiSyrdEP8AkzdA/wCS4qL6FY5HYWFOGiiNs9M72OKex9euV48Yr36Sm9pOuV48Yr36Sm9pfJZVbltjt12htdXebfS3OfTkqKaqYyaTXm3WE6nX6Ar5os4zxG6DFOltdcrx4xXv0lN7Sdcrx4xXv0lN7SwbjmuPWir6lrr9bKKp5UQ8jUVkcb+ULQ4M3S4HeLXNOnPo4HpX3vWTWfG2wuu92obW2d25Ea2pZCJHd5u8RqfoCjDZdPRHRsMU6Wv1yvHjFe/SU3tL+i6Xlp1GR3vX6bjKf/MqW2c5j2wMJtWQ9SdQ9XxmTqfleU3NHFum9oNebvBUaU02VdMVRTF0/AxTpUVk2k5RYZmE3Dr1Sgjepri0B2nTuytAcD9Lg8fR3u37Vkkee4rWS2Suda6+SF8IkkibJLQzlpDS6MktcWkhwGu64acSCvPy2MLyCXFsut1WxxFNVSsoqxg5nMe7dY4/3HuB16AX98lcjL+DbK2s5rsqbqo0dvwTE39DqBv/ALNG6U20agzF+0uO/wB0hukV0ndfLOZm1MjZRIeVaJ9ZA4ji3ebqCRqOde62MbGxrGNDWtGgaBoAO8v0i8ICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgLozbVC+PPqGVw/Jy2zdjPfLJXF/wB4z/wXeaiNq2GTZVZIaihYH3W3OdNAzUAzNI0fFqeALgARroN5rdSBqupwZb02GU01V5p6N6YdIIvnq2pie3V7dd6Nw4sewjUOB5i1wOoI4EEd9SHaptXhXJv+8lf75fQapqj/ANYvYLNeWDZbNW3fMrBmmUV9ku9yvM+lC2200r6yCR46nkhkdTvkIDS0Atf3G7+zou9O1TavCuTf95K/3ysmt3Gho1IA04nUrVtbGq3uxdF22foOgrjj9DLUbeRVU8dbNFboIhUVEbXSENtjSCXac+o3uHTxXEsN5sdnzSkr89ET4LhjFtjs1TXwGaE9w41MbeBHKOcWEjnI0XolFjyXpiYntmc3xmdPx/kdb/Bx07SWJ7o0b1M7Qaaf0j12Qpy+YHQ3+vdWVFfe6eRzQ3cobzVU0fDvMjka0H6dOK4HaptR/wClcm/7yV/vldRTaWdEURETdF2f/Qsl8KyJ9Q2CCL+emqIYo/77pWhv/iQuFj2O0+NUslPTVFfUse/fLrhXTVbwdANA6VziBw5gdOfvrsXZRiUmSX2C8ys/4TbpC6J5HCoqBqBu99sZ11I/bAGurXALe3jJ7GbW06LvPshNOe93qiIvmCRERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREBERAREQEREEXmGyq1ZZUvrmPltd0cBvVdLp+V0Gg5RhG6/QaDXg7QAa6cFETbDsgjdpDe7bM3+tJSSRn/kHuXdaLp2PCWVWFOCivo+N0+ab3R/aRyXwrafqZfWnaRyXwrafqZfWu8EWxzxlmtG6C90f2kcl8K2n6mX1p2kcl8K2n6mX1rvBE54yzWjdBe6P7SOS+FbT9TL61/RsRyUnjdrUB3xBKf/ANl3eic8ZZrRugdV2TYVBHM2S+XWS5MBB6lpYzTRH6HHeL3D6N4DvghdoU1NFR08VPTxMggiYGRxRtDWsaBoAAOAAHQvoi51vlVtlM32tV/90F4iItVAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiIP//Z", "text/plain": [ "" ] @@ -208,7 +208,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -216,7 +216,7 @@ "output_type": "stream", "text": [ "{'router_node': {'route': 'other'}}\n", - "{'normal_llm_node': {'messages': [AIMessage(content='Hello! How can I assist you today?', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 9, 'prompt_tokens': 9, 'total_tokens': 18}, 'model_name': 'gpt-3.5-turbo-0125', 'system_fingerprint': None, 'finish_reason': 'stop', 'logprobs': None}, id='run-9730e690-8cbd-4ba0-a962-3f8a4e848ef9-0', usage_metadata={'input_tokens': 9, 'output_tokens': 9, 'total_tokens': 18})]}}\n" + "{'normal_llm_node': {'messages': [AIMessage(content='Hello! How can I assist you today?', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 9, 'prompt_tokens': 9, 'total_tokens': 18, 'completion_tokens_details': {'reasoning_tokens': 0}}, 'model_name': 'gpt-3.5-turbo-0125', 'system_fingerprint': None, 'finish_reason': 'stop', 'logprobs': None}, id='run-6405070a-cf4c-4a6a-a1d6-7b444edbb64f-0', usage_metadata={'input_tokens': 9, 'output_tokens': 9, 'total_tokens': 18})]}}\n" ] } ], @@ -240,7 +240,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -267,17 +267,17 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "((), {'messages': [HumanMessage(content=\"what's the weather in sf\", id='ad42a2dc-57c5-4aae-b616-6a86ca6ee7bd')]})\n", - "((), {'messages': [HumanMessage(content=\"what's the weather in sf\", id='ad42a2dc-57c5-4aae-b616-6a86ca6ee7bd')], 'route': 'weather'})\n", - "(('weather_graph:99f49d5c-9d1a-5e00-b2fc-1f1ade30dec3',), {'messages': [HumanMessage(content=\"what's the weather in sf\", id='ad42a2dc-57c5-4aae-b616-6a86ca6ee7bd')]})\n", - "(('weather_graph:99f49d5c-9d1a-5e00-b2fc-1f1ade30dec3',), {'messages': [HumanMessage(content=\"what's the weather in sf\", id='ad42a2dc-57c5-4aae-b616-6a86ca6ee7bd')], 'city': 'San Francisco'})\n" + "((), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')]})\n", + "((), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')], 'route': 'weather'})\n", + "(('weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20',), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')]})\n", + "(('weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20',), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')], 'city': 'San Francisco'})\n" ] } ], @@ -297,7 +297,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -306,7 +306,7 @@ "('weather_graph',)" ] }, - "execution_count": 36, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } @@ -325,16 +325,16 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(PregelTask(id='99f49d5c-9d1a-5e00-b2fc-1f1ade30dec3', name='weather_graph', error=None, interrupts=(), state={'configurable': {'thread_id': '3', 'checkpoint_ns': 'weather_graph:99f49d5c-9d1a-5e00-b2fc-1f1ade30dec3'}}),)" + "(PregelTask(id='0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20', name='weather_graph', path=('__pregel_pull', 'weather_graph'), error=None, interrupts=(), state={'configurable': {'thread_id': '3', 'checkpoint_ns': 'weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20'}}),)" ] }, - "execution_count": 37, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -352,16 +352,16 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 9, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "PregelTask(id='99f49d5c-9d1a-5e00-b2fc-1f1ade30dec3', name='weather_graph', error=None, interrupts=(), state=StateSnapshot(values={'messages': [HumanMessage(content=\"what's the weather in sf\", id='ad42a2dc-57c5-4aae-b616-6a86ca6ee7bd')], 'city': 'San Francisco'}, next=('weather_node',), config={'configurable': {'thread_id': '3', 'checkpoint_ns': 'weather_graph:99f49d5c-9d1a-5e00-b2fc-1f1ade30dec3', 'checkpoint_id': '1ef6a48a-018f-638c-8001-a7af39dcd6ee', 'checkpoint_map': {'': '1ef6a489-fddc-6208-8001-5e02ff54dfba', 'weather_graph:99f49d5c-9d1a-5e00-b2fc-1f1ade30dec3': '1ef6a48a-018f-638c-8001-a7af39dcd6ee'}}}, metadata={'source': 'loop', 'writes': {'model_node': {'city': 'San Francisco'}}, 'step': 1, 'parents': {'': '1ef6a489-fddc-6208-8001-5e02ff54dfba'}}, created_at='2024-09-03T23:02:42.795391+00:00', parent_config={'configurable': {'thread_id': '3', 'checkpoint_ns': 'weather_graph:99f49d5c-9d1a-5e00-b2fc-1f1ade30dec3', 'checkpoint_id': '1ef6a489-fded-6936-8000-c96152586915'}}, tasks=(PregelTask(id='c153ac13-b9a5-543a-8044-3b3c852fd0bc', name='weather_node', error=None, interrupts=(), state=None),)))" + "PregelTask(id='0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20', name='weather_graph', path=('__pregel_pull', 'weather_graph'), error=None, interrupts=(), state=StateSnapshot(values={'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')], 'city': 'San Francisco'}, next=('weather_node',), config={'configurable': {'thread_id': '3', 'checkpoint_ns': 'weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20', 'checkpoint_id': '1ef75ee0-d9c3-6242-8001-440e7a3fb19f', 'checkpoint_map': {'': '1ef75ee0-d4e8-6ede-8001-2542067239ef', 'weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20': '1ef75ee0-d9c3-6242-8001-440e7a3fb19f'}}}, metadata={'source': 'loop', 'writes': {'model_node': {'city': 'San Francisco'}}, 'step': 1, 'parents': {'': '1ef75ee0-d4e8-6ede-8001-2542067239ef'}}, created_at='2024-09-18T18:44:36.278105+00:00', parent_config={'configurable': {'thread_id': '3', 'checkpoint_ns': 'weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20', 'checkpoint_id': '1ef75ee0-d4ef-6dec-8000-5d5724f3ef73'}}, tasks=(PregelTask(id='26f4384a-41d7-5ca9-cb94-4001de62e8aa', name='weather_node', path=('__pregel_pull', 'weather_node'), error=None, interrupts=(), state=None),)))" ] }, - "execution_count": 39, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -382,22 +382,120 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "{'weather_graph': {'messages': [HumanMessage(content=\"what's the weather in sf\", id='ad42a2dc-57c5-4aae-b616-6a86ca6ee7bd'), AIMessage(content=\"It's sunny in San Francisco!\", id='07b513fa-30af-4ee4-83e4-2af8f6d133bd')]}}\n" + "((), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')], 'route': 'weather'})\n", + "(('weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20',), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')], 'city': 'San Francisco'})\n", + "(('weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20',), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc'), AIMessage(content=\"It's sunny in San Francisco!\", additional_kwargs={}, response_metadata={}, id='c996ce37-438c-44f4-9e60-5aed8bcdae8a')], 'city': 'San Francisco'})\n", + "((), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc'), AIMessage(content=\"It's sunny in San Francisco!\", additional_kwargs={}, response_metadata={}, id='c996ce37-438c-44f4-9e60-5aed8bcdae8a')], 'route': 'weather'})\n" ] } ], "source": [ - "for update in graph.stream(None, config=config, stream_mode=\"updates\"):\n", + "for update in graph.stream(None, config=config, stream_mode=\"values\", subgraphs=True):\n", " print(update)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Resuming from specific subgraph node\n", + "\n", + "In the example above, we were replaying from the outer graph - which automatically replayed the subgraph from whatever state it was in previously (paused before the `weather_node` in our case), but it is also possible to replay from inside a subgraph. In order to do so, we need to get the configuration from the exact subgraph state that we want to replay from.\n", + "\n", + "We can do this by exploring the state history of the subgraph, and selecting the state before `model_node` - which we can do by filtering on the `.next` parameter.\n", + "\n", + "To get the state history of the subgraph, we need to first pass in " + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": {}, + "outputs": [], + "source": [ + "parent_graph_state_before_subgraph = next(h for h in graph.get_state_history(config) if h.next == ('weather_graph',))" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": {}, + "outputs": [], + "source": [ + "subgraph_state_before_model_node = next(h for h in graph.get_state_history(parent_graph_state_before_subgraph.tasks[0].state) if h.next == ('model_node',))\n", + "\n", + "# This pattern can be extended no matter how many levels deep - image model node was another subgraph in this case\n", + "# subsubgraph_stat_history = next(h for h in graph.get_state_history(subgraph_state_before_model_node.tasks[0].state) if h.next == ('my_subsubgraph_node',))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can confirm that we have gotten the correct state by comparing the `.next` parameter of the `subgraph_state_before_model_node`." + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "('model_node',)" + ] + }, + "execution_count": 64, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "subgraph_state_before_model_node.next" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Perfect! We have gotten the correct state snaphshot, and we can now resume from the `model_node` inside of our subgraph:" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "((), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')], 'route': 'weather'})\n", + "(('weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20',), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')]})\n", + "(('weather_graph:0c47aeb3-6f4d-5e68-ccf4-42bd48e8ef20',), {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='108eb27a-2cbf-48d2-a6e7-6e07e82eafbc')], 'city': 'San Francisco'})\n" + ] + } + ], + "source": [ + "for value in graph.stream(None, config=subgraph_state_before_model_node.config, stream_mode=\"values\", subgraphs=True):\n", + " print(value)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Great, this subsection has shown how you can replay from any node, no matter how deeply nested it is inside your graph - a powerful tool for testing how deterministic your agent is." + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -411,7 +509,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -431,16 +529,16 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 12, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "[HumanMessage(content=\"what's the weather in sf\", id='35e331c6-eb47-483c-a63c-585877b12f5d')]" + "[HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='05ee2159-3b25-4d6c-97d6-82beda3cabd4')]" ] }, - "execution_count": 18, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } @@ -459,20 +557,20 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "{'configurable': {'thread_id': '4',\n", - " 'checkpoint_ns': 'weather_graph:9e512e8e-bac5-5412-babe-fe5c12a47cc2',\n", - " 'checkpoint_id': '1ef6a424-2bb2-6ee0-8002-6a6ca5dbc91f',\n", - " 'checkpoint_map': {'': '1ef6a40d-0fca-671c-8001-3064b486db01',\n", - " 'weather_graph:9e512e8e-bac5-5412-babe-fe5c12a47cc2': '1ef6a424-2bb2-6ee0-8002-6a6ca5dbc91f'}}}" + " 'checkpoint_ns': 'weather_graph:67f32ef7-aee0-8a20-0eb0-eeea0fd6de6e',\n", + " 'checkpoint_id': '1ef75e5a-0b00-6bc0-8002-5726e210fef4',\n", + " 'checkpoint_map': {'': '1ef75e59-1b13-6ffe-8001-0844ae748fd5',\n", + " 'weather_graph:67f32ef7-aee0-8a20-0eb0-eeea0fd6de6e': '1ef75e5a-0b00-6bc0-8002-5726e210fef4'}}}" ] }, - "execution_count": 19, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } @@ -520,7 +618,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 56, "metadata": {}, "outputs": [ { @@ -528,14 +626,17 @@ "output_type": "stream", "text": [ "((), {'router_node': {'route': 'weather'}})\n", - "(('weather_graph:bdb185a9-ff74-58dd-ae72-34e8665a33d7',), {'model_node': {'city': 'San Francisco'}})\n", + "HERE\n", + "(('weather_graph:ec34ba77-edd8-bbf1-f3f1-01498cf0c575',), {'model_node': {'city': 'San Francisco'}})\n", "interrupted!\n", - "((), {'weather_graph': {'messages': [HumanMessage(content=\"what's the weather in sf\", id='5d721f30-278e-460f-a83c-fdb101731f3e'), AIMessage(content='rainy', id='43b30e0d-6ea0-4e9c-92de-3e411e6fa21d')]}})\n", - "[HumanMessage(content=\"what's the weather in sf\", id='5d721f30-278e-460f-a83c-fdb101731f3e'), AIMessage(content='rainy', id='43b30e0d-6ea0-4e9c-92de-3e411e6fa21d')]\n" + "(('weather_graph:ec34ba77-edd8-bbf1-f3f1-01498cf0c575',), {'weather_node': {'messages': [{'role': 'assistant', 'content': \"It's sunny in San Francisco!\"}]}})\n", + "((), {'weather_graph': {'messages': [HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='c94dd545-308d-4032-832a-57aab1abb6b1'), AIMessage(content=\"It's sunny in San Francisco!\", additional_kwargs={}, response_metadata={}, id='9d004035-5ffe-4b44-addc-4f0d61255f16')]}})\n", + "[HumanMessage(content=\"what's the weather in sf\", additional_kwargs={}, response_metadata={}, id='c94dd545-308d-4032-832a-57aab1abb6b1'), AIMessage(content=\"It's sunny in San Francisco!\", additional_kwargs={}, response_metadata={}, id='9d004035-5ffe-4b44-addc-4f0d61255f16')]\n" ] } ], "source": [ + "graph = \n", "config = {\"configurable\": {\"thread_id\": \"14\"}}\n", "inputs = {\"messages\": [{\"role\": \"user\", \"content\": \"what's the weather in sf\"}]}\n", "for update in graph.stream(inputs, config=config, stream_mode=\"updates\", subgraphs=True):\n", @@ -544,7 +645,7 @@ "print(\"interrupted!\")\n", "state = graph.get_state(config, subgraphs=True)\n", "# We update the state by passing in the message we want returned from the weather node, and make sure to use as_node\n", - "graph.update_state(state.tasks[0].state.config, {\"messages\": [{\"role\": \"assistant\", \"content\": \"rainy\"}]}, as_node=\"weather_node\")\n", + "#graph.update_state(state.tasks[0].state.config, {\"messages\": [{\"role\": \"assistant\", \"content\": \"rainy\"}]}, as_node=\"weather_node\")\n", "for update in graph.stream(None, config=config, stream_mode=\"updates\", subgraphs=True):\n", " print(update)\n", "print(graph.get_state(config).values['messages'])" From c44f10c05e91ac313754a2fc45b200de43095fa1 Mon Sep 17 00:00:00 2001 From: Isaac Francisco <78627776+isahers1@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:40:35 -0700 Subject: [PATCH 05/17] docs: return state before recursion limit is hit (#1736) * recursion limit return state * links * spelling * remove bad link --- docs/docs/how-tos/index.md | 1 + .../return-when-recursion-limit-hits.ipynb | 251 ++++++++++++++++++ docs/mkdocs.yml | 1 + 3 files changed, 253 insertions(+) create mode 100644 docs/docs/how-tos/return-when-recursion-limit-hits.ipynb diff --git a/docs/docs/how-tos/index.md b/docs/docs/how-tos/index.md index c7ca5927bb..547427ad38 100644 --- a/docs/docs/how-tos/index.md +++ b/docs/docs/how-tos/index.md @@ -88,6 +88,7 @@ These guides show how to use different streaming modes. - [How to add node retries](node-retries.ipynb) - [How to force function calling agent to structure output](react-agent-structured-output.ipynb) - [How to pass custom LangSmith run ID for graph runs](run-id-langsmith.ipynb) +- [How to return state before hitting recursion limit](return-when-recursion-limit-hits.ipynb) ## Prebuilt ReAct Agent diff --git a/docs/docs/how-tos/return-when-recursion-limit-hits.ipynb b/docs/docs/how-tos/return-when-recursion-limit-hits.ipynb new file mode 100644 index 0000000000..eb193c10ae --- /dev/null +++ b/docs/docs/how-tos/return-when-recursion-limit-hits.ipynb @@ -0,0 +1,251 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# How to return state before hitting recursion limit\n", + "\n", + "[Setting the graph recursion limit](https://langchain-ai.github.io/langgraph/how-tos/recursion-limit/) can help you control how long your graph will stay running, but if the recursion limit is hit your graph returns an error - which may not be ideal for all use cases. Instead you may wish to return the value of the state *just before* the recursion limit is hit. This how-to will show you how to do this." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n", + "\n", + "First, let's installed the required packages:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%capture --no-stderr\n", + "%pip install -U langgraph" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "
\n", + "

Set up LangSmith for LangGraph development

\n", + "

\n", + " Sign up for LangSmith to quickly spot issues and improve the performance of your LangGraph projects. LangSmith lets you use trace data to debug, test, and monitor your LLM apps built with LangGraph — read more about how to get started here. \n", + "

\n", + "
" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Without returning state\n", + "\n", + "We are going to define a dummy graph in this example that will always hit the recursion limit. First, we will implement it without returning the state and show that it hits the recursion limit. This graph is based on the ReACT architecture, but instead of actually making decisions and taking actions it just loops forever." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "from typing_extensions import TypedDict\n", + "from langgraph.graph import StateGraph\n", + "from langgraph.graph import START, END\n", + "\n", + "class State(TypedDict):\n", + " value: str\n", + " action_result: str\n", + "\n", + "def router(state: State):\n", + " if state['value'] == \"end\":\n", + " return \"__end__\"\n", + " else:\n", + " return \"action\"\n", + "\n", + "def decision_node(state):\n", + " return {'value':'keep going!'}\n", + "\n", + "def action_node(state: State):\n", + " # Do your action here ...\n", + " return {'action_result':'what a great result!'}\n", + "\n", + "workflow = StateGraph(State)\n", + "workflow.add_node('decision',decision_node)\n", + "workflow.add_node('action',action_node)\n", + "workflow.add_edge(START,'decision')\n", + "workflow.add_conditional_edges('decision',router)\n", + "workflow.add_edge('action','decision')\n", + "app = workflow.compile()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAD5AOADASIAAhEBAxEB/8QAHQABAAICAwEBAAAAAAAAAAAAAAUGBAcCAwgBCf/EAFEQAAEEAQIDAgYMCgYIBwAAAAEAAgMEBQYRBxIhEzEUFRYiQZQIFzJRVVZhdJPR0tM1NlRxdYGRlbKzGCM3QnK0JSg0Q2KhscEzUldkg5bw/8QAGwEBAQADAQEBAAAAAAAAAAAAAAECAwQFBgf/xAA1EQEAAQIBCAYKAwEBAAAAAAAAAQIRAwQSITFRUnGRFDNBYaHRBRMVIzJigZKxwSLw8ULh/9oADAMBAAIRAxEAPwD9U0REBERAREQERVme1d1VYmrY61LjcXC8xzZCJre0neOjmQlwIAB3Dn7b77huxHMNlFGd22hbJ+3frUGh1mxFXae4yvDQf2rC8qsL8MUPWWfWsOpw/wBOU5DKMPVsWSd3WrbPCJ3f4pZOZ5/WfSszyVwvwPQ9WZ9S2WwY7ZnlH7k0HlVhfhih6yz608qsL8MUPWWfWnkrhfgeh6sz6k8lcL8D0PVmfUnue/wXQeVWF+GKHrLPrTyqwvwxQ9ZZ9aeSuF+B6HqzPqTyVwvwPQ9WZ9Se57/A0HlVhfhih6yz608qsL8MUPWWfWnkrhfgeh6sz6k8lcL8D0PVmfUnue/wNDNq3a95hfWsRWGD+9E8OH/Jd6r9rQGn7LxIzFV6dkbltqi3weZpPeQ+PZ3ven0LhUu3dO3a9DKTuvUrDhFUyTmgPD9ukU+2w3O3mvAAcfNIDuUyTMpq6udOyf1/YS2xY0RFoQREQEREBERAREQEREBERAREQQOuMlPi9MW5KjxFcmdHUryH+5LNI2Jjv1OkB/UpTGY2vh8dWo1IxFWrRtijYOuzQNh+dQfEZhGlZbQBLaFmrfeGt5jyQ2I5X9P8LHKygggEHcH0ronqaeM/iF7H1ERc6KZxB4xaQ4XT0INS5Y0rN4SPr14as1mV7Gbc7+SFj3BjeYbuIDRv1Kq0vsjMLDxxi4eup3nCbF1r0WQhx9uVj5Z5eVkZ5YS1sYbyuMzncgLi0kFjgq97Jys6ncw+bwWM1jHrqjStjD5rSuNN2JrjyHwW2zYtMUjgw+e3YchPM09+NDkNTaX456Z1XqTS2VsePNFU8RckwNJ9yKlkW2XSyxycm5jj/rTs93m+aeqDYlPj9oK/rjyQiz3LnjZkpMhmpzxRSTx788TJnRiJ7xyu81rieh6LHueyJ0LWyuZxUOTt38riJJ4LtSjirlh0EkURlc15jhcG7tB5T3PIIbzEELzjnMfrPUWc05f1Dhtf5PVeI13Xv5COOCYYSljo7rmxuqxtIjnHYujPMwPk6yFxA3W9eBumL2NscX/DMdPj5MprG7PXlswOj8IhdWrtZI0keczcOAcNxuHbelBN8BeNNHjloDH6hrUrWOtSwRyWqk9WeOOJ7wTyxyyRsbMBt7uPcfm3WyFpX2J1+9Q4R4LR+X09m8DmdM0YqFzxnRfDBLI0ubvBKfNmb5m/MwkbOb763UgLBzmIhz2It4+xuI7EZZzN6OYfQ4H0EHYgjuICzl1zzx1oJJpXBkUbS97j3AAbkrKmZiYmnWIrRuXmzulsZes8vhUkIE/J7ntW+bJt8nMHbKZVb4dV5INFYt0rHRyWGOtFjhs5vaudJsR6COfYqyLZjREYtUU6rys6xERaUEREBERAREQEREBERAREQcZI2TRujkaHscC1zXDcEHvBCq2MvN0UIMRk5WxY1pEWPvyu8wt6BsMjj3PHcCT542287cK1rrsV4rcEkM8bJoZGlr45GhzXA94IPeFtoriImmrTErEqbqTglw+1jmJ8tndE4DMZScNEty9jopZX8rQ1u7nNJOwAA+QBRzvY38KXtYHcONLuDBytBxMB5RuTsPN98k/rVgHD6jVd/o29k8RHvv2NO68Qj/DG/ma0fI0AL55E2PjVnvpofulnmYc6q+ceVy0bWbpLROn9BY1+O03hKGBoSSmd9bHV2QRukIALy1oA3Ia0b/IFNqr+RNj41Z76aH7pPImx8as99ND90nq8Pf8ACS0bVoRargx+Vk4rXtPO1TmPF0OFr32ESw9p2r55mO3PZ+55Y27dO/fqrZ5E2PjVnvpofuk9Xh7/AISWjaytX6C01xApwVNTYHHZ+rBJ2sUOSqsnYx+xHMA4HY7Ejf5VVP6NXCb/ANNtLfuiD7KsPkTY+NWe+mh+6TyJsfGrPfTQ/dJ6vD3/AAktG10aT4RaH0DkpMjpvSOFwF58Rgfax1GOCR0ZIcWlzQDtu1p2+Qe8uy/Yj13zY2mWzYLm5b9sb9nZb6YIj3PB7nuG4A3YN3E8nYOH2PsH/SVnIZpm5/qchbe+E794dENmOHyOaf8AmVZY42Qxtjja1kbAGta0bAAdwASKqMPTRN55W/v0XRGpyREXOxEREBERAREQEREBERAREQEREBERAREQEREGvahH9IHKDc83kxU6fJ4XZ+X/ALLYS17U3/pAZTu28mKnoG/+12f1/wD7862EgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiINeVB/rB5U8w38l6fm7df9rsrYa15U2/pB5X3/Jen6P8A3dn0rYaAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIuueeOtBJNM9sUUbS973nYNaBuST7yp3lZn8mxtnF4qjFRkAdC7I2ZGTSNPc4sbGeTcbEAknY9Q0ghbsPCqxPhWy6oqR491h+QYP1ub7tPHusPyDB+tzfdrd0WvbHOCy7oqR491h+QYP1ub7tPHusPyDB+tzfdp0WvbHOCzxtiPZ75e/wCyJfiY+FM7dS22Q6aOLfmWgxzx2JSXOf4P3AyHfp0DSV+gC800OAE2O9kPd4uxY/DeOrNPsPBPCJREycjkfYBEfunRgNI+Vx7z02/491h+QYP1ub7tOi17Y5wWXdFSPHusPyDB+tzfdp491h+QYP1ub7tOi17Y5wWXdFSPHusPyDB+tzfdrkNYZzEg2M1jKPi1nWafH2ZJJIW+l5jdGOZo7zsdwO4FOi4nZafrBZdUXFj2yNDmkOaRuCDuCFyXGgiIgIiICIiAiIgIiICIiAiIgrXE1xZw21Y5p2IxNsg//C9IgBGwAbAAdAuPFD+zTVv6It/yXrlH7hv5gvRwupjjP4hl2OSIofPauxOmbuGqZK34NYzFzwCizs3v7afs3ycm7QQ3zY3nd2w6d+5CrFMIiKgiicPqrF57J5jH0bJnuYidta7GYnt7KR0bZA3dwAd5r2ndpI67d+6Z7VWL0xJio8lZNZ+Uusx9QCJ7+0nc1zms80Hl3DHdXbDp39yglkRFQUfqIB2n8mCAQasu4P8AgKkFH6h/AGT+ay/wFZ0fHHFY1p3STi/SuGcTuTShJJ/wBSyiNIfinhfmUH8tql15uJ8dXGSdYiItaCIiAiIgIiICIiAiIgIiIKxxQ/s01b+iLf8AJeuUfuG/mC48UP7NNW/oi3/JeuUfuG/mC9HC6mOM/iGXYhdd56fSuiNQ5qrXFqzjcdYuRQH/AHj44nPDf1loH615jq6Xtib2P2sMjrDOalymezUNy2Ll4vpB82PsS7wwbckQb1a0M26E77nu9buaHtLXAOaRsQRuCFrTBexs4caZzePyuL074Hbx9p12mGXrPY1pXNc1xjiMnZsBD3bta0N7unQbSYmWLRHDc8X+K+Ax2vsTe8GyFzIum3sarmbShiZZLH1X40VDGNmNczfn5+bzuffoti8MMRkOMGa1RqjMav1FQnxep7eMqYfFZF1WrVgqy8jY5YW9JHSAczi/fo8cvL0V9i4BaCr6wdqeHANgy7rYvudDanZA6z39sYA8RGTfrz8m+/XfdfczwD0HntVyakuYEHLyyRzTSwWp4I7EkZBY+WJj2xyOGw2L2k9ApFMjTrsPrjN0+LuC0rqDMWrGN1dVdHBYzEjLMlI1q809WvZeXGAu538pGwbvt0C+u1NDexnCG9gM7q+vJBrZ+AydLN5OU2ATHZfNWtgO5Zix0UYa5xds0d53O+7MzwZ0fqCvm4b2KfI3NXo8ledHcnifJZjjbGyRrmPBjIaxo8wtHTfvJXLHcHNHYnEYPGU8KyClhcj43pMbPLzMubPBme7m5pHESv35y7ffr3DZmyPOkupNSDg1Pxlfq7Nt1OzPOY3BC6fFohbkvBPATV9ySYx7vbtOY78y+6pyOoK2guMGuotX6ijyuldVWo8XVZkpBTjhjkgcYXQ+5kY4SObs/m2G3Ly7Lfr+AegpNX+U7tOxHLeGeMNzPL4P4V+UeD8/Zdr6e05Obfrvv1Ujc4SaTv6c1JgZ8V2mJ1FbkvZSv4TKPCJpOXndzB/M3fkb0aQOnQd6mbIt6j9Q/gDJ/NZf4CpBR+ofwBk/msv8BXRR8ccVjWnNIfinhfmUH8tql1EaQ/FPC/MoP5bVLrzcX46uMk6xERa0EREBERAREQEREBERAREQVjih/Zpq39EW/wCS9co/cN/MFN5THQ5jGW6FjmNe1C+CTlOx5XNLTsfzFUxp1Hh4m1ZsFNmXRAMbco2IWCUDucWyyNLTttuOo3J2JC9DAmKsPMvETE30zbXbbwZa4smkVao6py+SMvg+jsvIyMtHa9tUEcnMxrwWOM2z27OHnNJG+433BAyvG2e+JmV9apffrfmfNH3R5lk2ihPG2e+JmV9apffp42z3xMyvrVL79Mz5o+6PMsm0WvYOMlazxDs6Fiwl5+q61NuQlxosVOdsJIAdzdty79QeXffYg7bdVZ/G2e+JmV9apffpmfNH3R5lk2ihPG2e+JmV9apffp42z3xMyvrVL79Mz5o+6PMsm1H6h/AGT+ay/wABWJ42z3xMyvrVL79YLL2V1c6fC+KLGnzO2WKWbJSxtl7NvK2R8MbHOMm3aNAfuGAuB3PuTlTTFMxVNUWjvjzIhddIfinhfmUH8tql11Vq8dSvFBE3kiiYGMb7wA2AXavJrnOqmdrEREWAIiICIiAiIgIiICIiAiLCyeXrYlsInkb29h7oqtbna2SzII3ydnGHEcz+SN7tvea4nYAkB2ZDJVcTWNi7Ziqwc7I+0meGgve4MY0b97nOc1oHeS4AdSoV+OsauiIylZ9LEuFqrNiLAjlF6J39W10vfytLedwjB3Ie3m2ILBkYzF2LkzMlltxZlhgcMaXNlgoytaS4xu5QXOLnuBedtw1uwb13m0HGKJkMbI42NjjYA1rGjYNA7gAuSIgIiIPzv0/7Gbjhj/Zey62k1DpSbUUbmZuzGL1oQyU5ZZITXaTXJHmRubttsBtsfe/RBa/oATcfc45nKTBpmg2TYncc9q4Wgju/3blsBAREQFgZTCUsx2brMDTYhEgr2mgCauXsLHOif3scWuI3HXqs9EFakzFvScUhzT/CcNXhrsbmOr7EkhcI3meKOINYNy15kZswBzy5sbY93WVFANwM2EtdthSyKtPblt36cxe8TOezqYSX7Qu5w1xAHI4ukJHM8vAT6LDxGUjzONr3I4pq4lYHGCyzs5YiRuWPafcuG/ULMQEREBERAREQEREBERBjZG63G4+1bfFNO2vE6UxVozJK8NBOzGDq5x22AHUnosDC0p5XuyV8ym1P/WQ1rMUIfj2OYzmgDo99+rAXHnfu7fY8oaBiaqpOymS09Tkxc1+j4cLM88dnsmVnQsdJE97R1kHaNYA3u32J7tlY0BERAREQF03LkGPqT2rU8darAx0ss0zwxkbGjdznOPQAAEkldy17ZceLGUbWhdzaKoT72ZWkgZaxG7pC0+muxw3eR0kc3k9w2QPDv4WVbGSjzOrrsMlexqOw2zXrygtfBRYwMrMc0+5c5oMrmnq107mnuV7REBERAREQEREEPkcBz33ZTG+DUsy9kUEtuSAydtXZIXdk8BzS7YPl5Dv5jpHEAgua7KwuXizlBtqOGxW898b4LULopI3scWuBaflB2cN2uGzmlzSCc5QNmtPj9W1rlapbtRZGMVbkjbQEFURtkfHJ2Lj3uLiwuZ5x3j5gQ0FgTyIiAiIgIiICIoXMa209p+0K2TzmOx9kjm7GzaYx+3v8pO+yzpoqrm1MXlbXTSKre2lo7404j12P609tLR3xpxHrsf1rb0fG3J5SubOx914a1Cvis5ZpNtNw15tp077oqspROY+Gaw9ziGuZHFLI5zXdNhuPOa1SGl9Y4DW+Pff07nMbn6LJTC61i7cdmJsgAJYXMJAcAQdu/qPfXj32evCPSPHTS0OqtOahxUutcHXMbIGXoyb1UEvMIHN7tpc5zdu/mcOu422X7D2XS3Cb2POk8JfzuKo5aSF129DJaYx7ZpXF5a4E7hzWlrSD181Oj425PKTNnY9Hoqt7aWjvjTiPXY/rT20tHfGnEeux/WnR8bcnlJmzsWlFVvbS0d8acR67H9ao9jiBg+KORsULGfx+L0fDIYZYZLjI7OZcDylrgTvFV36eh8x6ebF/47o+NuTylM2dixTXpuKs7quNnfBo2Nz47eQicWPyh2IMVd4IIi3PnStPncvKzoS4XmpUgx9SGrVhjrVoGNjihhYGMjYBs1rWjoAAAAAuVeGKvBHFAxkUMbQ1jIwA1rQNgAB3DZdi50EREBERAREQEREBV7WmO8YVcU5uHdmZK2UqTtjba8HMG0oDp99/OEbS5xZ/eA5fSrCtU8X+LXD/AE7JTw+d1Lp9mUrZXHTy4y5qCvRsVwLEbxO5rnhxawbScpHnhu3pQbWRR2ntSYnV2Hr5bBZSlmsVY5uxvY+wyeCXlcWu5XsJadnNcDsehBHoUigIiICIiDCzVx2Pw960wAvggklaD77Wkj/oqjpKpHWwFKQDmnsxMnnmd1fNI5oLnuJ6kkn9Xd3BWfVX4sZj5nN/AVXtNfi5ivmkX8AXoYGjCnivYkkRFmgiIgIiIC+PY2Rpa5oc09CCNwV9RBicPH9g3PYuPpUxuQ7CtHt0ijdBDLyN/wCEGVwA7gNmgAAK3KnaB/DGsv0rH/kqquK5sp62eEfiFnWIiLlQREQFxllZDG+SR7Y42Auc9x2DQO8krEzOXqYDF2cjelENWuwvkefe94D0knYAekkBeedW6nu66tmXI80dFrg6DGc3NFHt3F4HR7/TudwP7u3Un1MhyCvLaptNqY1z5Lxbls8W9H1Xlrs/UlI7zXJmH7WAhdHtzaN+Gm+ry/YWjgA0AAbAdAAi+jj0Hk/bVV4eSXhvH25tG/DTfV5fsLxr7PrhlheONvS2o9HXIrGoIZm4y80xPYDWe7dsziWjpG4u37zs7/hW2UT2Hk29VzjyLw2Xw01Rw64WaBwWk8Rl2Nx+Jqtrsd4NKDIR1fIRy+6c4ucflcVZvbm0b8NN9Xl+wtHInsPJt6rnHkXhvH25tG/DTfoJfsKawetsBqWQxYzL07k4HMYY5R2gHv8AIfO2+Xb0LzouuatHOWl7fOYQ5jwdnMIO4LXDqDuO8LCv0HgTH8K5ie+0/qC8PVSLVXC/iPZsXIsDmpjYmkB8CvP25pNhuYpPfeACQ7+8AQfOG79qr5XKcmxMlxJw8T/RF6q/FjMfM5v4Cq9pr8XMV80i/gCsOqvxYzHzOb+Aqvaa/FzFfNIv4At+D1M8f0vYklpPTvsjZbfFjH6Gz2Bx+Iu5J88VU0tQV8hPHJFG6TlswMAdDzMY4g7uG4233W5rkBtVJ4WyvgdIxzBLGdnMJG24+ULzhob2PettL2+GsUvkjFQ0XefI6al24s5RkkMkMk8jizZku0nOWeeHOJ89oHWTfRZF90Txk1DxGuxZDT2h/CNETWpK0Ofs5aOGWZrHuY6dlbkJMXM0gEvDiOvKqdwu4ncRtQcMteZXN4Olakx9zKxVpYM32UhMNh7DACKoDBGxpDZfOLuQEtBJ2sPC3h7xE4UQY/SVCzpq/oWhaea1yybDckyo6Rz+xMbW9m57eblEnOBsAS3ddmjOGOsNKVtc6ddNhLOlsvYyd7H2hLM27HLbeZBHKzkLORrnvHM1xJHL5o6qae0ROkuNeUm01oPA6T01b1dqC5pWnnrYy+abGa1eRjQwzWjETLM93MOjBzcpceULLo+yRt6on0jT0vpCTJZLUFK/O6vfyDagoTU5o4Zopncj+gc545mBx3a3zdnFzcDT/BjXPDlukMrpWzp63nKekqWmMvTyss7Ksrqzd454ZGRl24c6QcrmjmaR7khZvDbgBlNBap0Tk5cpVyPiuhlxlZ9nRyWLl6xFO50bNiAwFjx1cDty9DudpGcNhcK+IbOJmkxljj5MTdht2cfdx8kglNazBK6KVnOAA4czSQ4AbgjoO5W9UThBoPIcP8TqGrkZq00mQ1DkstEarnODYrFh8rGu5mjzg1wBA3G/cT3q9rZGrSMHQP4Y1l+lY/8AJVVcVTtA/hjWX6Vj/wAlVVxXPlXWfSPxCyIiLlQREQao485N4hwGJBIjszyWpAO57YWjZp+TnkY787AtYLaHHrGPMGBywBMdWeSrKR3MbMBs4/Jzxsb+d4Wr1+geic3odOb33438rFXYIqfNxXwEEz4nxZvmY4tPLp/IOG494iDY/nC+P4tafY4tMWc3B26aeyBH7ewXpeuw96ObBVdaeyEo6Y1HlMTUgxlt2KDRcN/NwUZC8tD+SGN+5lIaRvvyjc7b7g7ZTOM17O5HwbS+mfHTHYarm2TWL7aoMU3abMI5HEP8wbDqDudy3Yb48WkNTY/P5nN6QdhrOK1G6O8+DUEM8MtSfs2sLmtDOZwcGtJY/kII23CtGN0ddqcSMvqB8lbwO5iatBkcZcHtkjfK5xI22DdpBt1J6HouOn19VWmrRfZGrTa08lQo4zuzdbTUelsHJnMpm8f40FSay2syrW6AulkLXbHnPKAAdyD3ALu4E5jJZ3R+QtZV85ueOshG6KxP2zoA2w8CIO3O4YPNG3TYdOirWmeEmrNC1dI3sLaw9jM43D+JcjWuvlFaeLtO0a+ORrOYOa7fvbsQ70Kb0XOeEuDlx2pXS2MjdyFzIl2Fxty3CGyzOeBuyJ3KfO7j+096mHVi58VY2jR3W7P/AEbPRU3229P7b9lnP/ruQ+4Uxp3WGO1S6dtBl9pgDS/w3G2anfvty9tG3m7j3b7envC74xaKptFUT9USWQmlqVH2oHdnZq7WYX7b8sjDzNP7QF6moXG5CjWtMGzJ42ytHyOAP/deWchDLbquq129pZtEVoWb7cz3nlaP2lep6FRmPo16sfVkEbYm/mA2H/RfM+ns22Ht0/pnGphaq/FjMfM5v4Cq9pr8XMV80i/gCtOZpuyOIvVGEB88EkQJ9Bc0j/uqhpK5HYwNOEHks1oWQWIHdHwyNaA5jgeoIP7RsR0IXg4GnCmO9exMIiLNBERAREQERcZJGxML3uDGDqXOOwCDD0D+GNZfpWP/ACVVXFVHh4zwhmcykfWpk7/b1pN+ksbYIohI3/hcYnFp7nDZwJDgrcubKetnhHhELOsREXKgiIgw8xiauexlnH3YhNVsMMcjD6QfSD6CO8H0EArzzq7S97Qtox5Dmlx5IEOU5do379wft0Y/0bHYO/u+kD0kuMkbZY3Me0PY4FrmuG4IPeCF6eRZfXkVU2i9M64Xi8tNcHtDmkEHqCPSvq3za4TaPtvc92nqUTndT4PH2IPp7mbLo9pvRvwHF9LJ9pfRx6cyftpq8PNLQ0ai3l7TejfgOL6WT7Se03o34Di+lk+0r7cybdq5R5loaNRby9pvRvwHF9LJ9pPab0b8BxfSyfaT25k27VyjzLQ0auuWzHC5jXO3kedmRtBc957gGtHUn5AFvb2m9G/AcX0sn2lNYPRmB008vxeIp0ZSNjLDC0SEe8Xd5H61hX6cwYj+FEzPfaPMtCi8L+G9mpcjz2bh7CdgPgVF3V0W4IMsnvPIJAaPcgnfdztmbURF8rlOU4mVYk4mJ/gKFzGitP6hsCxlMHjcjOByiW1UjkeB727gTsppFz011UTembSalW9qvRnxTwn7vi+yntV6M+KeE/d8X2VaUW7pGNvzzlbztVb2q9GfFPCfu+L7Ke1Xoz4p4T93xfZVpROkY2/POS87VW9qvRnxTwn7vi+yntV6M+KeE/d8X2VaUTpGNvzzkvO1Vvar0Z8U8J+74vsrsg4Z6QrSCSLS+GjeOoc2hED37/8Al98BWVE6RjT/ANzzkvIiIudBERAREQEREBERAREQEREBERAREQf/2Q==", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from IPython.display import Image, display\n", + "\n", + "display(Image(app.get_graph().draw_mermaid_png()))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's verify that our graph will always hit the recursion limit:" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Recursion Error\n" + ] + } + ], + "source": [ + "from langgraph.errors import GraphRecursionError\n", + "\n", + "try:\n", + " app.invoke({\"value\":\"hi!\"})\n", + "except GraphRecursionError:\n", + " print(\"Recursion Error\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## With returning state\n", + "\n", + "If we wanted to actually return the state, what we are going to do is introduce a new key to our state called `is_last_step` which keeps track of if we are on the last step of our recursion limit. If so, we will bypass all other graph decisions and simply terminate the graph, returning the state to the user without causing an error.\n", + "\n", + "We are going to use a `ManagedValue` channel to do this. A `ManagedValue` channel is a state channel that will exist for the duration of our graph run and no longer. Since our `action` node is going to always induce at least 2 extra steps to our graph (since the `action` node ALWAYS calls the `decision` node afterwards), we will use this channel to check if we are within 2 steps of the limit. See the implementation of `IsLastOrSecondToLastStepManager` below.\n", + "\n", + "This implementation very closely mirrors the implementation of `isLastStep` (which you can use by calling `from langgraph.managed import IsLastStep` and then decorating state keys with the `isLastStep` type), but in this case we check if we are on the last OR second-to-last step, instead of just the last step.\n", + "\n", + "Now, when we run our graph we should receive no errors and instead get the last value of the state before the recursion limit was hit." + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "from typing_extensions import TypedDict\n", + "from langgraph.graph import StateGraph\n", + "from typing import Annotated\n", + "\n", + "from langgraph.managed.base import ManagedValue\n", + "\n", + "\n", + "class IsLastOrSecondToLastStepManager(ManagedValue[bool]):\n", + " def __call__(self, step: int) -> bool:\n", + " limit = self.config.get(\"recursion_limit\", 0)\n", + " return step >= limit - 2\n", + " \n", + "class State(TypedDict):\n", + " value: str\n", + " action_result: str\n", + " is_last_step: Annotated[bool, IsLastOrSecondToLastStepManager]\n", + "\n", + "def router(state: State):\n", + " # Force the agent to end if it is on the last step\n", + " if state['is_last_step']:\n", + " return \"__end__\"\n", + " if state['value'] == \"end\":\n", + " return \"__end__\"\n", + " else:\n", + " return \"action\"\n", + "\n", + "def decision_node(state):\n", + " return {'value':'keep going!'}\n", + "\n", + "def action_node(state: State):\n", + " # Do your action here ...\n", + " return {'action_result':'what a great result!'}\n", + "\n", + "workflow = StateGraph(State)\n", + "workflow.add_node('decision',decision_node)\n", + "workflow.add_node('action',action_node)\n", + "workflow.add_edge(START,'decision')\n", + "workflow.add_conditional_edges('decision',router)\n", + "workflow.add_edge('action','decision')\n", + "app = workflow.compile()" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'value': 'keep going!', 'action_result': 'what a great result!'}" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "app.invoke({\"value\":\"hi!\"})" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Perfect! Our code ran with no error, just as we expected!" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 84cf273269..05f9a7adae 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -182,6 +182,7 @@ nav: - Add node retries: how-tos/node-retries.ipynb - Return structured output from a ReAct agent: how-tos/react-agent-structured-output.ipynb - Pass custom LangSmith run ID for graph runs: how-tos/run-id-langsmith.ipynb + - Return state before hitting recursion limit: how-tos/return-when-recursion-limit-hits.ipynb - Prebuilt ReAct Agent: - Create a ReAct agent: how-tos/create-react-agent.ipynb - Add memory to a ReAct agent: how-tos/create-react-agent-memory.ipynb From 96f9d29a43e8c786e193f5bc9debeff7b37a4ddd Mon Sep 17 00:00:00 2001 From: Isaac Francisco <78627776+isahers1@users.noreply.github.com> Date: Thu, 19 Sep 2024 11:00:31 -0700 Subject: [PATCH 06/17] stop before recursion limit callout (#1769) --- docs/docs/how-tos/recursion-limit.ipynb | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/docs/how-tos/recursion-limit.ipynb b/docs/docs/how-tos/recursion-limit.ipynb index 19c9956b04..7c24e79fed 100644 --- a/docs/docs/how-tos/recursion-limit.ipynb +++ b/docs/docs/how-tos/recursion-limit.ipynb @@ -8,6 +8,9 @@ "\n", "You can set the graph recursion limit when invoking or streaming the graph. The recursion limit sets the number of supersteps that the graph is allowed to execute before it raises an error. Read more about the concept of recursion limits [here](https://langchain-ai.github.io/langgraph/concepts/low_level/#recursion-limit). Let's see an example of this in a simple graph with parallel branches to better understand exactly how the recursion limit works.\n", "\n", + "If you want to see an example of how you can return the last value of your state instead of receiving a recursion limit error form your graph, read [this how-to](https://langchain-ai.github.io/langgraph/how-tos/return-when-recursion-limit-hits/).\n", + "\n", + "\n", "## Setup\n", "\n", "First, let's install the required packages" From 9a8cc75ea29e170c1a077f423d2a2b9fbe5c935c Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 11:38:41 -0700 Subject: [PATCH 07/17] Fix some typing issues in langgraph lib --- .../langgraph/checkpoint/sqlite/aio.py | 3 +- .../langgraph/checkpoint/base/__init__.py | 5 +- .../langgraph/checkpoint/serde/types.py | 11 +-- libs/langgraph/Makefile | 3 +- libs/langgraph/bench/react_agent.py | 2 +- libs/langgraph/langgraph/_api/deprecation.py | 8 +- libs/langgraph/langgraph/channels/binop.py | 2 +- .../langgraph/channels/named_barrier_value.py | 5 +- libs/langgraph/langgraph/graph/graph.py | 47 ++++++----- libs/langgraph/langgraph/graph/message.py | 15 +++- libs/langgraph/langgraph/graph/state.py | 22 ++++-- libs/langgraph/langgraph/managed/base.py | 6 +- libs/langgraph/langgraph/managed/context.py | 39 ++++++--- .../langgraph/managed/shared_value.py | 11 +-- .../langgraph/prebuilt/chat_agent_executor.py | 34 ++++---- .../langgraph/prebuilt/tool_executor.py | 7 +- .../langgraph/langgraph/prebuilt/tool_node.py | 7 +- .../langgraph/prebuilt/tool_validator.py | 2 +- libs/langgraph/langgraph/pregel/__init__.py | 79 ++++++++++--------- libs/langgraph/langgraph/pregel/algo.py | 48 +++++++---- libs/langgraph/langgraph/pregel/debug.py | 16 +++- libs/langgraph/langgraph/pregel/executor.py | 10 ++- libs/langgraph/langgraph/pregel/io.py | 10 +-- libs/langgraph/langgraph/pregel/loop.py | 53 +++++++------ libs/langgraph/langgraph/pregel/manager.py | 10 +-- libs/langgraph/langgraph/pregel/messages.py | 15 ++-- libs/langgraph/langgraph/pregel/read.py | 2 +- libs/langgraph/langgraph/pregel/retry.py | 4 +- libs/langgraph/langgraph/pregel/runner.py | 48 +++++------ libs/langgraph/langgraph/pregel/types.py | 4 +- libs/langgraph/langgraph/pregel/utils.py | 4 +- libs/langgraph/langgraph/pregel/write.py | 4 +- libs/langgraph/langgraph/utils/config.py | 33 +++++--- libs/langgraph/langgraph/utils/fields.py | 2 +- libs/langgraph/langgraph/utils/pydantic.py | 2 +- libs/langgraph/langgraph/utils/queue.py | 10 ++- libs/langgraph/langgraph/utils/runnable.py | 62 ++++++++++----- libs/langgraph/poetry.lock | 74 ++++++++++------- libs/langgraph/pyproject.toml | 9 ++- libs/langgraph/tests/test_pregel_async.py | 2 +- 40 files changed, 432 insertions(+), 298 deletions(-) diff --git a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py index 39476ae8e6..d2347006e1 100644 --- a/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py +++ b/libs/checkpoint-sqlite/langgraph/checkpoint/sqlite/aio.py @@ -7,7 +7,6 @@ Callable, Dict, Iterator, - List, Optional, Sequence, Tuple, @@ -216,7 +215,7 @@ def put( ).result() def put_writes( - self, config: RunnableConfig, writes: List[Tuple[str, Any]], task_id: str + self, config: RunnableConfig, writes: Sequence[Tuple[str, Any]], task_id: str ) -> None: return asyncio.run_coroutine_threadsafe( self.aput_writes(config, writes, task_id), self.loop diff --git a/libs/checkpoint/langgraph/checkpoint/base/__init__.py b/libs/checkpoint/langgraph/checkpoint/base/__init__.py index 80441dad1d..822389cce9 100644 --- a/libs/checkpoint/langgraph/checkpoint/base/__init__.py +++ b/libs/checkpoint/langgraph/checkpoint/base/__init__.py @@ -10,6 +10,7 @@ Mapping, NamedTuple, Optional, + Sequence, Tuple, TypedDict, Union, @@ -301,7 +302,7 @@ def put( def put_writes( self, config: RunnableConfig, - writes: List[Tuple[str, Any]], + writes: Sequence[Tuple[str, Any]], task_id: str, ) -> None: """Store intermediate writes linked to a checkpoint. @@ -393,7 +394,7 @@ async def aput( async def aput_writes( self, config: RunnableConfig, - writes: List[Tuple[str, Any]], + writes: Sequence[Tuple[str, Any]], task_id: str, ) -> None: """Asynchronously store intermediate writes linked to a checkpoint. diff --git a/libs/checkpoint/langgraph/checkpoint/serde/types.py b/libs/checkpoint/langgraph/checkpoint/serde/types.py index f86c2e558f..43a5bf8789 100644 --- a/libs/checkpoint/langgraph/checkpoint/serde/types.py +++ b/libs/checkpoint/langgraph/checkpoint/serde/types.py @@ -1,7 +1,5 @@ from typing import ( Any, - AsyncGenerator, - Generator, Optional, Protocol, Sequence, @@ -9,7 +7,6 @@ runtime_checkable, ) -from langchain_core.runnables import RunnableConfig from typing_extensions import Self ERROR = "__error__" @@ -31,13 +28,7 @@ def UpdateType(self) -> Any: ... def checkpoint(self) -> Optional[C]: ... - def from_checkpoint( - self, checkpoint: Optional[C], config: RunnableConfig - ) -> Generator[Self, None, None]: ... - - async def afrom_checkpoint( - self, checkpoint: Optional[C], config: RunnableConfig - ) -> AsyncGenerator[Self, None]: ... + def from_checkpoint(self, checkpoint: Optional[C]) -> Self: ... def update(self, values: Sequence[Update]) -> bool: ... diff --git a/libs/langgraph/Makefile b/libs/langgraph/Makefile index 3d8a175e73..62ef303bba 100644 --- a/libs/langgraph/Makefile +++ b/libs/langgraph/Makefile @@ -74,7 +74,8 @@ lint lint_diff lint_package lint_tests: poetry run ruff check . [ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff [ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --select I $(PYTHON_FILES) - [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) || poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) + [ "$(PYTHON_FILES)" = "" ] || poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) format format_diff: poetry run ruff format $(PYTHON_FILES) diff --git a/libs/langgraph/bench/react_agent.py b/libs/langgraph/bench/react_agent.py index 4ad671f898..a6f84f2dc5 100644 --- a/libs/langgraph/bench/react_agent.py +++ b/libs/langgraph/bench/react_agent.py @@ -14,7 +14,7 @@ from langgraph.pregel import Pregel -def react_agent(n_tools: int, checkpointer: BaseCheckpointSaver) -> Pregel: +def react_agent(n_tools: int, checkpointer: Optional[BaseCheckpointSaver]) -> Pregel: class FakeFuntionChatModel(FakeMessagesListChatModel): def bind_tools(self, functions: list): return self diff --git a/libs/langgraph/langgraph/_api/deprecation.py b/libs/langgraph/langgraph/_api/deprecation.py index 6fa419e832..c93e09de86 100644 --- a/libs/langgraph/langgraph/_api/deprecation.py +++ b/libs/langgraph/langgraph/_api/deprecation.py @@ -21,14 +21,14 @@ def decorator(obj: Union[F, C]) -> Union[F, C]: f" removed in {removal_str}. Use {alternative} instead.{example}" ) if isinstance(obj, type): - original_init = obj.__init__ + original_init = obj.__init__ # type: ignore[misc] @functools.wraps(original_init) - def new_init(self, *args: Any, **kwargs: Any) -> None: + def new_init(self, *args: Any, **kwargs: Any) -> None: # type: ignore[no-untyped-def] warnings.warn(message, LangGraphDeprecationWarning, stacklevel=2) original_init(self, *args, **kwargs) - obj.__init__ = new_init + obj.__init__ = new_init # type: ignore[misc] docstring = ( f"**Deprecated**: This class is deprecated as of version {since}. " @@ -68,7 +68,7 @@ def deprecated_parameter( ) -> Callable[[F], F]: def decorator(func: F) -> F: @functools.wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] if arg_name in kwargs: warnings.warn( f"Parameter '{arg_name}' in function '{func.__name__}' is " diff --git a/libs/langgraph/langgraph/channels/binop.py b/libs/langgraph/langgraph/channels/binop.py index d3fe4fce2f..a2360142b7 100644 --- a/libs/langgraph/langgraph/channels/binop.py +++ b/libs/langgraph/langgraph/channels/binop.py @@ -14,7 +14,7 @@ # Adapted from typing_extensions -def _strip_extras(t): +def _strip_extras(t): # type: ignore[no-untyped-def] """Strips Annotated, Required and NotRequired from a given type.""" if hasattr(t, "__origin__"): return _strip_extras(t.__origin__) diff --git a/libs/langgraph/langgraph/channels/named_barrier_value.py b/libs/langgraph/langgraph/channels/named_barrier_value.py index a804a30522..4a1d990ca8 100644 --- a/libs/langgraph/langgraph/channels/named_barrier_value.py +++ b/libs/langgraph/langgraph/channels/named_barrier_value.py @@ -11,10 +11,13 @@ class NamedBarrierValue(Generic[Value], BaseChannel[Value, Value, set[Value]]): __slots__ = ("names", "seen") + names: set[Value] + seen: set[Value] + def __init__(self, typ: Type[Value], names: set[Value]) -> None: super().__init__(typ) self.names = names - self.seen = set() + self.seen: set[str] = set() def __eq__(self, value: object) -> bool: return isinstance(value, NamedBarrierValue) and value.names == self.names diff --git a/libs/langgraph/langgraph/graph/graph.py b/libs/langgraph/langgraph/graph/graph.py index b04594bdd3..706e5253ca 100644 --- a/libs/langgraph/langgraph/graph/graph.py +++ b/libs/langgraph/langgraph/graph/graph.py @@ -56,9 +56,11 @@ class Branch(NamedTuple): def run( self, - writer: Callable[[list[str], RunnableConfig], None], + writer: Callable[ + [list[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] + ], reader: Optional[Callable[[RunnableConfig], Any]] = None, - ) -> None: + ) -> RunnableCallable: return ChannelWrite.register_writer( RunnableCallable( func=self._route, @@ -75,8 +77,10 @@ def _route( input: Any, config: RunnableConfig, *, - reader: Optional[Callable[[], Any]], - writer: Callable[[list[str], RunnableConfig], None], + reader: Optional[Callable[[RunnableConfig], Any]], + writer: Callable[ + [list[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] + ], ) -> Runnable: if reader: value = reader(config) @@ -94,8 +98,10 @@ async def _aroute( input: Any, config: RunnableConfig, *, - reader: Optional[Callable[[], Any]], - writer: Callable[[list[str], RunnableConfig], Optional[Runnable]], + reader: Optional[Callable[[RunnableConfig], Any]], + writer: Callable[ + [list[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] + ], ) -> Runnable: if reader: value = await asyncio.to_thread(reader, config) @@ -110,7 +116,9 @@ async def _aroute( def _finish( self, - writer: Callable[[list[str], RunnableConfig], None], + writer: Callable[ + [list[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] + ], input: Any, result: Any, config: RunnableConfig, @@ -378,8 +386,8 @@ def validate(self, interrupt: Optional[Sequence[str]] = None) -> None: def compile( self, checkpointer: Optional[BaseCheckpointSaver] = None, - interrupt_before: Optional[Union[All, Sequence[str]]] = None, - interrupt_after: Optional[Union[All, Sequence[str]]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, debug: bool = False, ) -> "CompiledGraph": # assign default values @@ -451,7 +459,7 @@ def attach_edge(self, start: str, end: str) -> None: else: # subscribe to start channel self.nodes[end].triggers.append(start) - self.nodes[end].channels.append(start) + cast(list[str], self.nodes[end].channels).append(start) def attach_branch(self, start: str, name: str, branch: Branch) -> None: def branch_writer( @@ -530,17 +538,18 @@ def add_edge( subgraph.trim_first_node() subgraph.trim_last_node() if len(subgraph.nodes) > 1: - end_nodes[key], start_nodes[key] = graph.extend( - subgraph, prefix=key - ) + e, s = graph.extend(subgraph, prefix=key) + if s is None or e is None: + raise ValueError(f"Could not extend subgraph {key}") + end_nodes[key], start_nodes[key] = e, s else: - n = graph.add_node(node, key, metadata=metadata or None) - start_nodes[key] = n - end_nodes[key] = n + nn = graph.add_node(node, key, metadata=metadata or None) + start_nodes[key] = nn + end_nodes[key] = nn else: - n = graph.add_node(node, key, metadata=metadata or None) - start_nodes[key] = n - end_nodes[key] = n + nn = graph.add_node(node, key, metadata=metadata or None) + start_nodes[key] = nn + end_nodes[key] = nn for start, end in sorted(self.builder._all_edges): add_edge(start, end) for start, branches in self.builder.branches.items(): diff --git a/libs/langgraph/langgraph/graph/message.py b/libs/langgraph/langgraph/graph/message.py index 402812f841..34bc9c0907 100644 --- a/libs/langgraph/langgraph/graph/message.py +++ b/libs/langgraph/langgraph/graph/message.py @@ -1,8 +1,9 @@ import uuid -from typing import Annotated, TypedDict, Union +from typing import Annotated, TypedDict, Union, cast from langchain_core.messages import ( AnyMessage, + BaseMessageChunk, MessageLikeRepresentation, RemoveMessage, convert_to_messages, @@ -66,8 +67,14 @@ def add_messages(left: Messages, right: Messages) -> Messages: if not isinstance(right, list): right = [right] # coerce to message - left = [message_chunk_to_message(m) for m in convert_to_messages(left)] - right = [message_chunk_to_message(m) for m in convert_to_messages(right)] + left = [ + message_chunk_to_message(cast(BaseMessageChunk, m)) + for m in convert_to_messages(left) + ] + right = [ + message_chunk_to_message(cast(BaseMessageChunk, m)) + for m in convert_to_messages(right) + ] # assign missing ids for m in left: if m.id is None: @@ -144,7 +151,7 @@ class MessageGraph(StateGraph): """ def __init__(self) -> None: - super().__init__(Annotated[list[AnyMessage], add_messages]) + super().__init__(Annotated[list[AnyMessage], add_messages]) # type: ignore[arg-type] class MessagesState(TypedDict): diff --git a/libs/langgraph/langgraph/graph/state.py b/libs/langgraph/langgraph/graph/state.py index 33601dd5e3..e66cff53da 100644 --- a/libs/langgraph/langgraph/graph/state.py +++ b/libs/langgraph/langgraph/graph/state.py @@ -66,7 +66,7 @@ def _warn_invalid_state_schema(schema: Union[Type[Any], Any]) -> None: class StateNodeSpec(NamedTuple): runnable: Runnable - metadata: dict[str, Any] + metadata: Optional[dict[str, Any]] input: Type[Any] retry_policy: Optional[RetryPolicy] @@ -318,7 +318,11 @@ def add_node( ) if not isinstance(node, str): action = node - node = getattr(action, "name", action.__name__) + node = getattr(action, "name", getattr(action, "__name__")) + if node is None: + raise ValueError( + "Node name must be provided if action is not a function" + ) if node in self.nodes: raise ValueError(f"Node `{node}` already present.") if node == END or node == START: @@ -392,8 +396,8 @@ def compile( checkpointer: Optional[BaseCheckpointSaver] = None, *, store: Optional[BaseStore] = None, - interrupt_before: Optional[Union[All, Sequence[str]]] = None, - interrupt_after: Optional[Union[All, Sequence[str]]] = None, + interrupt_before: Optional[Union[All, list[str]]] = None, + interrupt_after: Optional[Union[All, list[str]]] = None, debug: bool = False, ) -> "CompiledStateGraph": """Compiles the state graph into a `CompiledGraph` object. @@ -554,7 +558,7 @@ def _get_state_key(input: Union[None, dict, Any], *, key: str) -> Any: ), ], ) - else: + elif node is not None: input_schema = node.input if node else self.builder.schema input_values = {k: k for k in self.builder.schemas[input_schema]} is_single_input = len(input_values) == 1 and "__root__" in input_values @@ -582,6 +586,8 @@ def _get_state_key(input: Union[None, dict, Any], *, key: str) -> Any: retry_policy=node.retry_policy, bound=node.runnable, ) + else: + raise RuntimeError def attach_edge(self, starts: Union[str, Sequence[str]], end: str) -> None: if isinstance(starts, str): @@ -613,7 +619,7 @@ def attach_edge(self, starts: Union[str, Sequence[str]], end: str) -> None: def attach_branch(self, start: str, name: str, branch: Branch) -> None: def branch_writer( packets: list[Union[str, Send]], config: RunnableConfig - ) -> Optional[ChannelWrite]: + ) -> None: if filtered := [p for p in packets if p != END]: writes = [ ( @@ -782,12 +788,12 @@ def _get_schema( else: keys = list(schemas[typ].keys()) if len(keys) == 1 and keys[0] == "__root__": - return create_model( # type: ignore[call-overload] + return create_model( name, root=(channels[keys[0]].UpdateType, None), ) else: - return create_model( # type: ignore[call-overload] + return create_model( name, field_definitions={ k: ( diff --git a/libs/langgraph/langgraph/managed/base.py b/libs/langgraph/langgraph/managed/base.py index b863889309..3d4eb69f3d 100644 --- a/libs/langgraph/langgraph/managed/base.py +++ b/libs/langgraph/langgraph/managed/base.py @@ -106,7 +106,9 @@ def is_writable_managed_value(value: Any) -> TypeGuard[Type[WritableManagedValue class ManagedValueMapping(dict[str, ManagedValue]): - def replace_runtime_values(self, step: int, values: Union[dict[str, Any], Any]): + def replace_runtime_values( + self, step: int, values: Union[dict[str, Any], Any] + ) -> None: if not self or not values: return if all(not mv.runtime for mv in self.values()): @@ -128,7 +130,7 @@ def replace_runtime_values(self, step: int, values: Union[dict[str, Any], Any]): def replace_runtime_placeholders( self, step: int, values: Union[dict[str, Any], Any] - ): + ) -> None: if not self or not values: return if all(not mv.runtime for mv in self.values()): diff --git a/libs/langgraph/langgraph/managed/context.py b/libs/langgraph/langgraph/managed/context.py index 43cff5e673..df64419ec6 100644 --- a/libs/langgraph/langgraph/managed/context.py +++ b/libs/langgraph/langgraph/managed/context.py @@ -4,7 +4,9 @@ Any, AsyncContextManager, AsyncIterator, + Callable, ContextManager, + Generic, Iterator, Optional, Type, @@ -17,15 +19,26 @@ from langgraph.managed.base import ConfiguredManagedValue, ManagedValue, V -class Context(ManagedValue): +class Context(ManagedValue[V], Generic[V]): runtime = True value: V @staticmethod def of( - ctx: Union[None, Type[ContextManager[V]], Type[AsyncContextManager[V]]] = None, - actx: Optional[Type[AsyncContextManager[V]]] = None, + ctx: Union[ + None, + Callable[..., ContextManager[V]], + Type[ContextManager[V]], + Callable[..., AsyncContextManager[V]], + Type[AsyncContextManager[V]], + ] = None, + actx: Optional[ + Union[ + Callable[..., AsyncContextManager[V]], + Type[AsyncContextManager[V]], + ] + ] = None, ) -> ConfiguredManagedValue: if ctx is None and actx is None: raise ValueError("Must provide either sync or async context manager.") @@ -40,11 +53,11 @@ def enter(cls, config: RunnableConfig, **kwargs: Any) -> Iterator[Self]: "Synchronous context manager not found. Please initialize Context value with a sync context manager, or invoke your graph asynchronously." ) ctx = ( - self.ctx(config) + self.ctx(config) # type: ignore[call-arg] if signature(self.ctx).parameters.get("config") else self.ctx() ) - with ctx as v: + with ctx as v: # type: ignore[union-attr] self.value = v yield self @@ -54,24 +67,32 @@ async def aenter(cls, config: RunnableConfig, **kwargs: Any) -> AsyncIterator[Se async with super().aenter(config, **kwargs) as self: if self.actx is not None: ctx = ( - self.actx(config) + self.actx(config) # type: ignore[call-arg] if signature(self.actx).parameters.get("config") else self.actx() ) - else: + elif self.ctx is not None: ctx = ( - self.ctx(config) + self.ctx(config) # type: ignore if signature(self.ctx).parameters.get("config") else self.ctx() ) + else: + raise ValueError( + "Asynchronous context manager not found. Please initialize Context value with an async context manager, or invoke your graph synchronously." + ) if hasattr(ctx, "__aenter__"): async with ctx as v: self.value = v yield self - else: + elif hasattr(ctx, "__enter__") and hasattr(ctx, "__exit__"): with ctx as v: self.value = v yield self + else: + raise ValueError( + "Context manager must have either __enter__ or __aenter__ method." + ) def __init__( self, diff --git a/libs/langgraph/langgraph/managed/shared_value.py b/libs/langgraph/langgraph/managed/shared_value.py index f5e0561bda..9a624c685b 100644 --- a/libs/langgraph/langgraph/managed/shared_value.py +++ b/libs/langgraph/langgraph/managed/shared_value.py @@ -7,6 +7,7 @@ Optional, Sequence, Type, + cast, ) from langchain_core.runnables import RunnableConfig @@ -30,7 +31,7 @@ # Adapted from typing_extensions -def _strip_extras(t): +def _strip_extras(t): # type: ignore[no-untyped-def] """Strips Annotated, Required and NotRequired from a given type.""" if hasattr(t, "__origin__"): return _strip_extras(t.__origin__) @@ -82,9 +83,9 @@ def __init__( raise ValueError("SharedValue must be a dict") self.scope = scope self.value: Value = {} - self.store: BaseStore = config["configurable"].get(CONFIG_KEY_STORE) + self.store = cast(BaseStore, config["configurable"].get(CONFIG_KEY_STORE)) if self.store is None: - self.ns: Optional[str] = None + pass elif scope_value := config["configurable"].get(self.scope): self.ns = f"scoped:{scope}:{key}:{scope_value}" else: @@ -98,12 +99,12 @@ def __call__(self, step: int) -> Value: def _process_update( self, values: Sequence[Update] ) -> list[tuple[str, str, Optional[dict[str, Any]]]]: - writes = [] + writes: list[tuple[str, str, Optional[dict[str, Any]]]] = [] for vv in values: for k, v in vv.items(): if v is None: if k in self.value: - self.value[k] = None + del self.value[k] writes.append((self.ns, k, None)) elif not isinstance(v, dict): raise InvalidUpdateError("Received a non-dict value") diff --git a/libs/langgraph/langgraph/prebuilt/chat_agent_executor.py b/libs/langgraph/langgraph/prebuilt/chat_agent_executor.py index d5a1dc88e7..d4a6157cb6 100644 --- a/libs/langgraph/langgraph/prebuilt/chat_agent_executor.py +++ b/libs/langgraph/langgraph/prebuilt/chat_agent_executor.py @@ -1,6 +1,7 @@ from typing import ( Annotated, Callable, + Literal, Optional, Sequence, Type, @@ -9,7 +10,7 @@ Union, ) -from langchain_core.language_models import LanguageModelLike +from langchain_core.language_models import BaseChatModel from langchain_core.messages import ( AIMessage, BaseMessage, @@ -129,15 +130,15 @@ def _get_model_preprocessing_runnable( @deprecated_parameter("messages_modifier", "0.1.9", "state_modifier", removal="0.3.0") def create_react_agent( - model: LanguageModelLike, + model: BaseChatModel, tools: Union[ToolExecutor, Sequence[BaseTool], ToolNode], *, state_schema: Optional[StateSchemaType] = None, messages_modifier: Optional[MessagesModifier] = None, state_modifier: Optional[StateModifier] = None, checkpointer: Optional[BaseCheckpointSaver] = None, - interrupt_before: Optional[Sequence[str]] = None, - interrupt_after: Optional[Sequence[str]] = None, + interrupt_before: Optional[list[str]] = None, + interrupt_after: Optional[list[str]] = None, debug: bool = False, ) -> CompiledGraph: """Creates a graph that works with a chat model that utilizes tool calling. @@ -421,7 +422,7 @@ class Agent,Tools otherClass tool_classes = tools.tools tool_node = ToolNode(tool_classes) elif isinstance(tools, ToolNode): - tool_classes = tools.tools_by_name.values() + tool_classes = list(tools.tools_by_name.values()) tool_node = tools else: tool_classes = tools @@ -429,11 +430,11 @@ class Agent,Tools otherClass model = model.bind_tools(tool_classes) # Define the function that determines whether to continue or not - def should_continue(state: AgentState): + def should_continue(state: AgentState) -> Literal["continue", "end"]: messages = state["messages"] last_message = messages[-1] # If there is no function call, then we finish - if not last_message.tool_calls: + if not isinstance(last_message, AIMessage) or not last_message.tool_calls: return "end" # Otherwise if there is, we continue else: @@ -443,12 +444,13 @@ def should_continue(state: AgentState): model_runnable = preprocessor | model # Define the function that calls the model - def call_model( - state: AgentState, - config: RunnableConfig, - ): + def call_model(state: AgentState, config: RunnableConfig) -> AgentState: response = model_runnable.invoke(state, config) - if state["is_last_step"] and response.tool_calls: + if ( + state["is_last_step"] + and isinstance(response, AIMessage) + and response.tool_calls + ): return { "messages": [ AIMessage( @@ -460,9 +462,13 @@ def call_model( # We return a list, because this will get added to the existing list return {"messages": [response]} - async def acall_model(state: AgentState, config: RunnableConfig): + async def acall_model(state: AgentState, config: RunnableConfig) -> AgentState: response = await model_runnable.ainvoke(state, config) - if state["is_last_step"] and response.tool_calls: + if ( + state["is_last_step"] + and isinstance(response, AIMessage) + and response.tool_calls + ): return { "messages": [ AIMessage( diff --git a/libs/langgraph/langgraph/prebuilt/tool_executor.py b/libs/langgraph/langgraph/prebuilt/tool_executor.py index 341f4874d1..d939f23d9b 100644 --- a/libs/langgraph/langgraph/prebuilt/tool_executor.py +++ b/libs/langgraph/langgraph/prebuilt/tool_executor.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Sequence, Union +from typing import Any, Callable, Sequence, Union, cast from langchain_core.load.serializable import Serializable from langchain_core.runnables import RunnableConfig @@ -101,10 +101,11 @@ def __init__( ) -> None: super().__init__(self._execute, afunc=self._aexecute, trace=False) tools_ = [ - tool if isinstance(tool, BaseTool) else create_tool(tool) for tool in tools + tool if isinstance(tool, BaseTool) else cast(BaseTool, create_tool(tool)) + for tool in tools ] self.tools = tools_ - self.tool_map = {t.name: t for t in tools} + self.tool_map = {t.name: t for t in tools_} self.invalid_tool_msg_template = invalid_tool_msg_template def _execute( diff --git a/libs/langgraph/langgraph/prebuilt/tool_node.py b/libs/langgraph/langgraph/prebuilt/tool_node.py index a596e829fc..c0f7c83f6b 100644 --- a/libs/langgraph/langgraph/prebuilt/tool_node.py +++ b/libs/langgraph/langgraph/prebuilt/tool_node.py @@ -94,7 +94,7 @@ def __init__( self.handle_tool_errors = handle_tool_errors for tool_ in tools: if not isinstance(tool_, BaseTool): - tool_ = create_tool(tool_) + tool_ = cast(BaseTool, create_tool(tool_)) self.tools_by_name[tool_.name] = tool_ def _func( @@ -188,10 +188,7 @@ def _parse_input( if not isinstance(message, AIMessage): raise ValueError("Last message is not an AIMessage") - tool_calls = [ - self._inject_state(call, input) - for call in cast(AIMessage, message).tool_calls - ] + tool_calls = [self._inject_state(call, input) for call in message.tool_calls] return tool_calls, output_type def _validate_tool_call(self, call: ToolCall) -> Optional[ToolMessage]: diff --git a/libs/langgraph/langgraph/prebuilt/tool_validator.py b/libs/langgraph/langgraph/prebuilt/tool_validator.py index 2222e7f2f3..401a35db71 100644 --- a/libs/langgraph/langgraph/prebuilt/tool_validator.py +++ b/libs/langgraph/langgraph/prebuilt/tool_validator.py @@ -211,7 +211,7 @@ def _func( """Validate and run tool calls synchronously.""" output_type, message = self._get_message(input) - def run_one(call: ToolCall): + def run_one(call: ToolCall) -> ToolMessage: schema = self.schemas_by_name[call["name"]] try: if issubclass(schema, BaseModel): diff --git a/libs/langgraph/langgraph/pregel/__init__.py b/libs/langgraph/langgraph/pregel/__init__.py index 8026906880..5389b7ddf1 100644 --- a/libs/langgraph/langgraph/pregel/__init__.py +++ b/libs/langgraph/langgraph/pregel/__init__.py @@ -95,7 +95,7 @@ patch_configurable, ) from langgraph.utils.pydantic import create_model -from langgraph.utils.queue import AsyncQueue, SyncQueue +from langgraph.utils.queue import AsyncQueue, SyncQueue # type: ignore[attr-defined] from langgraph.utils.runnable import RunnableCallable WriteValue = Union[Callable[[Input], Output], Any] @@ -172,9 +172,9 @@ def write_to( class Pregel(Runnable[Union[dict[str, Any], Any], Union[dict[str, Any], Any]]): - nodes: Mapping[str, PregelNode] + nodes: dict[str, PregelNode] - channels: Mapping[str, Union[BaseChannel, ManagedValueSpec]] + channels: dict[str, Union[BaseChannel, ManagedValueSpec]] stream_mode: StreamMode = "values" """Mode to stream output, defaults to 'values'.""" @@ -214,8 +214,8 @@ class Pregel(Runnable[Union[dict[str, Any], Any], Union[dict[str, Any], Any]]): def __init__( self, *, - nodes: Mapping[str, PregelNode], - channels: Mapping[str, Union[BaseChannel, ManagedValueSpec]] = None, + nodes: dict[str, PregelNode], + channels: Optional[dict[str, Union[BaseChannel, ManagedValueSpec]]], auto_validate: bool = True, stream_mode: StreamMode = "values", output_channels: Union[str, Sequence[str]], @@ -256,12 +256,14 @@ def copy(self, update: dict[str, Any]) -> Self: return self.__class__(**attrs) def with_config(self, config: RunnableConfig | None = None, **kwargs: Any) -> Self: - return self.copy({"config": merge_configs(self.config, config, kwargs)}) + return self.copy( + {"config": merge_configs(self.config, config, cast(RunnableConfig, kwargs))} + ) def validate(self) -> Self: validate_graph( self.nodes, - self.channels, + {k: v for k, v in self.channels.items() if isinstance(v, BaseChannel)}, self.input_channels, self.output_channels, self.stream_channels, @@ -312,11 +314,12 @@ def get_input_schema( if isinstance(self.input_channels, str): return super().get_input_schema(config) else: - return create_model( # type: ignore[call-overload] + return create_model( self.get_name("Input"), field_definitions={ k: (self.channels[k].UpdateType, None) for k in self.input_channels or self.channels.keys() + if isinstance(self.channels[k], BaseChannel) }, ) @@ -341,10 +344,12 @@ def get_output_schema( if isinstance(self.output_channels, str): return super().get_output_schema(config) else: - return create_model( # type: ignore[call-overload] + return create_model( self.get_name("Output"), field_definitions={ - k: (self.channels[k].ValueType, None) for k in self.output_channels + k: (self.channels[k].ValueType, None) + for k in self.output_channels + if isinstance(self.channels[k], BaseChannel) }, ) @@ -413,7 +418,7 @@ def _prepare_state_snapshot( self, config: RunnableConfig, saved: Optional[CheckpointTuple], - recurse: Optional[BaseCheckpointSaver] = False, + recurse: Optional[BaseCheckpointSaver] = None, ) -> StateSnapshot: if not saved: return StateSnapshot( @@ -486,7 +491,7 @@ async def _aprepare_state_snapshot( self, config: RunnableConfig, saved: Optional[CheckpointTuple], - recurse: Optional[BaseCheckpointSaver] = False, + recurse: Optional[BaseCheckpointSaver] = None, ) -> StateSnapshot: if not saved: return StateSnapshot( @@ -545,7 +550,7 @@ async def _aprepare_state_snapshot( } } task_states[task.id] = await subgraphs[task.name].aget_state( - config, subgraphs=recurse + config, subgraphs=True ) # assemble the state snapshot return StateSnapshot( @@ -828,7 +833,7 @@ def update_state( writers = self.nodes[as_node].flat_writers if not writers: raise InvalidUpdateError(f"Node {as_node} has no writers") - writes = deque() + writes: deque[tuple[str, Any]] = deque() task = PregelTaskWrites(as_node, writes, [INTERRUPT]) task_id = str(uuid5(UUID(checkpoint["id"]), INTERRUPT)) run = RunnableSequence(*writers) if len(writers) > 1 else writers[0] @@ -925,21 +930,12 @@ async def aupdate_state( ) step = saved.metadata.get("step", -1) if saved else -1 # merge configurable fields with previous checkpoint config - checkpoint_config = { - **config, - "configurable": { - **config["configurable"], - # TODO: add proper support for updating nested subgraph state - "checkpoint_ns": "", - }, - } + checkpoint_config = patch_configurable( + config, + {"checkpoint_ns": config["configurable"].get("checkpoint_ns", "")}, + ) if saved: - checkpoint_config = { - "configurable": { - **config.get("configurable", {}), - **saved.config["configurable"], - } - } + checkpoint_config = patch_configurable(config, saved.config["configurable"]) # find last node that updated the state, if not provided if values is None and as_node is None: next_config = await checkpointer.aput( @@ -986,7 +982,7 @@ async def aupdate_state( writers = self.nodes[as_node].flat_writers if not writers: raise InvalidUpdateError(f"Node {as_node} has no writers") - writes = deque() + writes: deque[tuple[str, Any]] = deque() task = PregelTaskWrites(as_node, writes, [INTERRUPT]) task_id = str(uuid5(UUID(checkpoint["id"]), INTERRUPT)) run = RunnableSequence(*writers) if len(writers) > 1 else writers[0] @@ -1052,7 +1048,7 @@ def _defaults( debug: Optional[bool], ) -> tuple[ bool, - Sequence[StreamMode], + set[StreamMode], Union[str, Sequence[str]], Optional[Sequence[str]], Optional[Sequence[str]], @@ -1079,7 +1075,7 @@ def _defaults( checkpointer = self.checkpointer return ( debug, - stream_mode, + set(stream_mode), output_keys, interrupt_before, interrupt_after, @@ -1249,7 +1245,7 @@ def output() -> Iterator: # a pending waiter to return immediately loop.stack.callback(stream._count.release) - def get_waiter() -> asyncio.Task[None]: + def get_waiter() -> concurrent.futures.Future[None]: nonlocal waiter if waiter is None or waiter.done(): waiter = loop.submit(stream.wait) @@ -1390,13 +1386,6 @@ def output() -> Iterator: else: yield payload - if subgraphs: - - def get_waiter() -> asyncio.Task[None]: - return aioloop.create_task(stream.wait()) - else: - get_waiter = None - config = ensure_config(self.config, config) callback_manager = get_async_callback_manager_for_config(config) run_manager = await callback_manager.on_chain_start( @@ -1437,6 +1426,11 @@ def get_waiter() -> asyncio.Task[None]: interrupt_after=interrupt_after, debug=debug, ) + # set up messages stream mode + if "messages" in stream_modes: + run_manager.inheritable_handlers.append( + StreamMessagesHandler(stream.put) + ) async with AsyncPregelLoop( input, stream=StreamProtocol(stream.put_nowait, stream_modes), @@ -1457,6 +1451,13 @@ def get_waiter() -> asyncio.Task[None]: # enable subgraph streaming if subgraphs: loop.config["configurable"][CONFIG_KEY_STREAM] = loop.stream + # enable concurrent streaming + if subgraphs or "messages" in stream_modes: + + def get_waiter() -> asyncio.Task[None]: + return aioloop.create_task(stream.wait()) + else: + get_waiter = None # Similarly to Bulk Synchronous Parallel / Pregel model # computation proceeds in steps, while there are channel updates # channel updates from step N are only visible in step N+1 diff --git a/libs/langgraph/langgraph/pregel/algo.py b/libs/langgraph/langgraph/pregel/algo.py index c2c5866e4a..bf4baeb0f5 100644 --- a/libs/langgraph/langgraph/pregel/algo.py +++ b/libs/langgraph/langgraph/pregel/algo.py @@ -4,6 +4,7 @@ from typing import ( Any, Callable, + Iterable, Iterator, Literal, Mapping, @@ -20,7 +21,12 @@ from langchain_core.runnables.config import RunnableConfig from langgraph.channels.base import BaseChannel -from langgraph.checkpoint.base import BaseCheckpointSaver, Checkpoint, copy_checkpoint +from langgraph.checkpoint.base import ( + BaseCheckpointSaver, + Checkpoint, + V, + copy_checkpoint, +) from langgraph.constants import ( CONFIG_KEY_CHECKPOINT_MAP, CONFIG_KEY_CHECKPOINTER, @@ -46,13 +52,18 @@ from langgraph.pregel.types import All, PregelExecutableTask, PregelTask from langgraph.utils.config import merge_configs, patch_config -EMPTY_SEQ = tuple() +EMPTY_SEQ: tuple[str, ...] = tuple() class WritesProtocol(Protocol): - name: str - writes: Sequence[tuple[str, Any]] - triggers: Sequence[str] + @property + def name(self) -> str: ... + + @property + def writes(self) -> Sequence[tuple[str, Any]]: ... + + @property + def triggers(self) -> Sequence[str]: ... class PregelTaskWrites(NamedTuple): @@ -64,14 +75,14 @@ class PregelTaskWrites(NamedTuple): def should_interrupt( checkpoint: Checkpoint, interrupt_nodes: Union[All, Sequence[str]], - tasks: list[PregelExecutableTask], + tasks: Iterable[PregelExecutableTask], ) -> list[PregelExecutableTask]: version_type = type(next(iter(checkpoint["channel_versions"].values()), None)) - null_version = version_type() + null_version = version_type() # type: ignore[misc] seen = checkpoint["versions_seen"].get(INTERRUPT, {}) # interrupt if any channel has been updated since last interrupt any_updates_since_prev_interrupt = any( - version > seen.get(chan, null_version) + version > seen.get(chan, null_version) # type: ignore[operator] for chan, version in checkpoint["channel_versions"].items() ) # and any triggered node is in interrupt_nodes list @@ -161,8 +172,8 @@ def increment(current: Optional[int], channel: BaseChannel) -> int: def apply_writes( checkpoint: Checkpoint, channels: Mapping[str, BaseChannel], - tasks: Sequence[WritesProtocol], - get_next_version: Optional[Callable[[int, BaseChannel], int]], + tasks: Iterable[WritesProtocol], + get_next_version: Optional[Callable[[Optional[V], BaseChannel], V]], ) -> dict[str, list[Any]]: # update seen versions for task in tasks: @@ -189,7 +200,8 @@ def apply_writes( }: if channels[chan].consume() and get_next_version is not None: checkpoint["channel_versions"][chan] = get_next_version( - max_version, channels[chan] + max_version, # type: ignore[arg-type] + channels[chan], ) # clear pending sends @@ -222,7 +234,8 @@ def apply_writes( if chan in channels: if channels[chan].update(vals) and get_next_version is not None: checkpoint["channel_versions"][chan] = get_next_version( - max_version, channels[chan] + max_version, # type: ignore[arg-type] + channels[chan], ) updated_channels.add(chan) @@ -231,7 +244,8 @@ def apply_writes( if chan not in updated_channels: if channels[chan].update([]) and get_next_version is not None: checkpoint["channel_versions"][chan] = get_next_version( - max_version, channels[chan] + max_version, # type: ignore[arg-type] + channels[chan], ) # Return managed values writes to be applied externally @@ -280,7 +294,7 @@ def prepare_next_tasks( checkpointer: Optional[BaseCheckpointSaver] = None, manager: Union[None, ParentRunManager, AsyncParentRunManager] = None, ) -> Union[dict[str, PregelTask], dict[str, PregelExecutableTask]]: - tasks: Union[dict[str, PregelTask], dict[str, PregelExecutableTask]] = {} + tasks: dict[str, Union[PregelTask, PregelExecutableTask]] = {} # Consume pending packets for idx, _ in enumerate(checkpoint["pending_sends"]): if task := prepare_single_task( @@ -377,7 +391,7 @@ def prepare_single_task( managed.replace_runtime_placeholders(step, packet.arg) if proc.metadata: metadata.update(proc.metadata) - writes = deque() + writes: deque[tuple[str, Any]] = deque() return PregelExecutableTask( packet.node, packet.arg, @@ -438,7 +452,7 @@ def prepare_single_task( return proc = processes[name] version_type = type(next(iter(checkpoint["channel_versions"].values()), None)) - null_version = version_type() + null_version = version_type() # type: ignore[misc] if null_version is None: return seen = checkpoint["versions_seen"].get(name, {}) @@ -449,7 +463,7 @@ def prepare_single_task( if not isinstance( read_channel(channels, chan, return_exception=True), EmptyChannelError ) - and checkpoint["channel_versions"].get(chan, null_version) + and checkpoint["channel_versions"].get(chan, null_version) # type: ignore[operator] > seen.get(chan, null_version) ): try: diff --git a/libs/langgraph/langgraph/pregel/debug.py b/libs/langgraph/langgraph/pregel/debug.py index a83c957094..782d5a13c8 100644 --- a/libs/langgraph/langgraph/pregel/debug.py +++ b/libs/langgraph/langgraph/pregel/debug.py @@ -2,7 +2,17 @@ from dataclasses import asdict from datetime import datetime, timezone from pprint import pformat -from typing import Any, Iterator, Literal, Mapping, Optional, Sequence, TypedDict, Union +from typing import ( + Any, + Iterable, + Iterator, + Literal, + Mapping, + Optional, + Sequence, + TypedDict, + Union, +) from uuid import UUID from langchain_core.runnables.config import RunnableConfig @@ -48,8 +58,6 @@ class CheckpointPayload(TypedDict): class DebugOutputBase(TypedDict): timestamp: str step: int - type: str - payload: dict[str, Any] class DebugOutputTask(DebugOutputBase): @@ -201,7 +209,7 @@ def print_step_checkpoint( def tasks_w_writes( - tasks: list[PregelExecutableTask], + tasks: Iterable[Union[PregelTask, PregelExecutableTask]], pending_writes: Optional[list[PendingWrite]], states: Optional[dict[str, Union[RunnableConfig, StateSnapshot]]], ) -> tuple[PregelTask, ...]: diff --git a/libs/langgraph/langgraph/pregel/executor.py b/libs/langgraph/langgraph/pregel/executor.py index f606d4c28d..46f1c3f641 100644 --- a/libs/langgraph/langgraph/pregel/executor.py +++ b/libs/langgraph/langgraph/pregel/executor.py @@ -9,9 +9,11 @@ Awaitable, Callable, ContextManager, + Coroutine, Optional, Protocol, TypeVar, + cast, ) from langchain_core.runnables import RunnableConfig @@ -42,7 +44,7 @@ def __init__(self, config: RunnableConfig) -> None: self.executor = self.stack.enter_context(get_executor_for_config(config)) self.tasks: dict[concurrent.futures.Future, tuple[bool, bool]] = {} - def submit( + def submit( # type: ignore[valid-type] self, fn: Callable[P, T], *args: P.args, @@ -68,7 +70,7 @@ def done(self, task: concurrent.futures.Future) -> None: else: self.tasks.pop(task) - def __enter__(self) -> "submit": + def __enter__(self) -> Submit: return self.submit def __exit__( @@ -105,7 +107,7 @@ def __init__(self) -> None: self.sentinel = object() self.loop = asyncio.get_running_loop() - def submit( + def submit( # type: ignore[valid-type] self, fn: Callable[P, Awaitable[T]], *args: P.args, @@ -114,7 +116,7 @@ def submit( __reraise_on_exit__: bool = True, **kwargs: P.kwargs, ) -> asyncio.Task[T]: - coro = fn(*args, **kwargs) + coro = cast(Coroutine[None, None, T], fn(*args, **kwargs)) if self.context_not_supported: task = self.loop.create_task(coro, name=__name__) else: diff --git a/libs/langgraph/langgraph/pregel/io.py b/libs/langgraph/langgraph/pregel/io.py index a02afbdecd..ad2252c9d0 100644 --- a/libs/langgraph/langgraph/pregel/io.py +++ b/libs/langgraph/langgraph/pregel/io.py @@ -28,7 +28,7 @@ def read_channel( def read_channels( channels: Mapping[str, BaseChannel], - select: Union[list[str], str], + select: Union[Sequence[str], str], *, skip_empty: bool = True, ) -> Union[dict[str, Any], Any]: @@ -97,7 +97,7 @@ def __radd__(self, other: dict[str, Any]) -> "AddableUpdatesDict": raise TypeError("AddableUpdatesDict does not support right-side addition") -EMPTY_SEQ = tuple() +EMPTY_SEQ: tuple[str, ...] = tuple() def map_output_updates( @@ -131,16 +131,16 @@ def map_output_updates( for task, writes in output_tasks if any(chan in output_channels for chan, _ in writes) ) - grouped = {t.name: [] for t, _ in output_tasks} + grouped: dict[str, list[Any]] = {t.name: [] for t, _ in output_tasks} for node, value in updated: grouped[node].append(value) for node, value in grouped.items(): if len(value) == 0: - grouped[node] = None + grouped[node] = None # type: ignore[assignment] if len(value) == 1: grouped[node] = value[0] if cached: - grouped["__metadata__"] = {"cached": cached} + grouped["__metadata__"] = {"cached": cached} # type: ignore[assignment] yield AddableUpdatesDict(grouped) diff --git a/libs/langgraph/langgraph/pregel/loop.py b/libs/langgraph/langgraph/pregel/loop.py index 7560905edd..ba98b16053 100644 --- a/libs/langgraph/langgraph/pregel/loop.py +++ b/libs/langgraph/langgraph/pregel/loop.py @@ -14,7 +14,6 @@ Mapping, Optional, Sequence, - Tuple, Type, TypeVar, Union, @@ -28,6 +27,7 @@ from langgraph.channels.base import BaseChannel from langgraph.checkpoint.base import ( BaseCheckpointSaver, + ChannelVersions, Checkpoint, CheckpointMetadata, CheckpointTuple, @@ -92,7 +92,7 @@ ) from langgraph.pregel.manager import AsyncChannelsManager, ChannelsManager from langgraph.pregel.read import PregelNode -from langgraph.pregel.types import PregelExecutableTask +from langgraph.pregel.types import PregelExecutableTask, StreamMode from langgraph.pregel.utils import get_new_channel_versions from langgraph.store.base import BaseStore from langgraph.store.batch import AsyncBatchedStore @@ -105,31 +105,32 @@ EMPTY_SEQ = () SPECIAL_CHANNELS = (ERROR, INTERRUPT, SCHEDULED) +StreamChunk = tuple[tuple[str, ...], str, Any] + class StreamProtocol: __slots__ = ("modes", "__call__") - modes: Sequence[Literal["values", "updates", "debug"]] + modes: set[StreamMode] - __call__: Callable[[Tuple[str, str, Any]], None] + __call__: Callable[[StreamChunk], None] def __init__( self, - __call__: Callable[[Tuple[str, str, Any]], None], - modes: Sequence[Literal["values", "updates", "debug"]], + __call__: Callable[[StreamChunk], None], + modes: set[StreamMode], ) -> None: self.__call__ = __call__ self.modes = modes -class DuplexStream(StreamProtocol): - def __init__(self, *streams: StreamProtocol) -> None: - def __call__(value: Tuple[str, str, Any]) -> None: - for stream in streams: - if value[1] in stream.modes: - stream(value) +def DuplexStream(*streams: StreamProtocol) -> StreamProtocol: + def __call__(value: StreamChunk) -> None: + for stream in streams: + if value[1] in stream.modes: + stream(value) # type: ignore - super().__init__(__call__, {mode for s in streams for mode in s.modes}) + return StreamProtocol(__call__, {mode for s in streams for mode in s.modes}) class PregelLoop: @@ -156,6 +157,7 @@ class PregelLoop: RunnableConfig, Sequence[tuple[str, Any]], str, + ChannelVersions, ], Any, ] @@ -209,7 +211,7 @@ def __init__( or CONFIG_KEY_DEDUPE_TASKS in config["configurable"] ) self.debug = debug - if CONFIG_KEY_STREAM in config["configurable"]: + if self.stream is not None and CONFIG_KEY_STREAM in config["configurable"]: self.stream = DuplexStream( self.stream, config["configurable"][CONFIG_KEY_STREAM] ) @@ -233,7 +235,7 @@ def __init__( else: self.checkpoint_config = config self.checkpoint_ns = ( - tuple(self.config["configurable"].get("checkpoint_ns").split(NS_SEP)) + tuple(cast(str, self.config["configurable"]["checkpoint_ns"]).split(NS_SEP)) if self.config["configurable"].get("checkpoint_ns") else () ) @@ -435,7 +437,7 @@ def tick( # debug flag if self.debug: - print_step_tasks(self.step, self.tasks.values()) + print_step_tasks(self.step, list(self.tasks.values())) return True @@ -482,6 +484,7 @@ def _first(self, *, input_keys: Union[str, Sequence[str]]) -> None: self.config, self.step, for_execution=True, + checkpointer=None, manager=None, ) # apply input writes @@ -589,7 +592,7 @@ def _emit( if mode not in self.stream.modes: return for v in values(*args, **kwargs): - self.stream((self.checkpoint_ns, mode, v)) + self.stream((self.checkpoint_ns, mode, v)) # type: ignore def _output_writes( self, task_id: str, writes: Sequence[tuple[str, Any]], *, cached: bool = False @@ -650,7 +653,7 @@ def __init__( self.checkpointer_put_writes = checkpointer.put_writes else: self.checkpointer_get_next_version = increment - self._checkpointer_put_after_previous = None + self._checkpointer_put_after_previous = None # type: ignore[assignment] self.checkpointer_put_writes = None def _checkpointer_put_after_previous( @@ -659,13 +662,15 @@ def _checkpointer_put_after_previous( config: RunnableConfig, checkpoint: Checkpoint, metadata: CheckpointMetadata, - new_versions: Optional[dict[str, Union[str, float, int]]], + new_versions: ChannelVersions, ) -> RunnableConfig: try: if prev is not None: prev.result() finally: - self.checkpointer.put(config, checkpoint, metadata, new_versions) + cast(BaseCheckpointSaver, self.checkpointer).put( + config, checkpoint, metadata, new_versions + ) def _update_mv(self, key: str, values: Sequence[Any]) -> None: return self.submit(cast(WritableManagedValue, self.managed[key]).update, values) @@ -766,7 +771,7 @@ def __init__( self.checkpointer_put_writes = checkpointer.aput_writes else: self.checkpointer_get_next_version = increment - self._checkpointer_put_after_previous = None + self._checkpointer_put_after_previous = None # type: ignore[method-assign] self.checkpointer_put_writes = None async def _checkpointer_put_after_previous( @@ -775,13 +780,15 @@ async def _checkpointer_put_after_previous( config: RunnableConfig, checkpoint: Checkpoint, metadata: CheckpointMetadata, - new_versions: Optional[dict[str, Union[str, float, int]]], + new_versions: ChannelVersions, ) -> RunnableConfig: try: if prev is not None: await prev finally: - await self.checkpointer.aput(config, checkpoint, metadata, new_versions) + await cast(BaseCheckpointSaver, self.checkpointer).aput( + config, checkpoint, metadata, new_versions + ) def _update_mv(self, key: str, values: Sequence[Any]) -> None: return self.submit( diff --git a/libs/langgraph/langgraph/pregel/manager.py b/libs/langgraph/langgraph/pregel/manager.py index a7e0b72833..c6d6c07aa1 100644 --- a/libs/langgraph/langgraph/pregel/manager.py +++ b/libs/langgraph/langgraph/pregel/manager.py @@ -28,8 +28,8 @@ def ChannelsManager( ) -> Iterator[tuple[Mapping[str, BaseChannel], ManagedValueMapping]]: """Manage channels for the lifetime of a Pregel invocation (multiple steps).""" config_for_managed = patch_configurable(config, {CONFIG_KEY_STORE: store}) - channel_specs: Mapping[str, BaseChannel] = {} - managed_specs: Mapping[str, ManagedValueSpec] = {} + channel_specs: dict[str, BaseChannel] = {} + managed_specs: dict[str, ManagedValueSpec] = {} for k, v in specs.items(): if isinstance(v, BaseChannel): channel_specs[k] = v @@ -66,11 +66,11 @@ async def AsyncChannelsManager( store: Optional[BaseStore] = None, *, skip_context: bool = False, -) -> AsyncIterator[Mapping[str, BaseChannel]]: +) -> AsyncIterator[tuple[Mapping[str, BaseChannel], ManagedValueMapping]]: """Manage channels for the lifetime of a Pregel invocation (multiple steps).""" config_for_managed = patch_configurable(config, {CONFIG_KEY_STORE: store}) - channel_specs: Mapping[str, BaseChannel] = {} - managed_specs: Mapping[str, ManagedValueSpec] = {} + channel_specs: dict[str, BaseChannel] = {} + managed_specs: dict[str, ManagedValueSpec] = {} for k, v in specs.items(): if isinstance(v, BaseChannel): channel_specs[k] = v diff --git a/libs/langgraph/langgraph/pregel/messages.py b/libs/langgraph/langgraph/pregel/messages.py index 3ade1dd5f8..0a96f0fee4 100644 --- a/libs/langgraph/langgraph/pregel/messages.py +++ b/libs/langgraph/langgraph/pregel/messages.py @@ -8,6 +8,8 @@ Optional, Sequence, Tuple, + Union, + cast, ) from uuid import UUID, uuid4 @@ -17,13 +19,14 @@ from langchain_core.tracers._streaming import T, _StreamingCallbackHandler from langgraph.constants import NS_SEP +from langgraph.pregel.loop import StreamChunk class StreamMessagesHandler(BaseCallbackHandler, _StreamingCallbackHandler): - def __init__(self, stream: Callable[[Tuple[str, str, Any]], None]): + def __init__(self, stream: Callable[[StreamChunk], None]): self.stream = stream - self.metadata: dict[str, tuple[str, dict[str, Any]]] = {} - self.seen = set() + self.metadata: dict[UUID, tuple[tuple[str, ...], dict[str, Any]]] = {} + self.seen: set[Union[int, str]] = set() def _emit( self, @@ -31,7 +34,7 @@ def _emit( message: BaseMessage, *, dedupe: bool = False, - ): + ) -> None: ident = id(message) if dedupe and message.id in self.seen: return @@ -65,7 +68,7 @@ def on_chat_model_start( ) -> Any: if metadata: self.metadata[run_id] = ( - tuple(metadata["langgraph_checkpoint_ns"].split(NS_SEP)), + tuple(cast(str, metadata["langgraph_checkpoint_ns"]).split(NS_SEP)), metadata, ) @@ -116,7 +119,7 @@ def on_chain_start( ) -> Any: if metadata and kwargs.get("name") == metadata.get("langgraph_node"): self.metadata[run_id] = ( - tuple(metadata["langgraph_checkpoint_ns"].split(NS_SEP)), + tuple(cast(str, metadata["langgraph_checkpoint_ns"]).split(NS_SEP)), metadata, ) diff --git a/libs/langgraph/langgraph/pregel/read.py b/libs/langgraph/langgraph/pregel/read.py index e0e483ffd5..79643a0902 100644 --- a/libs/langgraph/langgraph/pregel/read.py +++ b/libs/langgraph/langgraph/pregel/read.py @@ -27,7 +27,7 @@ from langgraph.utils.config import merge_configs from langgraph.utils.runnable import RunnableCallable, RunnableSeq -READ_TYPE = Callable[[str, bool], Union[Any, dict[str, Any]]] +READ_TYPE = Callable[[Union[str, Sequence[str]], bool], Union[Any, dict[str, Any]]] class ChannelRead(RunnableCallable): diff --git a/libs/langgraph/langgraph/pregel/retry.py b/libs/langgraph/langgraph/pregel/retry.py index 553b254688..90ccaa7d07 100644 --- a/libs/langgraph/langgraph/pregel/retry.py +++ b/libs/langgraph/langgraph/pregel/retry.py @@ -47,7 +47,7 @@ def run_with_retry( if not isinstance(exc, retry_policy.retry_on): raise elif callable(retry_policy.retry_on): - if not retry_policy.retry_on(exc): + if not retry_policy.retry_on(exc): # type: ignore[call-arg] raise else: raise TypeError( @@ -113,7 +113,7 @@ async def arun_with_retry( if not isinstance(exc, retry_policy.retry_on): raise elif callable(retry_policy.retry_on): - if not retry_policy.retry_on(exc): + if not retry_policy.retry_on(exc): # type: ignore[call-arg] raise else: raise TypeError( diff --git a/libs/langgraph/langgraph/pregel/runner.py b/libs/langgraph/langgraph/pregel/runner.py index 1f282c5842..7e11224857 100644 --- a/libs/langgraph/langgraph/pregel/runner.py +++ b/libs/langgraph/langgraph/pregel/runner.py @@ -45,12 +45,12 @@ def tick( yield # fast path if single task with no timeout if len(tasks) == 1 and timeout is None: - task = tasks[0] + t = tasks[0] try: - run_with_retry(task, retry_policy) - self.commit(task, None) + run_with_retry(t, retry_policy) + self.commit(t, None) except Exception as exc: - self.commit(task, exc) + self.commit(t, exc) if reraise: raise return @@ -64,16 +64,16 @@ def tick( # execute tasks, and wait for one to fail or all to finish. # each task is independent from all other concurrent tasks # yield updates/debug output as each task finishes - for task in tasks: - if not task.writes: + for t in tasks: + if not t.writes: futures[ self.submit( run_with_retry, - task, + t, retry_policy, __reraise_on_exit__=reraise, ) - ] = task + ] = t all_futures = futures.copy() end_time = timeout + time.monotonic() if timeout else None while len(futures) > (1 if get_waiter is not None else 0): @@ -88,7 +88,7 @@ def tick( task = futures.pop(fut) if task is None: # waiter task finished, schedule another - if inflight: + if inflight and get_waiter is not None: futures[get_waiter()] = None else: # task finished, commit writes @@ -119,12 +119,12 @@ async def atick( yield # fast path if single task with no waiter and no timeout if len(tasks) == 1 and get_waiter is None and timeout is None: - task = tasks[0] + t = tasks[0] try: - await arun_with_retry(task, retry_policy, stream=self.use_astream) - self.commit(task, None) + await arun_with_retry(t, retry_policy, stream=self.use_astream) + self.commit(t, None) except Exception as exc: - self.commit(task, exc) + self.commit(t, exc) if reraise: raise return @@ -138,19 +138,19 @@ async def atick( # execute tasks, and wait for one to fail or all to finish. # each task is independent from all other concurrent tasks # yield updates/debug output as each task finishes - for task in tasks: - if not task.writes: + for t in tasks: + if not t.writes: futures[ self.submit( arun_with_retry, - task, + t, retry_policy, stream=self.use_astream, - __name__=task.name, + __name__=t.name, __cancel_on_exit__=True, __reraise_on_exit__=reraise, ) - ] = task + ] = t all_futures = futures.copy() end_time = timeout + loop.time() if timeout else None while len(futures) > (1 if get_waiter is not None else 0): @@ -165,7 +165,7 @@ async def atick( task = futures.pop(fut) if task is None: # waiter task finished, schedule another - if inflight: + if inflight and get_waiter is not None: futures[get_waiter()] = None else: # task finished, commit writes @@ -208,7 +208,7 @@ def commit( def _should_stop_others( - done: Union[set[concurrent.futures.Future[Any]], set[asyncio.Task[Any]]], + done: Union[set[concurrent.futures.Future[Any]], set[asyncio.Future[Any]]], ) -> bool: for fut in done: if fut.cancelled(): @@ -220,10 +220,10 @@ def _should_stop_others( def _exception( - fut: Union[concurrent.futures.Future[Any], asyncio.Task[Any]], + fut: Union[concurrent.futures.Future[Any], asyncio.Future[Any]], ) -> Optional[BaseException]: if fut.cancelled(): - if isinstance(fut, asyncio.Task): + if isinstance(fut, asyncio.Future): return asyncio.CancelledError() else: return concurrent.futures.CancelledError() @@ -240,8 +240,8 @@ def _panic_or_proceed( timeout_exc_cls: Type[Exception] = TimeoutError, panic: bool = True, ) -> None: - done: set[Union[concurrent.futures.Future[Any], asyncio.Task[Any]]] = set() - inflight: set[Union[concurrent.futures.Future[Any], asyncio.Task[Any]]] = set() + done: set[Union[concurrent.futures.Future[Any], asyncio.Future[Any]]] = set() + inflight: set[Union[concurrent.futures.Future[Any], asyncio.Future[Any]]] = set() for fut, val in futs.items(): if val is None: continue diff --git a/libs/langgraph/langgraph/pregel/types.py b/libs/langgraph/langgraph/pregel/types.py index f9ad466086..2184560858 100644 --- a/libs/langgraph/langgraph/pregel/types.py +++ b/libs/langgraph/langgraph/pregel/types.py @@ -66,7 +66,7 @@ class CachePolicy(NamedTuple): class PregelTask(NamedTuple): id: str name: str - path: tuple[str, ...] + path: tuple[Union[str, int], ...] error: Optional[Exception] = None interrupts: tuple[Interrupt, ...] = () state: Union[None, RunnableConfig, "StateSnapshot"] = None @@ -82,7 +82,7 @@ class PregelExecutableTask(NamedTuple): retry_policy: Optional[RetryPolicy] cache_policy: Optional[CachePolicy] id: str - path: tuple[str, ...] + path: tuple[Union[str, int], ...] scheduled: bool = False diff --git a/libs/langgraph/langgraph/pregel/utils.py b/libs/langgraph/langgraph/pregel/utils.py index d3d0d989f8..c6dc064d3f 100644 --- a/libs/langgraph/langgraph/pregel/utils.py +++ b/libs/langgraph/langgraph/pregel/utils.py @@ -7,11 +7,11 @@ def get_new_channel_versions( """Get new channel versions.""" if previous_versions: version_type = type(next(iter(current_versions.values()), None)) - null_version = version_type() + null_version = version_type() # type: ignore[misc] new_versions = { k: v for k, v in current_versions.items() - if v > previous_versions.get(k, null_version) + if v > previous_versions.get(k, null_version) # type: ignore[operator] } else: new_versions = current_versions diff --git a/libs/langgraph/langgraph/pregel/write.py b/libs/langgraph/langgraph/pregel/write.py index fd732966aa..9c3b7782df 100644 --- a/libs/langgraph/langgraph/pregel/write.py +++ b/libs/langgraph/langgraph/pregel/write.py @@ -50,7 +50,7 @@ def __init__( self, writes: Sequence[Union[ChannelWriteEntry, Send]], *, - tags: Optional[list[str]] = None, + tags: Optional[Sequence[str]] = None, require_at_least_one_of: Optional[Sequence[str]] = None, ): super().__init__(func=self._write, afunc=self._awrite, name=None, tags=tags) @@ -158,6 +158,6 @@ def register_writer(runnable: R) -> R: def _mk_future(val: Any) -> asyncio.Future: - fut = asyncio.Future() + fut: asyncio.Future[Any] = asyncio.Future() fut.set_result(val) return fut diff --git a/libs/langgraph/langgraph/utils/config.py b/libs/langgraph/langgraph/utils/config.py index 4a69050a15..cc352cf074 100644 --- a/libs/langgraph/langgraph/utils/config.py +++ b/libs/langgraph/langgraph/utils/config.py @@ -1,7 +1,12 @@ from collections import ChainMap from typing import Any, Optional, Sequence -from langchain_core.callbacks import AsyncCallbackManager, CallbackManager, Callbacks +from langchain_core.callbacks import ( + AsyncCallbackManager, + BaseCallbackManager, + CallbackManager, + Callbacks, +) from langchain_core.runnables import RunnableConfig from langchain_core.runnables.config import ( CONFIG_KEYS, @@ -63,20 +68,20 @@ def merge_configs(*configs: Optional[RunnableConfig]) -> RunnableConfig: if not value: continue if key == "metadata": - if base_value := base.get(key): # type: ignore + if base_value := base.get(key): base[key] = {**base_value, **value} # type: ignore else: - base[key] = value + base[key] = value # type: ignore[literal-required] elif key == "tags": - if base_value := base.get(key): # type: ignore + if base_value := base.get(key): base[key] = [*base_value, *value] # type: ignore else: - base[key] = value + base[key] = value # type: ignore[literal-required] elif key == "configurable": - if base_value := base.get(key): # type: ignore + if base_value := base.get(key): base[key] = {**base_value, **value} # type: ignore else: - base[key] = value + base[key] = value # type: ignore[literal-required] elif key == "callbacks": base_callbacks = base.get("callbacks") # callbacks can be either None, list[handler] or manager @@ -92,7 +97,7 @@ def merge_configs(*configs: Optional[RunnableConfig]) -> RunnableConfig: for callback in value: mngr.add_handler(callback, inherit=True) base["callbacks"] = mngr - else: + elif isinstance(value, BaseCallbackManager): # value is a manager if base_callbacks is None: base["callbacks"] = value.copy() @@ -104,11 +109,13 @@ def merge_configs(*configs: Optional[RunnableConfig]) -> RunnableConfig: else: # base_callbacks is also a manager base["callbacks"] = base_callbacks.merge(value) + else: + raise NotImplementedError elif key == "recursion_limit": if config["recursion_limit"] != DEFAULT_RECURSION_LIMIT: base["recursion_limit"] = config["recursion_limit"] else: - base[key] = config[key] + base[key] = config[key] # type: ignore[literal-required] return base @@ -138,7 +145,7 @@ def patch_config( Returns: RunnableConfig: The patched config. """ - config = config.copy() or {} + config = config.copy() if config is not None else {} if callbacks is not None: # If we're replacing callbacks, we need to unset run_name # As that should apply only to the same run as the original callbacks @@ -176,7 +183,7 @@ def get_callback_manager_for_config( if all_tags is not None and tags is not None: all_tags = [*all_tags, *tags] elif tags is not None: - all_tags = tags + all_tags = list(tags) # use existing callbacks if they exist if (callbacks := config.get("callbacks")) and isinstance( callbacks, CallbackManager @@ -214,7 +221,7 @@ def get_async_callback_manager_for_config( if all_tags is not None and tags is not None: all_tags = [*all_tags, *tags] elif tags is not None: - all_tags = tags + all_tags = list(tags) # use existing callbacks if they exist if (callbacks := config.get("callbacks")) and isinstance( callbacks, AsyncCallbackManager @@ -263,7 +270,7 @@ def ensure_config(*configs: Optional[RunnableConfig]) -> RunnableConfig: continue for k, v in config.items(): if v is not None and k in CONFIG_KEYS: - empty[k] = v + empty[k] = v # type: ignore[literal-required] for k, v in config.items(): if v is not None and k not in CONFIG_KEYS: empty["configurable"][k] = v diff --git a/libs/langgraph/langgraph/utils/fields.py b/libs/langgraph/langgraph/utils/fields.py index a4c29a9ec1..f4786cb340 100644 --- a/libs/langgraph/langgraph/utils/fields.py +++ b/libs/langgraph/langgraph/utils/fields.py @@ -59,7 +59,7 @@ def _is_readonly_type(type_: Any) -> bool: return False -_DEFAULT_KEYS = frozenset() +_DEFAULT_KEYS: frozenset[str] = frozenset() def get_field_default(name: str, type_: Any, schema: Type[Any]) -> Any: diff --git a/libs/langgraph/langgraph/utils/pydantic.py b/libs/langgraph/langgraph/utils/pydantic.py index 9accc66e15..cd0984202f 100644 --- a/libs/langgraph/langgraph/utils/pydantic.py +++ b/libs/langgraph/langgraph/utils/pydantic.py @@ -19,7 +19,7 @@ def create_model( """ try: # for langchain-core >= 0.3.0 - from langchain_core.runnables.pydantic import create_model_v2 + from langchain_core.utils.pydantic import create_model_v2 return create_model_v2( model_name, diff --git a/libs/langgraph/langgraph/utils/queue.py b/libs/langgraph/langgraph/utils/queue.py index 99d94f5c44..b68e15322e 100644 --- a/libs/langgraph/langgraph/utils/queue.py +++ b/libs/langgraph/langgraph/utils/queue.py @@ -1,3 +1,5 @@ +# type: ignore + import asyncio import queue import sys @@ -5,6 +7,7 @@ import types from collections import deque from time import monotonic +from typing import Optional PY_310 = sys.version_info >= (3, 10) @@ -14,7 +17,7 @@ class AsyncQueue(asyncio.Queue): Subclassed from asyncio.Queue, adding a wait() method.""" - async def wait(self): + async def wait(self) -> None: """If queue is empty, wait until an item is available. Copied from Queue.get(), removing the call to .get_nowait(), @@ -47,7 +50,7 @@ async def wait(self): class Semaphore(threading.Semaphore): """Semaphore subclass with a wait() method.""" - def wait(self, blocking: bool = True, timeout: float = None): + def wait(self, blocking: bool = True, timeout: Optional[float] = None): """Block until the semaphore can be acquired, but don't acquire it.""" if not blocking and timeout is not None: raise ValueError("can't specify timeout for non-blocking acquire") @@ -125,3 +128,6 @@ def qsize(self): return len(self._queue) __class_getitem__ = classmethod(types.GenericAlias) + + +__all__ = ["AsyncQueue", "SyncQueue"] diff --git a/libs/langgraph/langgraph/utils/runnable.py b/libs/langgraph/langgraph/utils/runnable.py index 7230d67bb8..f0b16442d9 100644 --- a/libs/langgraph/langgraph/utils/runnable.py +++ b/libs/langgraph/langgraph/utils/runnable.py @@ -5,7 +5,18 @@ from contextlib import AsyncExitStack from contextvars import copy_context from functools import partial, wraps -from typing import Any, AsyncIterator, Awaitable, Callable, Iterator, Optional, Sequence +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Coroutine, + Iterator, + Optional, + Sequence, + Union, + cast, +) from langchain_core.runnables.base import ( Runnable, @@ -19,7 +30,7 @@ run_in_executor, var_child_runnable_config, ) -from langchain_core.runnables.utils import Input, Output, accepts_config +from langchain_core.runnables.utils import Input, accepts_config from langchain_core.tracers._streaming import _StreamingCallbackHandler from typing_extensions import TypeGuard @@ -52,8 +63,8 @@ class RunnableCallable(Runnable): def __init__( self, - func: Callable[..., Optional[Runnable]], - afunc: Optional[Callable[..., Awaitable[Optional[Runnable]]]] = None, + func: Optional[Callable[..., Union[Any, Runnable]]], + afunc: Optional[Callable[..., Awaitable[Union[Any, Runnable]]]] = None, *, name: Optional[str] = None, tags: Optional[Sequence[str]] = None, @@ -155,7 +166,7 @@ async def ainvoke( try: child_config = patch_config(config, callbacks=run_manager.get_child()) context.run(_set_config_context, child_config) - coro = self.afunc(input, **kwargs) + coro = cast(Coroutine[None, None, Any], self.afunc(input, **kwargs)) if ASYNCIO_ACCEPTS_CONTEXT: ret = await asyncio.create_task(coro, context=context) else: @@ -168,9 +179,8 @@ async def ainvoke( else: context.run(_set_config_context, config) if ASYNCIO_ACCEPTS_CONTEXT: - ret = await asyncio.create_task( - self.afunc(input, **kwargs), context=context - ) + coro = cast(Coroutine[None, None, Any], self.afunc(input, **kwargs)) + ret = await asyncio.create_task(coro, context=context) else: ret = await self.afunc(input, **kwargs) if isinstance(ret, Runnable) and self.recurse: @@ -200,7 +210,9 @@ def is_async_generator( ) -def coerce_to_runnable(thing: RunnableLike, *, name: str, trace: bool) -> Runnable: +def coerce_to_runnable( + thing: RunnableLike, *, name: Optional[str], trace: bool +) -> Runnable: """Coerce a runnable-like object into a Runnable. Args: @@ -219,7 +231,7 @@ def coerce_to_runnable(thing: RunnableLike, *, name: str, trace: bool) -> Runnab else: return RunnableCallable( thing, - wraps(thing)(partial(run_in_executor, None, thing)), + wraps(thing)(partial(run_in_executor, None, thing)), # type: ignore[arg-type] name=name, trace=trace, ) @@ -257,7 +269,7 @@ def __init__( if isinstance(step, RunnableSequence): steps_flat.extend(step.steps) elif isinstance(step, RunnableSeq): - steps_flat.extend(step.steps) + steps_flat.extend(step.steps) # type: ignore[has-type] else: steps_flat.append(coerce_to_runnable(step, name=None, trace=True)) if len(steps_flat) < 2: @@ -288,7 +300,7 @@ def __or__( else: return RunnableSeq( *self.steps, - coerce_to_runnable(other), + coerce_to_runnable(other, name=None, trace=True), name=self.name, ) @@ -312,14 +324,16 @@ def __ror__( ) else: return RunnableSequence( - coerce_to_runnable(other), + coerce_to_runnable(other, name=None, trace=True), *self.steps, name=self.name, ) def invoke( self, input: Input, config: Optional[RunnableConfig] = None, **kwargs: Any - ) -> Output: + ) -> Any: + if config is None: + config = ensure_config() # setup callbacks and context callback_manager = get_callback_manager_for_config(config) # start the root run @@ -356,7 +370,9 @@ async def ainvoke( input: Input, config: Optional[RunnableConfig] = None, **kwargs: Optional[Any], - ) -> Output: + ) -> Any: + if config is None: + config = ensure_config() # setup callbacks callback_manager = get_async_callback_manager_for_config(config) # start the root run @@ -397,7 +413,9 @@ def stream( input: Input, config: Optional[RunnableConfig] = None, **kwargs: Optional[Any], - ) -> Iterator[Output]: + ) -> Iterator[Any]: + if config is None: + config = ensure_config() # setup callbacks callback_manager = get_callback_manager_for_config(config) # start the root run @@ -424,7 +442,7 @@ def stream( iterator = step.transform(iterator, config) if stream_handler := next( ( - h + cast(_StreamingCallbackHandler, h) for h in run_manager.handlers if isinstance(h, _StreamingCallbackHandler) ), @@ -432,7 +450,7 @@ def stream( ): # populates streamed_output in astream_log() output if needed iterator = stream_handler.tap_output_iter(run_manager.run_id, iterator) - output: Output = None + output: Any = None add_supported = False for chunk in iterator: yield chunk @@ -458,7 +476,9 @@ async def astream( input: Input, config: Optional[RunnableConfig] = None, **kwargs: Optional[Any], - ) -> AsyncIterator[Output]: + ) -> AsyncIterator[Any]: + if config is None: + config = ensure_config() # setup callbacks callback_manager = get_async_callback_manager_for_config(config) # start the root run @@ -488,7 +508,7 @@ async def astream( stack.push_async_callback(aiterator.aclose) if stream_handler := next( ( - h + cast(_StreamingCallbackHandler, h) for h in run_manager.handlers if isinstance(h, _StreamingCallbackHandler) ), @@ -498,7 +518,7 @@ async def astream( aiterator = stream_handler.tap_output_aiter( run_manager.run_id, aiterator ) - output: Output = None + output: Any = None add_supported = False async for chunk in aiterator: yield chunk diff --git a/libs/langgraph/poetry.lock b/libs/langgraph/poetry.lock index ba50565a8a..24de562c4f 100644 --- a/libs/langgraph/poetry.lock +++ b/libs/langgraph/poetry.lock @@ -1478,44 +1478,44 @@ files = [ [[package]] name = "mypy" -version = "1.10.0" +version = "1.11.2" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, - {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, - {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, - {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, - {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, - {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, - {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, - {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, - {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, - {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, - {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, - {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, - {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, - {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, - {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, - {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, - {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, - {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, - {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -2979,6 +2979,20 @@ files = [ {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, ] +[[package]] +name = "types-requests" +version = "2.32.0.20240914" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, + {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.12.2" @@ -3202,4 +3216,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<4.0" -content-hash = "73c2dec0a0e833ad8742ebfca86d8e3d602a8a63671a782d21d8e0079a02d448" +content-hash = "2c74c10f4650f14f2757e1a688761a9680ecd251da088ea1e8c5ceda51aec067" diff --git a/libs/langgraph/pyproject.toml b/libs/langgraph/pyproject.toml index a583db910a..954ca2973a 100644 --- a/libs/langgraph/pyproject.toml +++ b/libs/langgraph/pyproject.toml @@ -32,6 +32,7 @@ psycopg = {extras = ["binary"], version = ">=3.0.0", python = ">=3.10"} uvloop = "^0.20.0" pyperf = "^2.7.0" py-spy = "^0.3.14" +types-requests = "^2.32.0.20240914" [tool.ruff] lint.select = [ "E", "F", "I" ] @@ -49,8 +50,14 @@ docstring-code-format = false docstring-code-line-length = "dynamic" [tool.mypy] -ignore_missing_imports = "True" +# https://mypy.readthedocs.io/en/stable/config_file.html disallow_untyped_defs = "True" +explicit_package_bases = "True" +warn_no_return = "False" +warn_unused_ignores = "True" +warn_redundant_casts = "True" +allow_redefinition = "True" +disable_error_code = "typeddict-item, return-value, override" [tool.coverage.run] omit = ["tests/*"] diff --git a/libs/langgraph/tests/test_pregel_async.py b/libs/langgraph/tests/test_pregel_async.py index 6bbbd0506f..fec2aeafae 100644 --- a/libs/langgraph/tests/test_pregel_async.py +++ b/libs/langgraph/tests/test_pregel_async.py @@ -3962,7 +3962,7 @@ def search_api(query: str) -> str: assert [ c - for c in app.stream( + async for c in app.astream( {"messages": [HumanMessage(content="what is weather in sf")]}, stream_mode="messages", ) From cc318b1156cf18305a48a27065479a815d758240 Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 11:43:30 -0700 Subject: [PATCH 08/17] Fix --- .../langgraph/checkpoint/postgres/__init__.py | 4 ++-- .../langgraph/checkpoint/postgres/aio.py | 6 +++--- .../langgraph/checkpoint/postgres/base.py | 4 ++-- libs/checkpoint/langgraph/checkpoint/memory/__init__.py | 6 +++--- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/libs/checkpoint-postgres/langgraph/checkpoint/postgres/__init__.py b/libs/checkpoint-postgres/langgraph/checkpoint/postgres/__init__.py index 821b74af67..a2274cd601 100644 --- a/libs/checkpoint-postgres/langgraph/checkpoint/postgres/__init__.py +++ b/libs/checkpoint-postgres/langgraph/checkpoint/postgres/__init__.py @@ -1,6 +1,6 @@ import threading from contextlib import contextmanager -from typing import Any, Iterator, List, Optional, Union +from typing import Any, Iterator, Optional, Sequence, Union from langchain_core.runnables import RunnableConfig from psycopg import Connection, Cursor, Pipeline @@ -332,7 +332,7 @@ def put( def put_writes( self, config: RunnableConfig, - writes: List[tuple[str, Any]], + writes: Sequence[tuple[str, Any]], task_id: str, ) -> None: """Store intermediate writes linked to a checkpoint. diff --git a/libs/checkpoint-postgres/langgraph/checkpoint/postgres/aio.py b/libs/checkpoint-postgres/langgraph/checkpoint/postgres/aio.py index 43a8cfe47d..5b6269fe09 100644 --- a/libs/checkpoint-postgres/langgraph/checkpoint/postgres/aio.py +++ b/libs/checkpoint-postgres/langgraph/checkpoint/postgres/aio.py @@ -1,6 +1,6 @@ import asyncio from contextlib import asynccontextmanager -from typing import Any, AsyncIterator, Iterator, List, Optional, Union +from typing import Any, AsyncIterator, Iterator, Optional, Sequence, Union from langchain_core.runnables import RunnableConfig from psycopg import AsyncConnection, AsyncCursor, AsyncPipeline @@ -291,7 +291,7 @@ async def aput( async def aput_writes( self, config: RunnableConfig, - writes: list[tuple[str, Any]], + writes: Sequence[tuple[str, Any]], task_id: str, ) -> None: """Store intermediate writes linked to a checkpoint asynchronously. @@ -424,7 +424,7 @@ def put( def put_writes( self, config: RunnableConfig, - writes: List[tuple[str, Any]], + writes: Sequence[tuple[str, Any]], task_id: str, ) -> None: """Store intermediate writes linked to a checkpoint. diff --git a/libs/checkpoint-postgres/langgraph/checkpoint/postgres/base.py b/libs/checkpoint-postgres/langgraph/checkpoint/postgres/base.py index f2fa21eba3..76232e3376 100644 --- a/libs/checkpoint-postgres/langgraph/checkpoint/postgres/base.py +++ b/libs/checkpoint-postgres/langgraph/checkpoint/postgres/base.py @@ -1,5 +1,5 @@ import random -from typing import Any, List, Optional, Tuple, cast +from typing import Any, List, Optional, Sequence, Tuple, cast from langchain_core.runnables import RunnableConfig from psycopg.types.json import Jsonb @@ -209,7 +209,7 @@ def _dump_writes( checkpoint_ns: str, checkpoint_id: str, task_id: str, - writes: list[tuple[str, Any]], + writes: Sequence[tuple[str, Any]], ) -> list[tuple[str, str, str, str, int, str, str, bytes]]: return [ ( diff --git a/libs/checkpoint/langgraph/checkpoint/memory/__init__.py b/libs/checkpoint/langgraph/checkpoint/memory/__init__.py index 20d53ea066..176aec24c5 100644 --- a/libs/checkpoint/langgraph/checkpoint/memory/__init__.py +++ b/libs/checkpoint/langgraph/checkpoint/memory/__init__.py @@ -4,7 +4,7 @@ from contextlib import AbstractAsyncContextManager, AbstractContextManager from functools import partial from types import TracebackType -from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Tuple +from typing import Any, AsyncIterator, Dict, Iterator, Optional, Sequence, Tuple from langchain_core.runnables import RunnableConfig @@ -344,7 +344,7 @@ def put( def put_writes( self, config: RunnableConfig, - writes: List[Tuple[str, Any]], + writes: Sequence[Tuple[str, Any]], task_id: str, ) -> None: """Save a list of writes to the in-memory storage. @@ -447,7 +447,7 @@ async def aput( async def aput_writes( self, config: RunnableConfig, - writes: List[Tuple[str, Any]], + writes: Sequence[Tuple[str, Any]], task_id: str, ) -> None: """Asynchronous version of put_writes. From 4f767cd2ca8047034eb90c7c17dbc4be0c12dca4 Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 11:46:21 -0700 Subject: [PATCH 09/17] Fix --- libs/langgraph/langgraph/pregel/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langgraph/langgraph/pregel/__init__.py b/libs/langgraph/langgraph/pregel/__init__.py index 5389b7ddf1..3cf791ef82 100644 --- a/libs/langgraph/langgraph/pregel/__init__.py +++ b/libs/langgraph/langgraph/pregel/__init__.py @@ -1429,7 +1429,7 @@ def output() -> Iterator: # set up messages stream mode if "messages" in stream_modes: run_manager.inheritable_handlers.append( - StreamMessagesHandler(stream.put) + StreamMessagesHandler(stream.put_nowait) ) async with AsyncPregelLoop( input, From 5de9b354162bfee89b859ad72a404fb59488c35f Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 13:01:54 -0700 Subject: [PATCH 10/17] Finish --- .../langgraph/checkpoint/base/__init__.py | 4 +- libs/langgraph/Makefile | 2 +- .../channels/dynamic_barrier_value.py | 2 +- .../langgraph/channels/ephemeral_value.py | 4 +- libs/langgraph/langgraph/graph/graph.py | 79 ++++++++++++------- libs/langgraph/langgraph/graph/message.py | 4 +- libs/langgraph/langgraph/graph/state.py | 38 ++++++--- .../langgraph/prebuilt/chat_agent_executor.py | 2 +- .../langgraph/langgraph/prebuilt/tool_node.py | 2 +- libs/langgraph/langgraph/pregel/__init__.py | 44 ++++++----- libs/langgraph/langgraph/pregel/algo.py | 10 ++- libs/langgraph/langgraph/pregel/debug.py | 4 +- libs/langgraph/langgraph/pregel/loop.py | 22 ++++-- libs/langgraph/langgraph/pregel/messages.py | 13 +-- libs/langgraph/langgraph/pregel/read.py | 12 +-- libs/langgraph/langgraph/pregel/runner.py | 25 +++--- libs/langgraph/langgraph/pregel/validate.py | 16 ++-- libs/langgraph/langgraph/pregel/write.py | 5 +- libs/langgraph/langgraph/utils/runnable.py | 2 +- libs/langgraph/pyproject.toml | 2 +- 20 files changed, 171 insertions(+), 121 deletions(-) diff --git a/libs/checkpoint/langgraph/checkpoint/base/__init__.py b/libs/checkpoint/langgraph/checkpoint/base/__init__.py index 822389cce9..ae98e5df5d 100644 --- a/libs/checkpoint/langgraph/checkpoint/base/__init__.py +++ b/libs/checkpoint/langgraph/checkpoint/base/__init__.py @@ -13,11 +13,11 @@ Sequence, Tuple, TypedDict, + TypeVar, Union, ) from langchain_core.runnables import ConfigurableFieldSpec, RunnableConfig -from typing_extensions import TypeVar from langgraph.checkpoint.base.id import uuid6 from langgraph.checkpoint.serde.base import SerializerProtocol, maybe_add_typed_methods @@ -29,7 +29,7 @@ SendProtocol, ) -V = TypeVar("V", int, float, str, default=int) +V = TypeVar("V", int, float, str) PendingWrite = Tuple[str, str, Any] diff --git a/libs/langgraph/Makefile b/libs/langgraph/Makefile index 62ef303bba..7ec12f80a8 100644 --- a/libs/langgraph/Makefile +++ b/libs/langgraph/Makefile @@ -75,7 +75,7 @@ lint lint_diff lint_package lint_tests: [ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff [ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --select I $(PYTHON_FILES) [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) - [ "$(PYTHON_FILES)" = "" ] || poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + [ "$(PYTHON_FILES)" != "langgraph" ] || poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) format format_diff: poetry run ruff format $(PYTHON_FILES) diff --git a/libs/langgraph/langgraph/channels/dynamic_barrier_value.py b/libs/langgraph/langgraph/channels/dynamic_barrier_value.py index bbecd3d8bc..dfa77f3501 100644 --- a/libs/langgraph/langgraph/channels/dynamic_barrier_value.py +++ b/libs/langgraph/langgraph/channels/dynamic_barrier_value.py @@ -6,7 +6,7 @@ from langgraph.errors import EmptyChannelError, InvalidUpdateError -class WaitForNames(NamedTuple): +class WaitForNames(NamedTuple, Generic[Value]): names: set[Value] diff --git a/libs/langgraph/langgraph/channels/ephemeral_value.py b/libs/langgraph/langgraph/channels/ephemeral_value.py index 5beba22eb4..537a8763ca 100644 --- a/libs/langgraph/langgraph/channels/ephemeral_value.py +++ b/libs/langgraph/langgraph/channels/ephemeral_value.py @@ -1,4 +1,4 @@ -from typing import Generic, Optional, Sequence, Type +from typing import Any, Generic, Optional, Sequence, Type from typing_extensions import Self @@ -11,7 +11,7 @@ class EphemeralValue(Generic[Value], BaseChannel[Value, Value, Value]): __slots__ = ("value", "guard") - def __init__(self, typ: Type[Value], guard: bool = True) -> None: + def __init__(self, typ: Any, guard: bool = True) -> None: super().__init__(typ) self.guard = guard diff --git a/libs/langgraph/langgraph/graph/graph.py b/libs/langgraph/langgraph/graph/graph.py index 706e5253ca..93fe28b3d1 100644 --- a/libs/langgraph/langgraph/graph/graph.py +++ b/libs/langgraph/langgraph/graph/graph.py @@ -57,7 +57,7 @@ class Branch(NamedTuple): def run( self, writer: Callable[ - [list[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] + [Sequence[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] ], reader: Optional[Callable[[RunnableConfig], Any]] = None, ) -> RunnableCallable: @@ -79,7 +79,7 @@ def _route( *, reader: Optional[Callable[[RunnableConfig], Any]], writer: Callable[ - [list[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] + [Sequence[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] ], ) -> Runnable: if reader: @@ -100,7 +100,7 @@ async def _aroute( *, reader: Optional[Callable[[RunnableConfig], Any]], writer: Callable[ - [list[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] + [Sequence[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] ], ) -> Runnable: if reader: @@ -117,18 +117,20 @@ async def _aroute( def _finish( self, writer: Callable[ - [list[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] + [Sequence[Union[str, Send]], RunnableConfig], Optional[ChannelWrite] ], input: Any, result: Any, config: RunnableConfig, - ): + ) -> Union[Runnable, Any]: if not isinstance(result, list): result = [result] if self.ends: - destinations = [r if isinstance(r, Send) else self.ends[r] for r in result] + destinations: Sequence[Union[Send, str]] = [ + r if isinstance(r, Send) else self.ends[r] for r in result + ] else: - destinations = result + destinations = cast(Sequence[Union[Send, str]], result) if any(dest is None or dest == START for dest in destinations): raise ValueError("Branch did not return a valid destination") if any(p.node == END for p in destinations if isinstance(p, Send)): @@ -186,14 +188,20 @@ def add_node( ) if not isinstance(node, str): action = node - node = getattr(action, "name", action.__name__) + node = getattr(action, "name", getattr(action, "__name__")) + if node is None: + raise ValueError( + "Node name must be provided if action is not a function" + ) + if action is None: + raise RuntimeError if node in self.nodes: raise ValueError(f"Node `{node}` already present.") if node == END or node == START: raise ValueError(f"Node `{node}` is reserved.") - self.nodes[node] = NodeSpec( - coerce_to_runnable(action, name=node, trace=False), metadata + self.nodes[cast(str, node)] = NodeSpec( + coerce_to_runnable(action, name=cast(str, node), trace=False), metadata ) def add_edge(self, start_key: str, end_key: str) -> None: @@ -257,16 +265,20 @@ def add_conditional_edges( # coerce path_map to a dictionary try: if isinstance(path_map, dict): - path_map = path_map.copy() + path_map_ = path_map.copy() elif isinstance(path_map, list): - path_map = {name: name for name in path_map} - elif rtn_type := get_type_hints(path.__call__).get( - "return" - ) or get_type_hints(path).get("return"): + path_map_ = {name: name for name in path_map} + elif callable(path) and ( + rtn_type := get_type_hints(path.__call__).get("return") + if hasattr(path, "__call__") + else get_type_hints(path).get("return") + ): if get_origin(rtn_type) is Literal: - path_map = {name: name for name in get_args(rtn_type)} + path_map_ = {name: name for name in get_args(rtn_type)} + else: + path_map_ = None except Exception: - pass + path_map_ = None # find a name for the condition path = coerce_to_runnable(path, name=None, trace=True) name = path.name or "condition" @@ -276,7 +288,7 @@ def add_conditional_edges( f"Branch with name `{path.name}` already exists for node " f"`{source}`" ) # save it - self.branches[source][name] = Branch(path, path_map, then) + self.branches[source][name] = Branch(path, path_map_, then) def set_entry_point(self, key: str) -> None: """Specifies the first node to be called in the graph. @@ -405,7 +417,6 @@ def compile( # create empty compiled graph compiled = CompiledGraph( - builder=self, nodes={}, channels={START: EphemeralValue(Any), END: EphemeralValue(Any)}, input_channels=START, @@ -418,6 +429,7 @@ def compile( auto_validate=False, debug=debug, ) + compiled.builder = self # attach nodes, edges, and branches for key, node in self.nodes.items(): @@ -437,10 +449,6 @@ def compile( class CompiledGraph(Pregel): builder: Graph - def __init__(self, *, builder: Graph, **kwargs): - super().__init__(**kwargs) - self.builder = builder - def attach_node(self, key: str, node: NodeSpec) -> None: self.channels[key] = EphemeralValue(Any) self.nodes[key] = ( @@ -463,7 +471,7 @@ def attach_edge(self, start: str, end: str) -> None: def attach_branch(self, start: str, name: str, branch: Branch) -> None: def branch_writer( - packets: list[Union[str, Send]], config: RunnableConfig + packets: Sequence[Union[str, Send]], config: RunnableConfig ) -> Optional[ChannelWrite]: writes = [ ( @@ -473,7 +481,10 @@ def branch_writer( ) for p in packets ] - return ChannelWrite(writes, tags=[TAG_HIDDEN]) + return ChannelWrite( + cast(Sequence[Union[ChannelWriteEntry, Send]], writes), + tags=[TAG_HIDDEN], + ) # add hidden start node if start == START and start not in self.nodes: @@ -489,7 +500,7 @@ def branch_writer( channel_name = f"branch:{start}:{name}:{end}" self.channels[channel_name] = EphemeralValue(Any) self.nodes[end].triggers.append(channel_name) - self.nodes[end].channels.append(channel_name) + cast(list[str], self.nodes[end].channels).append(channel_name) def get_graph( self, @@ -504,17 +515,25 @@ def get_graph( } end_nodes: dict[str, DrawableNode] = {} if xray: - subgraphs = dict(self.get_subgraphs()) + subgraphs = { + k: v for k, v in self.get_subgraphs() if isinstance(v, CompiledGraph) + } else: subgraphs = {} def add_edge( - start: str, end: str, label: Optional[str] = None, conditional: bool = False + start: str, + end: str, + label: Optional[Hashable] = None, + conditional: bool = False, ) -> None: if end == END and END not in end_nodes: end_nodes[END] = graph.add_node(self.get_output_schema(config), END) return graph.add_edge( - start_nodes[start], end_nodes[end], label, conditional + start_nodes[start], + end_nodes[end], + str(label) if label is not None else None, + conditional, ) for key, n in self.builder.nodes.items(): @@ -563,7 +582,7 @@ def add_edge( elif branch.then is not None: ends = {k: k for k in default_ends if k not in (END, branch.then)} else: - ends = default_ends + ends = cast(dict[Hashable, str], default_ends) for label, end in ends.items(): add_edge( start, diff --git a/libs/langgraph/langgraph/graph/message.py b/libs/langgraph/langgraph/graph/message.py index 34bc9c0907..6575bd10cd 100644 --- a/libs/langgraph/langgraph/graph/message.py +++ b/libs/langgraph/langgraph/graph/message.py @@ -63,9 +63,9 @@ def add_messages(left: Messages, right: Messages) -> Messages: """ # coerce to list if not isinstance(left, list): - left = [left] + left = [left] # type: ignore[assignment] if not isinstance(right, list): - right = [right] + right = [right] # type: ignore[assignment] # coerce to message left = [ message_chunk_to_message(cast(BaseMessageChunk, m)) diff --git a/libs/langgraph/langgraph/graph/state.py b/libs/langgraph/langgraph/graph/state.py index e66cff53da..8034343162 100644 --- a/libs/langgraph/langgraph/graph/state.py +++ b/libs/langgraph/langgraph/graph/state.py @@ -7,11 +7,13 @@ from typing import ( Any, Callable, + Literal, NamedTuple, Optional, Sequence, Type, Union, + cast, get_origin, get_type_hints, overload, @@ -122,7 +124,7 @@ class StateGraph(Graph): >>> print(step1) {'x': [0.5, 0.75]}""" - nodes: dict[str, StateNodeSpec] + nodes: dict[str, StateNodeSpec] # type: ignore[assignment] channels: dict[str, BaseChannel] managed: dict[str, ManagedValueSpec] schemas: dict[Type[Any], dict[str, Union[BaseChannel, ManagedValueSpec]]] @@ -302,7 +304,7 @@ def add_node( if not isinstance(node, str): action = node if isinstance(action, Runnable): - node = action.name + node = action.get_name() else: node = getattr(action, "__name__", action.__class__.__name__) if node is None: @@ -323,13 +325,15 @@ def add_node( raise ValueError( "Node name must be provided if action is not a function" ) + if action is None: + raise RuntimeError if node in self.nodes: raise ValueError(f"Node `{node}` already present.") if node == END or node == START: raise ValueError(f"Node `{node}` is reserved.") for character in (NS_SEP, NS_END): - if character in node: + if character in cast(str, node): raise ValueError( f"'{character}' is a reserved character and is not allowed in the node names." ) @@ -349,8 +353,8 @@ def add_node( pass if input is not None: self._add_schema(input) - self.nodes[node] = StateNodeSpec( - coerce_to_runnable(action, name=node, trace=False), + self.nodes[cast(str, node)] = StateNodeSpec( + coerce_to_runnable(action, name=cast(str, node), trace=False), metadata, input=input or self.schema, retry_policy=retry, @@ -449,7 +453,6 @@ def compile( ) compiled = CompiledStateGraph( - builder=self, config_type=self.config_schema, nodes={}, channels={ @@ -468,6 +471,7 @@ def compile( debug=debug, store=store, ) + compiled.builder = self compiled.attach_node(START, None) for key, node in self.nodes.items(): @@ -618,7 +622,7 @@ def attach_edge(self, starts: Union[str, Sequence[str]], end: str) -> None: def attach_branch(self, start: str, name: str, branch: Branch) -> None: def branch_writer( - packets: list[Union[str, Send]], config: RunnableConfig + packets: Sequence[Union[str, Send]], config: RunnableConfig ) -> None: if filtered := [p for p in packets if p != END]: writes = [ @@ -638,7 +642,9 @@ def branch_writer( ), ) ) - ChannelWrite.do_write(config, writes) + ChannelWrite.do_write( + config, cast(Sequence[Union[Send, ChannelWriteEntry]], writes) + ) # attach branch publisher schema = ( @@ -708,11 +714,23 @@ def _get_channels( if name != "__slots__" } return ( - {k: v for k, v in all_keys.items() if not is_managed_value(v)}, + {k: v for k, v in all_keys.items() if isinstance(v, BaseChannel)}, {k: v for k, v in all_keys.items() if is_managed_value(v)}, ) +@overload +def _get_channel( + name: str, annotation: Any, *, allow_managed: Literal[False] +) -> BaseChannel: ... + + +@overload +def _get_channel( + name: str, annotation: Any, *, allow_managed: Literal[True] = True +) -> Union[BaseChannel, ManagedValueSpec]: ... + + def _get_channel( name: str, annotation: Any, *, allow_managed: bool = True ) -> Union[BaseChannel, ManagedValueSpec]: @@ -728,7 +746,7 @@ def _get_channel( channel.key = name return channel - fallback = LastValue(annotation) + fallback: LastValue = LastValue(annotation) fallback.key = name return fallback diff --git a/libs/langgraph/langgraph/prebuilt/chat_agent_executor.py b/libs/langgraph/langgraph/prebuilt/chat_agent_executor.py index d4a6157cb6..dd2cddb5e7 100644 --- a/libs/langgraph/langgraph/prebuilt/chat_agent_executor.py +++ b/libs/langgraph/langgraph/prebuilt/chat_agent_executor.py @@ -419,7 +419,7 @@ class Agent,Tools otherClass raise ValueError(f"Missing required key(s) {missing_keys} in state_schema") if isinstance(tools, ToolExecutor): - tool_classes = tools.tools + tool_classes: Sequence[BaseTool] = tools.tools tool_node = ToolNode(tool_classes) elif isinstance(tools, ToolNode): tool_classes = list(tools.tools_by_name.values()) diff --git a/libs/langgraph/langgraph/prebuilt/tool_node.py b/libs/langgraph/langgraph/prebuilt/tool_node.py index c0f7c83f6b..52b80f75ac 100644 --- a/libs/langgraph/langgraph/prebuilt/tool_node.py +++ b/libs/langgraph/langgraph/prebuilt/tool_node.py @@ -382,7 +382,7 @@ def _get_state_args(tool: BaseTool) -> Dict[str, Optional[str]]: full_schema = tool.get_input_schema() tool_args_to_state_fields: Dict = {} - def _is_injection(type_arg: Any): + def _is_injection(type_arg: Any) -> bool: if isinstance(type_arg, InjectedState) or ( isinstance(type_arg, type) and issubclass(type_arg, InjectedState) ): diff --git a/libs/langgraph/langgraph/pregel/__init__.py b/libs/langgraph/langgraph/pregel/__init__.py index 3cf791ef82..9f16ac6988 100644 --- a/libs/langgraph/langgraph/pregel/__init__.py +++ b/libs/langgraph/langgraph/pregel/__init__.py @@ -138,7 +138,7 @@ def subscribe_to( ) return PregelNode( channels=cast( - Union[Mapping[None, str], Mapping[str, str]], + Union[list[str], Mapping[str, str]], ( {key: channels} if isinstance(channels, str) and key is not None @@ -305,7 +305,9 @@ def config_specs(self) -> list[ConfigurableFieldSpec]: @property def InputType(self) -> Any: if isinstance(self.input_channels, str): - return self.channels[self.input_channels].UpdateType + channel = self.channels[self.input_channels] + if isinstance(channel, BaseChannel): + return channel.UpdateType def get_input_schema( self, config: Optional[RunnableConfig] = None @@ -317,9 +319,9 @@ def get_input_schema( return create_model( self.get_name("Input"), field_definitions={ - k: (self.channels[k].UpdateType, None) + k: (c.UpdateType, None) for k in self.input_channels or self.channels.keys() - if isinstance(self.channels[k], BaseChannel) + if (c := self.channels[k]) and isinstance(c, BaseChannel) }, ) @@ -335,7 +337,9 @@ def get_input_jsonschema( @property def OutputType(self) -> Any: if isinstance(self.output_channels, str): - return self.channels[self.output_channels].ValueType + channel = self.channels[self.output_channels] + if isinstance(channel, BaseChannel): + return channel.ValueType def get_output_schema( self, config: Optional[RunnableConfig] = None @@ -347,9 +351,9 @@ def get_output_schema( return create_model( self.get_name("Output"), field_definitions={ - k: (self.channels[k].ValueType, None) + k: (c.ValueType, None) for k in self.output_channels - if isinstance(self.channels[k], BaseChannel) + if (c := self.channels[k]) and isinstance(c, BaseChannel) }, ) @@ -1050,8 +1054,8 @@ def _defaults( bool, set[StreamMode], Union[str, Sequence[str]], - Optional[Sequence[str]], - Optional[Sequence[str]], + Union[All, Sequence[str]], + Union[All, Sequence[str]], Optional[BaseCheckpointSaver], ]: debug = debug if debug is not None else self.debug @@ -1199,8 +1203,8 @@ def output() -> Iterator: debug, stream_modes, output_keys, - interrupt_before, - interrupt_after, + interrupt_before_, + interrupt_after_, checkpointer, ) = self._defaults( config, @@ -1253,7 +1257,7 @@ def get_waiter() -> concurrent.futures.Future[None]: else: return waiter else: - get_waiter = None + get_waiter = None # type: ignore[assignment] # Similarly to Bulk Synchronous Parallel / Pregel model # computation proceeds in steps, while there are channel updates # channel updates from step N are only visible in step N+1 @@ -1261,8 +1265,8 @@ def get_waiter() -> concurrent.futures.Future[None]: # with channel updates applied only at the transition between steps while loop.tick( input_keys=self.input_channels, - interrupt_before=interrupt_before, - interrupt_after=interrupt_after, + interrupt_before=interrupt_before_, + interrupt_after=interrupt_after_, manager=run_manager, ): for _ in runner.tick( @@ -1397,7 +1401,7 @@ def output() -> Iterator: # if running from astream_log() run each proc with streaming do_stream = next( ( - h + cast(_StreamingCallbackHandler, h) for h in run_manager.handlers if isinstance(h, _StreamingCallbackHandler) ), @@ -1415,8 +1419,8 @@ def output() -> Iterator: debug, stream_modes, output_keys, - interrupt_before, - interrupt_after, + interrupt_before_, + interrupt_after_, checkpointer, ) = self._defaults( config, @@ -1457,7 +1461,7 @@ def output() -> Iterator: def get_waiter() -> asyncio.Task[None]: return aioloop.create_task(stream.wait()) else: - get_waiter = None + get_waiter = None # type: ignore[assignment] # Similarly to Bulk Synchronous Parallel / Pregel model # computation proceeds in steps, while there are channel updates # channel updates from step N are only visible in step N+1 @@ -1465,8 +1469,8 @@ def get_waiter() -> asyncio.Task[None]: # with channel updates applied only at the transition between steps while loop.tick( input_keys=self.input_channels, - interrupt_before=interrupt_before, - interrupt_after=interrupt_after, + interrupt_before=interrupt_before_, + interrupt_after=interrupt_after_, manager=run_manager, ): async for _ in runner.atick( diff --git a/libs/langgraph/langgraph/pregel/algo.py b/libs/langgraph/langgraph/pregel/algo.py index bf4baeb0f5..40a10a40f7 100644 --- a/libs/langgraph/langgraph/pregel/algo.py +++ b/libs/langgraph/langgraph/pregel/algo.py @@ -52,6 +52,8 @@ from langgraph.pregel.types import All, PregelExecutableTask, PregelTask from langgraph.utils.config import merge_configs, patch_config +GetNextVersion = Callable[[Optional[V], BaseChannel], V] + EMPTY_SEQ: tuple[str, ...] = tuple() @@ -173,7 +175,7 @@ def apply_writes( checkpoint: Checkpoint, channels: Mapping[str, BaseChannel], tasks: Iterable[WritesProtocol], - get_next_version: Optional[Callable[[Optional[V], BaseChannel], V]], + get_next_version: Optional[GetNextVersion], ) -> dict[str, list[Any]]: # update seen versions for task in tasks: @@ -200,7 +202,7 @@ def apply_writes( }: if channels[chan].consume() and get_next_version is not None: checkpoint["channel_versions"][chan] = get_next_version( - max_version, # type: ignore[arg-type] + max_version, channels[chan], ) @@ -234,7 +236,7 @@ def apply_writes( if chan in channels: if channels[chan].update(vals) and get_next_version is not None: checkpoint["channel_versions"][chan] = get_next_version( - max_version, # type: ignore[arg-type] + max_version, channels[chan], ) updated_channels.add(chan) @@ -244,7 +246,7 @@ def apply_writes( if chan not in updated_channels: if channels[chan].update([]) and get_next_version is not None: checkpoint["channel_versions"][chan] = get_next_version( - max_version, # type: ignore[arg-type] + max_version, channels[chan], ) diff --git a/libs/langgraph/langgraph/pregel/debug.py b/libs/langgraph/langgraph/pregel/debug.py index 782d5a13c8..56f1eb9c60 100644 --- a/libs/langgraph/langgraph/pregel/debug.py +++ b/libs/langgraph/langgraph/pregel/debug.py @@ -82,7 +82,7 @@ class DebugOutputCheckpoint(DebugOutputBase): def map_debug_tasks( - step: int, tasks: list[PregelExecutableTask] + step: int, tasks: Iterable[PregelExecutableTask] ) -> Iterator[DebugOutputTask]: ts = datetime.now(timezone.utc).isoformat() for task in tasks: @@ -132,7 +132,7 @@ def map_debug_checkpoint( stream_channels: Union[str, Sequence[str]], metadata: CheckpointMetadata, checkpoint: Checkpoint, - tasks: list[PregelExecutableTask], + tasks: Iterable[PregelExecutableTask], pending_writes: list[PendingWrite], ) -> Iterator[DebugOutputCheckpoint]: yield { diff --git a/libs/langgraph/langgraph/pregel/loop.py b/libs/langgraph/langgraph/pregel/loop.py index ba98b16053..49baa1846a 100644 --- a/libs/langgraph/langgraph/pregel/loop.py +++ b/libs/langgraph/langgraph/pregel/loop.py @@ -64,6 +64,7 @@ WritableManagedValue, ) from langgraph.pregel.algo import ( + GetNextVersion, PregelTaskWrites, apply_writes, increment, @@ -92,7 +93,7 @@ ) from langgraph.pregel.manager import AsyncChannelsManager, ChannelsManager from langgraph.pregel.read import PregelNode -from langgraph.pregel.types import PregelExecutableTask, StreamMode +from langgraph.pregel.types import All, PregelExecutableTask, StreamMode from langgraph.pregel.utils import get_new_channel_versions from langgraph.store.base import BaseStore from langgraph.store.batch import AsyncBatchedStore @@ -146,7 +147,7 @@ class PregelLoop: skip_done_tasks: bool is_nested: bool - checkpointer_get_next_version: Callable[[Optional[V]], V] + checkpointer_get_next_version: GetNextVersion checkpointer_put_writes: Optional[ Callable[[RunnableConfig, Sequence[tuple[str, Any]], str], Any] ] @@ -281,8 +282,8 @@ def tick( self, *, input_keys: Union[str, Sequence[str]], - interrupt_after: Sequence[str] = EMPTY_SEQ, - interrupt_before: Sequence[str] = EMPTY_SEQ, + interrupt_after: Union[All, Sequence[str]] = EMPTY_SEQ, + interrupt_before: Union[All, Sequence[str]] = EMPTY_SEQ, manager: Union[None, AsyncParentRunManager, ParentRunManager] = None, ) -> bool: """Execute a single iteration of the Pregel loop. @@ -681,6 +682,10 @@ def __enter__(self) -> Self: if self.config.get("configurable", {}).get( CONFIG_KEY_ENSURE_LATEST ) and self.checkpoint_config["configurable"].get("checkpoint_id"): + if self.checkpointer is None: + raise RuntimeError( + "Cannot ensure latest checkpoint without checkpointer" + ) saved = self.checkpointer.get_tuple( patch_configurable(self.checkpoint_config, {"checkpoint_id": None}) ) @@ -771,7 +776,7 @@ def __init__( self.checkpointer_put_writes = checkpointer.aput_writes else: self.checkpointer_get_next_version = increment - self._checkpointer_put_after_previous = None # type: ignore[method-assign] + self._checkpointer_put_after_previous = None # type: ignore[assignment] self.checkpointer_put_writes = None async def _checkpointer_put_after_previous( @@ -801,6 +806,10 @@ async def __aenter__(self) -> Self: if self.config.get("configurable", {}).get( CONFIG_KEY_ENSURE_LATEST ) and self.checkpoint_config["configurable"].get("checkpoint_id"): + if self.checkpointer is None: + raise RuntimeError( + "Cannot ensure latest checkpoint without checkpointer" + ) saved = await self.checkpointer.aget_tuple( patch_configurable(self.checkpoint_config, {"checkpoint_id": None}) ) @@ -858,6 +867,3 @@ async def __aexit__( return await asyncio.shield( self.stack.__aexit__(exc_type, exc_value, traceback) ) - - -EMPTY_SEQ = tuple() diff --git a/libs/langgraph/langgraph/pregel/messages.py b/libs/langgraph/langgraph/pregel/messages.py index 0a96f0fee4..7c3f90b107 100644 --- a/libs/langgraph/langgraph/pregel/messages.py +++ b/libs/langgraph/langgraph/pregel/messages.py @@ -7,7 +7,6 @@ List, Optional, Sequence, - Tuple, Union, cast, ) @@ -21,20 +20,16 @@ from langgraph.constants import NS_SEP from langgraph.pregel.loop import StreamChunk +Meta = tuple[tuple[str, ...], dict[str, Any]] + class StreamMessagesHandler(BaseCallbackHandler, _StreamingCallbackHandler): def __init__(self, stream: Callable[[StreamChunk], None]): self.stream = stream - self.metadata: dict[UUID, tuple[tuple[str, ...], dict[str, Any]]] = {} + self.metadata: dict[UUID, Meta] = {} self.seen: set[Union[int, str]] = set() - def _emit( - self, - meta: Tuple[str, dict[str, Any]], - message: BaseMessage, - *, - dedupe: bool = False, - ) -> None: + def _emit(self, meta: Meta, message: BaseMessage, *, dedupe: bool = False) -> None: ident = id(message) if dedupe and message.id in self.seen: return diff --git a/libs/langgraph/langgraph/pregel/read.py b/libs/langgraph/langgraph/pregel/read.py index 79643a0902..3ad988b898 100644 --- a/libs/langgraph/langgraph/pregel/read.py +++ b/libs/langgraph/langgraph/pregel/read.py @@ -18,7 +18,7 @@ RunnablePassthrough, RunnableSerializable, ) -from langchain_core.runnables.base import Input, Other, Output, coerce_to_runnable +from langchain_core.runnables.base import Input, Other, coerce_to_runnable from langchain_core.runnables.utils import ConfigurableFieldSpec from langgraph.constants import CONFIG_KEY_READ @@ -206,7 +206,7 @@ def __or__( Mapping[str, Runnable[Any, Other] | Callable[[Any], Other]], ], ) -> PregelNode: - if ChannelWrite.is_writer(other): + if isinstance(other, Runnable) and ChannelWrite.is_writer(other): return self.copy(update=dict(writers=[*self.writers, other])) elif self.bound is DEFAULT_BOUND: return self.copy(update=dict(bound=coerce_to_runnable(other))) @@ -237,7 +237,7 @@ def invoke( input: Input, config: Optional[RunnableConfig] = None, **kwargs: Optional[Any], - ) -> Output: + ) -> Any: return self.bound.invoke( input, merge_configs({"metadata": self.metadata, "tags": self.tags}, config), @@ -249,7 +249,7 @@ async def ainvoke( input: Input, config: Optional[RunnableConfig] = None, **kwargs: Optional[Any], - ) -> Output: + ) -> Any: return await self.bound.ainvoke( input, merge_configs({"metadata": self.metadata, "tags": self.tags}, config), @@ -261,7 +261,7 @@ def stream( input: Input, config: Optional[RunnableConfig] = None, **kwargs: Optional[Any], - ) -> Iterator[Output]: + ) -> Iterator[Any]: yield from self.bound.stream( input, merge_configs({"metadata": self.metadata, "tags": self.tags}, config), @@ -273,7 +273,7 @@ async def astream( input: Input, config: Optional[RunnableConfig] = None, **kwargs: Optional[Any], - ) -> AsyncIterator[Output]: + ) -> AsyncIterator[Any]: async for item in self.bound.astream( input, merge_configs({"metadata": self.metadata, "tags": self.tags}, config), diff --git a/libs/langgraph/langgraph/pregel/runner.py b/libs/langgraph/langgraph/pregel/runner.py index 7e11224857..14e84352ff 100644 --- a/libs/langgraph/langgraph/pregel/runner.py +++ b/libs/langgraph/langgraph/pregel/runner.py @@ -5,11 +5,13 @@ Any, AsyncIterator, Callable, + Iterable, Iterator, Optional, Sequence, Type, Union, + cast, ) from langgraph.constants import ERROR, INTERRUPT, NO_WRITES @@ -33,7 +35,7 @@ def __init__( def tick( self, - tasks: Sequence[PregelExecutableTask], + tasks: Iterable[PregelExecutableTask], *, reraise: bool = True, timeout: Optional[float] = None, @@ -106,7 +108,7 @@ def tick( async def atick( self, - tasks: Sequence[PregelExecutableTask], + tasks: Iterable[PregelExecutableTask], *, reraise: bool = True, timeout: Optional[float] = None, @@ -141,14 +143,17 @@ async def atick( for t in tasks: if not t.writes: futures[ - self.submit( - arun_with_retry, - t, - retry_policy, - stream=self.use_astream, - __name__=t.name, - __cancel_on_exit__=True, - __reraise_on_exit__=reraise, + cast( + asyncio.Future, + self.submit( + arun_with_retry, + t, + retry_policy, + stream=self.use_astream, + __name__=t.name, + __cancel_on_exit__=True, + __reraise_on_exit__=reraise, + ), ) ] = t all_futures = futures.copy() diff --git a/libs/langgraph/langgraph/pregel/validate.py b/libs/langgraph/langgraph/pregel/validate.py index 8627642e9a..2320142403 100644 --- a/libs/langgraph/langgraph/pregel/validate.py +++ b/libs/langgraph/langgraph/pregel/validate.py @@ -1,4 +1,4 @@ -from typing import Mapping, Optional, Sequence, Union +from typing import Any, Mapping, Optional, Sequence, Union from langgraph.channels.base import BaseChannel from langgraph.constants import RESERVED @@ -65,18 +65,18 @@ def validate_graph( raise ValueError(f"Output channel '{chan}' not in 'channels'") if interrupt_after_nodes != "*": - for node in interrupt_after_nodes: - if node not in nodes: - raise ValueError(f"Node {node} not in nodes") + for n in interrupt_after_nodes: + if n not in nodes: + raise ValueError(f"Node {n} not in nodes") if interrupt_before_nodes != "*": - for node in interrupt_before_nodes: - if node not in nodes: - raise ValueError(f"Node {node} not in nodes") + for n in interrupt_before_nodes: + if n not in nodes: + raise ValueError(f"Node {n} not in nodes") def validate_keys( keys: Optional[Union[str, Sequence[str]]], - channels: Mapping[str, BaseChannel], + channels: Mapping[str, Any], ) -> None: if isinstance(keys, str): if keys not in channels: diff --git a/libs/langgraph/langgraph/pregel/write.py b/libs/langgraph/langgraph/pregel/write.py index 9c3b7782df..2adcab7579 100644 --- a/libs/langgraph/langgraph/pregel/write.py +++ b/libs/langgraph/langgraph/pregel/write.py @@ -9,6 +9,7 @@ Sequence, TypeVar, Union, + cast, ) from langchain_core.runnables import Runnable, RunnableConfig @@ -34,7 +35,7 @@ class ChannelWriteEntry(NamedTuple): class ChannelWrite(RunnableCallable): - writes: Sequence[Union[ChannelWriteEntry, Send]] + writes: list[Union[ChannelWriteEntry, Send]] """ Sequence of write entries, each of which is a tuple of: - channel name @@ -54,7 +55,7 @@ def __init__( require_at_least_one_of: Optional[Sequence[str]] = None, ): super().__init__(func=self._write, afunc=self._awrite, name=None, tags=tags) - self.writes = writes + self.writes = cast(list[Union[ChannelWriteEntry, Send]], writes) self.require_at_least_one_of = require_at_least_one_of def get_name( diff --git a/libs/langgraph/langgraph/utils/runnable.py b/libs/langgraph/langgraph/utils/runnable.py index f0b16442d9..56d5d5df40 100644 --- a/libs/langgraph/langgraph/utils/runnable.py +++ b/libs/langgraph/langgraph/utils/runnable.py @@ -269,7 +269,7 @@ def __init__( if isinstance(step, RunnableSequence): steps_flat.extend(step.steps) elif isinstance(step, RunnableSeq): - steps_flat.extend(step.steps) # type: ignore[has-type] + steps_flat.extend(step.steps) else: steps_flat.append(coerce_to_runnable(step, name=None, trace=True)) if len(steps_flat) < 2: diff --git a/libs/langgraph/pyproject.toml b/libs/langgraph/pyproject.toml index 954ca2973a..4975cabb00 100644 --- a/libs/langgraph/pyproject.toml +++ b/libs/langgraph/pyproject.toml @@ -57,7 +57,7 @@ warn_no_return = "False" warn_unused_ignores = "True" warn_redundant_casts = "True" allow_redefinition = "True" -disable_error_code = "typeddict-item, return-value, override" +disable_error_code = "typeddict-item, return-value, override, has-type" [tool.coverage.run] omit = ["tests/*"] From 0cf1a13dabdd43a0007002ce876c559809accf31 Mon Sep 17 00:00:00 2001 From: Lance Martin <122662504+rlancemartin@users.noreply.github.com> Date: Thu, 19 Sep 2024 13:20:20 -0700 Subject: [PATCH 11/17] Update conceptual docs for multiple schema (#1770) * Update conceptual docs for multiple schema * Link to conceptual docs from How-To --- docs/docs/concepts/low_level.md | 58 ++++++++++++++++----- docs/docs/how-tos/input_output_schema.ipynb | 4 +- docs/docs/how-tos/pass_private_state.ipynb | 2 +- 3 files changed, 50 insertions(+), 14 deletions(-) diff --git a/docs/docs/concepts/low_level.md b/docs/docs/concepts/low_level.md index c8a01879fd..47b70fc27a 100644 --- a/docs/docs/concepts/low_level.md +++ b/docs/docs/concepts/low_level.md @@ -50,28 +50,62 @@ By default, the graph will have the same input and output schemas. If you want t #### Multiple schemas -Typically, all graph nodes communicate with a single schema. This means that they will read and write to the same state channels. But, there are cases where we may want a bit more control over this: +Typically, all graph nodes communicate with a single schema. This means that they will read and write to the same state channels. But, there are cases where we want more control over this: -* Internal nodes may pass information that is not required in the graph's input / output. +* Internal nodes can pass information that is not required in the graph's input / output. * We may also want to use different input / output schemas for the graph. The output might, for example, only contain a single relevant output key. -It is possible to have nodes write to private state channels inside the graph for internal node communication. We can simply define a private schema and use a type hint -- e.g., `state: PrivateState` as shown below -- to specify it as the node input schema. See [this notebook](../how-tos/pass_private_state.ipynb) for more detail. +It is possible to have nodes write to private state channels inside the graph for internal node communication. We can simply define a private schema, `PrivateState`. See [this notebook](../how-tos/pass_private_state.ipynb) for more detail. + +It is also possible to define explicit input and output schemas for a graph. In these cases, we define an "internal" schema that contains *all* keys relevant to graph operations. But, we also define `input` and `output` schemas that are sub-sets of the "internal" schema to constrain the input and output of the graph. See [this notebook](../how-tos/input_output_schema.ipynb) for more detail. + +Let's look at an example: ```python -class OverallState(TypedDict): - foo: int +class InputState(TypedDict): + user_input: str -class PrivateState(TypedDict): - baz: int +class OutputState(TypedDict): + graph_output: str -def node_1(state: OverallState) -> PrivateState: - ... +class OverallState(TypedDict): + foo: str + user_input: str + graph_output: str -def node_2(state: PrivateState) -> OverallState: - ... +class PrivateState(TypedDict): + bar: str + +def node_1(state: InputState) -> OverallState: + # Write to OverallState + return {"foo": state["user_input"] + " name"} + +def node_2(state: OverallState) -> PrivateState: + # Read from OverallState, write to PrivateState + return {"bar": state["foo"] + " is"} + +def node_3(state: PrivateState) -> OutputState: + # Read from PrivateState, write to OutputState + return {"graph_output": state["bar"] + " Lance"} + +builder = StateGraph(OverallState,input=InputState,output=OutputState) +builder.add_node("node_1", node_1) +builder.add_node("node_2", node_2) +builder.add_node("node_3", node_3) +builder.add_edge(START, "node_1") +builder.add_edge("node_1", "node_2") +builder.add_edge("node_2", "node_3") +builder.add_edge("node_3", END) + +graph = builder.compile() +graph.invoke({"user_input":"My"}) +{'graph_output': 'My name is Lance'} ``` +There are two subtle and important points to note here: -It is also possible to define explicit input and output schemas for a graph. In these cases, we define an "internal" schema that contains *all* keys relevant to graph operations. But, we also define `input` and `output` schemas that are sub-sets of the "internal" schema to constrain the input and output of the graph. See [this notebook](../how-tos/input_output_schema.ipynb) for more detail. +1. We pass `state: InputState` as the input schema to `node_1`. But, we write out to `foo`, a channel in `OverallState`. How can we write out to a state channel that is not included in the input schema? This is because a node *can write to any state channel in the graph state.* The graph state is the union of of the state channels defined at initialization, which includes `OverallState` and the filters `InputState` and `OutputState`. + +2. We initialize the graph with `StateGraph(OverallState,input=InputState,output=OutputState)`. So, how can we write to `PrivateState` in `node_2`? How does the graph gain access to this schema if it was not passed in the `StateGraph` initialization? We can do this because *nodes can also declare additional state channels* as long as the state schema definition exists. In this case, the `PrivateState` schema is defined, so we can add `bar` as a new state channel in the graph and write to it. ### Reducers diff --git a/docs/docs/how-tos/input_output_schema.ipynb b/docs/docs/how-tos/input_output_schema.ipynb index 955c182b00..97f69c48d6 100644 --- a/docs/docs/how-tos/input_output_schema.ipynb +++ b/docs/docs/how-tos/input_output_schema.ipynb @@ -9,7 +9,9 @@ "\n", "By default, `StateGraph` takes in a single schema and all nodes are expected to communicate with that schema. However, it is also possible to define explicit input and output schemas for a graph. Often, in these cases, we define an \"internal\" schema that contains all keys relevant to graph operations. But, we use specific input and output schemas to filter what's permitted when invoking and what's returned. We use type hints below to, for example, show that the output of `answer_node` will be filtered to `OutputState`. In addition, we define each node's input schema (e.g., as state: `OverallState` for `answer_node`).\n", "\n", - "In this notebook we'll walk through an example of this. At a high level, in order to do this you simply have to pass in `input=..., output=...` when defining the graph. Let's see an example below!\n", + "In this notebook we'll walk through an example of this. At a high level, in order to do this you simply have to pass in `input=..., output=...` when defining the graph. See the conceptual docs [here](https://langchain-ai.github.io/langgraph/concepts/low_level/#multiple-schemas) for more details.\n", + "\n", + "Let's look at an example!\n", "\n", "## Setup\n", "\n", diff --git a/docs/docs/how-tos/pass_private_state.ipynb b/docs/docs/how-tos/pass_private_state.ipynb index dbbb9ae741..5c731d916a 100644 --- a/docs/docs/how-tos/pass_private_state.ipynb +++ b/docs/docs/how-tos/pass_private_state.ipynb @@ -15,7 +15,7 @@ "3. Retrieves documents for that generated query\n", "4. Generates a final answer based on those documents\n", "\n", - "We will have a separate node for each step. We will only have the `question` and `answer` on the overall state. However, we will need separate states for the `search_query` and the `documents` - we will pass these as private state keys.\n", + "We will have a separate node for each step. We will only have the `question` and `answer` on the overall state. However, we will need separate states for the `search_query` and the `documents` - we will pass these as private state keys. See the conceptual docs [here](https://langchain-ai.github.io/langgraph/concepts/low_level/#multiple-schemas) for more details.\n", "\n", "Let's look at an example!\n", "\n", From 95c1ca3adcd1b14f304aecd01140a9dd636f1e8f Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 13:20:45 -0700 Subject: [PATCH 12/17] Fix --- libs/langgraph/Makefile | 2 +- .../channels/dynamic_barrier_value.py | 6 +++--- libs/langgraph/langgraph/graph/graph.py | 18 ++++++++++++------ libs/langgraph/langgraph/graph/state.py | 2 +- 4 files changed, 17 insertions(+), 11 deletions(-) diff --git a/libs/langgraph/Makefile b/libs/langgraph/Makefile index 7ec12f80a8..1e249a0cd6 100644 --- a/libs/langgraph/Makefile +++ b/libs/langgraph/Makefile @@ -75,7 +75,7 @@ lint lint_diff lint_package lint_tests: [ "$(PYTHON_FILES)" = "" ] || poetry run ruff format $(PYTHON_FILES) --diff [ "$(PYTHON_FILES)" = "" ] || poetry run ruff check --select I $(PYTHON_FILES) [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) - [ "$(PYTHON_FILES)" != "langgraph" ] || poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + [ "$(PYTHON_FILES)" = "" ] || poetry run mypy langgraph --cache-dir $(MYPY_CACHE) format format_diff: poetry run ruff format $(PYTHON_FILES) diff --git a/libs/langgraph/langgraph/channels/dynamic_barrier_value.py b/libs/langgraph/langgraph/channels/dynamic_barrier_value.py index dfa77f3501..f64191e863 100644 --- a/libs/langgraph/langgraph/channels/dynamic_barrier_value.py +++ b/libs/langgraph/langgraph/channels/dynamic_barrier_value.py @@ -1,4 +1,4 @@ -from typing import Generic, NamedTuple, Optional, Sequence, Type, Union +from typing import Any, Generic, NamedTuple, Optional, Sequence, Type, Union from typing_extensions import Self @@ -6,8 +6,8 @@ from langgraph.errors import EmptyChannelError, InvalidUpdateError -class WaitForNames(NamedTuple, Generic[Value]): - names: set[Value] +class WaitForNames(NamedTuple): + names: set[Any] class DynamicBarrierValue( diff --git a/libs/langgraph/langgraph/graph/graph.py b/libs/langgraph/langgraph/graph/graph.py index 93fe28b3d1..12f9cf9432 100644 --- a/libs/langgraph/langgraph/graph/graph.py +++ b/libs/langgraph/langgraph/graph/graph.py @@ -268,13 +268,15 @@ def add_conditional_edges( path_map_ = path_map.copy() elif isinstance(path_map, list): path_map_ = {name: name for name in path_map} - elif callable(path) and ( - rtn_type := get_type_hints(path.__call__).get("return") - if hasattr(path, "__call__") - else get_type_hints(path).get("return") - ): + elif isinstance(path, Runnable): + path_map_ = None + elif rtn_type := get_type_hints(path.__call__).get( # type: ignore[operator] + "return" + ) or get_type_hints(path).get("return"): if get_origin(rtn_type) is Literal: path_map_ = {name: name for name in get_args(rtn_type)} + else: + path_map_ = None else: path_map_ = None except Exception: @@ -417,6 +419,7 @@ def compile( # create empty compiled graph compiled = CompiledGraph( + builder=self, nodes={}, channels={START: EphemeralValue(Any), END: EphemeralValue(Any)}, input_channels=START, @@ -429,7 +432,6 @@ def compile( auto_validate=False, debug=debug, ) - compiled.builder = self # attach nodes, edges, and branches for key, node in self.nodes.items(): @@ -449,6 +451,10 @@ def compile( class CompiledGraph(Pregel): builder: Graph + def __init__(self, *, builder: Graph, **kwargs: Any) -> None: + super().__init__(**kwargs) + self.builder = builder + def attach_node(self, key: str, node: NodeSpec) -> None: self.channels[key] = EphemeralValue(Any) self.nodes[key] = ( diff --git a/libs/langgraph/langgraph/graph/state.py b/libs/langgraph/langgraph/graph/state.py index 8034343162..ace4d25532 100644 --- a/libs/langgraph/langgraph/graph/state.py +++ b/libs/langgraph/langgraph/graph/state.py @@ -453,6 +453,7 @@ def compile( ) compiled = CompiledStateGraph( + builder=self, config_type=self.config_schema, nodes={}, channels={ @@ -471,7 +472,6 @@ def compile( debug=debug, store=store, ) - compiled.builder = self compiled.attach_node(START, None) for key, node in self.nodes.items(): From a47dc2b6340ce83663b5ece438a97ca0a34bc03a Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 14:37:32 -0700 Subject: [PATCH 13/17] Apply suggestions from code review Co-authored-by: William FH <13333726+hinthornw@users.noreply.github.com> --- libs/langgraph/langgraph/graph/graph.py | 2 +- libs/langgraph/langgraph/graph/state.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/langgraph/langgraph/graph/graph.py b/libs/langgraph/langgraph/graph/graph.py index 12f9cf9432..2a1e8c8adf 100644 --- a/libs/langgraph/langgraph/graph/graph.py +++ b/libs/langgraph/langgraph/graph/graph.py @@ -194,7 +194,7 @@ def add_node( "Node name must be provided if action is not a function" ) if action is None: - raise RuntimeError + raise RuntimeError(f"Expected a function or Runnable action in add_node. Received None.") if node in self.nodes: raise ValueError(f"Node `{node}` already present.") if node == END or node == START: diff --git a/libs/langgraph/langgraph/graph/state.py b/libs/langgraph/langgraph/graph/state.py index ace4d25532..b759ff4ca7 100644 --- a/libs/langgraph/langgraph/graph/state.py +++ b/libs/langgraph/langgraph/graph/state.py @@ -320,7 +320,7 @@ def add_node( ) if not isinstance(node, str): action = node - node = getattr(action, "name", getattr(action, "__name__")) + node = getattr(action, "name", getattr(action, "__name__", None)) if node is None: raise ValueError( "Node name must be provided if action is not a function" From e96934533e48888a648d60f88e585cd54f2f8fce Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 15:06:39 -0700 Subject: [PATCH 14/17] Update graph.py --- libs/langgraph/langgraph/graph/graph.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langgraph/langgraph/graph/graph.py b/libs/langgraph/langgraph/graph/graph.py index 2a1e8c8adf..418bb3a7e4 100644 --- a/libs/langgraph/langgraph/graph/graph.py +++ b/libs/langgraph/langgraph/graph/graph.py @@ -194,7 +194,7 @@ def add_node( "Node name must be provided if action is not a function" ) if action is None: - raise RuntimeError(f"Expected a function or Runnable action in add_node. Received None.") + raise RuntimeError("Expected a function or Runnable action in add_node. Received None.") if node in self.nodes: raise ValueError(f"Node `{node}` already present.") if node == END or node == START: From ed27e54761f840224ffd04f6d06173239ffe4feb Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 16:41:46 -0700 Subject: [PATCH 15/17] Update graph.py --- libs/langgraph/langgraph/graph/graph.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libs/langgraph/langgraph/graph/graph.py b/libs/langgraph/langgraph/graph/graph.py index 418bb3a7e4..7e20d1c26a 100644 --- a/libs/langgraph/langgraph/graph/graph.py +++ b/libs/langgraph/langgraph/graph/graph.py @@ -194,7 +194,9 @@ def add_node( "Node name must be provided if action is not a function" ) if action is None: - raise RuntimeError("Expected a function or Runnable action in add_node. Received None.") + raise RuntimeError( + "Expected a function or Runnable action in add_node. Received None." + ) if node in self.nodes: raise ValueError(f"Node `{node}` already present.") if node == END or node == START: From 7643c1171f613acdacd9f855523c5f2e919862f7 Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 16:49:05 -0700 Subject: [PATCH 16/17] Lint --- libs/langgraph/langgraph/graph/state.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langgraph/langgraph/graph/state.py b/libs/langgraph/langgraph/graph/state.py index b759ff4ca7..e5f20a51dd 100644 --- a/libs/langgraph/langgraph/graph/state.py +++ b/libs/langgraph/langgraph/graph/state.py @@ -320,7 +320,7 @@ def add_node( ) if not isinstance(node, str): action = node - node = getattr(action, "name", getattr(action, "__name__", None)) + node = cast(str, getattr(action, "name", getattr(action, "__name__", None))) if node is None: raise ValueError( "Node name must be provided if action is not a function" From 94e64f87b16459ca393ead0895c265f16bb9cee6 Mon Sep 17 00:00:00 2001 From: Nuno Campos Date: Thu, 19 Sep 2024 15:07:45 -0700 Subject: [PATCH 17/17] Allow chaning add_node/add_edge/etc calls --- libs/langgraph/langgraph/graph/graph.py | 23 ++++++++++++++--------- libs/langgraph/langgraph/graph/state.py | 11 +++++++---- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/libs/langgraph/langgraph/graph/graph.py b/libs/langgraph/langgraph/graph/graph.py index 7e20d1c26a..c5a043ee7e 100644 --- a/libs/langgraph/langgraph/graph/graph.py +++ b/libs/langgraph/langgraph/graph/graph.py @@ -23,6 +23,7 @@ from langchain_core.runnables.config import RunnableConfig from langchain_core.runnables.graph import Graph as DrawableGraph from langchain_core.runnables.graph import Node as DrawableNode +from typing_extensions import Self from langgraph.channels.ephemeral_value import EphemeralValue from langgraph.checkpoint.base import BaseCheckpointSaver @@ -156,7 +157,7 @@ def add_node( node: RunnableLike, *, metadata: Optional[dict[str, Any]] = None, - ) -> None: ... + ) -> Self: ... @overload def add_node( @@ -165,7 +166,7 @@ def add_node( action: RunnableLike, *, metadata: Optional[dict[str, Any]] = None, - ) -> None: ... + ) -> Self: ... def add_node( self, @@ -173,7 +174,7 @@ def add_node( action: Optional[RunnableLike] = None, *, metadata: Optional[dict[str, Any]] = None, - ) -> None: + ) -> Self: if isinstance(node, str): for character in (NS_SEP, NS_END): if character in node: @@ -205,8 +206,9 @@ def add_node( self.nodes[cast(str, node)] = NodeSpec( coerce_to_runnable(action, name=cast(str, node), trace=False), metadata ) + return self - def add_edge(self, start_key: str, end_key: str) -> None: + def add_edge(self, start_key: str, end_key: str) -> Self: if self.compiled: logger.warning( "Adding an edge to a graph that has already been compiled. This will " @@ -227,6 +229,7 @@ def add_edge(self, start_key: str, end_key: str) -> None: ) self.edges.add((start_key, end_key)) + return self def add_conditional_edges( self, @@ -238,7 +241,7 @@ def add_conditional_edges( ], path_map: Optional[Union[dict[Hashable, str], list[str]]] = None, then: Optional[str] = None, - ) -> None: + ) -> Self: """Add a conditional edge from the starting node to any number of destination nodes. Args: @@ -293,8 +296,9 @@ def add_conditional_edges( ) # save it self.branches[source][name] = Branch(path, path_map_, then) + return self - def set_entry_point(self, key: str) -> None: + def set_entry_point(self, key: str) -> Self: """Specifies the first node to be called in the graph. Equivalent to calling `add_edge(START, key)`. @@ -316,7 +320,7 @@ def set_conditional_entry_point( ], path_map: Optional[Union[dict[Hashable, str], list[str]]] = None, then: Optional[str] = None, - ) -> None: + ) -> Self: """Sets a conditional entry point in the graph. Args: @@ -333,7 +337,7 @@ def set_conditional_entry_point( """ return self.add_conditional_edges(START, path, path_map, then) - def set_finish_point(self, key: str) -> None: + def set_finish_point(self, key: str) -> Self: """Marks a node as a finish point of the graph. If the graph reaches this node, it will cease execution. @@ -346,7 +350,7 @@ def set_finish_point(self, key: str) -> None: """ return self.add_edge(key, END) - def validate(self, interrupt: Optional[Sequence[str]] = None) -> None: + def validate(self, interrupt: Optional[Sequence[str]] = None) -> Self: # assemble sources all_sources = {src for src, _ in self._all_edges} for start, branches in self.branches.items(): @@ -398,6 +402,7 @@ def validate(self, interrupt: Optional[Sequence[str]] = None) -> None: raise ValueError(f"Interrupt node `{node}` not found") self.compiled = True + return self def compile( self, diff --git a/libs/langgraph/langgraph/graph/state.py b/libs/langgraph/langgraph/graph/state.py index e5f20a51dd..cee0fb849d 100644 --- a/libs/langgraph/langgraph/graph/state.py +++ b/libs/langgraph/langgraph/graph/state.py @@ -23,6 +23,7 @@ from langchain_core.runnables.base import RunnableLike from pydantic import BaseModel from pydantic.v1 import BaseModel as BaseModelV1 +from typing_extensions import Self from langgraph._api.deprecation import LangGraphDeprecationWarning from langgraph.channels.base import BaseChannel @@ -211,7 +212,7 @@ def add_node( metadata: Optional[dict[str, Any]] = None, input: Optional[Type[Any]] = None, retry: Optional[RetryPolicy] = None, - ) -> None: + ) -> Self: """Adds a new node to the state graph. Will take the name of the function/runnable as the node name. @@ -235,7 +236,7 @@ def add_node( metadata: Optional[dict[str, Any]] = None, input: Optional[Type[Any]] = None, retry: Optional[RetryPolicy] = None, - ) -> None: + ) -> Self: """Adds a new node to the state graph. Args: @@ -258,7 +259,7 @@ def add_node( metadata: Optional[dict[str, Any]] = None, input: Optional[Type[Any]] = None, retry: Optional[RetryPolicy] = None, - ) -> None: + ) -> Self: """Adds a new node to the state graph. Will take the name of the function/runnable as the node name. @@ -359,8 +360,9 @@ def add_node( input=input or self.schema, retry_policy=retry, ) + return self - def add_edge(self, start_key: Union[str, list[str]], end_key: str) -> None: + def add_edge(self, start_key: Union[str, list[str]], end_key: str) -> Self: """Adds a directed edge from the start node to the end node. If the graph transitions to the start_key node, it will always transition to the end_key node next. @@ -394,6 +396,7 @@ def add_edge(self, start_key: Union[str, list[str]], end_key: str) -> None: raise ValueError(f"Need to add_node `{end_key}` first") self.waiting_edges.add((tuple(start_key), end_key)) + return self def compile( self,