From 38959c0e2a15f307cececc93ddf1db19ac72ea50 Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Sat, 23 Mar 2024 09:51:10 -0400 Subject: [PATCH 1/2] add a serialization test of ChatNVIDIA using dumps/loads --- .../tests/unit_tests/test_serialization.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 libs/ai-endpoints/tests/unit_tests/test_serialization.py diff --git a/libs/ai-endpoints/tests/unit_tests/test_serialization.py b/libs/ai-endpoints/tests/unit_tests/test_serialization.py new file mode 100644 index 00000000..d0f77f7b --- /dev/null +++ b/libs/ai-endpoints/tests/unit_tests/test_serialization.py @@ -0,0 +1,16 @@ +from langchain_core.load.dump import dumps +from langchain_core.load.load import loads + +from langchain_nvidia_ai_endpoints import ChatNVIDIA + + +def test_serialize_chatnvidia() -> None: + secret = "a-bogus-key" + x = ChatNVIDIA(nvidia_api_key=secret) + y = loads( + dumps(x), + secrets_map={"NVIDIA_API_KEY": secret}, + valid_namespaces=["langchain_nvidia_ai_endpoints"], + ) + assert x == y + assert isinstance(y, ChatNVIDIA) From 8c8b173c0867b9276bd99320e062f2ea4293a30b Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Sat, 23 Mar 2024 12:34:47 -0400 Subject: [PATCH 2/2] add a serialization integration test of ChatNVIDIA using dumps/loads --- .../tests/integration_tests/test_chat_models.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/libs/ai-endpoints/tests/integration_tests/test_chat_models.py b/libs/ai-endpoints/tests/integration_tests/test_chat_models.py index 86984fff..a7f9ba32 100644 --- a/libs/ai-endpoints/tests/integration_tests/test_chat_models.py +++ b/libs/ai-endpoints/tests/integration_tests/test_chat_models.py @@ -2,6 +2,8 @@ import warnings import pytest +from langchain_core.load.dump import dumps +from langchain_core.load.load import loads from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage from langchain_nvidia_ai_endpoints.chat_models import ChatNVIDIA @@ -273,3 +275,11 @@ def test_ai_endpoints_invoke_top_p_positive(chat_model: str, mode: dict) -> None result1 = llm1.invoke("What's in a top_p?") assert isinstance(result1.content, str) assert result0.content != result1.content + + +def test_serialize_chatnvidia() -> None: + model = loads( + dumps(ChatNVIDIA()), valid_namespaces=["langchain_nvidia_ai_endpoints"] + ) + result = model.invoke("What is there if there is nothing?") + assert isinstance(result.content, str)