diff --git a/README.md b/README.md index a65dfe9..385aca7 100644 --- a/README.md +++ b/README.md @@ -3,3 +3,25 @@ This repository contains 1 package with Together integrations with LangChain: - [langchain-together](https://pypi.org/project/langchain-together/) + +## Setup for Testing + +```bash +cd libs/together +poetry install --with lint,typing,test,test_integration, +``` + +## Running the Unit Tests + +```bash +cd libs/together +make tests +``` + +## Running the Integration Tests + +```bash +cd libs/together +export TOGETHER_API_KEY= +make integration_tests +``` \ No newline at end of file diff --git a/libs/together/README.md b/libs/together/README.md index 2195895..4c1ac48 100644 --- a/libs/together/README.md +++ b/libs/together/README.md @@ -25,4 +25,4 @@ NEED to add image endpoint + completions endpoint as well See a [usage example](https://python.langchain.com/docs/integrations/text_embedding/together/) -Use `togethercomputer/m2-bert-80M-8k-retrieval` as the default model for embeddings. +Use `togethercomputer/m2-bert-80M-8k-retrieval` as the default model for embeddings. \ No newline at end of file diff --git a/libs/together/tests/integration_tests/test_llms.py b/libs/together/tests/integration_tests/test_llms.py index bfc4168..1ed900d 100644 --- a/libs/together/tests/integration_tests/test_llms.py +++ b/libs/together/tests/integration_tests/test_llms.py @@ -13,7 +13,7 @@ def test_together_call() -> None: """Test simple call to together.""" llm = Together( # type: ignore[call-arg] - model="togethercomputer/RedPajama-INCITE-7B-Base", + model="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", temperature=0.2, max_tokens=250, ) @@ -27,7 +27,7 @@ def test_together_call() -> None: async def test_together_acall() -> None: """Test simple call to together.""" llm = Together( # type: ignore[call-arg] - model="togethercomputer/RedPajama-INCITE-7B-Base", + model="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", temperature=0.2, max_tokens=250, ) diff --git a/libs/together/tests/unit_tests/test_llms.py b/libs/together/tests/unit_tests/test_llms.py index ebed1d7..60c869e 100644 --- a/libs/together/tests/unit_tests/test_llms.py +++ b/libs/together/tests/unit_tests/test_llms.py @@ -10,7 +10,7 @@ def test_together_api_key_is_secret_string() -> None: """Test that the API key is stored as a SecretStr.""" llm = Together( together_api_key="secret-api-key", # type: ignore[call-arg] - model="togethercomputer/RedPajama-INCITE-7B-Base", + model="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", temperature=0.2, max_tokens=250, ) @@ -23,7 +23,7 @@ def test_together_api_key_masked_when_passed_from_env( """Test that the API key is masked when passed from an environment variable.""" monkeypatch.setenv("TOGETHER_API_KEY", "secret-api-key") llm = Together( # type: ignore[call-arg] - model="togethercomputer/RedPajama-INCITE-7B-Base", + model="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", temperature=0.2, max_tokens=250, ) @@ -39,7 +39,7 @@ def test_together_api_key_masked_when_passed_via_constructor( """Test that the API key is masked when passed via the constructor.""" llm = Together( together_api_key="secret-api-key", # type: ignore[call-arg] - model="togethercomputer/RedPajama-INCITE-7B-Base", + model="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", temperature=0.2, max_tokens=250, ) @@ -53,7 +53,7 @@ def test_together_uses_actual_secret_value_from_secretstr() -> None: """Test that the actual secret value is correctly retrieved.""" llm = Together( together_api_key="secret-api-key", # type: ignore[call-arg] - model="togethercomputer/RedPajama-INCITE-7B-Base", + model="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", temperature=0.2, max_tokens=250, ) @@ -64,7 +64,7 @@ def test_together_uses_actual_secret_value_from_secretstr_api_key() -> None: """Test that the actual secret value is correctly retrieved.""" llm = Together( api_key="secret-api-key", # type: ignore[arg-type] - model="togethercomputer/RedPajama-INCITE-7B-Base", + model="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", temperature=0.2, max_tokens=250, )