Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

community:everlyai[patch]: standardize init args #22098

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions libs/community/langchain_community/chat_models/everlyai.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from typing import TYPE_CHECKING, Dict, Optional, Set

from langchain_core.messages import BaseMessage
from langchain_core.pydantic_v1 import Field, root_validator
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
from langchain_core.utils import get_from_dict_or_env

from langchain_community.adapters.openai import convert_message_to_dict
Expand Down Expand Up @@ -55,7 +55,7 @@ def lc_secrets(self) -> Dict[str, str]:
def is_lc_serializable(cls) -> bool:
return False

everlyai_api_key: Optional[str] = None
everlyai_api_key: Optional[SecretStr] = Field(None, alias="api_key")
"""EverlyAI Endpoints API keys."""
model_name: str = Field(default=DEFAULT_MODEL, alias="model")
"""Model name to use."""
Expand All @@ -64,6 +64,11 @@ def is_lc_serializable(cls) -> bool:
available_models: Optional[Set[str]] = None
"""Available models from EverlyAI API."""

class Config:
"""Configuration for this pydantic object."""

allow_population_by_field_name = True

@staticmethod
def get_available_models() -> Set[str]:
"""Get available models from EverlyAI API."""
Expand Down
58 changes: 58 additions & 0 deletions libs/community/tests/unit_tests/chat_models/test_everlyai.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
"""Test Everly AI Chat API wrapper."""
import os

import pytest
from langchain_core.pydantic_v1 import SecretStr, ValidationError

from langchain_community.chat_models import ChatEverlyAI

os.environ["EVERLYAI_API_KEY"] = "foo"
DEFAULT_MODEL = "meta-llama/Llama-2-7b-chat-hf"


@pytest.mark.requires("openai")
def test_everlyai_chat_missing_api_key(monkeypatch: pytest.MonkeyPatch) -> None:
"""Verify validation error if no api key found"""
monkeypatch.delenv("EVERLYAI_API_KEY", raising=False)
with pytest.raises(ValidationError) as e:
ChatEverlyAI() # type: ignore[call-arg]
assert "Did not find everlyai_api_key" in str(e)


@pytest.mark.requires("openai")
def test_everlyai_chat_default_params() -> None:
"""Check default parameters with environment API key"""
chat = ChatEverlyAI() # type: ignore[call-arg]
assert chat.everlyai_api_key is None
assert chat.model_name == DEFAULT_MODEL
assert chat.everlyai_api_base == "https://everlyai.xyz/hosted"
assert chat.available_models == {
"meta-llama/Llama-2-13b-chat-hf-quantized",
"meta-llama/Llama-2-7b-chat-hf",
}


@pytest.mark.requires("openai")
def test_everlyai_chat_param_api_key(monkeypatch: pytest.MonkeyPatch) -> None:
"""Check use of parameter API key instead of environment API key"""
monkeypatch.delenv("EVERLYAI_API_KEY", raising=False)
chat = ChatEverlyAI(everlyai_api_key="test") # type: ignore[call-arg]
assert isinstance(chat.everlyai_api_key, SecretStr)


@pytest.mark.requires("openai")
def test_everlyai_chat_initialization() -> None:
"""Ensure parameter names can be referenced by alias"""
for model in [
ChatEverlyAI( # type: ignore[call-arg]
everlyai_api_key="test",
model_name=DEFAULT_MODEL,
),
ChatEverlyAI( # type: ignore[call-arg]
api_key="test",
model=DEFAULT_MODEL,
),
]:
if model.everlyai_api_key is not None:
assert model.everlyai_api_key.get_secret_value() == "test"
assert model.model_name == DEFAULT_MODEL