Skip to content

Commit

Permalink
[chore]: Checking if client has already been passed
Browse files Browse the repository at this point in the history
  • Loading branch information
keenborder786 committed Dec 20, 2024
1 parent 22379a5 commit d301422
Showing 1 changed file with 17 additions and 16 deletions.
33 changes: 17 additions & 16 deletions libs/community/langchain_community/embeddings/llamacpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class LlamaCppEmbeddings(BaseModel, Embeddings):
"""

client: Any = None #: :meta private:
model_path: str
model_path: str = Field(default="")

n_ctx: int = Field(512, alias="n_ctx")
"""Token context window."""
Expand Down Expand Up @@ -88,21 +88,22 @@ def validate_environment(self) -> Self:
if self.n_gpu_layers is not None:
model_params["n_gpu_layers"] = self.n_gpu_layers

try:
from llama_cpp import Llama

self.client = Llama(model_path, embedding=True, **model_params)
except ImportError:
raise ImportError(
"Could not import llama-cpp-python library. "
"Please install the llama-cpp-python library to "
"use this embedding model: pip install llama-cpp-python"
)
except Exception as e:
raise ValueError(
f"Could not load Llama model from path: {model_path}. "
f"Received error {e}"
)
if not self.client:
try:
from llama_cpp import Llama

self.client = Llama(model_path, embedding=True, **model_params)
except ImportError:
raise ImportError(
"Could not import llama-cpp-python library. "
"Please install the llama-cpp-python library to "
"use this embedding model: pip install llama-cpp-python"
)
except Exception as e:
raise ValueError(
f"Could not load Llama model from path: {model_path}. "
f"Received error {e}"
)

return self

Expand Down

0 comments on commit d301422

Please sign in to comment.