Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
keenborder786 committed Jun 13, 2024
2 parents 6421887 + 7b57fec commit e21ff2c
Show file tree
Hide file tree
Showing 5 changed files with 122 additions and 52 deletions.
17 changes: 17 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,23 @@ with ClaudeChrome(driver_config=driver_config,
```



## Integration with Langchain

```python
from free_llms.langchain_model import FreeLLMs
from langchain.prompts import PromptTemplate
# model name can be any of the following: GPTChrome,PreplexityChrome,MistralChrome,ClaudeChrome
model = FreeLLMs(model_name = 'PreplexityChrome', llm_kwargs = {
'driver_config':[],
'email':'email',
'password':''})

prompt = PromptTemplate.from_template('Write me a joke about {topic}')
chain = prompt | model | str
print(chain.invoke({'topic':'coding'}))

```
## Note:

- Free_LLMs only uses a `Patched Chrome Driver` as it's main driver. The driver can be found [here](https://github.com/ultrafunkamsterdam/undetected-chromedriver/tree/master)
11 changes: 11 additions & 0 deletions examples/langchain_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from free_llms.langchain_model import FreeLLMs
from langchain.prompts import PromptTemplate
# model name can be any of the following:
model = FreeLLMs(model_name = 'PreplexityChrome', llm_kwargs = {
'driver_config':[],
'email':'email',
'password':''})

prompt = PromptTemplate.from_template('Write me a joke about {topic}')
chain = prompt | model | str
print(chain.invoke({'topic':'coding'}))
30 changes: 6 additions & 24 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

50 changes: 50 additions & 0 deletions src/free_llms/langchain_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
from typing import Any, Dict, List, Optional, Type, Union

from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import root_validator

from free_llms.models import ClaudeChrome, GPTChrome, MistralChrome, PreplexityChrome


class FreeLLMs(LLM):
client: Any # private

model_name: Optional[str] = None
"""One of the following model names to choose from: GPTChrome,PreplexityChrome,MistralChrome,ClaudeChrome"""
llm_kwargs: Dict[str, Any]
"""Keyword arguments to be passed to free_llms.models.LLMChrome"""

@root_validator()
def start_model(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""

models: Dict[str, Type[Union[ClaudeChrome, GPTChrome, MistralChrome, PreplexityChrome]]] = {
"GPTChrome": GPTChrome,
"PreplexityChrome": PreplexityChrome,
"MistralChrome": MistralChrome,
"ClaudeChrome": ClaudeChrome,
}
if values["model_name"] not in models:
raise ValueError(f'The given model {values["model_name"]} is not correct. Please pass one of the following {list(models.keys())}')
else:
values["client"] = models[values["model_name"]](**values["llm_kwargs"])
return values

@property
def _llm_type(self) -> str:
"""Return type of llm."""
return "free_llms"

def _call(
self,
prompt: str,
stop: Optional[List[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
if not self.client.login():
raise ValueError("Cannot Login given the credentials")
answer = self.client.send_prompt(prompt).content
self.client.driver.quit()
return answer
Loading

0 comments on commit e21ff2c

Please sign in to comment.