Skip to content

Commit

Permalink
ollama docstrings update (#171)
Browse files Browse the repository at this point in the history
  • Loading branch information
dfokina authored Jan 4, 2024
1 parent b80138e commit deb2f17
Showing 1 changed file with 13 additions and 11 deletions.
24 changes: 13 additions & 11 deletions integrations/ollama/src/ollama_haystack/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@ def __init__(
:param url: The URL of the generation endpoint of a running Ollama instance.
Default is "http://localhost:11434/api/generate".
:param generation_kwargs: Optional arguments to pass to the Ollama generation endpoint, such as temperature,
top_p, etc. See the
top_p, and others. See the available arguments in
[Ollama docs](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
:param system_prompt: Optional system message (overrides what is defined in the Ollama Modelfile).
:param template: The full prompt template (overrides what is defined in the Ollama Modelfile).
:param raw: If True, no formatting will be applied to the prompt. You may choose to use the raw parameter
if you are specifying a full templated prompt in your request to the API.
if you are specifying a full templated prompt in your API request.
:param timeout: The number of seconds before throwing a timeout error from the Ollama API.
Default is 30 seconds.
"""
Expand All @@ -47,10 +47,12 @@ def __init__(

def _create_json_payload(self, prompt: str, generation_kwargs=None) -> Dict[str, Any]:
"""
Returns A dictionary of JSON arguments for a POST request to an Ollama service
:param prompt: the prompt to generate a response for
:param generation_kwargs:
:return: A dictionary of arguments for a POST request to an Ollama service
Returns a dictionary of JSON arguments for a POST request to an Ollama service.
:param prompt: The prompt to generate a response for.
:param generation_kwargs: Optional arguments to pass to the Ollama generation endpoint, such as temperature,
top_p, and others. See the available arguments in
[Ollama docs](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
:return: A dictionary of arguments for a POST request to an Ollama service.
"""
generation_kwargs = generation_kwargs or {}
return {
Expand All @@ -65,9 +67,9 @@ def _create_json_payload(self, prompt: str, generation_kwargs=None) -> Dict[str,

def _convert_to_haystack_response(self, ollama_response: Response) -> Dict[str, List[Any]]:
"""
Convert a response from the Ollama API to the required Haystack format
:param ollama_response: A response (requests library) from the Ollama API
:return: A dictionary of the returned responses and metadata
Convert a response from the Ollama API to the required Haystack format.
:param ollama_response: A response (requests library) from the Ollama API.
:return: A dictionary of the returned responses and metadata.
"""
resp_dict = ollama_response.json()

Expand All @@ -83,10 +85,10 @@ def run(
generation_kwargs: Optional[Dict[str, Any]] = None,
):
"""
Run an Ollama Model on the a given prompt.
Run an Ollama Model on the given prompt.
:param prompt: The prompt to generate a response for.
:param generation_kwargs: Optional arguments to pass to the Ollama generation endpoint, such as temperature,
top_p, etc. See the
top_p, and others. See the available arguments in
[Ollama docs](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
:return: A dictionary of the response and returned metadata
"""
Expand Down

0 comments on commit deb2f17

Please sign in to comment.