From 189344e67994cb9a4911bb9a3ceb51c9463110f6 Mon Sep 17 00:00:00 2001 From: Sina Date: Wed, 22 May 2024 02:25:35 +0000 Subject: [PATCH] Fix typos --- chainlite/llm_config.py | 4 ++-- chainlite/llm_generate.py | 6 +++--- setup.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/chainlite/llm_config.py b/chainlite/llm_config.py index 77517d5..71fd7f8 100644 --- a/chainlite/llm_config.py +++ b/chainlite/llm_config.py @@ -34,11 +34,11 @@ def load_config_from_file(config_file: str): config = yaml.unsafe_load(config_file) prompt_dirs = config.get("prompt_dirs", ["./"]) - prompt_log_file = config.get("promp_logging", {}).get( + prompt_log_file = config.get("prompt_logging", {}).get( "log_file", "./prompt_logs.jsonl" ) prompts_to_skip_for_debugging = set( - config.get("promp_logging", {}).get("prompts_to_skip", []) + config.get("prompt_logging", {}).get("prompts_to_skip", []) ) litellm.set_verbose = config.get("litellm_set_verbose", False) diff --git a/chainlite/llm_generate.py b/chainlite/llm_generate.py index 27343db..945e5d9 100644 --- a/chainlite/llm_generate.py +++ b/chainlite/llm_generate.py @@ -62,9 +62,9 @@ def write_prompt_logs_to_file(log_file: Optional[str] = None): "input", "output", ] # specifies the sort order of keys in the output, for a better viewing experience - } - ), - ensure_ascii=False, + }, + ensure_ascii=False, + ) ) f.write("\n") diff --git a/setup.py b/setup.py index df3a88f..35b3923 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="chainlite", - version="0.1.1", + version="0.1.3", author="Sina Semnani", author_email="sinaj@cs.stanford.edu", description="A Python package that uses LangChain and LiteLLM to call large language model APIs easily",