Skip to content

Commit

Permalink
Merge pull request #42 from royerlab/release_v2024.3.26.1
Browse files Browse the repository at this point in the history
Release v2024.3.26.3
  • Loading branch information
royerloic authored Mar 27, 2024
2 parents bc0d752 + 206cb93 commit 1764486
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 11 deletions.
5 changes: 3 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
[metadata]
name = napari-chatgpt
version = v2024.3.26
version = v2024.3.26.3
description = A napari plugin to process and analyse images with chatGPT.
long_description = file: README.md
long_description_content_type = text/markdown
url = https://github.com/royerlab/napari-chatgpt
author = Loic A. Royer
author = Loic A. Royer and contributors
author_email = [email protected]
license = BSD-3-Clause
license_files = LICENSE
Expand Down Expand Up @@ -38,6 +38,7 @@ install_requires =
QtAwesome
langchain==0.1.11
langchain-openai==0.0.8
langchain-anthropic==0.1.4
openai==1.13.3
anthropic
fastapi
Expand Down
10 changes: 5 additions & 5 deletions src/napari_chatgpt/_widget.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,8 @@ def _model_selection(self):
model_list.append('claude-2.1')
model_list.append('claude-2.0')
model_list.append('claude-instant-1.2')
#model_list.append('claude-3-sonnet-20240229')
#model_list.append('claude-3-opus-20240229')
model_list.append('claude-3-sonnet-20240229')
model_list.append('claude-3-opus-20240229')


if is_ollama_running():
Expand Down Expand Up @@ -450,14 +450,14 @@ def _start_omega(self):
main_llm_model_name = self.model_combo_box.currentText()

# Warn users with a modal window that the selected model might be sub-optimal:
if 'gpt-4' not in main_llm_model_name:
if 'gpt-4' not in main_llm_model_name and 'claude-3-opus' not in main_llm_model_name:
aprint("Warning: you did not select a gpt-4 level model. Omega's cognitive and coding abilities will be degraded.")
show_warning_dialog(f"You have selected this model: '{main_llm_model_name}'. "
f"This is not a GPT4-level model. "
f"This is not a GPT4 or Claude-3-opus level model. "
f"Omega's cognitive and coding abilities will be degraded. "
f"It might even completely fail or be too slow. "
f"Please visit <a href='https://github.com/royerlab/napari-chatgpt/wiki/OpenAIKey'>our wiki</a> "
f"for information on how to gain access to GPT4.")
f"for information on how to gain access to GPT4 (or Claude-3).")

# Set tool LLM model name via configuration file.
tool_llm_model_name = self.config.get('tool_llm_model_name', 'same')
Expand Down
15 changes: 11 additions & 4 deletions src/napari_chatgpt/llm/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,17 +63,24 @@ def _instantiate_single_llm(llm_model_name: str,
elif 'claude' in llm_model_name:

# Import Claude LLM:
from langchain.chat_models import ChatAnthropic
from langchain_anthropic import ChatAnthropic

max_token_limit = 8000
llm_model_name_lc = llm_model_name.lower()

if 'opus' in llm_model_name_lc or 'sonnet' in llm_model_name_lc or 'hiaku' in llm_model_name_lc or '2.1':
max_tokens_to_sample = 4096
max_token_limit = 200000
else:
max_tokens_to_sample = 4096
max_token_limit = 8000

# Instantiates Main LLM:
llm = ChatAnthropic(
model=llm_model_name,
verbose=verbose,
streaming=streaming,
temperature=temperature,
max_tokens_to_sample=max_token_limit,
max_tokens_to_sample=max_tokens_to_sample,
callbacks=[callback_handler])

return llm, max_token_limit
Expand Down Expand Up @@ -103,7 +110,7 @@ def _instantiate_single_llm(llm_model_name: str,
# Wait a bit:
sleep(3)

# Make ure that Ollama is running
# Make sure that Ollama is running
if not is_ollama_running(ollama_host, ollama_port):
aprint(f"Ollama server is not running on '{ollama_host}'. Please start the Ollama server on this machine and make sure the port '{ollama_port}' is open. ")
raise Exception("Ollama server is not running!")
Expand Down

0 comments on commit 1764486

Please sign in to comment.