From 7206062cd36c7acc4f18a75dce13d438b2bdd6f5 Mon Sep 17 00:00:00 2001 From: pancake Date: Fri, 3 May 2024 11:14:55 +0200 Subject: [PATCH] Use the llama3 model by default --- r2ai/models.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/r2ai/models.py b/r2ai/models.py index 71bada73..0a841810 100644 --- a/r2ai/models.py +++ b/r2ai/models.py @@ -12,9 +12,10 @@ import sys import traceback -#DEFAULT_MODEL = "TheBloke/CodeLlama-34B-Instruct-GGUF" +# DEFAULT_MODEL = "TheBloke/CodeLlama-34B-Instruct-GGUF" # DEFAULT_MODEL = "TheBloke/llama2-7b-chat-codeCherryPop-qLoRA-GGUF" -DEFAULT_MODEL = "-m TheBloke/dolphin-2_6-phi-2-GGUF" +# DEFAULT_MODEL = "-m TheBloke/dolphin-2_6-phi-2-GGUF" +DEFAULT_MODEL = "-m FaradayDotDev/llama-3-8b-Instruct-GGUF" r2ai_model_json = "r2ai.model.json" # windows path if "HOME" in os.environ: r2ai_model_json = os.environ["HOME"] + "/.r2ai.model"