Skip to content

Commit

Permalink
r2ai takes - flags with _ and honor that in r2ai-server
Browse files Browse the repository at this point in the history
  • Loading branch information
radare committed Aug 30, 2024
1 parent 62b1d18 commit 47c8e76
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 4 deletions.
2 changes: 1 addition & 1 deletion r2ai-server/r2ai-server
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ main() {
koboldcpp -c 0 --port ${PORT} -m "${MODELDIR}/${MODEL}.gguf"
;;
r2ai)
r2ai -c "-e http.port=${PORT}" -c "-m ${MODEL}" -c "-w"
r2ai -c "_e http.port=${PORT}" -c "_m ${MODEL}" -c "_w"
;;
*)
echo "Invalid llama server selected."
Expand Down
3 changes: 1 addition & 2 deletions r2ai/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from rich.rule import Rule
from signal import signal, SIGINT


from .large import Large
from .utils import merge_deltas
from .message_block import MessageBlock
Expand Down Expand Up @@ -767,7 +766,7 @@ def chat(self, message=None):
debug_mode = False # maybe true when debuglevel=2 ?
self.llama_instance = new_get_hf_llm(self, self.model, debug_mode, ctxwindow)
if self.llama_instance is None:
builtins.print("Cannot find the model")
builtins.print("Cannot find model " + self.model)
return
except Exception:
traceback.print_exc()
Expand Down
5 changes: 4 additions & 1 deletion r2ai/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,10 @@ def main(args, commands):

if commands is not None:
for c in commands:
runline(ai, c)
if c.startswith("_"):
runline(ai, "-" + c[1:])
else:
runline(ai, c)
r2ai_repl(ai)
# elif HAVE_RLANG and HAVE_R2PIPE:
# r2ai_repl(ai)
Expand Down

0 comments on commit 47c8e76

Please sign in to comment.