Skip to content

Commit

Permalink
Add llm.layers to make it configurable
Browse files Browse the repository at this point in the history
  • Loading branch information
radare authored and trufae committed Sep 17, 2024
1 parent 599a9e7 commit 275f2a5
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 1 deletion.
1 change: 1 addition & 0 deletions r2ai/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -612,6 +612,7 @@ def __init__(self):
self.system_message = ""
self.env["llm.model"] = self.model ## TODO: dup. must get rid of self.model
self.env["llm.gpu"] = "true"
self.env["llm.layers"] = "-1"
self.env["llm.window"] = "32768" # "4096" # context_window
self.env["llm.maxtokens"] = "4096" # "1750"
self.env["llm.maxmsglen"] = "8096" # "1750"
Expand Down
2 changes: 1 addition & 1 deletion r2ai/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def models():
def gpulayers(ai):
if "llm.gpu" in ai.env:
if ai.env["llm.gpu"] == "true":
return -1
return int(ai.env["llm.layers"])
return 0

def get_hf_llm(ai, repo_id, debug_mode, context_window):
Expand Down

0 comments on commit 275f2a5

Please sign in to comment.