Skip to content

Commit

Permalink
Use the requests library for openai/openapi only when necesary
Browse files Browse the repository at this point in the history
  • Loading branch information
nitanmarcel committed Sep 9, 2024
1 parent cf62e4b commit 3de0ece
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 32 deletions.
5 changes: 3 additions & 2 deletions r2ai/backend/openapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,16 @@ def chat(messages, uri='http://localhost:5001', model='gpt-3.5-turbo', openapiKe
if uri.endswith("/"):
uri = uri[0:len(uri)-1]
# url = f'{uri}/v1/completions'
url = f'{uri}/v1/chat/completions'
url = f'{uri}/chat/completions'
data = {
"model": model,
"messages": messages
}
headers = {
"HTTP-Referer": "https://rada.re", # openrouter specific: Optional, for including your app on openrouter.ai rankings.
"X-Title": "radare2", # openrouter specific: Optional. Shows in rankings on openrouter.ai.
"Authorization": f"Bearer {openapiKey}"
"Authorization": f"Bearer {openapiKey}",
"Content-Type": "application/json"
}

r = requests.post(url=url, data=json.dumps(data), timeout=600, headers=headers)
Expand Down
74 changes: 44 additions & 30 deletions r2ai/interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,7 @@ def __init__(self):
self.google_client = None
self.google_chat = None
self.bedrock_client = None
self.api_base = None # Will set it to whatever OpenAI wants
self.api_base = "https://api.openai.com/v1" # Default openai base url
self.system_message = ""
self.env["debug"] = "false"
self.env["llm.model"] = self.model ## TODO: dup. must get rid of self.model
Expand Down Expand Up @@ -868,29 +868,24 @@ def respond(self):
response = auto.chat(self)
return

elif self.model.startswith("openapi"):
m = messages
if self.system_message != "":
m.insert(0, {"role": "system", "content": self.system_message})
response = ""
if ":" in self.model:
uri = self.model.split(":")[1:]
model = 'gpt-3.5-turbo'
openapiKey = syscmdstr('cat ~/.r2ai.openai-key').strip()
if not openapiKey:
openapiKey = ''
if len(uri) > 2:
model = uri[-1]
uri = uri[:-1]
response = openapi.chat(m, ":".join(uri), model, openapiKey)
else:
response = openapi.chat(m)
if "content" in self.messages[-1]:
last_message = self.messages[-1]["content"]
if self.env["chat.reply"] == "true":
self.messages.append({"role": "assistant", "content": response})
print(response)
return
# elif self.model.startswith("openapi"):
# m = messages
# if self.system_message != "":
# m.insert(0, {"role": "system", "content": self.system_message})
# response = ""
# if ":" in self.model:
# uri = self.model.split(":")[1:]
# model = 'gpt-3.5-turbo'
# openapiKey = syscmdstr('cat ~/.r2ai.openai-key').strip()
# if not openapiKey:
# openapiKey = ''
# if len(uri) > 2:
# model = uri[-1]
# uri = uri[:-1]
# response = openapi.chat(m, ":".join(uri), model, openapiKey)
# else:
# response = openapi.chat(m)
# return

elif self.model.startswith("kobaldcpp"):
if self.system_message != "":
Expand Down Expand Up @@ -918,36 +913,55 @@ def respond(self):
print(response)
return

elif self.model.startswith("openai:"):
elif self.model.startswith("openai:") or self.model.startswith("openapi:"):
# [
# {"role": "system", "content": "You are a poetic assistant, be creative."},
# {"role": "user", "content": "Compose a poem that explains the concept of recursion in programming."}
# ]
openai_model = self.model[7:]
if self.model.startswith("openapi:"):
uri = self.model.split(":", 3)[1:]
if len(uri) > 2:
self.model = uri[-1]
self.api_base = ":".join(uri[:-1])
openai_model = self.model
else:
openai_model = self.model.rsplit(":")[-1]
self.api_key = syscmdstr('cat ~/.r2ai.openai-key').strip();
if have_openai:
# https://platform.openai.com/docs/assistants/overview
if self.openai_client is None:
self.openai_client = OpenAI()
self.openai_client = OpenAI(base_url=self.api_base)
if self.system_message != "":
self.messages.append({"role": "system", "content": self.system_message})
completion = self.openai_client.chat.completions.create(
# TODO: instructions=self.system_message # instead of passing it in the query
model=openai_model,
max_tokens=maxtokens,
temperature=float(self.env["llm.temperature"]),
messages=self.messages
messages=self.messages,
extra_headers={
"HTTP-Referer": "https://rada.re", # openrouter specific: Optional, for including your app on openrouter.ai rankings.
"X-Title": "radare2", # openrouter specific: Optional. Shows in rankings on openrouter.ai.
}
)
response = completion.choices[0].message.content
if "content" in self.messages[-1]:
last_message = self.messages[-1]["content"]
if self.env["chat.reply"] == "true":
self.messages.append({"role": "assistant", "content": response})
print(response)
return
else:
print("OpenAi python not found. Falling back to requests library", file=sys.stderr)
response = openapi.chat(self.messages, self.api_base, openai_model, self.api_key)
if "content" in self.messages[-1]:
last_message = self.messages[-1]["content"]
if self.env["chat.reply"] == "true":
self.messages.append({"role": "assistant", "content": response})
print(response)
print("For a better experience install openai python", file=sys.stderr)
print("pip install -U openai", file=sys.stderr)
print("export OPENAI_API_KEY=...", file=sys.stderr)
return
return

elif self.model.startswith('anthropic:'):
anthropic_model = self.model[10:]
Expand Down

0 comments on commit 3de0ece

Please sign in to comment.