-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathchat_utils.py
118 lines (95 loc) · 3.21 KB
/
chat_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import os
import promptlayer
import openai as openai_orig
import openai_mock
env = os.environ.get("APP_DEV")
if env == "dev":
openai = openai_mock.MockOpenAI
else:
promptlayer.api_key = os.environ.get("PROMPTLAYER_API_KEY")
if promptlayer.api_key is None:
openai = openai_orig
else:
openai = promptlayer.openai
openai.api_key = os.environ.get("OPENAI_API_KEY")
def extract_messages(st):
default = [
{
"role": "system",
"content": "You are a helpful assistant. Please use concise language to save bandwidth and token usage. Avoid 'AI language model' disclaimer whenever possible.",
}
]
conversation = st.session_state.get("conversation", {})
messages = conversation.get("messages", default)
return messages
# see sample-stream.json to know how to parse it
def generate_stream(st, holder, user_input):
model = st.session_state["model"]
messages = extract_messages(st)
messages.append({"role": "user", "content": user_input})
print("openai.ChatCompletion.create with", openai, model, messages)
completion = openai.ChatCompletion.create(
model=model, messages=messages, stream=True
)
# first chunk should be
# {
# "choices": [
# {
# "delta": {
# "role": "assistant"
# },
# "finish_reason": null,
# "index": 0
# }
# ],
# "created": 1684389483,
# "id": "chatcmpl-7HQwF5QPvTrDtYPOvBZbzFfDb9tcI",
# "model": "gpt-3.5-turbo-0301",
# "object": "chat.completion.chunk"
# }
# middle chunks are content:
with holder.container():
content = ""
for chunk in completion:
delta = chunk["choices"][0].get("delta", {})
if "content" in delta:
content += delta["content"]
holder.info(content, icon="🤖")
# last chunk should be
# {
# "choices": [
# {
# "delta": {},
# "finish_reason": "stop",
# "index": 0
# }
# ],
# "created": 1684389483,
# "id": "chatcmpl-7HQwF5QPvTrDtYPOvBZbzFfDb9tcI",
# "model": "gpt-3.5-turbo-0301",
# "object": "chat.completion.chunk"
# }
messages.append({"role": "assistant", "content": content})
st.session_state["messages"] = messages
# No usage info in stream mode yet
# https://community.openai.com/t/usage-info-in-api-responses/18862
return messages
def generate_conversation_title(messages):
user_messages = [m["content"] for m in messages if m["role"] == "user"]
conversation = " ".join(user_messages)
# Generate a prompt for the model
prompt = f"""
Based on the following user chat messages ---:
---
{conversation}
---
A title in 5 words or less, without quotes, for this conversation is: """
# Use the OpenAI API to generate a response
response = openai.Completion.create(
engine="text-davinci-002", prompt=prompt, temperature=0.3, max_tokens=60
)
# Extract the generated title
title = response["choices"][0]["text"].strip()
# remove surrounding quotes
title = title.replace('"', "")
return title