Skip to content

Commit

Permalink
Added initial dialogue insertion
Browse files Browse the repository at this point in the history
  • Loading branch information
Mgrsc committed Jan 3, 2025
1 parent 659400c commit b634b55
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 8 deletions.
5 changes: 4 additions & 1 deletion plugins/llm_chat/config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import List, Optional
from typing import List, Optional, Tuple
from pydantic import BaseModel, Field
from pathlib import Path
import tomli
Expand All @@ -15,6 +15,7 @@ class LLMConfig(BaseModel):
max_tokens: int = 1000
system_prompt: Optional[str] = None
max_context_messages: int = 10
qa_pairs: List[Tuple[str, str]] = []

class ChunkConfig(BaseModel):
"""分段发送配置"""
Expand Down Expand Up @@ -69,10 +70,12 @@ def load_config(cls) -> "Config":
base_url=toml_config["llm"]["base_url"],
temperature=toml_config["llm"].get("temperature", 0.7),
max_tokens=toml_config["llm"].get("max_tokens", 2000),
max_context_messages=toml_config["llm"].get("max_context_messages", 10),
system_prompt=toml_config["llm"]["system_prompt"],
google_api_key=toml_config["llm"].get("google_api_key", ""),
top_p=toml_config["llm"].get("top_p", 1.0),
groq_api_key=toml_config["llm"].get("groq_api_key", ""),
qa_pairs=toml_config["llm"].get("qa_pairs", []),
)

plugin_config = PluginConfig(
Expand Down
19 changes: 12 additions & 7 deletions plugins/llm_chat/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,16 +91,21 @@ async def build_graph(config: Config, llm):

async def chatbot(state: State):
messages = state["messages"]
if config.llm.system_prompt:
messages = [SystemMessage(content=config.llm.system_prompt)] + messages
# 固定
fixed_messages = []
if hasattr(config.llm, "system_prompt") and config.llm.system_prompt:
fixed_messages.append(SystemMessage(content=config.llm.system_prompt))
if hasattr(config.llm, "qa_pairs") and config.llm.qa_pairs:
for user_content, assistant_content in config.llm.qa_pairs:
fixed_messages.append(HumanMessage(content=user_content))
fixed_messages.append(AIMessage(content=assistant_content))
# 修剪
trimmed_messages = trimmer.invoke(messages)
if not trimmed_messages:
return {"messages": []}
print("-" * 50)
truncated_messages = trimmed_messages[-2:]
print(format_messages_for_print(truncated_messages))
response = await llm_with_tools.ainvoke(trimmed_messages)
print(f"chatbot: {response}")
# 合并
messages = fixed_messages + trimmed_messages
response = await llm_with_tools.ainvoke(messages)
return {"messages": [response]}

graph_builder = StateGraph(State)
Expand Down

0 comments on commit b634b55

Please sign in to comment.