Skip to content

Commit

Permalink
Merge branch 'feature/java-chat-mvp-intelligence-service' into featur…
Browse files Browse the repository at this point in the history
…e/interface-chat-mvp-intelligence-service
  • Loading branch information
milesha committed Dec 1, 2024
2 parents ee21f79 + eda63de commit 74a72fc
Showing 1 changed file with 10 additions and 12 deletions.
22 changes: 10 additions & 12 deletions server/intelligence-service/app/main.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
from fastapi import FastAPI
from pydantic import BaseModel
import os
from enum import Enum
from typing import List
from .config import settings
from config import settings
from typing_extensions import Annotated, TypedDict
from langchain.chat_models.base import BaseChatModel
from langchain_openai import ChatOpenAI, AzureChatOpenAI
Expand Down Expand Up @@ -43,7 +42,7 @@ def invoke(self, message: str):
model = MockChatModel()

elif settings.is_openai_available:
model = ChatOpenAI(temperature=2)
model = ChatOpenAI()
elif settings.is_azure_openai_available:
model = AzureChatOpenAI()
else:
Expand All @@ -54,17 +53,19 @@ class State(TypedDict):
messages: Annotated[list, add_messages]


propmt = "You are an AI mentor helping a students working on the software engineering projects embracing structured self-reflection practices. You need to guide the student through the set questions regarding their work on the project during the last week (sprint). Your value is the fact, that you help students to reflect on their past progress. Throughout the conversation you need to perform all of the following tasks in the given order: Task 1: Greet the student and say you are happy to start the session. Task 2: Ask the student about the overall progress on the project. Task 3: Ask the student about the challenges faced during the sprint referring to what he said about progress. Task 4: Ask about the plan for the next sprint. You need to understand at which stage in the conversation you are and what is the next task. Be polite, friendly and do not let the student drive the conversation to any other topic except for the current project. Do not make a questionaire out of the conversation, but rather make it a natural conversation."
prompt = "You are an AI mentor helping a students working on the software engineering projects embracing structured self-reflection practices. You need to guide the student through the set questions regarding their work on the project during the last week (sprint). Your value is the fact, that you help students to reflect on their past progress. Throughout the conversation you need to perform all of the following tasks in the given order: Task 1: Greet the student and say you are happy to start the session. Task 2: Ask the student about the overall progress on the project. Task 3: Ask the student about the challenges faced during the sprint referring to what he said about progress. Task 4: Ask about the plan for the next sprint. You need to understand at which stage in the conversation you are from the message history and what is the next task. Be polite, friendly and do not let the student drive the conversation to any other topic except for the current project. Do not make a questionaire out of the conversation, but rather make it a natural conversation."


def ai_mentor(state: State):
print(SystemMessage(content=prompt) + state["messages"])
return {
"messages": [model.invoke([SystemMessage(content=propmt)] + state["messages"])]
"messages": [model.invoke(state["messages"] + [SystemMessage(content=prompt)])]
}


graph_builder = StateGraph(State)
graph_builder.add_node("ai_mentor", ai_mentor)

graph_builder.add_edge(START, "ai_mentor")
graph_builder.add_edge("ai_mentor", END)
graph = graph_builder.compile()
Expand All @@ -75,15 +76,12 @@ def ai_mentor(state: State):
response_model=ChatResponse,
summary="Start and continue a chat session with an LLM.",
)
async def chat(request: ChatRequest):
def chat(request: ChatRequest):
messages = []
for message in request.message_history:
if message.author == "USER":
if message.sender == "USER":
messages.append(HumanMessage(content=message.content))
else:
messages.append(AIMessage(content=message.content))

for event in graph.stream({"messages": messages}, stream_mode="values"):
responce_message = event["messages"][-1].content

return ChatResponse(responce=responce_message)
response_message = graph.invoke({"messages": messages})["messages"][-1].content
return ChatResponse(responce=response_message)

0 comments on commit 74a72fc

Please sign in to comment.