From 63e09282ee0aa5bf974594f59d15c09637f31610 Mon Sep 17 00:00:00 2001 From: Milena Serbinova <45200178+milesha@users.noreply.github.com> Date: Sun, 22 Dec 2024 13:12:59 +0100 Subject: [PATCH] Automatic Greeting Feature (#216) --- .../mentor/message/MessageService.java | 40 +++++++++---- .../mentor/session/SessionService.java | 8 ++- .../app/mentor/prompt_loader.py | 19 +++++++ .../app/mentor/prompts/mentor_persona.txt | 17 ++++++ server/intelligence-service/app/mentor/run.py | 33 ++++++++++- .../app/routers/mentor.py | 9 +-- webapp/src/app/mentor/mentor.component.html | 4 +- webapp/src/app/mentor/mentor.component.ts | 1 + .../mentor/messages/messages.component.html | 57 +++++++++++++------ .../app/mentor/messages/messages.component.ts | 4 +- .../app/mentor/messages/messages.stories.ts | 16 ++++-- 11 files changed, 163 insertions(+), 45 deletions(-) create mode 100644 server/intelligence-service/app/mentor/prompt_loader.py create mode 100644 server/intelligence-service/app/mentor/prompts/mentor_persona.txt diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/message/MessageService.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/message/MessageService.java index 94b6f394..d68976f1 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/message/MessageService.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/message/MessageService.java @@ -52,29 +52,34 @@ public MessageDTO sendMessage(String content, Long sessionId) { currentSession.getMessages().add(savedUserMessage); sessionRepository.save(currentSession); - String systemResponse = generateResponse(sessionId, content); + String systemResponse = generateResponse(sessionId); // prevent saving empty system messages if the intelligence service is down if (systemResponse == null) { + logger.error("Failed to generate response for message: {}", content); return MessageDTO.fromMessage(savedUserMessage); } - Message systemMessage = new Message(); - systemMessage.setSender(MessageSender.MENTOR); - systemMessage.setContent(systemResponse); - systemMessage.setSession(currentSession); + Message savedSystemMessage = createSystemMessage(currentSession, systemResponse); + return MessageDTO.fromMessage(savedSystemMessage); + } - Message savedSystemMessage = messageRepository.save(systemMessage); - currentSession.getMessages().add(savedSystemMessage); - sessionRepository.save(currentSession); + public void generateFirstSystemMessage(Session session) { + String systemResponse = generateResponse(session.getId()); - return MessageDTO.fromMessage(savedSystemMessage); + // prevent saving empty system messages if the intelligence service is down + if (systemResponse == null) { + logger.error("Failed to generate response for the conversation start"); + return; + } + + createSystemMessage(session, systemResponse); } - private String generateResponse(Long sessionId, String messageContent) { + private String generateResponse(Long sessionId) { List messages = messageRepository.findBySessionId(sessionId); - ISMessageHistory messageHistory = new ISMessageHistory(); + messageHistory.setMessages( messages .stream() @@ -91,4 +96,17 @@ private String generateResponse(Long sessionId, String messageContent) { return null; } } + + private Message createSystemMessage(Session currentSession, String systemResponse) { + Message systemMessage = new Message(); + systemMessage.setSender(MessageSender.MENTOR); + systemMessage.setContent(systemResponse); + systemMessage.setSession(currentSession); + + Message savedSystemMessage = messageRepository.save(systemMessage); + currentSession.getMessages().add(savedSystemMessage); + sessionRepository.save(currentSession); + + return savedSystemMessage; + } } diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionService.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionService.java index d07dd5a6..c8a6d72d 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionService.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionService.java @@ -2,6 +2,7 @@ import de.tum.in.www1.hephaestus.core.exception.AccessForbiddenException; import de.tum.in.www1.hephaestus.gitprovider.user.User; +import de.tum.in.www1.hephaestus.mentor.message.MessageService; import java.util.List; import java.util.Optional; import org.springframework.beans.factory.annotation.Autowired; @@ -13,6 +14,9 @@ public class SessionService { @Autowired private SessionRepository sessionRepository; + @Autowired + private MessageService messageService; + public void checkAccessElseThrow(User user, Session session) { if (!session.getUser().getId().equals(user.getId())) { throw new AccessForbiddenException("Session", session.getId()); @@ -32,6 +36,8 @@ public SessionDTO createSession(User user) { Session session = new Session(); session.setUser(user); - return SessionDTO.fromSession(sessionRepository.save(session)); + Session savedSession = sessionRepository.save(session); + messageService.generateFirstSystemMessage(session); + return SessionDTO.fromSession(savedSession); } } diff --git a/server/intelligence-service/app/mentor/prompt_loader.py b/server/intelligence-service/app/mentor/prompt_loader.py new file mode 100644 index 00000000..6402ded4 --- /dev/null +++ b/server/intelligence-service/app/mentor/prompt_loader.py @@ -0,0 +1,19 @@ +from pathlib import Path +from typing import Dict + + +class PromptLoader: + def __init__(self, prompt_dir: str = "prompts"): + self.prompt_dir = Path(__file__).parent / prompt_dir + + def load_prompts(self) -> Dict[str, str]: + prompts = {} + for txt_file in self.prompt_dir.glob("*.txt"): + key = txt_file.stem # use the filename without extension as the key + with open(txt_file, "r", encoding="utf-8") as f: + prompts[key] = f.read().strip() + return prompts + + def get_prompt(self, name: str) -> str: + prompts = self.load_prompts() + return prompts.get(name, "") diff --git a/server/intelligence-service/app/mentor/prompts/mentor_persona.txt b/server/intelligence-service/app/mentor/prompts/mentor_persona.txt new file mode 100644 index 00000000..d067746f --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/mentor_persona.txt @@ -0,0 +1,17 @@ +You are a friendly, approachable AI mentor focused on helping a student with their software engineering project. Your goal is to provide constructive guidance, support, and encouragement. Follow these rules and guidelines: + +- **Stay On-Topic**: Only answer questions related to the student’s software engineering project. Avoid unrelated topics and general advice that does not support their specific work. + +- **Focus on Guidance**: Offer actionable suggestions. If the student’s explanation is unclear, ask clarifying questions. Help them break down complex issues into manageable steps, and encourage them to think critically about their problem-solving approach. + +- **Tone**: Maintain a friendly, supportive, and empathetic demeanor. Keep the conversation casual and encouraging, rather than formal or distant. Show understanding and reassure them when they face challenges. + +- **Personality**: Be positive and motivating. Praise the student’s progress and offer constructive feedback when needed. Support them in reflecting on their decisions and thought processes to improve their project outcomes. + +- **Empathy and Accountability**: Acknowledge any difficulties and provide practical strategies to overcome obstacles. Encourage the student to take responsibility for their learning and project development, while remaining patient and understanding. + +- **Context for the Conversation**: + - If the student is stuck, ask questions to pinpoint their confusion and then suggest targeted steps to move forward. + - If the student is making good progress, recognize their achievements and continue to motivate them. + +**Remember**: Your primary objective is to help the student succeed in their software engineering project. Do not deviate from this focus. diff --git a/server/intelligence-service/app/mentor/run.py b/server/intelligence-service/app/mentor/run.py index 82681da6..dfe1dd71 100644 --- a/server/intelligence-service/app/mentor/run.py +++ b/server/intelligence-service/app/mentor/run.py @@ -1,11 +1,14 @@ from typing_extensions import Annotated, TypedDict - +from .prompt_loader import PromptLoader from langgraph.graph import START, StateGraph, END from langgraph.graph.message import add_messages from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder from ..model import model +prompt_loader = PromptLoader() +persona_prompt = prompt_loader.get_prompt("mentor_persona") + class State(TypedDict): messages: Annotated[list, add_messages] @@ -14,9 +17,10 @@ class State(TypedDict): def mentor(state: State): prompt = ChatPromptTemplate( [ + ("system", persona_prompt), ( "system", - "You are an AI mentor helping a students working on the software engineering projects embracing structured self-reflection practices. You need to guide the student through the set questions regarding their work on the project during the last week (sprint). Your value is the fact, that you help students to reflect on their past progress. Throughout the conversation you need to perform all of the following tasks in the given order: Task 1: Greet the student and say you are happy to start the session. Task 2: Ask the student about the overall progress on the project. Task 3: Ask the student about the challenges faced during the sprint referring to what he said about progress. Task 4: Ask about the plan for the next sprint. You need to understand at which task in the conversation you are from the message history and what is the next task. Please, don't repeat yourself throughout the conversation. Don't perform more then one task at a time. If the user already shared something to a task you can go to the next. Be polite, friendly and do not let the student drive the conversation to any other topic except for the current project. Do not make a questionaire out of the conversation, but rather make it a natural conversation. Don't repeat the answer of the student to your latest question but try to react on it. If the student asks questions be helpful and try to find solutions.", + "You need to guide the student through the set questions regarding their work on the project during the last week (sprint). Your value is the fact, that you help students to reflect on their past progress. Throughout the conversation you need to perform all of the following tasks in the given order: Task 1: Ask the student about the overall progress on the project. Task 2: Ask the student about the challenges faced during the sprint referring to what he said about progress. Task 3: Ask about the plan for the next sprint. You need to understand at which task in the conversation you are from the message history and what is the next task. Please, don't repeat yourself throughout the conversation. Don't perform more then one task at a time. If the user already shared something to a task you can go to the next.", ), MessagesPlaceholder("messages"), ] @@ -25,9 +29,32 @@ def mentor(state: State): return {"messages": [chain.invoke({"messages": state["messages"]})]} +def greeting(state: State): + prompt = ChatPromptTemplate( + [ + ("system", persona_prompt), + ( + "system", + "Greet the user warmly and express excitement about starting today’s session. Keep the greeting friendly and encouraging. Mention that you are here to support them and look forward to making progress together.", + ), + ] + ) + chain = prompt | model + return {"messages": [chain.invoke({"messages": state["messages"]})]} + + +def isFirstInteraction(state: State): + if len(state["messages"]) == 0: + return "greeting" + return "mentor" + + graph_builder = StateGraph(State) graph_builder.add_node("mentor", mentor) -graph_builder.add_edge(START, "mentor") +graph_builder.add_node("greeting", greeting) + +graph_builder.add_conditional_edges(START, isFirstInteraction) graph_builder.add_edge("mentor", END) +graph_builder.add_edge("greeting", END) graph = graph_builder.compile() diff --git a/server/intelligence-service/app/routers/mentor.py b/server/intelligence-service/app/routers/mentor.py index 12da4034..8006d52e 100644 --- a/server/intelligence-service/app/routers/mentor.py +++ b/server/intelligence-service/app/routers/mentor.py @@ -31,9 +31,10 @@ class MentorMessage(BaseModel): def generate(request: MessageHistory): messages = [] for message in request.messages: - if message.sender == "USER": - messages.append(HumanMessage(content=message.content)) - else: - messages.append(AIMessage(content=message.content)) + if message.content: + if message.sender == "USER": + messages.append(HumanMessage(content=message.content)) + else: + messages.append(AIMessage(content=message.content)) response_message = graph.invoke({"messages": messages})["messages"][-1].content return MentorMessage(content=response_message) diff --git a/webapp/src/app/mentor/mentor.component.html b/webapp/src/app/mentor/mentor.component.html index a45888c4..19906855 100644 --- a/webapp/src/app/mentor/mentor.component.html +++ b/webapp/src/app/mentor/mentor.component.html @@ -17,10 +17,10 @@ } @else {
@if (selectedSessionId() !== undefined) { - @if ((selectedSessionMessages.data()?.length ?? 0) > 0) { + @if ((selectedSessionMessages.data()?.length ?? 0) > 0 || selectedSessionMessages.isPending()) {
- +
} @else { diff --git a/webapp/src/app/mentor/mentor.component.ts b/webapp/src/app/mentor/mentor.component.ts index 1e8893b9..87d90a76 100644 --- a/webapp/src/app/mentor/mentor.component.ts +++ b/webapp/src/app/mentor/mentor.component.ts @@ -64,6 +64,7 @@ export class MentorComponent { }, onSuccess: (session) => { this.selectedSessionId.set(session.id); + this.queryClient.invalidateQueries({ queryKey: ['sessions', this.selectedSessionId()] }); } })); diff --git a/webapp/src/app/mentor/messages/messages.component.html b/webapp/src/app/mentor/messages/messages.component.html index 168c8529..f7d6dfdd 100644 --- a/webapp/src/app/mentor/messages/messages.component.html +++ b/webapp/src/app/mentor/messages/messages.component.html @@ -1,27 +1,48 @@
- @for (message of messages(); track message.id) { -
-
- @if (message.sender === Message.SenderEnum.Mentor) { -
-
- + @if (isLoading()) { + @for (i of [1, 2, 3, 4]; track i) { +
+ @if (i % 2 === 0) { +
+ +
+
+ } @else { + +
+ + +
} -
-
-

{{ message.content }}

+
+ } + } @else { + @for (message of messages(); track message.id) { +
+
+ @if (message.sender === Message.SenderEnum.Mentor) { +
+
+ +
+
+ } +
+
+

{{ message.content }}

+
+ {{ message.sender === Message.SenderEnum.User ? 'You' : 'AI Mentor' }} · {{ message.sentAt | date: 'shortTime' }}
- {{ message.sender === Message.SenderEnum.User ? 'You' : 'AI Mentor' }} · {{ message.sentAt | date: 'shortTime' }}
-
+ } }
diff --git a/webapp/src/app/mentor/messages/messages.component.ts b/webapp/src/app/mentor/messages/messages.component.ts index c3e92ca9..bbba89ff 100644 --- a/webapp/src/app/mentor/messages/messages.component.ts +++ b/webapp/src/app/mentor/messages/messages.component.ts @@ -4,12 +4,13 @@ import { LucideAngularModule, BotMessageSquare } from 'lucide-angular'; import { HlmAvatarModule } from '@spartan-ng/ui-avatar-helm'; import { SecurityStore } from '@app/core/security/security-store.service'; import { Message } from '@app/core/modules/openapi'; +import { HlmSkeletonComponent } from '@spartan-ng/ui-skeleton-helm'; @Component({ selector: 'app-messages', templateUrl: './messages.component.html', standalone: true, - imports: [CommonModule, LucideAngularModule, HlmAvatarModule] + imports: [CommonModule, LucideAngularModule, HlmAvatarModule, HlmSkeletonComponent] }) export class MessagesComponent { protected BotMessageSquare = BotMessageSquare; @@ -17,4 +18,5 @@ export class MessagesComponent { securityStore = inject(SecurityStore); messages = input([]); + isLoading = input(false); } diff --git a/webapp/src/app/mentor/messages/messages.stories.ts b/webapp/src/app/mentor/messages/messages.stories.ts index a0afd9bc..a6189779 100644 --- a/webapp/src/app/mentor/messages/messages.stories.ts +++ b/webapp/src/app/mentor/messages/messages.stories.ts @@ -11,35 +11,35 @@ const meta: Meta = { id: 1, sentAt: '2024-12-05T10:15:00Z', sender: Message.SenderEnum.Mentor, - content: 'Hello! How can I assist you today?', + content: 'Hello! I’m excited to help you with your software engineering project today. What are you currently working on?', sessionId: 101 }, { id: 2, sentAt: '2024-12-05T10:16:30Z', sender: Message.SenderEnum.User, - content: 'I need help with understanding my recent order.', + content: 'Hi! I’m struggling with designing the database schema for my project.', sessionId: 101 }, { id: 3, sentAt: '2024-12-05T10:17:00Z', sender: Message.SenderEnum.Mentor, - content: 'Sure! Could you provide your order ID?', + content: 'Got it! Can you tell me a bit more about the project?', sessionId: 101 }, { id: 4, sentAt: '2024-12-05T10:17:45Z', sender: Message.SenderEnum.User, - content: 'The order ID is #12345. I’m looking for the details.', + content: 'It’s an e-commerce app where users can browse products, add them to a cart, and place orders.', sessionId: 101 }, { id: 5, sentAt: '2024-12-05T10:18:10Z', sender: Message.SenderEnum.Mentor, - content: "Got it! Please hold on while I fetch your details. Thank you for your patience. :) I'll be back in a moment...", + content: 'A good first step is identifying the main entities: users, products, orders, and the cart. Let’s start with that — do you have any initial thoughts?', sessionId: 101 } ] @@ -50,3 +50,9 @@ export default meta; type Story = StoryObj; export const Default: Story = {}; + +export const isLoading: Story = { + args: { + isLoading: true + } +};