Skip to content

Commit

Permalink
Support multiple users with @memoryid #1
Browse files Browse the repository at this point in the history
  • Loading branch information
arey committed Nov 2, 2024
1 parent 2217b86 commit 7b080c0
Show file tree
Hide file tree
Showing 6 changed files with 33 additions and 14 deletions.
5 changes: 3 additions & 2 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@ This sample demonstrates how to **easily integrate AI/LLM capabilities into a Ja
This can be achieved thanks to:
* A unified **abstraction layer** designed to decouple your code from specific implementations like LLM or embedding providers, enabling easy component swapping.
Only the [application.properties](src/main/resources/application.properties) file references LLM providers such as OpenAI or Azure OpenAI.
* **Memory** offers context to the LLM for both your current and previous conversations.
Refer to the use of the `MessageWindowChatMemory` class in [AssistantConfiguration](src/main/java/org/springframework/samples/petclinic/chat/AssistantConfiguration.java).
* **Memory** offers context to the LLM for both your current and previous conversations, with support for multiple users.
Refer to the use of the `MessageWindowChatMemory` class in [AssistantConfiguration](src/main/java/org/springframework/samples/petclinic/chat/AssistantConfiguration.java)
and the `@MemoryId` annotation in the [Assistant](src/main/java/org/springframework/samples/petclinic/chat/Assistant.java) interface.
* **AI Services** enables declarative definitions of complex AI behaviors through a straightforward Java API.
See the use of the `@AiService` annotation in the [Assistant](src/main/java/org/springframework/samples/petclinic/chat/Assistant.java) interface.
* **System prompts** play a vital role in LLMs as they shape how models interpret and respond to user queries.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
package org.springframework.samples.petclinic.chat;

import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.TokenStream;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.service.spring.AiService;

import java.util.UUID;

@AiService
interface Assistant {

@SystemMessage(fromResource = "/prompts/system.st")
TokenStream chat(String userMessage);
TokenStream chat(@MemoryId UUID memoryId, @UserMessage String userMessage);

}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package org.springframework.samples.petclinic.chat;

import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
Expand All @@ -21,8 +21,8 @@ class AssistantConfiguration {
* This chat memory will be used by an {@link Assistant}
*/
@Bean
ChatMemory chatMemory() {
return MessageWindowChatMemory.withMaxMessages(10);
ChatMemoryProvider chatMemoryProvider() {
return memoryId -> MessageWindowChatMemory.withMaxMessages(10);
}

@Bean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,11 @@

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;

import java.io.IOException;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

Expand All @@ -25,10 +24,10 @@ class AssistantController {
}

// Using the POST method due to chat memory capabilities
@PostMapping(value = "/chat")
public SseEmitter chat(@RequestBody String query) {
@PostMapping(value = "/chat/{user}")
public SseEmitter chat(@PathVariable UUID user, @RequestBody String query) {
SseEmitter emitter = new SseEmitter();
nonBlockingService.execute(() -> assistant.chat(query).onNext(message -> {
nonBlockingService.execute(() -> assistant.chat(user, query).onNext(message -> {
try {
sendMessage(emitter, message);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,5 +88,4 @@ record VetResponse(List<String> vet) {
}

record VetRequest(Vet vet) {

}
18 changes: 17 additions & 1 deletion src/main/resources/static/resources/js/chat.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,16 @@ async function sendMessage() {
const userElements = prepareMessage("user");
displayMessage(query, userElements);

// Retrieve or create a UserID as a UUID v4
let userId = sessionStorage.getItem('userId');
if (!userId) {
userId = uuidv4();
sessionStorage.setItem('userId', userId);
}

// We'll start by using fetch to initiate a POST request to our SSE endpoint.
// This endpoint is configured to send multiple messages, with the response header Content-Type: text/event-stream.
let response = await fetch('/chat', {
let response = await fetch('/chat/' + userId, {
method: 'POST',
headers: {
'Accept': 'text/event-stream',
Expand Down Expand Up @@ -118,3 +125,12 @@ function loadChatMessages() {
document.getElementById('chatbox-messages').scrollTop = document.getElementById('chatbox-messages').scrollHeight;
}
}

function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'
.replace(/[xy]/g, function (c) {
const r = Math.random() * 16 | 0;
const v = c === 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
}

0 comments on commit 7b080c0

Please sign in to comment.