diff --git a/compose.yaml b/compose.yaml index 33a4baef..5ee41647 100644 --- a/compose.yaml +++ b/compose.yaml @@ -5,7 +5,7 @@ services: - '8080' environment: - SPRING_PROFILES_ACTIVE=prod - - DATABASE_URL=jdbc:postgresql://postgres:5432/hephaestus + - DATABASE_URL=postgresql://postgres:5432/hephaestus - DATABASE_USERNAME=root - DATABASE_PASSWORD=root - SECURITY_USER_NAME=${SECURITY_USER_NAME:-admin} @@ -25,6 +25,9 @@ services: intelligence-service: build: server/intelligence-service environment: + - DATABASE_URL=postgresql://postgres:5432/hephaestus + - DATABASE_USERNAME=root + - DATABASE_PASSWORD=root # Either OPENAI_API_KEY or AZURE_OPENAI_API_KEY must be set - OPENAI_API_KEY - AZURE_OPENAI_API_KEY diff --git a/package.json b/package.json index 5ac745bc..969d5f2a 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,7 @@ "generate:api:application-server": "npm run generate:api:application-server:specs && npm run generate:api:application-server:clean && npm run generate:api:application-server:client", "generate:api:intelligence-service:clean": "shx rm -rf server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice", "generate:api:intelligence-service:specs": "python -m server.intelligence-service.app.generate_openapi_yaml", - "generate:api:intelligence-service:client": "npx openapi-generator-cli generate -i server/intelligence-service/openapi.yaml -g java --library resttemplate --api-package de.tum.in.www1.hephaestus.intelligenceservice.api --model-package de.tum.in.www1.hephaestus.intelligenceservice.model --invoker-package de.tum.in.www1.hephaestus.intelligenceservice --model-name-prefix IS --additional-properties useJakartaEe=true,performBeanValidation=true,hideGenerationTimestamp=true --package-name de.tum.in.www1.hephaestus.intelligenceservice -o tmp/java-client && shx cp -r tmp/java-client/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice server/application-server/src/main/java/de/tum/in/www1/hephaestus && shx rm -rf tmp", + "generate:api:intelligence-service:client": "npx openapi-generator-cli generate -i server/intelligence-service/openapi.yaml -g java --library resttemplate --api-package de.tum.in.www1.hephaestus.intelligenceservice.api --model-package de.tum.in.www1.hephaestus.intelligenceservice.model --invoker-package de.tum.in.www1.hephaestus.intelligenceservice --additional-properties useJakartaEe=true,performBeanValidation=true,hideGenerationTimestamp=true --package-name de.tum.in.www1.hephaestus.intelligenceservice -o tmp/java-client && shx cp -r tmp/java-client/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice server/application-server/src/main/java/de/tum/in/www1/hephaestus && shx rm -rf tmp", "generate:api:intelligence-service": "npm run generate:api:intelligence-service:clean && npm run generate:api:intelligence-service:specs && npm run generate:api:intelligence-service:client", "generate:api": "npm run generate:api:intelligence-service && npm run generate:api:application-server", "format:java:check": "prettier --check server/application-server/src/**/*.java --config-precedence prefer-file --config server/application-server/.prettierrc.yaml --ignore-path server/application-server/.prettierignore", diff --git a/server/application-server/openapi.yaml b/server/application-server/openapi.yaml index da7f933b..f907c8d3 100644 --- a/server/application-server/openapi.yaml +++ b/server/application-server/openapi.yaml @@ -800,6 +800,7 @@ components: required: - createdAt - id + - isClosed type: object properties: id: @@ -808,3 +809,5 @@ components: createdAt: type: string format: date-time + isClosed: + type: boolean diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/api/HealthcheckApi.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/api/HealthcheckApi.java index 26d221ae..c8c74c19 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/api/HealthcheckApi.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/api/HealthcheckApi.java @@ -3,7 +3,7 @@ import de.tum.in.www1.hephaestus.intelligenceservice.ApiClient; import de.tum.in.www1.hephaestus.intelligenceservice.BaseApi; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISHealthCheck; +import de.tum.in.www1.hephaestus.intelligenceservice.model.HealthCheck; import java.util.Collections; import java.util.HashMap; @@ -41,10 +41,10 @@ public HealthcheckApi(ApiClient apiClient) { * Perform a Health Check * ## Perform a Health Check Endpoint to perform a healthcheck on. This endpoint can primarily be used Docker to ensure a robust container orchestration and management is in place. Other services which rely on proper functioning of the API service will not deploy if this endpoint returns any other HTTP status code except 200 (OK). Returns: HealthCheck: Returns a JSON response with the health status *

200 - Return HTTP Status Code 200 (OK) - * @return ISHealthCheck + * @return HealthCheck * @throws RestClientException if an error occurs while attempting to invoke the API */ - public ISHealthCheck getHealthHealthGet() throws RestClientException { + public HealthCheck getHealthHealthGet() throws RestClientException { return getHealthHealthGetWithHttpInfo().getBody(); } @@ -52,10 +52,10 @@ public ISHealthCheck getHealthHealthGet() throws RestClientException { * Perform a Health Check * ## Perform a Health Check Endpoint to perform a healthcheck on. This endpoint can primarily be used Docker to ensure a robust container orchestration and management is in place. Other services which rely on proper functioning of the API service will not deploy if this endpoint returns any other HTTP status code except 200 (OK). Returns: HealthCheck: Returns a JSON response with the health status *

200 - Return HTTP Status Code 200 (OK) - * @return ResponseEntity<ISHealthCheck> + * @return ResponseEntity<HealthCheck> * @throws RestClientException if an error occurs while attempting to invoke the API */ - public ResponseEntity getHealthHealthGetWithHttpInfo() throws RestClientException { + public ResponseEntity getHealthHealthGetWithHttpInfo() throws RestClientException { Object localVarPostBody = null; @@ -73,7 +73,7 @@ public ResponseEntity getHealthHealthGetWithHttpInfo() throws Res String[] localVarAuthNames = new String[] { }; - ParameterizedTypeReference localReturnType = new ParameterizedTypeReference() {}; + ParameterizedTypeReference localReturnType = new ParameterizedTypeReference() {}; return apiClient.invokeAPI("/health", HttpMethod.GET, Collections.emptyMap(), localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localReturnType); } diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/api/MentorApi.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/api/MentorApi.java index bf921f78..f0d8c52f 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/api/MentorApi.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/api/MentorApi.java @@ -3,9 +3,10 @@ import de.tum.in.www1.hephaestus.intelligenceservice.ApiClient; import de.tum.in.www1.hephaestus.intelligenceservice.BaseApi; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISHTTPValidationError; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISMentorMessage; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISMessageHistory; +import de.tum.in.www1.hephaestus.intelligenceservice.model.HTTPValidationError; +import de.tum.in.www1.hephaestus.intelligenceservice.model.MentorRequest; +import de.tum.in.www1.hephaestus.intelligenceservice.model.MentorResponse; +import de.tum.in.www1.hephaestus.intelligenceservice.model.MentorStartRequest; import java.util.Collections; import java.util.HashMap; @@ -40,33 +41,33 @@ public MentorApi(ApiClient apiClient) { } /** - * Start and continue a chat session with an LLM. + * Continue a chat session with an LLM. * *

200 - Successful Response *

422 - Validation Error - * @param isMessageHistory (required) - * @return ISMentorMessage + * @param mentorRequest (required) + * @return MentorResponse * @throws RestClientException if an error occurs while attempting to invoke the API */ - public ISMentorMessage generateMentorPost(ISMessageHistory isMessageHistory) throws RestClientException { - return generateMentorPostWithHttpInfo(isMessageHistory).getBody(); + public MentorResponse generateMentorPost(MentorRequest mentorRequest) throws RestClientException { + return generateMentorPostWithHttpInfo(mentorRequest).getBody(); } /** - * Start and continue a chat session with an LLM. + * Continue a chat session with an LLM. * *

200 - Successful Response *

422 - Validation Error - * @param isMessageHistory (required) - * @return ResponseEntity<ISMentorMessage> + * @param mentorRequest (required) + * @return ResponseEntity<MentorResponse> * @throws RestClientException if an error occurs while attempting to invoke the API */ - public ResponseEntity generateMentorPostWithHttpInfo(ISMessageHistory isMessageHistory) throws RestClientException { - Object localVarPostBody = isMessageHistory; + public ResponseEntity generateMentorPostWithHttpInfo(MentorRequest mentorRequest) throws RestClientException { + Object localVarPostBody = mentorRequest; - // verify the required parameter 'isMessageHistory' is set - if (isMessageHistory == null) { - throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'isMessageHistory' when calling generateMentorPost"); + // verify the required parameter 'mentorRequest' is set + if (mentorRequest == null) { + throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'mentorRequest' when calling generateMentorPost"); } @@ -86,9 +87,59 @@ public ResponseEntity generateMentorPostWithHttpInfo(ISMessageH String[] localVarAuthNames = new String[] { }; - ParameterizedTypeReference localReturnType = new ParameterizedTypeReference() {}; + ParameterizedTypeReference localReturnType = new ParameterizedTypeReference() {}; return apiClient.invokeAPI("/mentor/", HttpMethod.POST, Collections.emptyMap(), localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localReturnType); } + /** + * Start a chat session with an LLM. + * + *

200 - Successful Response + *

422 - Validation Error + * @param mentorStartRequest (required) + * @return MentorResponse + * @throws RestClientException if an error occurs while attempting to invoke the API + */ + public MentorResponse startMentorStartPost(MentorStartRequest mentorStartRequest) throws RestClientException { + return startMentorStartPostWithHttpInfo(mentorStartRequest).getBody(); + } + + /** + * Start a chat session with an LLM. + * + *

200 - Successful Response + *

422 - Validation Error + * @param mentorStartRequest (required) + * @return ResponseEntity<MentorResponse> + * @throws RestClientException if an error occurs while attempting to invoke the API + */ + public ResponseEntity startMentorStartPostWithHttpInfo(MentorStartRequest mentorStartRequest) throws RestClientException { + Object localVarPostBody = mentorStartRequest; + + // verify the required parameter 'mentorStartRequest' is set + if (mentorStartRequest == null) { + throw new HttpClientErrorException(HttpStatus.BAD_REQUEST, "Missing the required parameter 'mentorStartRequest' when calling startMentorStartPost"); + } + + + final MultiValueMap localVarQueryParams = new LinkedMultiValueMap(); + final HttpHeaders localVarHeaderParams = new HttpHeaders(); + final MultiValueMap localVarCookieParams = new LinkedMultiValueMap(); + final MultiValueMap localVarFormParams = new LinkedMultiValueMap(); + + final String[] localVarAccepts = { + "application/json" + }; + final List localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); + final String[] localVarContentTypes = { + "application/json" + }; + final MediaType localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); + + String[] localVarAuthNames = new String[] { }; + + ParameterizedTypeReference localReturnType = new ParameterizedTypeReference() {}; + return apiClient.invokeAPI("/mentor/start", HttpMethod.POST, Collections.emptyMap(), localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarCookieParams, localVarFormParams, localVarAccept, localVarContentType, localVarAuthNames, localReturnType); + } @Override public ResponseEntity invokeAPI(String url, HttpMethod method, Object request, ParameterizedTypeReference returnType) throws RestClientException { diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISHTTPValidationError.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/HTTPValidationError.java similarity index 78% rename from server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISHTTPValidationError.java rename to server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/HTTPValidationError.java index dac878c7..c66ebc68 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISHTTPValidationError.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/HTTPValidationError.java @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.annotation.JsonValue; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISValidationError; +import de.tum.in.www1.hephaestus.intelligenceservice.model.ValidationError; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -29,27 +29,26 @@ import org.hibernate.validator.constraints.*; /** - * ISHTTPValidationError + * HTTPValidationError */ @JsonPropertyOrder({ - ISHTTPValidationError.JSON_PROPERTY_DETAIL + HTTPValidationError.JSON_PROPERTY_DETAIL }) -@JsonTypeName("HTTPValidationError") @jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0") -public class ISHTTPValidationError { +public class HTTPValidationError { public static final String JSON_PROPERTY_DETAIL = "detail"; - private List detail = new ArrayList<>(); + private List detail = new ArrayList<>(); - public ISHTTPValidationError() { + public HTTPValidationError() { } - public ISHTTPValidationError detail(List detail) { + public HTTPValidationError detail(List detail) { this.detail = detail; return this; } - public ISHTTPValidationError addDetailItem(ISValidationError detailItem) { + public HTTPValidationError addDetailItem(ValidationError detailItem) { if (this.detail == null) { this.detail = new ArrayList<>(); } @@ -65,14 +64,14 @@ public ISHTTPValidationError addDetailItem(ISValidationError detailItem) { @JsonProperty(JSON_PROPERTY_DETAIL) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public List getDetail() { + public List getDetail() { return detail; } @JsonProperty(JSON_PROPERTY_DETAIL) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public void setDetail(List detail) { + public void setDetail(List detail) { this.detail = detail; } @@ -84,7 +83,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ISHTTPValidationError htTPValidationError = (ISHTTPValidationError) o; + HTTPValidationError htTPValidationError = (HTTPValidationError) o; return Objects.equals(this.detail, htTPValidationError.detail); } @@ -96,7 +95,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ISHTTPValidationError {\n"); + sb.append("class HTTPValidationError {\n"); sb.append(" detail: ").append(toIndentedString(detail)).append("\n"); sb.append("}"); return sb.toString(); diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISHealthCheck.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/HealthCheck.java similarity index 90% rename from server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISHealthCheck.java rename to server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/HealthCheck.java index bc580df5..77b190a4 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISHealthCheck.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/HealthCheck.java @@ -28,18 +28,17 @@ * Response model to validate and return when performing a health check. */ @JsonPropertyOrder({ - ISHealthCheck.JSON_PROPERTY_STATUS + HealthCheck.JSON_PROPERTY_STATUS }) -@JsonTypeName("HealthCheck") @jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0") -public class ISHealthCheck { +public class HealthCheck { public static final String JSON_PROPERTY_STATUS = "status"; private String status = "OK"; - public ISHealthCheck() { + public HealthCheck() { } - public ISHealthCheck status(String status) { + public HealthCheck status(String status) { this.status = status; return this; @@ -72,7 +71,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ISHealthCheck healthCheck = (ISHealthCheck) o; + HealthCheck healthCheck = (HealthCheck) o; return Objects.equals(this.status, healthCheck.status); } @@ -84,7 +83,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ISHealthCheck {\n"); + sb.append("class HealthCheck {\n"); sb.append(" status: ").append(toIndentedString(status)).append("\n"); sb.append("}"); return sb.toString(); diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMessageHistory.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMessageHistory.java deleted file mode 100644 index 3b1257f4..00000000 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMessageHistory.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Hephaestus Intelligence Service API - * API documentation for the Hephaestus Intelligence Service. - * - * The version of the OpenAPI document: 0.0.1 - * Contact: felixtj.dietrich@tum.de - * - * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). - * https://openapi-generator.tech - * Do not edit the class manually. - */ - - -package de.tum.in.www1.hephaestus.intelligenceservice.model; - -import java.util.Objects; -import java.util.Arrays; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonTypeName; -import com.fasterxml.jackson.annotation.JsonValue; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISMessage; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import com.fasterxml.jackson.annotation.JsonPropertyOrder; -import com.fasterxml.jackson.annotation.JsonTypeName; -import org.hibernate.validator.constraints.*; - -/** - * ISMessageHistory - */ -@JsonPropertyOrder({ - ISMessageHistory.JSON_PROPERTY_MESSAGES -}) -@JsonTypeName("MessageHistory") -@jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0") -public class ISMessageHistory { - public static final String JSON_PROPERTY_MESSAGES = "messages"; - private List messages = new ArrayList<>(); - - public ISMessageHistory() { - } - - public ISMessageHistory messages(List messages) { - - this.messages = messages; - return this; - } - - public ISMessageHistory addMessagesItem(ISMessage messagesItem) { - if (this.messages == null) { - this.messages = new ArrayList<>(); - } - this.messages.add(messagesItem); - return this; - } - - /** - * Get messages - * @return messages - */ - @jakarta.annotation.Nonnull - @JsonProperty(JSON_PROPERTY_MESSAGES) - @JsonInclude(value = JsonInclude.Include.ALWAYS) - - public List getMessages() { - return messages; - } - - - @JsonProperty(JSON_PROPERTY_MESSAGES) - @JsonInclude(value = JsonInclude.Include.ALWAYS) - public void setMessages(List messages) { - this.messages = messages; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - ISMessageHistory messageHistory = (ISMessageHistory) o; - return Objects.equals(this.messages, messageHistory.messages); - } - - @Override - public int hashCode() { - return Objects.hash(messages); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("class ISMessageHistory {\n"); - sb.append(" messages: ").append(toIndentedString(messages)).append("\n"); - sb.append("}"); - return sb.toString(); - } - - /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). - */ - private String toIndentedString(Object o) { - if (o == null) { - return "null"; - } - return o.toString().replace("\n", "\n "); - } - -} - diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMessage.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorRequest.java similarity index 71% rename from server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMessage.java rename to server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorRequest.java index 9312b1d3..7afc5851 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMessage.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorRequest.java @@ -25,25 +25,24 @@ import org.hibernate.validator.constraints.*; /** - * ISMessage + * MentorRequest */ @JsonPropertyOrder({ - ISMessage.JSON_PROPERTY_CONTENT, - ISMessage.JSON_PROPERTY_SENDER + MentorRequest.JSON_PROPERTY_CONTENT, + MentorRequest.JSON_PROPERTY_SESSION_ID }) -@JsonTypeName("Message") @jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0") -public class ISMessage { +public class MentorRequest { public static final String JSON_PROPERTY_CONTENT = "content"; private String content; - public static final String JSON_PROPERTY_SENDER = "sender"; - private String sender; + public static final String JSON_PROPERTY_SESSION_ID = "session_id"; + private String sessionId; - public ISMessage() { + public MentorRequest() { } - public ISMessage content(String content) { + public MentorRequest content(String content) { this.content = content; return this; @@ -68,29 +67,29 @@ public void setContent(String content) { this.content = content; } - public ISMessage sender(String sender) { + public MentorRequest sessionId(String sessionId) { - this.sender = sender; + this.sessionId = sessionId; return this; } /** - * Get sender - * @return sender + * Get sessionId + * @return sessionId */ @jakarta.annotation.Nonnull - @JsonProperty(JSON_PROPERTY_SENDER) + @JsonProperty(JSON_PROPERTY_SESSION_ID) @JsonInclude(value = JsonInclude.Include.ALWAYS) - public String getSender() { - return sender; + public String getSessionId() { + return sessionId; } - @JsonProperty(JSON_PROPERTY_SENDER) + @JsonProperty(JSON_PROPERTY_SESSION_ID) @JsonInclude(value = JsonInclude.Include.ALWAYS) - public void setSender(String sender) { - this.sender = sender; + public void setSessionId(String sessionId) { + this.sessionId = sessionId; } @Override @@ -101,22 +100,22 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ISMessage message = (ISMessage) o; - return Objects.equals(this.content, message.content) && - Objects.equals(this.sender, message.sender); + MentorRequest mentorRequest = (MentorRequest) o; + return Objects.equals(this.content, mentorRequest.content) && + Objects.equals(this.sessionId, mentorRequest.sessionId); } @Override public int hashCode() { - return Objects.hash(content, sender); + return Objects.hash(content, sessionId); } @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ISMessage {\n"); + sb.append("class MentorRequest {\n"); sb.append(" content: ").append(toIndentedString(content)).append("\n"); - sb.append(" sender: ").append(toIndentedString(sender)).append("\n"); + sb.append(" sessionId: ").append(toIndentedString(sessionId)).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMentorMessage.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorResponse.java similarity index 86% rename from server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMentorMessage.java rename to server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorResponse.java index b476d527..a33bc477 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISMentorMessage.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorResponse.java @@ -25,21 +25,20 @@ import org.hibernate.validator.constraints.*; /** - * ISMentorMessage + * MentorResponse */ @JsonPropertyOrder({ - ISMentorMessage.JSON_PROPERTY_CONTENT + MentorResponse.JSON_PROPERTY_CONTENT }) -@JsonTypeName("MentorMessage") @jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0") -public class ISMentorMessage { +public class MentorResponse { public static final String JSON_PROPERTY_CONTENT = "content"; private String content; - public ISMentorMessage() { + public MentorResponse() { } - public ISMentorMessage content(String content) { + public MentorResponse content(String content) { this.content = content; return this; @@ -72,8 +71,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ISMentorMessage mentorMessage = (ISMentorMessage) o; - return Objects.equals(this.content, mentorMessage.content); + MentorResponse mentorResponse = (MentorResponse) o; + return Objects.equals(this.content, mentorResponse.content); } @Override @@ -84,7 +83,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ISMentorMessage {\n"); + sb.append("class MentorResponse {\n"); sb.append(" content: ").append(toIndentedString(content)).append("\n"); sb.append("}"); return sb.toString(); diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorStartRequest.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorStartRequest.java new file mode 100644 index 00000000..230287c5 --- /dev/null +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/MentorStartRequest.java @@ -0,0 +1,135 @@ +/* + * Hephaestus Intelligence Service API + * API documentation for the Hephaestus Intelligence Service. + * + * The version of the OpenAPI document: 0.0.1 + * Contact: felixtj.dietrich@tum.de + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + + +package de.tum.in.www1.hephaestus.intelligenceservice.model; + +import java.util.Objects; +import java.util.Arrays; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonTypeName; +import com.fasterxml.jackson.annotation.JsonValue; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import com.fasterxml.jackson.annotation.JsonTypeName; +import org.hibernate.validator.constraints.*; + +/** + * MentorStartRequest + */ +@JsonPropertyOrder({ + MentorStartRequest.JSON_PROPERTY_PREVIOUS_SESSION_ID, + MentorStartRequest.JSON_PROPERTY_SESSION_ID +}) +@jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0") +public class MentorStartRequest { + public static final String JSON_PROPERTY_PREVIOUS_SESSION_ID = "previous_session_id"; + private String previousSessionId; + + public static final String JSON_PROPERTY_SESSION_ID = "session_id"; + private String sessionId; + + public MentorStartRequest() { + } + + public MentorStartRequest previousSessionId(String previousSessionId) { + + this.previousSessionId = previousSessionId; + return this; + } + + /** + * Get previousSessionId + * @return previousSessionId + */ + @jakarta.annotation.Nonnull + @JsonProperty(JSON_PROPERTY_PREVIOUS_SESSION_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + + public String getPreviousSessionId() { + return previousSessionId; + } + + + @JsonProperty(JSON_PROPERTY_PREVIOUS_SESSION_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public void setPreviousSessionId(String previousSessionId) { + this.previousSessionId = previousSessionId; + } + + public MentorStartRequest sessionId(String sessionId) { + + this.sessionId = sessionId; + return this; + } + + /** + * Get sessionId + * @return sessionId + */ + @jakarta.annotation.Nonnull + @JsonProperty(JSON_PROPERTY_SESSION_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + + public String getSessionId() { + return sessionId; + } + + + @JsonProperty(JSON_PROPERTY_SESSION_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public void setSessionId(String sessionId) { + this.sessionId = sessionId; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MentorStartRequest mentorStartRequest = (MentorStartRequest) o; + return Objects.equals(this.previousSessionId, mentorStartRequest.previousSessionId) && + Objects.equals(this.sessionId, mentorStartRequest.sessionId); + } + + @Override + public int hashCode() { + return Objects.hash(previousSessionId, sessionId); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class MentorStartRequest {\n"); + sb.append(" previousSessionId: ").append(toIndentedString(previousSessionId)).append("\n"); + sb.append(" sessionId: ").append(toIndentedString(sessionId)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces + * (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } + +} + diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISValidationError.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ValidationError.java similarity index 81% rename from server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISValidationError.java rename to server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ValidationError.java index be6ca6d7..458a22e0 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISValidationError.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ValidationError.java @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.annotation.JsonValue; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISValidationErrorLocInner; +import de.tum.in.www1.hephaestus.intelligenceservice.model.ValidationErrorLocInner; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -29,18 +29,17 @@ import org.hibernate.validator.constraints.*; /** - * ISValidationError + * ValidationError */ @JsonPropertyOrder({ - ISValidationError.JSON_PROPERTY_LOC, - ISValidationError.JSON_PROPERTY_MSG, - ISValidationError.JSON_PROPERTY_TYPE + ValidationError.JSON_PROPERTY_LOC, + ValidationError.JSON_PROPERTY_MSG, + ValidationError.JSON_PROPERTY_TYPE }) -@JsonTypeName("ValidationError") @jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0") -public class ISValidationError { +public class ValidationError { public static final String JSON_PROPERTY_LOC = "loc"; - private List loc = new ArrayList<>(); + private List loc = new ArrayList<>(); public static final String JSON_PROPERTY_MSG = "msg"; private String msg; @@ -48,16 +47,16 @@ public class ISValidationError { public static final String JSON_PROPERTY_TYPE = "type"; private String type; - public ISValidationError() { + public ValidationError() { } - public ISValidationError loc(List loc) { + public ValidationError loc(List loc) { this.loc = loc; return this; } - public ISValidationError addLocItem(ISValidationErrorLocInner locItem) { + public ValidationError addLocItem(ValidationErrorLocInner locItem) { if (this.loc == null) { this.loc = new ArrayList<>(); } @@ -73,18 +72,18 @@ public ISValidationError addLocItem(ISValidationErrorLocInner locItem) { @JsonProperty(JSON_PROPERTY_LOC) @JsonInclude(value = JsonInclude.Include.ALWAYS) - public List getLoc() { + public List getLoc() { return loc; } @JsonProperty(JSON_PROPERTY_LOC) @JsonInclude(value = JsonInclude.Include.ALWAYS) - public void setLoc(List loc) { + public void setLoc(List loc) { this.loc = loc; } - public ISValidationError msg(String msg) { + public ValidationError msg(String msg) { this.msg = msg; return this; @@ -109,7 +108,7 @@ public void setMsg(String msg) { this.msg = msg; } - public ISValidationError type(String type) { + public ValidationError type(String type) { this.type = type; return this; @@ -142,7 +141,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ISValidationError validationError = (ISValidationError) o; + ValidationError validationError = (ValidationError) o; return Objects.equals(this.loc, validationError.loc) && Objects.equals(this.msg, validationError.msg) && Objects.equals(this.type, validationError.type); @@ -156,7 +155,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ISValidationError {\n"); + sb.append("class ValidationError {\n"); sb.append(" loc: ").append(toIndentedString(loc)).append("\n"); sb.append(" msg: ").append(toIndentedString(msg)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISValidationErrorLocInner.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ValidationErrorLocInner.java similarity index 90% rename from server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISValidationErrorLocInner.java rename to server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ValidationErrorLocInner.java index b515b8fe..3f3658e3 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ISValidationErrorLocInner.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/intelligenceservice/model/ValidationErrorLocInner.java @@ -21,14 +21,14 @@ import org.hibernate.validator.constraints.*; /** - * ISValidationErrorLocInner + * ValidationErrorLocInner */ @JsonPropertyOrder({ }) @JsonTypeName("ValidationError_loc_inner") @jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0") -public class ISValidationErrorLocInner { - public ISValidationErrorLocInner() { +public class ValidationErrorLocInner { + public ValidationErrorLocInner() { } @Override @@ -50,7 +50,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ISValidationErrorLocInner {\n"); + sb.append("class ValidationErrorLocInner {\n"); sb.append("}"); return sb.toString(); } diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/message/MessageService.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/message/MessageService.java index d68976f1..ef6a6d8f 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/message/MessageService.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/message/MessageService.java @@ -1,9 +1,9 @@ package de.tum.in.www1.hephaestus.mentor.message; import de.tum.in.www1.hephaestus.config.IntelligenceServiceConfig.IntelligenceServiceApi; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISMentorMessage; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISMessage; -import de.tum.in.www1.hephaestus.intelligenceservice.model.ISMessageHistory; +import de.tum.in.www1.hephaestus.intelligenceservice.model.MentorRequest; +import de.tum.in.www1.hephaestus.intelligenceservice.model.MentorResponse; +import de.tum.in.www1.hephaestus.intelligenceservice.model.MentorStartRequest; import de.tum.in.www1.hephaestus.mentor.message.Message.MessageSender; import de.tum.in.www1.hephaestus.mentor.session.Session; import de.tum.in.www1.hephaestus.mentor.session.SessionRepository; @@ -43,6 +43,14 @@ public MessageDTO sendMessage(String content, Long sessionId) { } Session currentSession = session.get(); + // prevent sending messages to closed sessions + Session previousSession = sessionRepository + .findFirstByUserOrderByCreatedAtDesc(currentSession.getUser()) + .orElse(null); + if (previousSession != null && previousSession.isClosed()) { + return null; + } + Message userMessage = new Message(); userMessage.setSender(MessageSender.USER); userMessage.setContent(content); @@ -52,52 +60,36 @@ public MessageDTO sendMessage(String content, Long sessionId) { currentSession.getMessages().add(savedUserMessage); sessionRepository.save(currentSession); - String systemResponse = generateResponse(sessionId); - - // prevent saving empty system messages if the intelligence service is down - if (systemResponse == null) { + try { + MentorRequest mentorRequest = new MentorRequest(); + mentorRequest.setContent(content); + mentorRequest.setSessionId(String.valueOf(sessionId)); + MentorResponse mentorMessage = intelligenceServiceApi.generateMentorPost(mentorRequest); + String mentorResponse = mentorMessage.getContent(); + Message savedMentorMessage = createMentorMessage(currentSession, mentorResponse); + + return MessageDTO.fromMessage(savedMentorMessage); + } catch (Exception e) { + // prevent saving empty system messages if the intelligence service is down logger.error("Failed to generate response for message: {}", content); - return MessageDTO.fromMessage(savedUserMessage); - } - - Message savedSystemMessage = createSystemMessage(currentSession, systemResponse); - return MessageDTO.fromMessage(savedSystemMessage); - } - - public void generateFirstSystemMessage(Session session) { - String systemResponse = generateResponse(session.getId()); - - // prevent saving empty system messages if the intelligence service is down - if (systemResponse == null) { - logger.error("Failed to generate response for the conversation start"); - return; + return null; } - - createSystemMessage(session, systemResponse); } - private String generateResponse(Long sessionId) { - List messages = messageRepository.findBySessionId(sessionId); - ISMessageHistory messageHistory = new ISMessageHistory(); - - messageHistory.setMessages( - messages - .stream() - .map(message -> - new ISMessage().content(message.getContent()).sender(message.getSender().toString()) - ) - .toList() - ); + public void sendFirstMessage(Session session, String previousSessionId) { try { - ISMentorMessage mentorMessage = intelligenceServiceApi.generateMentorPost(messageHistory); - return mentorMessage.getContent(); + MentorStartRequest mentorStartRequest = new MentorStartRequest(); + mentorStartRequest.setPreviousSessionId(previousSessionId); + mentorStartRequest.setSessionId(String.valueOf(session.getId())); + MentorResponse mentorMessage = intelligenceServiceApi.startMentorStartPost(mentorStartRequest); + createMentorMessage(session, mentorMessage.getContent()); } catch (Exception e) { - logger.error("Failed to generate response for message: {}", e.getMessage()); - return null; + // prevent saving empty system messages if the intelligence service is down + logger.error("Failed to generate response during session start"); } } - private Message createSystemMessage(Session currentSession, String systemResponse) { + private Message createMentorMessage(Session currentSession, String systemResponse) { Message systemMessage = new Message(); systemMessage.setSender(MessageSender.MENTOR); systemMessage.setContent(systemResponse); diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/Session.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/Session.java index 16046f98..cfa8efeb 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/Session.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/Session.java @@ -31,6 +31,9 @@ public class Session { @NonNull private OffsetDateTime createdAt = OffsetDateTime.now(); + @NonNull + private boolean isClosed = false; + @ManyToOne @JoinColumn(name = "user_id") @ToString.Exclude diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionDTO.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionDTO.java index e14b6ca0..0dea5874 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionDTO.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionDTO.java @@ -5,8 +5,8 @@ import org.springframework.lang.NonNull; @JsonInclude(JsonInclude.Include.NON_EMPTY) -public record SessionDTO(@NonNull Long id, @NonNull OffsetDateTime createdAt) { +public record SessionDTO(@NonNull Long id, @NonNull OffsetDateTime createdAt, @NonNull boolean isClosed) { public static SessionDTO fromSession(Session session) { - return new SessionDTO(session.getId(), session.getCreatedAt()); + return new SessionDTO(session.getId(), session.getCreatedAt(), session.isClosed()); } } diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionRepository.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionRepository.java index 245f37d5..18b21a1d 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionRepository.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionRepository.java @@ -2,10 +2,13 @@ import de.tum.in.www1.hephaestus.gitprovider.user.User; import java.util.List; +import java.util.Optional; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; @Repository public interface SessionRepository extends JpaRepository { List findByUser(User user); + + Optional findFirstByUserOrderByCreatedAtDesc(User user); } diff --git a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionService.java b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionService.java index c8a6d72d..9d3a884c 100644 --- a/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionService.java +++ b/server/application-server/src/main/java/de/tum/in/www1/hephaestus/mentor/session/SessionService.java @@ -33,11 +33,23 @@ public Optional findSessionById(Long sessionId) { } public SessionDTO createSession(User user) { + String previousSessionId = sessionRepository + .findFirstByUserOrderByCreatedAtDesc(user) + .map(Session::getId) + .map(String::valueOf) + .orElse(""); + // close the previous session if it exists to prevent multiple open sessions + if (previousSessionId != "") { + Session previousSession = sessionRepository.findFirstByUserOrderByCreatedAtDesc(user).get(); + previousSession.setClosed(true); + sessionRepository.save(previousSession); + } + + // create a new session Session session = new Session(); session.setUser(user); - Session savedSession = sessionRepository.save(session); - messageService.generateFirstSystemMessage(session); + messageService.sendFirstMessage(session, previousSessionId); return SessionDTO.fromSession(savedSession); } } diff --git a/server/application-server/src/main/resources/application-prod.yml b/server/application-server/src/main/resources/application-prod.yml index 37fdd15c..0af2b84f 100644 --- a/server/application-server/src/main/resources/application-prod.yml +++ b/server/application-server/src/main/resources/application-prod.yml @@ -1,6 +1,6 @@ spring: datasource: - url: ${DATABASE_URL} + url: jdbc:${DATABASE_URL} username: ${DATABASE_USERNAME} password: ${DATABASE_PASSWORD} jpa: diff --git a/server/application-server/src/main/resources/db/changelog/1737749442154_changelog.xml b/server/application-server/src/main/resources/db/changelog/1737749442154_changelog.xml new file mode 100644 index 00000000..2dc37de6 --- /dev/null +++ b/server/application-server/src/main/resources/db/changelog/1737749442154_changelog.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/server/application-server/src/main/resources/db/master.xml b/server/application-server/src/main/resources/db/master.xml index 8cb2056a..2354c730 100644 --- a/server/application-server/src/main/resources/db/master.xml +++ b/server/application-server/src/main/resources/db/master.xml @@ -10,4 +10,5 @@ + diff --git a/server/intelligence-service/app/mentor/conditions.py b/server/intelligence-service/app/mentor/conditions.py new file mode 100644 index 00000000..4f248b75 --- /dev/null +++ b/server/intelligence-service/app/mentor/conditions.py @@ -0,0 +1,22 @@ +from .state import State + + +def start_router(state: State): + if len(state["messages"]) == 0: + return "greeting" + return "check_state" + + +def main_router(state: State): + if state["status"]: + return "status_node" + elif state["impediments"]: + return "impediments_node" + elif state["promises"]: + return "promises_node" + elif state["summary"]: + return "summary_node" + elif state["finish"]: + return "finish_node" + else: + return "mentor_node" diff --git a/server/intelligence-service/app/mentor/nodes.py b/server/intelligence-service/app/mentor/nodes.py new file mode 100644 index 00000000..8f5d6caa --- /dev/null +++ b/server/intelligence-service/app/mentor/nodes.py @@ -0,0 +1,198 @@ +from .state import State +from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from ..model import model +from uuid import uuid4 +from langchain_core.runnables.config import RunnableConfig +from langgraph.store.base import BaseStore +from .prompt_loader import PromptLoader + +prompt_loader = PromptLoader() +persona_prompt = prompt_loader.get_prompt(type="mentor", name="persona") + + +def greet(state: State): + prompt = ChatPromptTemplate( + [ + ("system", persona_prompt), + ("system", prompt_loader.get_prompt(type="mentor", name="greeting")), + MessagesPlaceholder("messages"), + ] + ) + chain = prompt | model + + return { + "messages": [chain.invoke({"messages": state["messages"]})], + "status": True, # directly update the state to the next step + } + + +def ask_status(state: State, store: BaseStore): + previous_session_id = state["last_thread"] + if state["last_thread"] == "": + previous_promises = "" + else: + namespace = (previous_session_id, "summary") + previous_promises = store.search(namespace) + if not previous_promises: + previous_promises = "" + else: + for item in previous_promises: + if "promises" in item.value: + previous_promises = item.value["promises"] + break + + prompt = ChatPromptTemplate( + [ + ("system", persona_prompt), + ( + "system", + ( + prompt_loader.get_prompt(type="mentor", name="status").format_map( + {"previous_promises": previous_promises} + ) + ), + ), + MessagesPlaceholder("messages"), + ] + ) + + chain = prompt | model + return {"messages": [chain.invoke({"messages": state["messages"]})]} + + +def ask_impediments(state: State, store: BaseStore): + previous_session_id = state["last_thread"] + previous_impediments = "" + if state["last_thread"] != "": + namespace = (previous_session_id, "summary") + previous_impediments = store.search(namespace) + if previous_impediments: + for item in previous_impediments: + if "impediments" in item.value: + previous_impediments = item.value["impediments"] + break + + prompt = ChatPromptTemplate( + [ + ("system", persona_prompt), + ( + "system", + ( + prompt_loader.get_prompt( + type="mentor", name="impediments" + ).format_map({"previous_impediments": previous_impediments}) + ), + ), + MessagesPlaceholder("messages"), + ] + ) + chain = prompt | model + return {"messages": [chain.invoke({"messages": state["messages"]})]} + + +def ask_promises(state: State): + prompt = ChatPromptTemplate( + [ + ("system", persona_prompt), + ("system", prompt_loader.get_prompt(type="mentor", name="promises")), + MessagesPlaceholder("messages"), + ] + ) + chain = prompt | model + return {"messages": [chain.invoke({"messages": state["messages"]})]} + + +def ask_summary(state: State): + prompt = ChatPromptTemplate( + [ + ("system", persona_prompt), + ("system", prompt_loader.get_prompt(type="mentor", name="summary")), + MessagesPlaceholder("messages"), + ] + ) + chain = prompt | model + response = chain.invoke({"messages": state["messages"]}) + return {"messages": [response]} + + +def finish(state: State): + prompt = ChatPromptTemplate( + [ + ("system", persona_prompt), + ("system", prompt_loader.get_prompt(type="mentor", name="finish")), + MessagesPlaceholder("messages"), + ] + ) + chain = prompt | model + return { + "messages": [chain.invoke({"messages": state["messages"]})], + "finish": False, + "closed": True, + } + + +# node responsible for checking the state of the conversation and updating it accordingly +def check_state(state: State): + step_order = ["status", "impediments", "promises", "summary", "finish"] + step = next((key for key in step_order if state.get(key)), None) + if not step: + return # exit early if no step is active without state update + + prompt = ChatPromptTemplate( + [ + ( + "system", + prompt_loader.get_prompt( + type="analyzer", name="check_state" + ).format_map({"step": step}), + ), + MessagesPlaceholder("messages"), + ] + ) + + chain = prompt | model + + if chain.invoke({"messages": state["messages"]}).content == "YES": + step_index = step_order.index(step) + if step_index < len(step_order) - 1: + next_step = step_order[step_index + 1] + return {step: False, next_step: True} + return + + +# node responsible for updating the long-term session memory, that can be used across multiple sessions +def update_memory(state: State, config: RunnableConfig, *, store: BaseStore): + session_id = config["configurable"]["thread_id"] + namespace = (session_id, "summary") + steps = ["impediments", "promises"] # steps to process + + for step in steps: + prompt = ChatPromptTemplate( + [ + ( + "system", + prompt_loader.get_prompt( + type="analyzer", name="update_memory" + ).format_map({"step": step}), + ), + MessagesPlaceholder("messages"), + ] + ) + + chain = prompt | model + response = chain.invoke({"messages": state["messages"]}).content + store.put(namespace, key=str(uuid4()), value={step: response}) + + return + + +# node responsible for generating responses after the user has finished the project update +def talk_to_mentor(state: State): + prompt = ChatPromptTemplate( + [ + ("system", persona_prompt), + MessagesPlaceholder("messages"), + ] + ) + chain = prompt | model + return {"messages": [chain.invoke({"messages": state["messages"]})]} diff --git a/server/intelligence-service/app/mentor/prompt_loader.py b/server/intelligence-service/app/mentor/prompt_loader.py index 6402ded4..ba2dbe31 100644 --- a/server/intelligence-service/app/mentor/prompt_loader.py +++ b/server/intelligence-service/app/mentor/prompt_loader.py @@ -6,14 +6,15 @@ class PromptLoader: def __init__(self, prompt_dir: str = "prompts"): self.prompt_dir = Path(__file__).parent / prompt_dir - def load_prompts(self) -> Dict[str, str]: + def load_prompts(self, type: str) -> Dict[str, str]: prompts = {} - for txt_file in self.prompt_dir.glob("*.txt"): + type_dir = self.prompt_dir / type + for txt_file in type_dir.glob("*.txt"): key = txt_file.stem # use the filename without extension as the key with open(txt_file, "r", encoding="utf-8") as f: prompts[key] = f.read().strip() return prompts - def get_prompt(self, name: str) -> str: - prompts = self.load_prompts() + def get_prompt(self, type: str, name: str) -> str: + prompts = self.load_prompts(type) return prompts.get(name, "") diff --git a/server/intelligence-service/app/mentor/prompts/analyzer/check_state.txt b/server/intelligence-service/app/mentor/prompts/analyzer/check_state.txt new file mode 100644 index 00000000..016b28c7 --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/analyzer/check_state.txt @@ -0,0 +1,16 @@ +You are an analyzer tasked with determining if a conversation about a {step} update is complete. Review the provided conversation and output only "YES" or "NO" based on these precise criteria: + +Output "YES" if and only if ALL of these conditions are met: +1. The LLM explicitly asked if the user has any (or any additional) information about the {step} update +2. The user provided a clear negative response (e.g., "no", "nope", "nothing else", "that's all") + +Output "NO" in all other scenarios, including: +- If the LLM didn't explicitly ask about additional information +- If the user's response was ambiguous or implicit +- If the user didn't directly answer the LLM's question +- If there was no clear conclusion to the {step} discussion + +Response Requirements: +1. Respond ONLY with "YES" or "NO" +2. No additional text, explanation, or formatting +3. Case-sensitive response (all capitals) \ No newline at end of file diff --git a/server/intelligence-service/app/mentor/prompts/analyzer/update_memory.txt b/server/intelligence-service/app/mentor/prompts/analyzer/update_memory.txt new file mode 100644 index 00000000..bd489948 --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/analyzer/update_memory.txt @@ -0,0 +1,35 @@ +Your task is to analyze the given conversation and create a summary. + +The summary type is: {step} + +If the summary type is "impediments": + Title: IMPEDIMENTS + Content rules: + - List only concrete challenges and problems mentioned by the user + - Each point must be specific and actionable + - Exclude resolved issues or general complaints + - If no impediments found, return exactly "IMPEDIMENTS\nNone reported" + +If the summary type is "promises": + Title: PROMISES + Content rules: + - List only specific commitments for the next sprint + - Each point must be concrete and measurable + - Exclude vague intentions or general discussions + - If no promises found, return exactly "PROMISES\nNone made" + +YOU MUST: +1. Always start with the exact title matching your summary type in CAPS +2. List each item on a new line with a bullet point +3. Keep descriptions brief but clear +4. Return ONLY the title and bullet points, no additional text + +Example format for impediments: +IMPEDIMENTS +- [specific impediment] +- [specific impediment] + +Example format for promises: +PROMISES +- [specific promise] +- [specific promise] \ No newline at end of file diff --git a/server/intelligence-service/app/mentor/prompts/mentor/finish.txt b/server/intelligence-service/app/mentor/prompts/mentor/finish.txt new file mode 100644 index 00000000..90c48a8c --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/mentor/finish.txt @@ -0,0 +1,2 @@ +Warmly thank the user for sharing their update. Express genuine appreciation for their efforts and encourage them with a positive wish for success in the upcoming sprint. +Conclude with a friendly farewell, such as 'See you next week!' or another suitable closing that aligns with the tone of the conversation. \ No newline at end of file diff --git a/server/intelligence-service/app/mentor/prompts/mentor/greeting.txt b/server/intelligence-service/app/mentor/prompts/mentor/greeting.txt new file mode 100644 index 00000000..3d7014e2 --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/mentor/greeting.txt @@ -0,0 +1,3 @@ + Greet the user warmly and express excitement about starting today’s session. Keep the greeting friendly and encouraging. + Mention that you are here to support them and look forward to making progress together. + Conclude by asking how their week has been so far to foster engagement. diff --git a/server/intelligence-service/app/mentor/prompts/mentor/impediments.txt b/server/intelligence-service/app/mentor/prompts/mentor/impediments.txt new file mode 100644 index 00000000..e906a567 --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/mentor/impediments.txt @@ -0,0 +1,18 @@ +You are an attentive listener analyzing a conversation between a student and an AI assistant. Each conversation segment consists of two parts: first the AI's message, then the student's response. + +Your task is to ensure the student's impediments and challenges are thoroughly discussed. Follow these steps: + +1. Review the conversation history carefully. + +2. IF YOU HAVEN'T YET ASKED about impediments/challenges in this conversation: + - If there are impediments among the following: "{previous_impediments}", reference them specifically, say that they were mentioned in the last session and ask if they were resolved. + - Ask the student about any challenges, impediments, or roadblocks they're facing now. + +3. IF YOU HAVE ALREADY ASKED about impediments/challenges: + - Ask if there are any additional challenges or impediments they haven't mentioned yet. + +REQUIREMENTS: +- Your response MUST include at least one direct question about impediments or challenges +- When previous impediments exist, explicitly reference them +- Keep your response focused and concise +- Do not try to solve the challenges or give advise, your task is to get an overview \ No newline at end of file diff --git a/server/intelligence-service/app/mentor/prompts/mentor/persona.txt b/server/intelligence-service/app/mentor/prompts/mentor/persona.txt new file mode 100644 index 00000000..f98d7e99 --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/mentor/persona.txt @@ -0,0 +1,12 @@ +PERSONA: +- You are a friendly AI mentor which aims to embrace self-reflective practicies in software engineering. +- You encourage critical thinking and help users identify areas for growth in their coding practices, problem-solving approaches, and collaboration strategies. +- You provide actionable insights and constructive feedback, aiming to cultivate continuous learning and improvement. +- Your tone is empathetic, motivational, and tailored to inspire confidence and curiosity in the user. + +FORMAT: +- Do not use more than 3 sentences in yours messages. + +REQUIREMENTS: +- Do not let the user switch the topic of the conversation drastically. +- Do not let the user switch the language. \ No newline at end of file diff --git a/server/intelligence-service/app/mentor/prompts/mentor/promises.txt b/server/intelligence-service/app/mentor/prompts/mentor/promises.txt new file mode 100644 index 00000000..437943be --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/mentor/promises.txt @@ -0,0 +1,7 @@ +You are to perform the following tasks: +Analyze the conversation. Focus on the status update from the student. Be aware of the message order, it is always one llm message and afterwards the respoce of the user. +Based on your anaylsis respond: + - If you haven't already, ask (use explicit questions) what are their plan for the upcoming sprint, what they want to focus on. Use the word promises. + - If you have already asked about the promises for the next week, ask if the user wants to share anything else about any future progress they are planing to achieve. +REQUIREMENTS: +- Your response MUST include at least one direct question. \ No newline at end of file diff --git a/server/intelligence-service/app/mentor/prompts/mentor/status.txt b/server/intelligence-service/app/mentor/prompts/mentor/status.txt new file mode 100644 index 00000000..0382017d --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/mentor/status.txt @@ -0,0 +1,38 @@ +You are an academic advisor analyzing a conversation with a student about their project status update. + +CONVERSATION STRUCTURE: +- Messages alternate between LLM and student responses +- Each pair is: [LLM message] -> [student response] + +PRIMARY TASK: +Analyze the conversation and choose ONE of these actions: + +1. IF you haven't asked about the status update yet: + - Compose a question asking about what the student accomplished this week + - IF previous promises are provided: ({previous_promises}), reference these specific promises in your question + - Focus on concrete accomplishments and completed work + +2. IF you have already asked about the status update: + - Ask if there are any other completed tasks or progress to share + - IF previous promises are provided: ({previous_promises}), and you HAVE NOT refernced some of those yet, ask about these specific promises in your question + - Continue this until the student indicates they have nothing more to share + +STRICT REQUIREMENTS: +1. MUST include exactly one clear question in your response +2. MUST focus ONLY on completed work and current progress +3. DO NOT ask about: + - Future plans + - Upcoming tasks + - Challenges or problems + - Next steps + - Goals or intentions + +FORMAT: +- Keep your response brief and focused +- Ask your question directly +- If referencing previous promises, be specific about which ones + +Example responses (NOTE: you do not need to ask these exact questions, but they may help you to understand the concept): +- What have you accomplished on your project this week? +- During our previous session you mentioned plans to (use one of the [{previous_promises}]) - how did that progress? +- Could you share any other progress you've made on the project that you haven't mentioned yet? \ No newline at end of file diff --git a/server/intelligence-service/app/mentor/prompts/mentor/summary.txt b/server/intelligence-service/app/mentor/prompts/mentor/summary.txt new file mode 100644 index 00000000..5cfbe84f --- /dev/null +++ b/server/intelligence-service/app/mentor/prompts/mentor/summary.txt @@ -0,0 +1,25 @@ +You are to perform the following tasks: +- Analyze the message history. Be aware of the message order, it is always one llm message and afterwards the respoce of the user. +- If there is an LLM generated summary already in the chat and the user has asked to add any additional things, do that in the right formatting and ask if there is anything else which needs to be added. +- Otherwise: + - Summarize the past messages and generate a short (not more than one sentence) summary devided into three groups. You need to create a short bullet list point for each of the groups (groups can also be empty, if the student did not provide any information): + 1. STATUS: what have been accomplished during the week, don't add any challgenes into it, focus only on the things that were started or done during the sprint, it is a progress inidcator. + 2. IMPEDIMENTS: challnges and roadblocks encountered by the students + 3. PROMISES: what should be done during the next sprint + - Formating: Start with word "SUMMARY" and first generate three groups like "STATUS" and the corresponding points, "IMPEDIMENTS" and the corresponding points, "PROMISES" and the corresponding points. Then return "TEXT" and afterwards ask the user if there is anything left they want to add. + - You are not allowed to start the message with any other word then "SUMMARY" + - Do not use any special separators for the bullet points, devide them with new line. + - DO NOT USE "**" in your response for formatting! + +If you can, use the following emojis for the single bullet points, use them in the beginning of a bullet point if they related to the sense of it: +✅ Done +🛠️ Work in progress +❌ Not done +⚠️ Generic impediment +🏁 Promise to finish Work in progress +➕ Promise to finish new stuff (except for Thesis/Proposal points) +🧪 Experiment/Research +✏️ Thesis/Proposal + + + \ No newline at end of file diff --git a/server/intelligence-service/app/mentor/prompts/mentor_persona.txt b/server/intelligence-service/app/mentor/prompts/mentor_persona.txt deleted file mode 100644 index d067746f..00000000 --- a/server/intelligence-service/app/mentor/prompts/mentor_persona.txt +++ /dev/null @@ -1,17 +0,0 @@ -You are a friendly, approachable AI mentor focused on helping a student with their software engineering project. Your goal is to provide constructive guidance, support, and encouragement. Follow these rules and guidelines: - -- **Stay On-Topic**: Only answer questions related to the student’s software engineering project. Avoid unrelated topics and general advice that does not support their specific work. - -- **Focus on Guidance**: Offer actionable suggestions. If the student’s explanation is unclear, ask clarifying questions. Help them break down complex issues into manageable steps, and encourage them to think critically about their problem-solving approach. - -- **Tone**: Maintain a friendly, supportive, and empathetic demeanor. Keep the conversation casual and encouraging, rather than formal or distant. Show understanding and reassure them when they face challenges. - -- **Personality**: Be positive and motivating. Praise the student’s progress and offer constructive feedback when needed. Support them in reflecting on their decisions and thought processes to improve their project outcomes. - -- **Empathy and Accountability**: Acknowledge any difficulties and provide practical strategies to overcome obstacles. Encourage the student to take responsibility for their learning and project development, while remaining patient and understanding. - -- **Context for the Conversation**: - - If the student is stuck, ask questions to pinpoint their confusion and then suggest targeted steps to move forward. - - If the student is making good progress, recognize their achievements and continue to motivate them. - -**Remember**: Your primary objective is to help the student succeed in their software engineering project. Do not deviate from this focus. diff --git a/server/intelligence-service/app/mentor/run.py b/server/intelligence-service/app/mentor/run.py index dfe1dd71..4ae36384 100644 --- a/server/intelligence-service/app/mentor/run.py +++ b/server/intelligence-service/app/mentor/run.py @@ -1,60 +1,104 @@ -from typing_extensions import Annotated, TypedDict -from .prompt_loader import PromptLoader +from psycopg_pool import ConnectionPool from langgraph.graph import START, StateGraph, END -from langgraph.graph.message import add_messages -from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from langgraph.checkpoint.postgres import PostgresSaver +from langgraph.store.postgres import PostgresStore +from langchain_core.messages import HumanMessage -from ..model import model +from .state import State +from ..settings import settings -prompt_loader = PromptLoader() -persona_prompt = prompt_loader.get_prompt("mentor_persona") +from .nodes import ( + greet, + ask_impediments, + ask_status, + ask_promises, + ask_summary, + check_state, + finish, + update_memory, + talk_to_mentor, +) +from .conditions import start_router, main_router +connection_kwargs = { + "autocommit": True, + "prepare_threshold": 0, +} -class State(TypedDict): - messages: Annotated[list, add_messages] +graph_builder = StateGraph(State) +graph_builder.add_node("greeting", greet) +graph_builder.add_node("status_node", ask_status) +graph_builder.add_node("impediments_node", ask_impediments) +graph_builder.add_node("promises_node", ask_promises) +graph_builder.add_node("summary_node", ask_summary) +graph_builder.add_node("check_state", check_state) +graph_builder.add_node("finish_node", finish) +graph_builder.add_node("update_memory", update_memory) +graph_builder.add_node("mentor_node", talk_to_mentor) +graph_builder.add_conditional_edges(START, start_router) +graph_builder.add_conditional_edges("check_state", main_router) -def mentor(state: State): - prompt = ChatPromptTemplate( - [ - ("system", persona_prompt), - ( - "system", - "You need to guide the student through the set questions regarding their work on the project during the last week (sprint). Your value is the fact, that you help students to reflect on their past progress. Throughout the conversation you need to perform all of the following tasks in the given order: Task 1: Ask the student about the overall progress on the project. Task 2: Ask the student about the challenges faced during the sprint referring to what he said about progress. Task 3: Ask about the plan for the next sprint. You need to understand at which task in the conversation you are from the message history and what is the next task. Please, don't repeat yourself throughout the conversation. Don't perform more then one task at a time. If the user already shared something to a task you can go to the next.", - ), - MessagesPlaceholder("messages"), - ] - ) - chain = prompt | model - return {"messages": [chain.invoke({"messages": state["messages"]})]} +graph_builder.add_edge("greeting", END) +graph_builder.add_edge("status_node", END) +graph_builder.add_edge("impediments_node", END) +graph_builder.add_edge("promises_node", END) +graph_builder.add_edge("summary_node", END) +graph_builder.add_edge("finish_node", "update_memory") +graph_builder.add_edge("update_memory", END) +graph_builder.add_edge("mentor_node", END) -def greeting(state: State): - prompt = ChatPromptTemplate( - [ - ("system", persona_prompt), - ( - "system", - "Greet the user warmly and express excitement about starting today’s session. Keep the greeting friendly and encouraging. Mention that you are here to support them and look forward to making progress together.", - ), - ] - ) - chain = prompt | model - return {"messages": [chain.invoke({"messages": state["messages"]})]} +def start_session(last_thread: str, config): + with ConnectionPool( + conninfo=settings.DATABASE_CONNECTION_STRING, + max_size=20, + kwargs=connection_kwargs, + ) as pool: + checkpointer = PostgresSaver(pool) + checkpointer.setup() + with PostgresStore.from_conn_string( + settings.DATABASE_CONNECTION_STRING + ) as store: + store.setup() + graph = graph_builder.compile(checkpointer=checkpointer, store=store) -def isFirstInteraction(state: State): - if len(state["messages"]) == 0: - return "greeting" - return "mentor" + # set the initial state of the graph + result = graph.invoke( + { + "last_thread": last_thread, + "messages": [], + "impediments": False, + "status": False, + "promises": False, + "summary": False, + "finish": False, + "closed": False, + }, + config, + ) + pool.close() + return result -graph_builder = StateGraph(State) -graph_builder.add_node("mentor", mentor) -graph_builder.add_node("greeting", greeting) -graph_builder.add_conditional_edges(START, isFirstInteraction) -graph_builder.add_edge("mentor", END) -graph_builder.add_edge("greeting", END) +def run(message: str, config): + with ConnectionPool( + conninfo=settings.DATABASE_CONNECTION_STRING, + max_size=20, + kwargs=connection_kwargs, + ) as pool: + checkpointer = PostgresSaver(pool) + checkpointer.setup() + + with PostgresStore.from_conn_string( + settings.DATABASE_CONNECTION_STRING + ) as store: + store.setup() + graph = graph_builder.compile(checkpointer=checkpointer, store=store) + # update the state with the new message (the rest of the state is preserved by the checkpointer) + result = graph.invoke({"messages": [HumanMessage(content=message)]}, config) -graph = graph_builder.compile() + pool.close() + return result diff --git a/server/intelligence-service/app/mentor/state.py b/server/intelligence-service/app/mentor/state.py new file mode 100644 index 00000000..4ee68827 --- /dev/null +++ b/server/intelligence-service/app/mentor/state.py @@ -0,0 +1,13 @@ +from langgraph.graph.message import add_messages +from typing_extensions import Annotated, TypedDict + + +class State(TypedDict): + last_thread: str # id of the last conversation to integrate long-term memory + messages: Annotated[list, add_messages] + status: bool + impediments: bool + promises: bool + summary: bool + finish: bool # thank the user and say goodbye + closed: bool diff --git a/server/intelligence-service/app/model.py b/server/intelligence-service/app/model.py index 56850677..a1642bd8 100644 --- a/server/intelligence-service/app/model.py +++ b/server/intelligence-service/app/model.py @@ -5,9 +5,6 @@ from .settings import settings -temperature = 0.7 -max_tokens = 4096 - model: BaseChatModel if os.getenv("GITHUB_ACTIONS") == "true": @@ -21,4 +18,8 @@ else: raise EnvironmentError("No LLM available") - model = Model(temperature=temperature, max_tokens=max_tokens) + model = Model( + temperature=settings.MODEL_TEMPERATURE, + max_tokens=settings.MODEL_MAX_TOKENS, + model=settings.MODEL_NAME, + ) diff --git a/server/intelligence-service/app/routers/mentor.py b/server/intelligence-service/app/routers/mentor.py index 8006d52e..a1a00303 100644 --- a/server/intelligence-service/app/routers/mentor.py +++ b/server/intelligence-service/app/routers/mentor.py @@ -1,40 +1,46 @@ from typing import List from fastapi import APIRouter from pydantic import BaseModel +from langchain_core.runnables.config import RunnableConfig +from ..mentor.run import run, start_session -from langchain_core.messages import HumanMessage, AIMessage -from ..mentor.run import graph +router = APIRouter(prefix="/mentor", tags=["mentor"]) -router = APIRouter(prefix="/mentor", tags=["mentor"]) +class MentorStartRequest(BaseModel): + session_id: str + previous_session_id: str -class Message(BaseModel): - sender: str +class MentorRequest(BaseModel): + session_id: str content: str -class MessageHistory(BaseModel): - messages: List[Message] +class MentorResponse(BaseModel): + content: str -class MentorMessage(BaseModel): - content: str +@router.post( + "/start", + response_model=MentorResponse, + summary="Start a chat session with an LLM.", +) +def start(request: MentorStartRequest): + config = RunnableConfig({"configurable": {"thread_id": request.session_id}}) + response = start_session(request.previous_session_id, config) + response_message = response["messages"][-1].content + return MentorResponse(content=response_message) @router.post( "/", - response_model=MentorMessage, - summary="Start and continue a chat session with an LLM.", + response_model=MentorResponse, + summary="Continue a chat session with an LLM.", ) -def generate(request: MessageHistory): - messages = [] - for message in request.messages: - if message.content: - if message.sender == "USER": - messages.append(HumanMessage(content=message.content)) - else: - messages.append(AIMessage(content=message.content)) - response_message = graph.invoke({"messages": messages})["messages"][-1].content - return MentorMessage(content=response_message) +def generate(request: MentorRequest): + config = RunnableConfig({"configurable": {"thread_id": request.session_id}}) + response = run(request.content, config) + response_message = response["messages"][-1].content + return MentorResponse(content=response_message) diff --git a/server/intelligence-service/app/settings.py b/server/intelligence-service/app/settings.py index 006ea3a5..d5976093 100644 --- a/server/intelligence-service/app/settings.py +++ b/server/intelligence-service/app/settings.py @@ -7,12 +7,20 @@ class Settings(BaseSettings): model_config = SettingsConfigDict(env_file=".env") + DATABASE_URL: str = "postgresql://localhost:5432/hephaestus" + DATABASE_USERNAME: str = "root" + DATABASE_PASSWORD: str = "root" + OPENAI_API_KEY: str = "" AZURE_OPENAI_API_KEY: str = "" AZURE_OPENAI_ENDPOINT: str = "" AZURE_OPENAI_API_VERSION: str = "" + MODEL_NAME: str = "gpt-4o" + MODEL_TEMPERATURE: float = 0.7 + MODEL_MAX_TOKENS: int = 4096 + @property def is_openai_available(self): return bool(self.OPENAI_API_KEY) @@ -25,5 +33,14 @@ def is_azure_openai_available(self): and bool(self.AZURE_OPENAI_API_VERSION) ) + @property + def DATABASE_CONNECTION_STRING(self): + result = ( + f"postgresql://{self.DATABASE_USERNAME}:{self.DATABASE_PASSWORD}@{self.DATABASE_URL.replace('postgresql://', '')}" + + "?sslmode=disable" + ) + print(result) + return result + settings = Settings() diff --git a/server/intelligence-service/openapi.yaml b/server/intelligence-service/openapi.yaml index 85f209d1..65b96424 100644 --- a/server/intelligence-service/openapi.yaml +++ b/server/intelligence-service/openapi.yaml @@ -19,38 +19,40 @@ components: type: string title: HealthCheck type: object - MentorMessage: + MentorRequest: properties: content: title: Content type: string + session_id: + title: Session Id + type: string required: + - session_id - content - title: MentorMessage + title: MentorRequest type: object - Message: + MentorResponse: properties: content: title: Content type: string - sender: - title: Sender - type: string required: - - sender - content - title: Message + title: MentorResponse type: object - MessageHistory: + MentorStartRequest: properties: - messages: - items: - $ref: '#/components/schemas/Message' - title: Messages - type: array + previous_session_id: + title: Previous Session Id + type: string + session_id: + title: Session Id + type: string required: - - messages - title: MessageHistory + - session_id + - previous_session_id + title: MentorStartRequest type: object ValidationError: properties: @@ -108,14 +110,39 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MessageHistory' + $ref: '#/components/schemas/MentorRequest' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/MentorResponse' + description: Successful Response + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + description: Validation Error + summary: Continue a chat session with an LLM. + tags: + - mentor + /mentor/start: + post: + operationId: start_mentor_start_post + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/MentorStartRequest' required: true responses: '200': content: application/json: schema: - $ref: '#/components/schemas/MentorMessage' + $ref: '#/components/schemas/MentorResponse' description: Successful Response '422': content: @@ -123,6 +150,6 @@ paths: schema: $ref: '#/components/schemas/HTTPValidationError' description: Validation Error - summary: Start and continue a chat session with an LLM. + summary: Start a chat session with an LLM. tags: - mentor diff --git a/server/intelligence-service/poetry.lock b/server/intelligence-service/poetry.lock index 2b10e3f3..9688ab9a 100644 --- a/server/intelligence-service/poetry.lock +++ b/server/intelligence-service/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -154,6 +154,21 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] +[[package]] +name = "asttokens" +version = "3.0.0" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, +] + +[package.extras] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] + [[package]] name = "attrs" version = "24.3.0" @@ -369,6 +384,17 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "distro" version = "1.9.0" @@ -415,6 +441,20 @@ files = [ dnspython = ">=2.0.0" idna = ">=2.0.0" +[[package]] +name = "executing" +version = "2.1.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.8" +files = [ + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + [[package]] name = "fastapi" version = "0.115.6" @@ -783,6 +823,61 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "ipython" +version = "8.31.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ipython-8.31.0-py3-none-any.whl", hash = "sha256:46ec58f8d3d076a61d128fe517a51eb730e3aaf0c184ea8c17d16e366660c6a6"}, + {file = "ipython-8.31.0.tar.gz", hash = "sha256:b6a2274606bec6166405ff05e54932ed6e5cfecaca1fc05f2cacde7bb074d70b"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack_data = "*" +traitlets = ">=5.13.0" + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "jedi" +version = "0.19.2" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, +] + +[package.dependencies] +parso = ">=0.8.4,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] + [[package]] name = "jinja2" version = "3.1.5" @@ -912,20 +1007,20 @@ files = [ [[package]] name = "langchain" -version = "0.3.14" +version = "0.3.10" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "langchain-0.3.14-py3-none-any.whl", hash = "sha256:5df9031702f7fe6c956e84256b4639a46d5d03a75be1ca4c1bc9479b358061a2"}, - {file = "langchain-0.3.14.tar.gz", hash = "sha256:4a5ae817b5832fa0e1fcadc5353fbf74bebd2f8e550294d4dc039f651ddcd3d1"}, + {file = "langchain-0.3.10-py3-none-any.whl", hash = "sha256:4ae38d4c9f9ec5cd1a16a505b451a57c0aab2807d6c9f8cb5b346d06301b2232"}, + {file = "langchain-0.3.10.tar.gz", hash = "sha256:aef0f9bdaf4a4d3d50aec348438135987bda1d83070b49f77032f561d3a761d8"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" -langchain-core = ">=0.3.29,<0.4.0" -langchain-text-splitters = ">=0.3.3,<0.4.0" -langsmith = ">=0.1.17,<0.3" +langchain-core = ">=0.3.22,<0.4.0" +langchain-text-splitters = ">=0.3.0,<0.4.0" +langsmith = ">=0.1.17,<0.2.0" numpy = {version = ">=1.26.2,<3", markers = "python_version >= \"3.12\""} pydantic = ">=2.7.4,<3.0.0" PyYAML = ">=5.3" @@ -935,22 +1030,22 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" [[package]] name = "langchain-community" -version = "0.3.14" +version = "0.3.10" description = "Community contributed LangChain integrations." optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_community-0.3.14-py3-none-any.whl", hash = "sha256:cc02a0abad0551edef3e565dff643386a5b2ee45b933b6d883d4a935b9649f3c"}, - {file = "langchain_community-0.3.14.tar.gz", hash = "sha256:d8ba0fe2dbb5795bff707684b712baa5ee379227194610af415ccdfdefda0479"}, + {file = "langchain_community-0.3.10-py3-none-any.whl", hash = "sha256:f718de973f60c6d0f10c71321e461cf41251cc74543f064b7b2ee7ae06b9a43f"}, + {file = "langchain_community-0.3.10.tar.gz", hash = "sha256:f503e90cbb44ddb14afb141552a93fd9fbd0b216407315a6608f901861a938f9"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" httpx-sse = ">=0.4.0,<0.5.0" -langchain = ">=0.3.14,<0.4.0" -langchain-core = ">=0.3.29,<0.4.0" -langsmith = ">=0.1.125,<0.3" +langchain = ">=0.3.10,<0.4.0" +langchain-core = ">=0.3.22,<0.4.0" +langsmith = ">=0.1.125,<0.2.0" numpy = {version = ">=1.26.2,<3", markers = "python_version >= \"3.12\""} pydantic-settings = ">=2.4.0,<3.0.0" PyYAML = ">=5.3" @@ -983,20 +1078,39 @@ typing-extensions = ">=4.7" [[package]] name = "langchain-openai" -version = "0.3.1" +version = "0.2.11" description = "An integration package connecting OpenAI and LangChain" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_openai-0.3.1-py3-none-any.whl", hash = "sha256:5cf2a1e115b12570158d89c22832fa381803c3e1e11d1eb781195c8d9e454bd5"}, - {file = "langchain_openai-0.3.1.tar.gz", hash = "sha256:cce314f1437b2cad73e0ed2b55e74dc399bc1bbc43594c4448912fb51c5e4447"}, + {file = "langchain_openai-0.2.11-py3-none-any.whl", hash = "sha256:c019ae915a5782943bee9503388e65c8622d400e0451ef885f3e4989cf35727f"}, + {file = "langchain_openai-0.2.11.tar.gz", hash = "sha256:563bd843092d260c7ffd88b8e0e6b830f36347e058e62a6d5e9cc4c461a8da98"}, ] [package.dependencies] -langchain-core = ">=0.3.30,<0.4.0" -openai = ">=1.58.1,<2.0.0" +langchain-core = ">=0.3.21,<0.4.0" +openai = ">=1.54.0,<2.0.0" tiktoken = ">=0.7,<1" +[[package]] +name = "langchain-postgres" +version = "0.0.12" +description = "An integration package connecting Postgres and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_postgres-0.0.12-py3-none-any.whl", hash = "sha256:b3a8e8fa318ecea1874675ae2bed08fc320e7d4c4f65e016f95808d1164dad51"}, + {file = "langchain_postgres-0.0.12.tar.gz", hash = "sha256:fe44c8073345463720355c86b676c56fc867d5c0995066353f60f2a5d01d0d0d"}, +] + +[package.dependencies] +langchain-core = ">=0.2.13,<0.4.0" +numpy = ">=1,<2" +pgvector = ">=0.2.5,<0.3.0" +psycopg = ">=3,<4" +psycopg-pool = ">=3.2.1,<4.0.0" +sqlalchemy = ">=2,<3" + [[package]] name = "langchain-text-splitters" version = "0.3.5" @@ -1013,18 +1127,18 @@ langchain-core = ">=0.3.29,<0.4.0" [[package]] name = "langgraph" -version = "0.2.64" +version = "0.2.56" description = "Building stateful, multi-actor applications with LLMs" optional = false python-versions = "<4.0,>=3.9.0" files = [ - {file = "langgraph-0.2.64-py3-none-any.whl", hash = "sha256:951f1b7a29708daa551cea72b7caadec4cc49ee5cd70add1a45015d1d074b0ae"}, - {file = "langgraph-0.2.64.tar.gz", hash = "sha256:b5b030fedd41b02462bf6af88bad8ca7d6ae8ff35940bcbd9f7ed2c7e6c34950"}, + {file = "langgraph-0.2.56-py3-none-any.whl", hash = "sha256:ad8a4b772e34dc0137e890bb6ced596a39a1e684af66250c1e7c8150dbe90e9c"}, + {file = "langgraph-0.2.56.tar.gz", hash = "sha256:af10b1ffd10d52fd4072a73f154b8c2513c0b22e5bd5d20f4567dfeecab98d1e"}, ] [package.dependencies] -langchain-core = ">=0.2.43,<0.3.0 || >0.3.0,<0.3.1 || >0.3.1,<0.3.2 || >0.3.2,<0.3.3 || >0.3.3,<0.3.4 || >0.3.4,<0.3.5 || >0.3.5,<0.3.6 || >0.3.6,<0.3.7 || >0.3.7,<0.3.8 || >0.3.8,<0.3.9 || >0.3.9,<0.3.10 || >0.3.10,<0.3.11 || >0.3.11,<0.3.12 || >0.3.12,<0.3.13 || >0.3.13,<0.3.14 || >0.3.14,<0.3.15 || >0.3.15,<0.3.16 || >0.3.16,<0.3.17 || >0.3.17,<0.3.18 || >0.3.18,<0.3.19 || >0.3.19,<0.3.20 || >0.3.20,<0.3.21 || >0.3.21,<0.3.22 || >0.3.22,<0.4.0" -langgraph-checkpoint = ">=2.0.10,<3.0.0" +langchain-core = ">=0.2.43,<0.3.0 || >0.3.0,<0.3.1 || >0.3.1,<0.3.2 || >0.3.2,<0.3.3 || >0.3.3,<0.3.4 || >0.3.4,<0.3.5 || >0.3.5,<0.3.6 || >0.3.6,<0.3.7 || >0.3.7,<0.3.8 || >0.3.8,<0.3.9 || >0.3.9,<0.3.10 || >0.3.10,<0.3.11 || >0.3.11,<0.3.12 || >0.3.12,<0.3.13 || >0.3.13,<0.3.14 || >0.3.14,<0.4.0" +langgraph-checkpoint = ">=2.0.4,<3.0.0" langgraph-sdk = ">=0.1.42,<0.2.0" [[package]] @@ -1042,6 +1156,23 @@ files = [ langchain-core = ">=0.2.38,<0.4" msgpack = ">=1.1.0,<2.0.0" +[[package]] +name = "langgraph-checkpoint-postgres" +version = "2.0.9" +description = "Library with a Postgres implementation of LangGraph checkpoint saver." +optional = false +python-versions = "<4.0.0,>=3.9.0" +files = [ + {file = "langgraph_checkpoint_postgres-2.0.9-py3-none-any.whl", hash = "sha256:1ee203b03abe168b91fe97753bf759580bd63f5664017cb0893d4e8da0c4e971"}, + {file = "langgraph_checkpoint_postgres-2.0.9.tar.gz", hash = "sha256:eb6a3ff76f4b0700c7522d90a5f3e0cd8bc878dea06768af1b181071cce075ce"}, +] + +[package.dependencies] +langgraph-checkpoint = ">=2.0.7,<3.0.0" +orjson = ">=3.10.1" +psycopg = ">=3.2.0,<4.0.0" +psycopg-pool = ">=3.2.0,<4.0.0" + [[package]] name = "langgraph-sdk" version = "0.1.51" @@ -1059,13 +1190,13 @@ orjson = ">=3.10.1" [[package]] name = "langsmith" -version = "0.2.11" +version = "0.1.147" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false -python-versions = "<4.0,>=3.9" +python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.2.11-py3-none-any.whl", hash = "sha256:084cf66a7f093c25e6b30fb4005008ec5fa9843110e2f0b265ce133c6a0225e6"}, - {file = "langsmith-0.2.11.tar.gz", hash = "sha256:edf070349dbfc63dc4fc30e22533a11d77768e99ef269399b221c48fee25c737"}, + {file = "langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15"}, + {file = "langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a"}, ] [package.dependencies] @@ -1079,7 +1210,6 @@ requests = ">=2,<3" requests-toolbelt = ">=1.0.0,<2.0.0" [package.extras] -compression = ["zstandard (>=0.23.0,<0.24.0)"] langsmith-pyo3 = ["langsmith-pyo3 (>=0.1.0rc2,<0.2.0)"] [[package]] @@ -1195,6 +1325,20 @@ dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] tests = ["pytest", "simplejson"] +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + [[package]] name = "mdurl" version = "0.1.2" @@ -1393,66 +1537,47 @@ files = [ [[package]] name = "numpy" -version = "2.2.2" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.10" +python-versions = ">=3.9" files = [ - {file = "numpy-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7079129b64cb78bdc8d611d1fd7e8002c0a2565da6a47c4df8062349fee90e3e"}, - {file = "numpy-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec6c689c61df613b783aeb21f945c4cbe6c51c28cb70aae8430577ab39f163e"}, - {file = "numpy-2.2.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:40c7ff5da22cd391944a28c6a9c638a5eef77fcf71d6e3a79e1d9d9e82752715"}, - {file = "numpy-2.2.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:995f9e8181723852ca458e22de5d9b7d3ba4da3f11cc1cb113f093b271d7965a"}, - {file = "numpy-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b78ea78450fd96a498f50ee096f69c75379af5138f7881a51355ab0e11286c97"}, - {file = "numpy-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fbe72d347fbc59f94124125e73fc4976a06927ebc503ec5afbfb35f193cd957"}, - {file = "numpy-2.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8e6da5cffbbe571f93588f562ed130ea63ee206d12851b60819512dd3e1ba50d"}, - {file = "numpy-2.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:09d6a2032faf25e8d0cadde7fd6145118ac55d2740132c1d845f98721b5ebcfd"}, - {file = "numpy-2.2.2-cp310-cp310-win32.whl", hash = "sha256:159ff6ee4c4a36a23fe01b7c3d07bd8c14cc433d9720f977fcd52c13c0098160"}, - {file = "numpy-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:64bd6e1762cd7f0986a740fee4dff927b9ec2c5e4d9a28d056eb17d332158014"}, - {file = "numpy-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:642199e98af1bd2b6aeb8ecf726972d238c9877b0f6e8221ee5ab945ec8a2189"}, - {file = "numpy-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6d9fc9d812c81e6168b6d405bf00b8d6739a7f72ef22a9214c4241e0dc70b323"}, - {file = "numpy-2.2.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c7d1fd447e33ee20c1f33f2c8e6634211124a9aabde3c617687d8b739aa69eac"}, - {file = "numpy-2.2.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:451e854cfae0febe723077bd0cf0a4302a5d84ff25f0bfece8f29206c7bed02e"}, - {file = "numpy-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd249bc894af67cbd8bad2c22e7cbcd46cf87ddfca1f1289d1e7e54868cc785c"}, - {file = "numpy-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02935e2c3c0c6cbe9c7955a8efa8908dd4221d7755644c59d1bba28b94fd334f"}, - {file = "numpy-2.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a972cec723e0563aa0823ee2ab1df0cb196ed0778f173b381c871a03719d4826"}, - {file = "numpy-2.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6d6a0910c3b4368d89dde073e630882cdb266755565155bc33520283b2d9df8"}, - {file = "numpy-2.2.2-cp311-cp311-win32.whl", hash = "sha256:860fd59990c37c3ef913c3ae390b3929d005243acca1a86facb0773e2d8d9e50"}, - {file = "numpy-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:da1eeb460ecce8d5b8608826595c777728cdf28ce7b5a5a8c8ac8d949beadcf2"}, - {file = "numpy-2.2.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ac9bea18d6d58a995fac1b2cb4488e17eceeac413af014b1dd26170b766d8467"}, - {file = "numpy-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23ae9f0c2d889b7b2d88a3791f6c09e2ef827c2446f1c4a3e3e76328ee4afd9a"}, - {file = "numpy-2.2.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:3074634ea4d6df66be04f6728ee1d173cfded75d002c75fac79503a880bf3825"}, - {file = "numpy-2.2.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ec0636d3f7d68520afc6ac2dc4b8341ddb725039de042faf0e311599f54eb37"}, - {file = "numpy-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ffbb1acd69fdf8e89dd60ef6182ca90a743620957afb7066385a7bbe88dc748"}, - {file = "numpy-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0349b025e15ea9d05c3d63f9657707a4e1d471128a3b1d876c095f328f8ff7f0"}, - {file = "numpy-2.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:463247edcee4a5537841d5350bc87fe8e92d7dd0e8c71c995d2c6eecb8208278"}, - {file = "numpy-2.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9dd47ff0cb2a656ad69c38da850df3454da88ee9a6fde0ba79acceee0e79daba"}, - {file = "numpy-2.2.2-cp312-cp312-win32.whl", hash = "sha256:4525b88c11906d5ab1b0ec1f290996c0020dd318af8b49acaa46f198b1ffc283"}, - {file = "numpy-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:5acea83b801e98541619af398cc0109ff48016955cc0818f478ee9ef1c5c3dcb"}, - {file = "numpy-2.2.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b208cfd4f5fe34e1535c08983a1a6803fdbc7a1e86cf13dd0c61de0b51a0aadc"}, - {file = "numpy-2.2.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d0bbe7dd86dca64854f4b6ce2ea5c60b51e36dfd597300057cf473d3615f2369"}, - {file = "numpy-2.2.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:22ea3bb552ade325530e72a0c557cdf2dea8914d3a5e1fecf58fa5dbcc6f43cd"}, - {file = "numpy-2.2.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:128c41c085cab8a85dc29e66ed88c05613dccf6bc28b3866cd16050a2f5448be"}, - {file = "numpy-2.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:250c16b277e3b809ac20d1f590716597481061b514223c7badb7a0f9993c7f84"}, - {file = "numpy-2.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c8854b09bc4de7b041148d8550d3bd712b5c21ff6a8ed308085f190235d7ff"}, - {file = "numpy-2.2.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b6fb9c32a91ec32a689ec6410def76443e3c750e7cfc3fb2206b985ffb2b85f0"}, - {file = "numpy-2.2.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:57b4012e04cc12b78590a334907e01b3a85efb2107df2b8733ff1ed05fce71de"}, - {file = "numpy-2.2.2-cp313-cp313-win32.whl", hash = "sha256:4dbd80e453bd34bd003b16bd802fac70ad76bd463f81f0c518d1245b1c55e3d9"}, - {file = "numpy-2.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:5a8c863ceacae696aff37d1fd636121f1a512117652e5dfb86031c8d84836369"}, - {file = "numpy-2.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b3482cb7b3325faa5f6bc179649406058253d91ceda359c104dac0ad320e1391"}, - {file = "numpy-2.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9491100aba630910489c1d0158034e1c9a6546f0b1340f716d522dc103788e39"}, - {file = "numpy-2.2.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:41184c416143defa34cc8eb9d070b0a5ba4f13a0fa96a709e20584638254b317"}, - {file = "numpy-2.2.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:7dca87ca328f5ea7dafc907c5ec100d187911f94825f8700caac0b3f4c384b49"}, - {file = "numpy-2.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bc61b307655d1a7f9f4b043628b9f2b721e80839914ede634e3d485913e1fb2"}, - {file = "numpy-2.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fad446ad0bc886855ddf5909cbf8cb5d0faa637aaa6277fb4b19ade134ab3c7"}, - {file = "numpy-2.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:149d1113ac15005652e8d0d3f6fd599360e1a708a4f98e43c9c77834a28238cb"}, - {file = "numpy-2.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:106397dbbb1896f99e044efc90360d098b3335060375c26aa89c0d8a97c5f648"}, - {file = "numpy-2.2.2-cp313-cp313t-win32.whl", hash = "sha256:0eec19f8af947a61e968d5429f0bd92fec46d92b0008d0a6685b40d6adf8a4f4"}, - {file = "numpy-2.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:97b974d3ba0fb4612b77ed35d7627490e8e3dff56ab41454d9e8b23448940576"}, - {file = "numpy-2.2.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b0531f0b0e07643eb089df4c509d30d72c9ef40defa53e41363eca8a8cc61495"}, - {file = "numpy-2.2.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e9e82dcb3f2ebbc8cb5ce1102d5f1c5ed236bf8a11730fb45ba82e2841ec21df"}, - {file = "numpy-2.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d4142eb40ca6f94539e4db929410f2a46052a0fe7a2c1c59f6179c39938d2a"}, - {file = "numpy-2.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:356ca982c188acbfa6af0d694284d8cf20e95b1c3d0aefa8929376fea9146f60"}, - {file = "numpy-2.2.2.tar.gz", hash = "sha256:ed6906f61834d687738d25988ae117683705636936cc605be0bb208b23df4d8f"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -1579,6 +1704,21 @@ files = [ {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + [[package]] name = "pathspec" version = "0.12.1" @@ -1590,6 +1730,33 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pgvector" +version = "0.2.5" +description = "pgvector support for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b"}, +] + +[package.dependencies] +numpy = "*" + [[package]] name = "platformdirs" version = "4.3.6" @@ -1606,6 +1773,20 @@ docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-a test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] +[[package]] +name = "prompt-toolkit" +version = "3.0.48" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, +] + +[package.dependencies] +wcwidth = "*" + [[package]] name = "propcache" version = "0.2.1" @@ -1697,6 +1878,154 @@ files = [ {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"}, ] +[[package]] +name = "psycopg" +version = "3.2.4" +description = "PostgreSQL database adapter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg-3.2.4-py3-none-any.whl", hash = "sha256:43665368ccd48180744cab26b74332f46b63b7e06e8ce0775547a3533883d381"}, + {file = "psycopg-3.2.4.tar.gz", hash = "sha256:f26f1346d6bf1ef5f5ef1714dd405c67fb365cfd1c6cea07de1792747b167b92"}, +] + +[package.dependencies] +psycopg-binary = {version = "3.2.4", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +psycopg-c = {version = "3.2.4", optional = true, markers = "implementation_name != \"pypy\" and extra == \"c\""} +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (==3.2.4)"] +c = ["psycopg-c (==3.2.4)"] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + +[[package]] +name = "psycopg-binary" +version = "3.2.4" +description = "PostgreSQL database adapter for Python -- C optimisation distribution" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg_binary-3.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c716f75b5c0388fc5283b5124046292c727511dd8c6aa59ca2dc644b9a2ed0cd"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2e8050347018f596a63f5dccbb92fb68bca52b13912cb8fc40184b24c0e534f"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04171f9af9ab567c0fd339bac06f2c75836db839cebac5bd07824778dafa7f0e"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7ba7b2ff25a6405826f627fb7d0f1e06e5c08ae25ffabc74a5e9ec7b0a63b85"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e58eeba520d405b2ad72dffaafd04d0b592bef870e718bf37c261e89a75450a"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb18cfbb1cfc8172786ceefd314f0faa05c40ea93b3db7194d0f6bbbbfedb42a"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:769804b4f753ddec9403183a6d4577d5b696fc49c2451421013fb06d6fa2f288"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7d4f0c9b01eb933ce35bb32a54205f48d7bc36bf455565afe269cabcb7973955"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:26aed7ff8691ba810de95718d3bc81a43fd48a4036c3641ef711eb5f71fc7106"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8a4b65eaf44dfed0b47e6ebd392e88cd3cff62ea11652d92db6fefeb2608ed25"}, + {file = "psycopg_binary-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9fa48a2dc54c4e906d7dd781031d227d1b13966deff7e5ece5b037588643190"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d092b0aa80b8c3ee0701a7252cbfb0bdb742e1f74aaf0c1a13ef22c05c9266ab"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3955381dacc6d15f3838d5f25445ee99f80882876a163f8de0c01ffc54aeef4a"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04144d1963aa3309247980f1a742b98e15f60d68ea9745143c433f99aaeb70d7"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eac61931bc90c1c6fdc648452894d3a434a005ffefaf12819b4709548c894bf2"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c09b765960480c4586758a3c16f0ee0db6f7e2f31c88cccb5e7d7024215468cd"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:220de8efcc276e42ba7cc7ed613145b1274b6b5de321a1396fb6b6ce1758d34c"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b558d3de315d18819ce477908e27518cbdd3275717c6193b58dde36f0443e167"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3b4c9b9a112d43533f7dbdedbb1188107d4ddcd262e2a2af41b4de0caf7d053"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:870df866f789bb641a350897c1751c293b9420f46be4eb366d190ff5f2f2ffd8"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89506e268fb95428fb0f8f7abe48032e66cf47390469e11a4fe989f7407a5d88"}, + {file = "psycopg_binary-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:7ddf1494cc3bf60761c01265c44dfc7a7fd63f21308c403c14f5dd91702df84d"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ac24b3d421127ebe8662eba2c1e149a12f0f5b6795e66c1811a3f59111456bb"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f702f36204127984dd212eb57bb328676abdfe8a56f179e408a806d5e520aa11"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:610cd2013ee0849154fcff34b0cca17f720c91c7430ca094a61f1e5ff1d38e15"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95da59edd95f6b6488799c9710fafc2d5750e3ec6328ec991f7a9be04efe6886"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b71e98e3186f08473962e1ea4bfbc4387ecc398644b794cb112ad0a4276e3789"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ccf4f71c3a0d46bc74207bf7997f010a6586414161dd10f3dd026ec059942ef"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:244e1dd33b694792b7bc7a3d412a535ba39116218b07d8936b4591567f4121e9"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f8dc8f4de5130c6278dd5e34b18ad8324a74658a7adb72d4e67ca97f9aeaaf3c"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c336e58a48061a9189d3ba8c19f00fe5d9570219e6f7f954b923ad5c33e5bc71"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9633c5dc6796d11766d2475e62335b67e5f99f119f40ba1675c1d23208d7709d"}, + {file = "psycopg_binary-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:295c25e56b430d786a475c5c2cef266b0b27c0a6fcaadf9d83a4cdcfb76f971f"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:81ab801c0d35830c876bf0d1edc8e7dd2f73aa2b04fe24eb812159c0b054d149"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c09e02ce1124eb6638b3381df050a8cf88aedfad4522f939945cda49050a990c"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a249cdc6a5c2b5088a8677acba66b291e5237524739ab3d27498e1ef189312f5"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2960ba8a5c0ad75e184f6d8bf76bdf023708999efe75fe4e13445136c1cd206"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dae2e50b0d3425c167eebbedc3553f7c811dbc0dbfc737b6877f68a03be7daf"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bf7ee7e0002c2cce43ecb923ec510358056eb2e44a96afaeb0424518f35206"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5f5c85eeb63b1a8a6b026eef57f5da36ff215ce9a6a3bb8e20a409670d6cfbda"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8c7b95899d4d6d23c5cc46cb3419e8e6ca68d867509432ee1487042564a1ea55"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fa4acea9ca20a567c3872a5afab2084751530bb57b8fb6b52820d5c54e7c8c3b"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5c487f35a1905bb15da927c1fc05f70f3d29f0e21fb4ba21d360a0da9c755f20"}, + {file = "psycopg_binary-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:80297c3a9f7b5a6afdb0d8f220661ccd796e5c9128c44b32c41267f7daefd37f"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:22cf23d037310ae08feceea5e24f727b1ef816867188dbec2edde2e7237b0004"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3409151b91df85ef99a72d137aba289e1d7b5d4ac7750b37183674421903e04"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1145c3c038e6dbe7127309cc9bbe209bce5743f9f02a2a65c4f9478bd794598e"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21d369bac7606157ef2699a0ff65c8d43d274f0178fd03241babb5f86b7586f7"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:158fa0dbda433e0069bd2b6ffdf357c9fcdb84ec5e3b353fb8206636873b54f9"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4a56b55072a3e0629e6421a7f6fdd4eecc0eba4e9cedaaf2e7578ac62c336680"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d993ecfa7f2ac30108d57e7418732d70aa399ccb4a8ca1cf415638679fb32e8b"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:a5b68ba52bdf3ed86a8a1f1ac809ecd775ffd7bb611438d3ab9e1ee572742f95"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:90423ff7a0c1f4001b8d54e6c7866f5bbb778f3f4272a70a7926878fe7d8763c"}, + {file = "psycopg_binary-3.2.4-cp38-cp38-win_amd64.whl", hash = "sha256:5a462bdd427330418fa2a011b6494103edd94cacd4f5b00e598bcbd1c8d20fb9"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2ddec5deed4c93a1bd73f210bed6dadbabc470ac1f9ebf55fa260e48396fd61f"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8bd54787d894261ff48d5c4b7f23e281c05c9a5ac67355eff7d29cfbcde640cd"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ae8cf8694d01788be5f418f6cada813e2b86cef67efba9c60cb9371cee9eb9"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0958dd3bfffbdef86594a6fa45255d4389ade94d17572bdf5207a900166a3cba"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b9558f9d101907e412ea12c355e8989c811d382d893ba6a541c091e6d916164"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279faafe9a4cdaeeee7844c19cccb865328bd55a2bf4012fef8d7040223a5245"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:196d8426a9220d29c118eec6074034648267c176d220cb42c49b3c9c396f0dbc"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:166e68b1e42862b18570d636a7b615630552daeab8b129083aa094f848be64b0"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b84c3f51969d33266640c218ad5bb5f8487e6a991db7a95b2c3c46fbda37a77c"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:501113e4d84887c03f83c7d8886c0744fe088fd6b633b919ebf7af4f0f7186be"}, + {file = "psycopg_binary-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:e889fe21c578c6c533c8550e1b3ba5d2cc5d151890458fa5fbfc2ca3b2324cfa"}, +] + +[[package]] +name = "psycopg-c" +version = "3.2.4" +description = "PostgreSQL database adapter for Python -- C optimisation distribution" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg_c-3.2.4.tar.gz", hash = "sha256:22097a04263efb2efd2cc8b00a51fa90e23f9cd4a2e09903fe4d9c6923dac17a"}, +] + +[[package]] +name = "psycopg-pool" +version = "3.2.4" +description = "Connection Pool for Psycopg" +optional = false +python-versions = ">=3.8" +files = [ + {file = "psycopg_pool-3.2.4-py3-none-any.whl", hash = "sha256:f6a22cff0f21f06d72fb2f5cb48c618946777c49385358e0c88d062c59cbd224"}, + {file = "psycopg_pool-3.2.4.tar.gz", hash = "sha256:61774b5bbf23e8d22bedc7504707135aaf744679f8ef9b3fe29942920746a6ed"}, +] + +[package.dependencies] +typing-extensions = ">=4.6" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "pydantic" version = "2.10.5" @@ -1831,13 +2160,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.7.1" +version = "2.6.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"}, - {file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"}, + {file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"}, + {file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"}, ] [package.dependencies] @@ -2239,6 +2568,25 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + [[package]] name = "starlette" version = "0.41.3" @@ -2339,6 +2687,21 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + [[package]] name = "typer" version = "0.15.1" @@ -2382,6 +2745,17 @@ files = [ mypy-extensions = ">=0.3.0" typing-extensions = ">=3.7.4" +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + [[package]] name = "urllib3" version = "2.3.0" @@ -2558,6 +2932,17 @@ files = [ [package.dependencies] anyio = ">=3.0.0" +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + [[package]] name = "websockets" version = "14.2" @@ -2735,4 +3120,4 @@ propcache = ">=0.2.0" [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "a109d8116f90cf98284f467152a28842d099e183e9d543035c42d46c055c1746" +content-hash = "86bde150d2bab540793886a3e997c7b32e0396f54457241446ed6a2f03781f9d" diff --git a/server/intelligence-service/pyproject.toml b/server/intelligence-service/pyproject.toml index c99c5f77..492d7a89 100644 --- a/server/intelligence-service/pyproject.toml +++ b/server/intelligence-service/pyproject.toml @@ -10,12 +10,16 @@ package-mode = false [tool.poetry.dependencies] python = "^3.12" fastapi = {extras = ["standard"], version = "0.115.6"} -pydantic = "2.10.5" -pydantic-settings = "2.7.1" -langchain = "0.3.14" -langchain-openai = "0.3.1" -langchain-community = "0.3.14" -langgraph = "0.2.64" +pydantic-settings = "2.6.1" +langchain = "0.3.10" +langchain-openai = "0.2.11" +langgraph = "0.2.56" +langchain-community = "0.3.10" +langgraph-checkpoint-postgres = "^2.0.8" +langchain-postgres = "^0.0.12" +psycopg-pool = "^3.2.4" +ipython = "^8.31.0" +psycopg = {extras = ["binary", "c"], version = "^3.2.4"} [tool.poetry.group.dev.dependencies] black = "24.10.0" diff --git a/webapp/src/app/core/modules/openapi/model/session.ts b/webapp/src/app/core/modules/openapi/model/session.ts index 1da18674..985781eb 100644 --- a/webapp/src/app/core/modules/openapi/model/session.ts +++ b/webapp/src/app/core/modules/openapi/model/session.ts @@ -14,5 +14,6 @@ export interface Session { id: number; createdAt: string; + isClosed: boolean; } diff --git a/webapp/src/app/mentor/chat-input/chat-input.component.html b/webapp/src/app/mentor/chat-input/chat-input.component.html index 9bc2b8a0..d07e470f 100644 --- a/webapp/src/app/mentor/chat-input/chat-input.component.html +++ b/webapp/src/app/mentor/chat-input/chat-input.component.html @@ -1,15 +1,24 @@

- -