Skip to content

Commit

Permalink
Merge branch 'main' into feature/python-finally-has-switch-statements
Browse files Browse the repository at this point in the history
# Conflicts:
#	app/llm/langchain/iris_langchain_chat_model.py
  • Loading branch information
Hialus committed Feb 21, 2024
2 parents 2ebdb96 + db3c066 commit ffa721d
Show file tree
Hide file tree
Showing 25 changed files with 289 additions and 186 deletions.
4 changes: 4 additions & 0 deletions app/domain/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,5 @@
from domain.message import IrisMessage, IrisMessageRole
from domain.course import Course
from domain.exercise import ProgrammingExercise
from domain.submission import ProgrammingSubmission
from domain.codehint import CodeHint
28 changes: 28 additions & 0 deletions app/domain/codehint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from pydantic import BaseModel


class ProgrammingExerciseSolutionEntry(BaseModel):
file_path: str
previous_line: int
line: int
previous_code: str
code: str

def __str__(self):
return (
f'ProgrammingExerciseSolutionEntry(file_path="{self.file_path}", previous_line={self.previous_line}, '
f'line={self.line}, previous_code="{self.previous_code}", code="{self.code}")'
)


class CodeHint(BaseModel):
title: str
description: str
content: str
solution_entries: [ProgrammingExerciseSolutionEntry]

def __str__(self):
return (
f'CodeHint(title="{self.title}", description="{self.description}", content="{self.content}", '
f"solution_entries={self.solution_entries})"
)
9 changes: 9 additions & 0 deletions app/domain/course.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from pydantic import BaseModel


class Course(BaseModel):
title: str
description: str

def __str__(self):
return f'Course(title="{self.title}", description="{self.description}")'
60 changes: 60 additions & 0 deletions app/domain/dtos.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from pydantic import BaseModel

from domain import (
Course,
ProgrammingExercise,
IrisMessage,
ProgrammingSubmission,
CodeHint,
)


class ProgrammingExerciseTutorChatDTO(BaseModel):
course: Course
exercise: ProgrammingExercise
submission: ProgrammingSubmission
chat_history: [IrisMessage]

def __str__(self):
return (
f"ProgrammingExerciseTutorChatDTO(course={self.course}, exercise={self.exercise}, "
f"submission={self.submission}, chat_history={self.chat_history})"
)


class CodeEditorChatDTO(BaseModel):
problem_statement: str
solution_repository: dict[str, str]
template_repository: dict[str, str]
test_repository: dict[str, str]
chat_history: [IrisMessage]

def __str__(self):
return (
f'CodeEditorChatDTO(problem_statement="{self.problem_statement}", '
f"solution_repository={self.solution_repository}, template_repository={self.template_repository}, "
f"test_repository={self.test_repository}, chat_history={self.chat_history})"
)


class CodeEditorAdaptDTO(BaseModel):
problem_statement: str
solution_repository: dict[str, str]
template_repository: dict[str, str]
test_repository: dict[str, str]
instructions: str

def __str__(self):
return (
f'CodeEditorAdaptDTO(problem_statement="{self.problem_statement}", '
f"solution_repository={self.solution_repository}, template_repository={self.template_repository}, "
f'test_repository={self.test_repository}, instructions="{self.instructions}")'
)


class HestiaDTO(BaseModel):
code_hint: CodeHint
exercise: ProgrammingExercise

def __str__(self):
return f"HestiaDTO(code_hint={self.code_hint}, exercise={self.exercise})"
9 changes: 9 additions & 0 deletions app/domain/exercise.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from pydantic import BaseModel


class ProgrammingExercise(BaseModel):
title: str
problem_statement: str

def __str__(self):
return f'ProgrammingExercise(title="{self.title}", problem_statement="{self.problem_statement}")'
7 changes: 4 additions & 3 deletions app/domain/message.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
from enum import Enum

from pydantic import BaseModel


class IrisMessageRole(Enum):
USER = "user"
ASSISTANT = "assistant"
SYSTEM = "system"


class IrisMessage:
class IrisMessage(BaseModel):
role: IrisMessageRole
text: str

def __init__(self, role: IrisMessageRole, text: str):
self.role = role
self.text = text
super().__init__(role=role, text=text)

def __str__(self):
return f"IrisMessage(role={self.role.value}, text='{self.text}')"
21 changes: 21 additions & 0 deletions app/domain/submission.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from pydantic import BaseModel


class BuildLogEntry(BaseModel):
time: str
message: str

def __str__(self):
return f'BuildLogEntry(time="{self.time}", message="{self.message}")'


class ProgrammingSubmission(BaseModel):
commit_hash: str
build_failed: bool
build_log_entries: [BuildLogEntry]

def __str__(self):
return (
f'ProgrammingSubmission(commit_hash="{self.commit_hash}", build_failed={self.build_failed}, '
f"build_log_entries={self.build_log_entries})"
)
6 changes: 3 additions & 3 deletions app/llm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from llm.request_handler_interface import RequestHandlerInterface
from llm.generation_arguments import *
from llm.basic_request_handler import BasicRequestHandler, BasicRequestHandlerModel
from llm.request_handler_interface import RequestHandler
from llm.completion_arguments import *
from llm.basic_request_handler import BasicRequestHandler, DefaultModelId
50 changes: 14 additions & 36 deletions app/llm/basic_request_handler.py
Original file line number Diff line number Diff line change
@@ -1,48 +1,26 @@
from domain import IrisMessage
from llm import RequestHandlerInterface, CompletionArguments
from llm import RequestHandler, CompletionArguments
from llm.llm_manager import LlmManager
from llm.wrapper.abstract_llm_wrapper import (
AbstractLlmCompletionWrapper,
AbstractLlmChatCompletionWrapper,
AbstractLlmEmbeddingWrapper,
)

type BasicRequestHandlerModel = str


class BasicRequestHandler(RequestHandlerInterface):
model: BasicRequestHandlerModel
class BasicRequestHandler(RequestHandler):
model_id: str
llm_manager: LlmManager

def __init__(self, model: BasicRequestHandlerModel):
self.model = model
def __init__(self, model_id: str):
self.model_id = model_id
self.llm_manager = LlmManager()

def completion(self, prompt: str, arguments: CompletionArguments) -> str:
llm = self.llm_manager.get_llm_by_id(self.model)
if isinstance(llm, AbstractLlmCompletionWrapper):
return llm.completion(prompt, arguments)
else:
raise NotImplementedError(
f"The LLM {llm.__str__()} does not support completion"
)
def complete(self, prompt: str, arguments: CompletionArguments) -> str:
llm = self.llm_manager.get_by_id(self.model_id)
return llm.complete(prompt, arguments)

def chat_completion(
def chat(
self, messages: list[IrisMessage], arguments: CompletionArguments
) -> IrisMessage:
llm = self.llm_manager.get_llm_by_id(self.model)
if isinstance(llm, AbstractLlmChatCompletionWrapper):
return llm.chat_completion(messages, arguments)
else:
raise NotImplementedError(
f"The LLM {llm.__str__()} does not support chat completion"
)
llm = self.llm_manager.get_by_id(self.model_id)
return llm.chat(messages, arguments)

def create_embedding(self, text: str) -> list[float]:
llm = self.llm_manager.get_llm_by_id(self.model)
if isinstance(llm, AbstractLlmEmbeddingWrapper):
return llm.create_embedding(text)
else:
raise NotImplementedError(
f"The LLM {llm.__str__()} does not support embedding"
)
def embed(self, text: str) -> list[float]:
llm = self.llm_manager.get_by_id(self.model_id)
return llm.embed(text)
File renamed without changes.
21 changes: 21 additions & 0 deletions app/llm/external/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from llm.external.model import LanguageModel
from llm.external.openai_completion import (
DirectOpenAICompletionModel,
AzureOpenAICompletionModel,
)
from llm.external.openai_chat import DirectOpenAIChatModel, AzureOpenAIChatModel
from llm.external.openai_embeddings import (
DirectOpenAIEmbeddingModel,
AzureOpenAIEmbeddingModel,
)
from llm.external.ollama import OllamaModel

type AnyLLM = (
DirectOpenAICompletionModel
| AzureOpenAICompletionModel
| DirectOpenAIChatModel
| AzureOpenAIChatModel
| DirectOpenAIEmbeddingModel
| AzureOpenAIEmbeddingModel
| OllamaModel
)
60 changes: 60 additions & 0 deletions app/llm/external/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from abc import ABCMeta, abstractmethod
from pydantic import BaseModel

from domain import IrisMessage
from llm import CompletionArguments


class LanguageModel(BaseModel, metaclass=ABCMeta):
"""Abstract class for the llm wrappers"""

id: str
name: str
description: str


class CompletionModel(LanguageModel, metaclass=ABCMeta):
"""Abstract class for the llm completion wrappers"""

@classmethod
def __subclasshook__(cls, subclass) -> bool:
return hasattr(subclass, "complete") and callable(subclass.complete)

@abstractmethod
def complete(self, prompt: str, arguments: CompletionArguments) -> str:
"""Create a completion from the prompt"""
raise NotImplementedError(
f"The LLM {self.__str__()} does not support completion"
)


class ChatModel(LanguageModel, metaclass=ABCMeta):
"""Abstract class for the llm chat completion wrappers"""

@classmethod
def __subclasshook__(cls, subclass) -> bool:
return hasattr(subclass, "chat") and callable(subclass.chat)

@abstractmethod
def chat(
self, messages: list[IrisMessage], arguments: CompletionArguments
) -> IrisMessage:
"""Create a completion from the chat messages"""
raise NotImplementedError(
f"The LLM {self.__str__()} does not support chat completion"
)


class EmbeddingModel(LanguageModel, metaclass=ABCMeta):
"""Abstract class for the llm embedding wrappers"""

@classmethod
def __subclasshook__(cls, subclass) -> bool:
return hasattr(subclass, "embed") and callable(subclass.embed)

@abstractmethod
def embed(self, text: str) -> list[float]:
"""Create an embedding from the text"""
raise NotImplementedError(
f"The LLM {self.__str__()} does not support embeddings"
)
24 changes: 10 additions & 14 deletions app/llm/wrapper/ollama_wrapper.py → app/llm/external/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,7 @@

from domain import IrisMessage, IrisMessageRole
from llm import CompletionArguments
from llm.wrapper.abstract_llm_wrapper import (
AbstractLlmChatCompletionWrapper,
AbstractLlmCompletionWrapper,
AbstractLlmEmbeddingWrapper,
)
from llm.external.model import ChatModel, CompletionModel, EmbeddingModel


def convert_to_ollama_messages(messages: list[IrisMessage]) -> list[Message]:
Expand All @@ -21,10 +17,10 @@ def convert_to_iris_message(message: Message) -> IrisMessage:
return IrisMessage(role=IrisMessageRole(message["role"]), text=message["content"])


class OllamaWrapper(
AbstractLlmCompletionWrapper,
AbstractLlmChatCompletionWrapper,
AbstractLlmEmbeddingWrapper,
class OllamaModel(
CompletionModel,
ChatModel,
EmbeddingModel,
):
type: Literal["ollama"]
model: str
Expand All @@ -34,19 +30,19 @@ class OllamaWrapper(
def model_post_init(self, __context: Any) -> None:
self._client = Client(host=self.host) # TODO: Add authentication (httpx auth?)

def completion(self, prompt: str, arguments: CompletionArguments) -> str:
def complete(self, prompt: str, arguments: CompletionArguments) -> str:
response = self._client.generate(model=self.model, prompt=prompt)
return response["response"]

def chat_completion(
self, messages: list[any], arguments: CompletionArguments
) -> any:
def chat(
self, messages: list[IrisMessage], arguments: CompletionArguments
) -> IrisMessage:
response = self._client.chat(
model=self.model, messages=convert_to_ollama_messages(messages)
)
return convert_to_iris_message(response["message"])

def create_embedding(self, text: str) -> list[float]:
def embed(self, text: str) -> list[float]:
response = self._client.embeddings(model=self.model, prompt=text)
return list(response)

Expand Down
Loading

0 comments on commit ffa721d

Please sign in to comment.