Skip to content

Commit

Permalink
Fix style issues
Browse files Browse the repository at this point in the history
  • Loading branch information
Hialus committed Feb 10, 2024
1 parent 429a4a0 commit 898d1fc
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 14 deletions.
10 changes: 10 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
[flake8]
max-line-length = 120
exclude =
.git,
__pycache__,
.idea
per-file-ignores =
# imported but unused
__init__.py: F401, F403

5 changes: 3 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
rev: stable
hooks:
- id: black
language_version: python3.11
language_version: python3.12
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.0.0
hooks:
- id: flake8
- id: flake8
language_version: python3.12
10 changes: 8 additions & 2 deletions app/llm/basic_request_handler.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
from domain import IrisMessage
from llm import LlmManager
from llm import RequestHandlerInterface, CompletionArguments
from llm.wrapper import LlmCompletionWrapperInterface, LlmChatCompletionWrapperInterface, LlmEmbeddingWrapperInterface
from llm.wrapper import (
LlmCompletionWrapperInterface,
LlmChatCompletionWrapperInterface,
LlmEmbeddingWrapperInterface,
)

type BasicRequestHandlerModel = str

Expand All @@ -21,7 +25,9 @@ def completion(self, prompt: str, arguments: CompletionArguments) -> str:
else:
raise NotImplementedError

def chat_completion(self, messages: list[IrisMessage], arguments: CompletionArguments) -> IrisMessage:
def chat_completion(
self, messages: list[IrisMessage], arguments: CompletionArguments
) -> IrisMessage:
llm = self.llm_manager.get_llm_by_id(self.model).llm
if isinstance(llm, LlmChatCompletionWrapperInterface):
return llm.chat_completion(messages, arguments)
Expand Down
23 changes: 15 additions & 8 deletions app/llm/wrapper/llm_wrapper_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,19 @@

from llm import CompletionArguments

type LlmWrapperInterface = LlmCompletionWrapperInterface | LlmChatCompletionWrapperInterface | LlmEmbeddingWrapperInterface
type LlmWrapperInterface = (
LlmCompletionWrapperInterface
| LlmChatCompletionWrapperInterface
| LlmEmbeddingWrapperInterface
)


class LlmCompletionWrapperInterface(metaclass=ABCMeta):
"""Interface for the llm completion wrappers"""

@classmethod
def __subclasshook__(cls, subclass):
return (hasattr(subclass, 'completion') and
callable(subclass.completion))
return hasattr(subclass, "completion") and callable(subclass.completion)

@abstractmethod
def completion(self, prompt: str, arguments: CompletionArguments) -> str:
Expand All @@ -24,11 +27,14 @@ class LlmChatCompletionWrapperInterface(metaclass=ABCMeta):

@classmethod
def __subclasshook__(cls, subclass):
return (hasattr(subclass, 'chat_completion') and
callable(subclass.chat_completion))
return hasattr(subclass, "chat_completion") and callable(
subclass.chat_completion
)

@abstractmethod
def chat_completion(self, messages: list[any], arguments: CompletionArguments) -> any:
def chat_completion(
self, messages: list[any], arguments: CompletionArguments
) -> any:
"""Create a completion from the chat messages"""
raise NotImplementedError

Expand All @@ -38,8 +44,9 @@ class LlmEmbeddingWrapperInterface(metaclass=ABCMeta):

@classmethod
def __subclasshook__(cls, subclass):
return (hasattr(subclass, 'create_embedding') and
callable(subclass.create_embedding))
return hasattr(subclass, "create_embedding") and callable(
subclass.create_embedding
)

@abstractmethod
def create_embedding(self, text: str) -> list[float]:
Expand Down
2 changes: 0 additions & 2 deletions app/llm/wrapper/ollama_wrapper.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
from ollama import Client, Message
from openai import OpenAI
from openai.types.chat import ChatCompletionMessageParam

from domain import IrisMessage
from llm import CompletionArguments
Expand Down

0 comments on commit 898d1fc

Please sign in to comment.