From 5cae3ebc3398aae555bb2ed7762d3f6821bd8143 Mon Sep 17 00:00:00 2001 From: Yiannis Charalambous Date: Fri, 19 Jan 2024 11:24:55 +0000 Subject: [PATCH] AIModels: Changed some type annotations for list parameter to Iterables. --- esbmc_ai_lib/ai_models.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/esbmc_ai_lib/ai_models.py b/esbmc_ai_lib/ai_models.py index bc779b1..1b66a4f 100644 --- a/esbmc_ai_lib/ai_models.py +++ b/esbmc_ai_lib/ai_models.py @@ -1,7 +1,7 @@ # Author: Yiannis Charalambous from abc import abstractmethod -from typing import Any, Union +from typing import Any, Iterable, Union from enum import Enum from typing_extensions import override @@ -49,7 +49,7 @@ def create_llm( @classmethod def convert_messages_to_tuples( - cls, messages: list[BaseMessage] + cls, messages: Iterable[BaseMessage] ) -> list[tuple[str, str]]: """Converts messages into a format understood by the ChatPromptTemplate - since it won't format BaseMessage derived classes for some reason, but will for tuples, because they get converted into @@ -58,7 +58,7 @@ def convert_messages_to_tuples( def apply_chat_template( self, - messages: list[BaseMessage], + messages: Iterable[BaseMessage], **format_values: Any, ) -> PromptValue: # Default one, identity function essentially. @@ -157,7 +157,7 @@ def create_llm( @override def apply_chat_template( self, - messages: list[BaseMessage], + messages: Iterable[BaseMessage], **format_values: Any, ) -> PromptValue: """Text generation LLMs take single string of text as input. So the conversation