From 71d1a18ce8fd2aeaa6213dc75ecb635ce6594d09 Mon Sep 17 00:00:00 2001 From: = Enea_Gore Date: Mon, 2 Dec 2024 13:07:47 +0100 Subject: [PATCH 1/3] fix linting in approach controller --- modules/text/module_text_llm/module_text_llm/__init__.py | 3 +-- .../module_text_llm/approach_controller.py | 9 ++++----- .../basic_approach/generate_suggestions.py | 3 +-- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/modules/text/module_text_llm/module_text_llm/__init__.py b/modules/text/module_text_llm/module_text_llm/__init__.py index 2f04ff661..31c8c578b 100644 --- a/modules/text/module_text_llm/module_text_llm/__init__.py +++ b/modules/text/module_text_llm/module_text_llm/__init__.py @@ -1,4 +1,3 @@ import dotenv -# Load environment variables from .env file (for local development) -dotenv.load_dotenv(override=True) \ No newline at end of file +dotenv.load_dotenv(override=True) diff --git a/modules/text/module_text_llm/module_text_llm/approach_controller.py b/modules/text/module_text_llm/module_text_llm/approach_controller.py index 25329b8ec..eda900c32 100644 --- a/modules/text/module_text_llm/module_text_llm/approach_controller.py +++ b/modules/text/module_text_llm/module_text_llm/approach_controller.py @@ -1,16 +1,15 @@ - from typing import List from athena.text import Exercise, Submission, Feedback from module_text_llm.basic_approach import BasicApproachConfig -from module_text_llm.chain_of_thought_approach import ChainOfThoughtConfig +from module_text_llm.chain_of_thought_approach import ChainOfThoughtConfig from module_text_llm.approach_config import ApproachConfig from module_text_llm.basic_approach.generate_suggestions import generate_suggestions as generate_suggestions_basic from module_text_llm.chain_of_thought_approach.generate_suggestions import generate_suggestions as generate_cot_suggestions async def generate_suggestions(exercise: Exercise, submission: Submission, config: ApproachConfig, debug: bool) -> List[Feedback]: - if(isinstance(config, BasicApproachConfig)): + if isinstance(config, BasicApproachConfig): return await generate_suggestions_basic(exercise, submission, config, debug) - elif(isinstance(config, ChainOfThoughtConfig)): + elif isinstance(config, ChainOfThoughtConfig): return await generate_cot_suggestions(exercise, submission, config, debug) - + raise ValueError("Unsupported config type provided.") \ No newline at end of file diff --git a/modules/text/module_text_llm/module_text_llm/basic_approach/generate_suggestions.py b/modules/text/module_text_llm/module_text_llm/basic_approach/generate_suggestions.py index b9e8694cb..6cdf5b369 100644 --- a/modules/text/module_text_llm/module_text_llm/basic_approach/generate_suggestions.py +++ b/modules/text/module_text_llm/module_text_llm/basic_approach/generate_suggestions.py @@ -8,7 +8,6 @@ check_prompt_length_and_omit_features_if_necessary, num_tokens_from_prompt, ) -from athena.text import Exercise, Submission, Feedback from llm_core.utils.predict_and_parse import predict_and_parse from module_text_llm.config import BasicApproachConfig @@ -94,4 +93,4 @@ async def generate_suggestions(exercise: Exercise, submission: Submission, confi meta={} )) - return feedbacks \ No newline at end of file + return feedbacks From 30810f9f2516b9d3944257ff70572312a1242d9d Mon Sep 17 00:00:00 2001 From: = Enea_Gore Date: Mon, 2 Dec 2024 13:15:52 +0100 Subject: [PATCH 2/3] trailing new lines and ifel --- .../module_text_llm/module_text_llm/approach_controller.py | 4 ++-- .../module_text_llm/basic_approach/__init__.py | 2 +- .../basic_approach/prompt_generate_suggestions.py | 3 +-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/modules/text/module_text_llm/module_text_llm/approach_controller.py b/modules/text/module_text_llm/module_text_llm/approach_controller.py index eda900c32..fd7bed8e1 100644 --- a/modules/text/module_text_llm/module_text_llm/approach_controller.py +++ b/modules/text/module_text_llm/module_text_llm/approach_controller.py @@ -10,6 +10,6 @@ async def generate_suggestions(exercise: Exercise, submission: Submission, config: ApproachConfig, debug: bool) -> List[Feedback]: if isinstance(config, BasicApproachConfig): return await generate_suggestions_basic(exercise, submission, config, debug) - elif isinstance(config, ChainOfThoughtConfig): + if isinstance(config, ChainOfThoughtConfig): # Remove "elif" return await generate_cot_suggestions(exercise, submission, config, debug) - raise ValueError("Unsupported config type provided.") \ No newline at end of file + raise ValueError("Unsupported config type provided.") diff --git a/modules/text/module_text_llm/module_text_llm/basic_approach/__init__.py b/modules/text/module_text_llm/module_text_llm/basic_approach/__init__.py index 37a674e7c..cc8c8e56e 100644 --- a/modules/text/module_text_llm/module_text_llm/basic_approach/__init__.py +++ b/modules/text/module_text_llm/module_text_llm/basic_approach/__init__.py @@ -8,4 +8,4 @@ class BasicApproachConfig(ApproachConfig): type: Literal['basic'] = 'basic' generate_suggestions_prompt: GenerateSuggestionsPrompt = Field(default=GenerateSuggestionsPrompt()) - + \ No newline at end of file diff --git a/modules/text/module_text_llm/module_text_llm/basic_approach/prompt_generate_suggestions.py b/modules/text/module_text_llm/module_text_llm/basic_approach/prompt_generate_suggestions.py index 4b23137d5..d3d195b49 100644 --- a/modules/text/module_text_llm/module_text_llm/basic_approach/prompt_generate_suggestions.py +++ b/modules/text/module_text_llm/module_text_llm/basic_approach/prompt_generate_suggestions.py @@ -1,6 +1,5 @@ from pydantic import Field, BaseModel from typing import List, Optional -from pydantic import BaseModel, Field system_message = """\ You are an AI tutor for text assessment at a prestigious university. @@ -62,4 +61,4 @@ class AssessmentModel(BaseModel): """Collection of feedbacks making up an assessment""" feedbacks: List[FeedbackModel] = Field(description="Assessment feedbacks") - + \ No newline at end of file From 6c10afe4af46672e65eab0430b2edc0ff2cc6a69 Mon Sep 17 00:00:00 2001 From: Enea Gore <73840596+EneaGore@users.noreply.github.com> Date: Mon, 2 Dec 2024 13:18:54 +0100 Subject: [PATCH 3/3] Update approach_controller.py --- .../text/module_text_llm/module_text_llm/approach_controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/text/module_text_llm/module_text_llm/approach_controller.py b/modules/text/module_text_llm/module_text_llm/approach_controller.py index fd7bed8e1..69a5cc69c 100644 --- a/modules/text/module_text_llm/module_text_llm/approach_controller.py +++ b/modules/text/module_text_llm/module_text_llm/approach_controller.py @@ -10,6 +10,6 @@ async def generate_suggestions(exercise: Exercise, submission: Submission, config: ApproachConfig, debug: bool) -> List[Feedback]: if isinstance(config, BasicApproachConfig): return await generate_suggestions_basic(exercise, submission, config, debug) - if isinstance(config, ChainOfThoughtConfig): # Remove "elif" + if isinstance(config, ChainOfThoughtConfig): return await generate_cot_suggestions(exercise, submission, config, debug) raise ValueError("Unsupported config type provided.")