Skip to content

Commit

Permalink
fix: checking for llm_output_type in init
Browse files Browse the repository at this point in the history
  • Loading branch information
provos committed Sep 3, 2024
1 parent 18b9bbe commit 5a27ae4
Showing 1 changed file with 6 additions and 4 deletions.
10 changes: 6 additions & 4 deletions src/planai/llm_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import hashlib
import logging
from textwrap import dedent
from typing import Optional
from typing import Optional, Type

from langchain_core.output_parsers import PydanticOutputParser
from pydantic import ConfigDict, Field
Expand All @@ -26,7 +26,7 @@

class LLMTaskWorker(TaskWorker):
model_config = ConfigDict(arbitrary_types_allowed=True)
llm_output_type: Optional[Task] = Field(
llm_output_type: Optional[Type[Task]] = Field(
None,
description="The output type of the LLM if it differs from the task output type",
)
Expand All @@ -40,8 +40,10 @@ class LLMTaskWorker(TaskWorker):

def __init__(self, **data):
super().__init__(**data)
if len(self.output_types) != 1:
raise ValueError("LLMTask must have exactly one output type")
if self.llm_output_type is None and len(self.output_types) != 1:
raise ValueError(
"LLMTask must either have llm_output_type or exactly one output_type"
)

def consume_work(self, task: Task):
return self._invoke_llm(task)
Expand Down

0 comments on commit 5a27ae4

Please sign in to comment.