diff --git a/ldai/client.py b/ldai/client.py index 0312096..90218e2 100644 --- a/ldai/client.py +++ b/ldai/client.py @@ -2,12 +2,14 @@ from typing import Any, Dict, List, Literal, Optional import chevron +from dataclasses_json import dataclass_json from ldclient import Context from ldclient.client import LDClient from ldai.tracker import LDAIConfigTracker +@dataclass_json @dataclass class LDMessage: role: Literal['system', 'user', 'assistant'] @@ -59,7 +61,7 @@ def model_config( if isinstance(variation['prompt'], list) and all( isinstance(entry, dict) for entry in variation['prompt'] ): - variation['prompt'] = [ + prompt = [ LDMessage( role=entry['role'], content=self.__interpolate_template( @@ -71,7 +73,7 @@ def model_config( enabled = variation.get('_ldMeta', {}).get('enabled', False) return AIConfig( - config=AIConfigData(model=variation['model'], prompt=variation['prompt']), + config=AIConfigData(model=variation['model'], prompt=prompt), tracker=LDAIConfigTracker( self.client, variation.get('_ldMeta', {}).get('versionKey', ''), diff --git a/pyproject.toml b/pyproject.toml index c03027d..0b86b18 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,11 +27,12 @@ exclude = [ ] [tool.poetry.dependencies] -python = ">=3.8" +python = ">=3.8,<4" launchdarkly-server-sdk = ">=9.4.0" chevron = "=0.14.0" pycodestyle = "^2.12.1" isort = "^5.13.2" +dataclasses-json = "^0.6.7" [tool.poetry.group.dev.dependencies]