Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: SDK Changes in preparation for release #7

Merged
merged 22 commits into from
Nov 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "1.0.0"
".": "0.1.0"
}
4 changes: 1 addition & 3 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkl

## Submitting bug reports and feature requests

The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/python-server-sdk-AI/issues) in the SDK repository. Bug reports and feature requests specific to this library should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days.
The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/python-server-sdk-ai/issues) in the SDK repository. Bug reports and feature requests specific to this library should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days.

## Submitting pull requests

Expand Down Expand Up @@ -55,8 +55,6 @@ make lint

The library's module structure is as follows:

<!-- TODO: Add structure description -->

### Type hints

Python does not require the use of type hints, but they can be extremely helpful for spotting mistakes and for improving the IDE experience, so we should always use them in the library. Every method in the public API is expected to have type hints for all non-`self` parameters, and for its return value if any.
Expand Down
13 changes: 1 addition & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,7 @@ This version of the library has a minimum Python version of 3.8.

## Getting started

Install the package

$ pip install launchdarkly-server-sdk-ai

The provided `TracingHook` can be setup as shown below:

<!-- TODO: Install instructions -->

```python
import ldclient

```
Refer to the [SDK reference guide](https://docs.launchdarkly.com/sdk/ai/python) for instructions on getting started with using the SDK.

## Learn more

Expand Down
74 changes: 44 additions & 30 deletions ldai/client.py
Original file line number Diff line number Diff line change
@@ -1,53 +1,67 @@
from typing import Any, Dict, Optional
from typing import Any, Dict, List, Literal, Optional
from ldclient import Context
from ldclient.client import LDClient
import chevron

from ldai.tracker import LDAIConfigTracker
from ldai.types import AIConfig
from dataclasses import dataclass

@dataclass
class LDMessage():
role: Literal['system', 'user', 'assistant']
content: str

@dataclass
class AIConfigData():
model: Optional[dict]
prompt: Optional[List[LDMessage]]
class AIConfig():
def __init__(self, config: AIConfigData, tracker: LDAIConfigTracker, enabled: bool):
self.config = config
self.tracker = tracker
self.enabled = enabled

class LDAIClient:
"""The LaunchDarkly AI SDK client object."""

def __init__(self, client: LDClient):
self.client = client

def model_config(self, key: str, context: Context, default_value: str, variables: Optional[Dict[str, Any]] = None) -> AIConfig:
"""Get the value of a model configuration asynchronously.

Args:
key: The key of the model configuration.
context: The context to evaluate the model configuration in.
default_value: The default value of the model configuration.
variables: Additional variables for the model configuration.
def model_config(self, key: str, context: Context, default_value: AIConfig, variables: Optional[Dict[str, Any]] = None) -> AIConfig:
"""
Get the value of a model configuration asynchronously.

Returns:
The value of the model configuration.
:param key: The key of the model configuration.
:param context: The context to evaluate the model configuration in.
:param default_value: The default value of the model configuration.
:param variables: Additional variables for the model configuration.
:return: The value of the model configuration.
"""
variation = self.client.variation(key, context, default_value)

all_variables = {'ldctx': context}
all_variables = {}
if variables:
all_variables.update(variables)
all_variables['ldctx'] = context

if isinstance(variation['prompt'], list) and all(isinstance(entry, dict) for entry in variation['prompt']):
variation['prompt'] = [
LDMessage(
role=entry['role'],
content=self.__interpolate_template(entry['content'], all_variables)
)
for entry in variation['prompt']
]

variation['prompt'] = [
{
**entry,
'content': self.interpolate_template(entry['content'], all_variables)
}
for entry in variation['prompt']
]
enabled = variation.get('_ldMeta',{}).get('enabled', False)
return AIConfig(config=AIConfigData(model=variation['model'], prompt=variation['prompt']), tracker=LDAIConfigTracker(self.client, variation.get('_ldMeta', {}).get('versionKey', ''), key, context), enabled=bool(enabled))

return AIConfig(config=variation, tracker=LDAIConfigTracker(self.client, variation['_ldMeta']['variationId'], key, context))

def interpolate_template(self, template: str, variables: Dict[str, Any]) -> str:
"""Interpolate the template with the given variables.

Args:
template: The template string.
variables: The variables to interpolate into the template.
def __interpolate_template(self, template: str, variables: Dict[str, Any]) -> str:
"""
Interpolate the template with the given variables.

Returns:
The interpolated string.
:template: The template string.
:variables: The variables to interpolate into the template.
:return: The interpolated string.
"""
return chevron.render(template, variables)
105 changes: 105 additions & 0 deletions ldai/testing/test_model_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import pytest
from ldclient import LDClient, Context, Config
from ldclient.integrations.test_data import TestData
from ldai.client import AIConfig, AIConfigData, LDAIClient, LDMessage
from ldai.tracker import LDAIConfigTracker
from ldclient.testing.builders import *


@pytest.fixture
def td() -> TestData:
td = TestData.data_source()
td.update(td.flag('model-config').variations({
'model': { 'modelId': 'fakeModel'},
'prompt': [{'role': 'system', 'content': 'Hello, {{name}}!'}],
'_ldMeta': {'enabled': True, 'versionKey': 'abcd'}
}, "green").variation_for_all(0))

td.update(td.flag('multiple-prompt').variations({
'model': { 'modelId': 'fakeModel'},
'prompt': [{'role': 'system', 'content': 'Hello, {{name}}!'}, {'role': 'user', 'content': 'The day is, {{day}}!'}],
'_ldMeta': {'enabled': True, 'versionKey': 'abcd'}
}, "green").variation_for_all(0))

td.update(td.flag('ctx-interpolation').variations({
'model': { 'modelId': 'fakeModel'},
'prompt': [{'role': 'system', 'content': 'Hello, {{ldctx.name}}!'}],
'_ldMeta': {'enabled': True, 'versionKey': 'abcd'}
}).variation_for_all(0))

td.update(td.flag('off-config').variations({
'model': { 'modelId': 'fakeModel'},
'prompt': [{'role': 'system', 'content': 'Hello, {{name}}!'}],
'_ldMeta': {'enabled': False, 'versionKey': 'abcd'}
}).variation_for_all(0))

return td

@pytest.fixture
def client(td: TestData) -> LDClient:
config = Config('sdk-key', update_processor_class=td, send_events=False)
return LDClient(config=config)

@pytest.fixture
def tracker(client: LDClient) -> LDAIConfigTracker:
return LDAIConfigTracker(client, 'abcd', 'model-config', Context.create('user-key'))

@pytest.fixture
def ldai_client(client: LDClient) -> LDAIClient:
return LDAIClient(client)

def test_model_config_interpolation(ldai_client: LDAIClient, tracker):
context = Context.create('user-key')
default_value = AIConfig(config=AIConfigData(model={ 'modelId': 'fakeModel'}, prompt=[LDMessage(role='system', content='Hello, {{name}}!')]), tracker=tracker, enabled=True)
variables = {'name': 'World'}

config = ldai_client.model_config('model-config', context, default_value, variables)

assert config.config.prompt is not None
assert len(config.config.prompt) > 0
assert config.config.prompt[0].content == 'Hello, World!'
assert config.enabled is True

def test_model_config_no_variables(ldai_client: LDAIClient, tracker):
context = Context.create('user-key')
default_value = AIConfig(config=AIConfigData(model={}, prompt=[]), tracker=tracker, enabled=True)

config = ldai_client.model_config('model-config', context, default_value, {})

assert config.config.prompt is not None
assert len(config.config.prompt) > 0
assert config.config.prompt[0].content == 'Hello, !'
assert config.enabled is True

def test_context_interpolation(ldai_client: LDAIClient, tracker):
context = Context.builder('user-key').name("Sandy").build()
default_value = AIConfig(config=AIConfigData(model={}, prompt=[]), tracker=tracker, enabled=True)
variables = {'name': 'World'}

config = ldai_client.model_config('ctx-interpolation', context, default_value, variables)

assert config.config.prompt is not None
assert len(config.config.prompt) > 0
assert config.config.prompt[0].content == 'Hello, Sandy!'
assert config.enabled is True

def test_model_config_multiple(ldai_client: LDAIClient, tracker):
context = Context.create('user-key')
default_value = AIConfig(config=AIConfigData(model={}, prompt=[]), tracker=tracker, enabled=True)
variables = {'name': 'World', 'day': 'Monday'}

config = ldai_client.model_config('multiple-prompt', context, default_value, variables)

assert config.config.prompt is not None
assert len(config.config.prompt) > 0
assert config.config.prompt[0].content == 'Hello, World!'
assert config.config.prompt[1].content == 'The day is, Monday!'
assert config.enabled is True

def test_model_config_disabled(ldai_client: LDAIClient, tracker):
context = Context.create('user-key')
default_value = AIConfig(config=AIConfigData(model={}, prompt=[]), tracker=tracker, enabled=False)

config = ldai_client.model_config('off-config', context, default_value, {})

assert config.enabled is False
Loading