From 96f888f50503cc2e9e2c30bf1c21f80a2773c8b5 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Mon, 2 Dec 2024 12:03:36 -0500 Subject: [PATCH 1/4] fix: Return AIConfig and LDAITracker separately (#23) Consumers of this SDK are responsible for providing some default value to the `config` method. If that type is of `AIConfig`, customers have the option of providing a tracker instance that will never be useful. To address this, we are going to remove the tracker from the `AIConfig` type entirely. Instead, we are going to return the `AIConfig` and the `LDAITracker` separately. --- ldai/client.py | 78 +++++++++++++++++++++++-------- ldai/testing/test_model_config.py | 75 +++++++++++++++++------------ pyproject.toml | 1 - release-please-config.json | 2 +- 4 files changed, 105 insertions(+), 51 deletions(-) diff --git a/ldai/client.py b/ldai/client.py index e2d53a2..1eceeac 100644 --- a/ldai/client.py +++ b/ldai/client.py @@ -1,20 +1,27 @@ from dataclasses import dataclass -from typing import Any, Dict, List, Literal, Optional +from typing import Any, Dict, List, Literal, Optional, Tuple import chevron -from dataclasses_json import dataclass_json from ldclient import Context from ldclient.client import LDClient from ldai.tracker import LDAIConfigTracker -@dataclass_json @dataclass class LDMessage: role: Literal['system', 'user', 'assistant'] content: str + def to_dict(self) -> dict: + """ + Render the given message as a dictionary object. + """ + return { + 'role': self.role, + 'content': self.content, + } + class ModelConfig: """ @@ -62,6 +69,16 @@ def get_custom(self, key: str) -> Any: return self._custom.get(key) + def to_dict(self) -> dict: + """ + Render the given model config as a dictionary object. + """ + return { + 'id': self._id, + 'parameters': self._parameters, + 'custom': self._custom, + } + class ProviderConfig: """ @@ -78,14 +95,34 @@ def id(self) -> str: """ return self._id + def to_dict(self) -> dict: + """ + Render the given provider config as a dictionary object. + """ + return { + 'id': self._id, + } + +@dataclass(frozen=True) class AIConfig: - def __init__(self, tracker: LDAIConfigTracker, enabled: bool, model: Optional[ModelConfig], messages: Optional[List[LDMessage]], provider: Optional[ProviderConfig] = None): - self.tracker = tracker - self.enabled = enabled - self.model = model - self.messages = messages - self.provider = provider + enabled: Optional[bool] = None + model: Optional[ModelConfig] = None + messages: Optional[List[LDMessage]] = None + provider: Optional[ProviderConfig] = None + + def to_dict(self) -> dict: + """ + Render the given default values as an AIConfig-compatible dictionary object. + """ + return { + '_ldMeta': { + 'enabled': self.enabled or False, + }, + 'model': self.model.to_dict() if self.model else None, + 'messages': [message.to_dict() for message in self.messages] if self.messages else None, + 'provider': self.provider.to_dict() if self.provider else None, + } class LDAIClient: @@ -100,7 +137,7 @@ def config( context: Context, default_value: AIConfig, variables: Optional[Dict[str, Any]] = None, - ) -> AIConfig: + ) -> Tuple[AIConfig, LDAIConfigTracker]: """ Get the value of a model configuration. @@ -108,9 +145,9 @@ def config( :param context: The context to evaluate the model configuration in. :param default_value: The default value of the model configuration. :param variables: Additional variables for the model configuration. - :return: The value of the model configuration. + :return: The value of the model configuration along with a tracker used for gathering metrics. """ - variation = self.client.variation(key, context, default_value) + variation = self.client.variation(key, context, default_value.to_dict()) all_variables = {} if variables: @@ -146,20 +183,23 @@ def config( custom=custom ) + tracker = LDAIConfigTracker( + self.client, + variation.get('_ldMeta', {}).get('versionKey', ''), + key, + context, + ) + enabled = variation.get('_ldMeta', {}).get('enabled', False) - return AIConfig( - tracker=LDAIConfigTracker( - self.client, - variation.get('_ldMeta', {}).get('versionKey', ''), - key, - context, - ), + config = AIConfig( enabled=bool(enabled), model=model, messages=messages, provider=provider_config, ) + return config, tracker + def __interpolate_template(self, template: str, variables: Dict[str, Any]) -> str: """ Interpolate the template with the given variables. diff --git a/ldai/testing/test_model_config.py b/ldai/testing/test_model_config.py index 6f97a4d..593a037 100644 --- a/ldai/testing/test_model_config.py +++ b/ldai/testing/test_model_config.py @@ -3,7 +3,6 @@ from ldclient.integrations.test_data import TestData from ldai.client import AIConfig, LDAIClient, LDMessage, ModelConfig -from ldai.tracker import LDAIConfigTracker @pytest.fixture @@ -98,11 +97,6 @@ def client(td: TestData) -> LDClient: return LDClient(config=config) -@pytest.fixture -def tracker(client: LDClient) -> LDAIConfigTracker: - return LDAIConfigTracker(client, 'abcd', 'model-config', Context.create('user-key')) - - @pytest.fixture def ldai_client(client: LDClient) -> LDAIClient: return LDAIClient(client) @@ -125,17 +119,38 @@ def test_model_config_handles_custom(): assert model.get_custom('id') is None -def test_model_config_interpolation(ldai_client: LDAIClient, tracker): +def test_uses_default_on_invalid_flag(ldai_client: LDAIClient): + context = Context.create('user-key') + default_value = AIConfig( + enabled=True, + model=ModelConfig('fakeModel', parameters={'temperature': 0.5, 'maxTokens': 4096}), + messages=[LDMessage(role='system', content='Hello, {{name}}!')], + ) + variables = {'name': 'World'} + + config, _ = ldai_client.config('missing-flag', context, default_value, variables) + + assert config.messages is not None + assert len(config.messages) > 0 + assert config.messages[0].content == 'Hello, World!' + assert config.enabled is True + + assert config.model is not None + assert config.model.id == 'fakeModel' + assert config.model.get_parameter('temperature') == 0.5 + assert config.model.get_parameter('maxTokens') == 4096 + + +def test_model_config_interpolation(ldai_client: LDAIClient): context = Context.create('user-key') default_value = AIConfig( - tracker=tracker, enabled=True, model=ModelConfig('fakeModel'), messages=[LDMessage(role='system', content='Hello, {{name}}!')], ) variables = {'name': 'World'} - config = ldai_client.config('model-config', context, default_value, variables) + config, _ = ldai_client.config('model-config', context, default_value, variables) assert config.messages is not None assert len(config.messages) > 0 @@ -148,11 +163,11 @@ def test_model_config_interpolation(ldai_client: LDAIClient, tracker): assert config.model.get_parameter('maxTokens') == 4096 -def test_model_config_no_variables(ldai_client: LDAIClient, tracker): +def test_model_config_no_variables(ldai_client: LDAIClient): context = Context.create('user-key') - default_value = AIConfig(tracker=tracker, enabled=True, model=ModelConfig('fake-model'), messages=[]) + default_value = AIConfig(enabled=True, model=ModelConfig('fake-model'), messages=[]) - config = ldai_client.config('model-config', context, default_value, {}) + config, _ = ldai_client.config('model-config', context, default_value, {}) assert config.messages is not None assert len(config.messages) > 0 @@ -165,23 +180,23 @@ def test_model_config_no_variables(ldai_client: LDAIClient, tracker): assert config.model.get_parameter('maxTokens') == 4096 -def test_provider_config_handling(ldai_client: LDAIClient, tracker): +def test_provider_config_handling(ldai_client: LDAIClient): context = Context.builder('user-key').name("Sandy").build() - default_value = AIConfig(tracker=tracker, enabled=True, model=ModelConfig('fake-model'), messages=[]) + default_value = AIConfig(enabled=True, model=ModelConfig('fake-model'), messages=[]) variables = {'name': 'World'} - config = ldai_client.config('model-config', context, default_value, variables) + config, _ = ldai_client.config('model-config', context, default_value, variables) assert config.provider is not None assert config.provider.id == 'fakeProvider' -def test_context_interpolation(ldai_client: LDAIClient, tracker): +def test_context_interpolation(ldai_client: LDAIClient): context = Context.builder('user-key').name("Sandy").build() - default_value = AIConfig(tracker=tracker, enabled=True, model=ModelConfig('fake-model'), messages=[]) + default_value = AIConfig(enabled=True, model=ModelConfig('fake-model'), messages=[]) variables = {'name': 'World'} - config = ldai_client.config( + config, _ = ldai_client.config( 'ctx-interpolation', context, default_value, variables ) @@ -197,12 +212,12 @@ def test_context_interpolation(ldai_client: LDAIClient, tracker): assert config.model.get_parameter('extra-attribute') == 'I can be anything I set my mind/type to' -def test_model_config_multiple(ldai_client: LDAIClient, tracker): +def test_model_config_multiple(ldai_client: LDAIClient): context = Context.create('user-key') - default_value = AIConfig(tracker=tracker, enabled=True, model=ModelConfig('fake-model'), messages=[]) + default_value = AIConfig(enabled=True, model=ModelConfig('fake-model'), messages=[]) variables = {'name': 'World', 'day': 'Monday'} - config = ldai_client.config( + config, _ = ldai_client.config( 'multiple-messages', context, default_value, variables ) @@ -218,11 +233,11 @@ def test_model_config_multiple(ldai_client: LDAIClient, tracker): assert config.model.get_parameter('maxTokens') == 8192 -def test_model_config_disabled(ldai_client: LDAIClient, tracker): +def test_model_config_disabled(ldai_client: LDAIClient): context = Context.create('user-key') - default_value = AIConfig(tracker=tracker, enabled=False, model=ModelConfig('fake-model'), messages=[]) + default_value = AIConfig(enabled=False, model=ModelConfig('fake-model'), messages=[]) - config = ldai_client.config('off-config', context, default_value, {}) + config, _ = ldai_client.config('off-config', context, default_value, {}) assert config.model is not None assert config.enabled is False @@ -231,11 +246,11 @@ def test_model_config_disabled(ldai_client: LDAIClient, tracker): assert config.model.get_parameter('maxTokens') is None -def test_model_initial_config_disabled(ldai_client: LDAIClient, tracker): +def test_model_initial_config_disabled(ldai_client: LDAIClient): context = Context.create('user-key') - default_value = AIConfig(tracker=tracker, enabled=False, model=ModelConfig('fake-model'), messages=[]) + default_value = AIConfig(enabled=False, model=ModelConfig('fake-model'), messages=[]) - config = ldai_client.config('initial-config-disabled', context, default_value, {}) + config, _ = ldai_client.config('initial-config-disabled', context, default_value, {}) assert config.enabled is False assert config.model is None @@ -243,11 +258,11 @@ def test_model_initial_config_disabled(ldai_client: LDAIClient, tracker): assert config.provider is None -def test_model_initial_config_enabled(ldai_client: LDAIClient, tracker): +def test_model_initial_config_enabled(ldai_client: LDAIClient): context = Context.create('user-key') - default_value = AIConfig(tracker=tracker, enabled=False, model=ModelConfig('fake-model'), messages=[]) + default_value = AIConfig(enabled=False, model=ModelConfig('fake-model'), messages=[]) - config = ldai_client.config('initial-config-enabled', context, default_value, {}) + config, _ = ldai_client.config('initial-config-enabled', context, default_value, {}) assert config.enabled is True assert config.model is None diff --git a/pyproject.toml b/pyproject.toml index f301084..943049e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,6 @@ exclude = [ python = ">=3.8,<4" launchdarkly-server-sdk = ">=9.4.0" chevron = "=0.14.0" -dataclasses-json = "^0.6.7" [tool.poetry.group.dev.dependencies] diff --git a/release-please-config.json b/release-please-config.json index a7ac352..78df6d7 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -3,7 +3,7 @@ ".": { "release-type": "python", "versioning": "default", - "release-as": "0.3.0", + "bump-minor-pre-major": true, "include-v-in-tag": false, "extra-files": ["ldai/__init__.py", "PROVENANCE.md"], "include-component-in-tag": false From 7e73fa76301d0104e7a8f81214b98fa12963e545 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Mon, 2 Dec 2024 12:04:10 -0500 Subject: [PATCH 2/4] chore: Update provenance example data (#25) --- PROVENANCE.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/PROVENANCE.md b/PROVENANCE.md index 472b9d2..698b210 100644 --- a/PROVENANCE.md +++ b/PROVENANCE.md @@ -33,11 +33,11 @@ launchdarkly_server_sdk_ai-${VERSION}-py3-none-any.whl Below is a sample of expected output. ``` -Verified signature against tlog entry index 89939519 at URL: https://rekor.sigstore.dev/api/v1/log/entries/24296fb24b8ad77abb8d2f681b007c76a4fe9f89cd9574918683ac8bc87cd6834c5baa479ae5cb98 -Verified build using builder "https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@refs/tags/v1.10.0" at commit 984fc268df29918b03f51f2507146f66d8668d03 -Verifying artifact launchdarkly_server_sdk_ai-1.0.0-py3-none-any.whl: PASSED +Verified signature against tlog entry index 150910243 at URL: https://rekor.sigstore.dev/api/v1/log/entries/108e9186e8c5677ab3f14fc82cd3deb769e07ef812cadda623c08c77d4e51fc03124ee7542c470a1 +Verified build using builder "https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@refs/tags/v2.0.0" at commit 8e2d4094b4833d075e70dfce43bbc7176008c4a1 +Verifying artifact launchdarkly_server_sdk_ai-0.3.0-py3-none-any.whl: PASSED -PASSED: Verified SLSA provenance +PASSED: SLSA verification passed ``` Alternatively, to verify the provenance manually, the SLSA framework specifies [recommendations for verifying build artifacts](https://slsa.dev/spec/v1.0/verifying-artifacts) in their documentation. From 1159aeeda7c46cf2dab93f209929dbad5d35dc80 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Mon, 2 Dec 2024 12:09:17 -0500 Subject: [PATCH 3/4] fix: Fix context usage for message interpolation (#24) --- ldai/client.py | 2 +- ldai/testing/test_model_config.py | 41 ++++++++++++++++++++++++++++--- 2 files changed, 39 insertions(+), 4 deletions(-) diff --git a/ldai/client.py b/ldai/client.py index 1eceeac..6a802dd 100644 --- a/ldai/client.py +++ b/ldai/client.py @@ -152,7 +152,7 @@ def config( all_variables = {} if variables: all_variables.update(variables) - all_variables['ldctx'] = context + all_variables['ldctx'] = context.to_dict() messages = None if 'messages' in variation and isinstance(variation['messages'], list) and all( diff --git a/ldai/testing/test_model_config.py b/ldai/testing/test_model_config.py index 593a037..afab1e4 100644 --- a/ldai/testing/test_model_config.py +++ b/ldai/testing/test_model_config.py @@ -43,7 +43,19 @@ def td() -> TestData: .variations( { 'model': {'id': 'fakeModel', 'parameters': {'extra-attribute': 'I can be anything I set my mind/type to'}}, - 'messages': [{'role': 'system', 'content': 'Hello, {{ldctx.name}}!'}], + 'messages': [{'role': 'system', 'content': 'Hello, {{ldctx.name}}! Is your last name {{ldctx.last}}?'}], + '_ldMeta': {'enabled': True, 'versionKey': 'abcd'}, + } + ) + .variation_for_all(0) + ) + + td.update( + td.flag('multi-ctx-interpolation') + .variations( + { + 'model': {'id': 'fakeModel', 'parameters': {'extra-attribute': 'I can be anything I set my mind/type to'}}, + 'messages': [{'role': 'system', 'content': 'Hello, {{ldctx.user.name}}! Do you work for {{ldctx.org.shortname}}?'}], '_ldMeta': {'enabled': True, 'versionKey': 'abcd'}, } ) @@ -192,7 +204,7 @@ def test_provider_config_handling(ldai_client: LDAIClient): def test_context_interpolation(ldai_client: LDAIClient): - context = Context.builder('user-key').name("Sandy").build() + context = Context.builder('user-key').name("Sandy").set('last', 'Beaches').build() default_value = AIConfig(enabled=True, model=ModelConfig('fake-model'), messages=[]) variables = {'name': 'World'} @@ -202,7 +214,30 @@ def test_context_interpolation(ldai_client: LDAIClient): assert config.messages is not None assert len(config.messages) > 0 - assert config.messages[0].content == 'Hello, Sandy!' + assert config.messages[0].content == 'Hello, Sandy! Is your last name Beaches?' + assert config.enabled is True + + assert config.model is not None + assert config.model.id == 'fakeModel' + assert config.model.get_parameter('temperature') is None + assert config.model.get_parameter('maxTokens') is None + assert config.model.get_parameter('extra-attribute') == 'I can be anything I set my mind/type to' + + +def test_multi_context_interpolation(ldai_client: LDAIClient): + user_context = Context.builder('user-key').name("Sandy").build() + org_context = Context.builder('org-key').kind('org').name("LaunchDarkly").set('shortname', 'LD').build() + context = Context.multi_builder().add(user_context).add(org_context).build() + default_value = AIConfig(enabled=True, model=ModelConfig('fake-model'), messages=[]) + variables = {'name': 'World'} + + config, _ = ldai_client.config( + 'multi-ctx-interpolation', context, default_value, variables + ) + + assert config.messages is not None + assert len(config.messages) > 0 + assert config.messages[0].content == 'Hello, Sandy! Do you work for LD?' assert config.enabled is True assert config.model is not None From 03329e2ae54fdb444c817c907615c95d87b14d23 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 3 Dec 2024 16:26:09 -0500 Subject: [PATCH 4/4] chore(main): release 0.4.0 (#26) :robot: I have created a release *beep* *boop* --- ## [0.4.0](https://github.com/launchdarkly/python-server-sdk-ai/compare/0.3.0...0.4.0) (2024-12-02) ### Features * Return AIConfig and LDAITracker separately [#23](https://github.com/launchdarkly/python-server-sdk-ai/issues/23) ([96f888f](https://github.com/launchdarkly/python-server-sdk-ai/commit/96f888f50503cc2e9e2c30bf1c21f80a2773c8b5)) ### Bug Fixes * Fix context usage for message interpolation ([#24](https://github.com/launchdarkly/python-server-sdk-ai/issues/24)) ([1159aee](https://github.com/launchdarkly/python-server-sdk-ai/commit/1159aeeda7c46cf2dab93f209929dbad5d35dc80)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 12 ++++++++++++ PROVENANCE.md | 2 +- ldai/__init__.py | 2 +- pyproject.toml | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 0ee8c01..2537c1f 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.3.0" + ".": "0.4.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a3e7d5..ee4532c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,18 @@ All notable changes to the LaunchDarkly Python AI package will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org). +## [0.4.0](https://github.com/launchdarkly/python-server-sdk-ai/compare/0.3.0...0.4.0) (2024-12-02) + + +### Features + +* Return AIConfig and LDAITracker separately [#23](https://github.com/launchdarkly/python-server-sdk-ai/issues/23) ([96f888f](https://github.com/launchdarkly/python-server-sdk-ai/commit/96f888f50503cc2e9e2c30bf1c21f80a2773c8b5)) + + +### Bug Fixes + +* Fix context usage for message interpolation ([#24](https://github.com/launchdarkly/python-server-sdk-ai/issues/24)) ([1159aee](https://github.com/launchdarkly/python-server-sdk-ai/commit/1159aeeda7c46cf2dab93f209929dbad5d35dc80)) + ## [0.3.0](https://github.com/launchdarkly/python-server-sdk-ai/compare/0.2.0...0.3.0) (2024-11-22) diff --git a/PROVENANCE.md b/PROVENANCE.md index 698b210..f231637 100644 --- a/PROVENANCE.md +++ b/PROVENANCE.md @@ -10,7 +10,7 @@ To verify SLSA provenance attestations, we recommend using [slsa-verifier](https ``` # Set the version of the library to verify -VERSION=0.3.0 +VERSION=0.4.0 ``` diff --git a/ldai/__init__.py b/ldai/__init__.py index 7e1d86a..9ce25b8 100644 --- a/ldai/__init__.py +++ b/ldai/__init__.py @@ -1 +1 @@ -__version__ = "0.3.0" # x-release-please-version +__version__ = "0.4.0" # x-release-please-version diff --git a/pyproject.toml b/pyproject.toml index 943049e..4606e1f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "launchdarkly-server-sdk-ai" -version = "0.3.0" +version = "0.4.0" description = "LaunchDarkly SDK for AI" authors = ["LaunchDarkly "] license = "Apache-2.0"