From 4b8a667abc8b69f027e027489d1d4d5ace82d159 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 10 Mar 2026 11:26:41 -0500 Subject: [PATCH 1/2] chore: Update comments for setting default --- .../server-ai-langchain/README.md | 15 ++++++++++++--- .../ai-providers/server-ai-openai/README.md | 19 ++++++++++++------- 2 files changed, 24 insertions(+), 10 deletions(-) diff --git a/packages/ai-providers/server-ai-langchain/README.md b/packages/ai-providers/server-ai-langchain/README.md index b027628..a58dc00 100644 --- a/packages/ai-providers/server-ai-langchain/README.md +++ b/packages/ai-providers/server-ai-langchain/README.md @@ -41,9 +41,18 @@ from ldai_langchain import LangChainProvider ld_client = LDClient(Config("your-sdk-key")) ai_client = init(ld_client) -# Get AI configuration +# Get AI configuration. Pass a default for improved resiliency when the flag is unavailable or +# LaunchDarkly is unreachable; omit for a disabled default. Example: +# from ldai.models import AICompletionConfigDefault, LDMessage, ModelConfig, ProviderConfig +# default = AICompletionConfigDefault( +# enabled=True, +# model=ModelConfig("gpt-4"), +# provider=ProviderConfig("openai"), +# messages=[LDMessage(role="system", content="You are a helpful assistant.")] +# ) +# config = ai_client.config("ai-config-key", context, default) context = Context.builder("user-123").build() -config = ai_client.config("ai-config-key", context, {}) +config = ai_client.config("ai-config-key", context) async def main(): # Create a LangChain provider from the AI configuration @@ -120,7 +129,7 @@ Use the provider with LaunchDarkly's tracking capabilities: ```python # Get the AI config with tracker -config = ai_client.config("ai-config-key", context, {}) +config = ai_client.config("ai-config-key", context) # Create provider provider = await LangChainProvider.create(config) diff --git a/packages/ai-providers/server-ai-openai/README.md b/packages/ai-providers/server-ai-openai/README.md index d8bb2f2..d7ddf40 100644 --- a/packages/ai-providers/server-ai-openai/README.md +++ b/packages/ai-providers/server-ai-openai/README.md @@ -26,13 +26,18 @@ from ldai_openai import OpenAIProvider async def main(): # Initialize the AI client ai_client = AIClient(ld_client) - - # Get AI config - ai_config = ai_client.config( - "my-ai-config-key", - context, - default - ) + + # Get AI config. Pass a default for improved resiliency when the flag is unavailable or + # LaunchDarkly is unreachable; omit for a disabled default. Example: + # from ldai.models import AICompletionConfigDefault, LDMessage, ModelConfig, ProviderConfig + # default = AICompletionConfigDefault( + # enabled=True, + # model=ModelConfig("gpt-4"), + # provider=ProviderConfig("openai"), + # messages=[LDMessage(role="system", content="You are a helpful assistant.")] + # ) + # ai_config = ai_client.config("my-ai-config-key", context, default) + ai_config = ai_client.config("my-ai-config-key", context) # Create an OpenAI provider from the config provider = await OpenAIProvider.create(ai_config) From 3b254bbc1d869ecf43ddff8324baa5056a47659a Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 10 Mar 2026 11:29:16 -0500 Subject: [PATCH 2/2] update to latest version --- packages/ai-providers/server-ai-langchain/pyproject.toml | 2 +- packages/ai-providers/server-ai-openai/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ai-providers/server-ai-langchain/pyproject.toml b/packages/ai-providers/server-ai-langchain/pyproject.toml index b145732..8c6bc0e 100644 --- a/packages/ai-providers/server-ai-langchain/pyproject.toml +++ b/packages/ai-providers/server-ai-langchain/pyproject.toml @@ -24,7 +24,7 @@ packages = [{ include = "ldai_langchain", from = "src" }] [tool.poetry.dependencies] python = ">=3.9,<4" -launchdarkly-server-sdk-ai = ">=0.12.0" +launchdarkly-server-sdk-ai = ">=0.16.0" langchain-core = ">=0.2.0" langchain = ">=0.2.0" diff --git a/packages/ai-providers/server-ai-openai/pyproject.toml b/packages/ai-providers/server-ai-openai/pyproject.toml index fd48885..f80bee7 100644 --- a/packages/ai-providers/server-ai-openai/pyproject.toml +++ b/packages/ai-providers/server-ai-openai/pyproject.toml @@ -24,7 +24,7 @@ packages = [{ include = "ldai_openai", from = "src" }] [tool.poetry.dependencies] python = ">=3.9,<4" -launchdarkly-server-sdk-ai = ">=0.12.0" +launchdarkly-server-sdk-ai = ">=0.16.0" openai = ">=1.0.0" [tool.poetry.group.dev.dependencies]