Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"packages/sdk/server-ai",
"packages/sdk/server-ai/examples/bedrock",
"packages/sdk/server-ai/examples/openai",
"packages/sdk/server-ai/examples/tracked-chat",
"packages/sdk/server-ai/examples/vercel-ai",
"packages/telemetry/browser-telemetry",
"contract-tests",
Expand Down
49 changes: 49 additions & 0 deletions packages/sdk/server-ai/examples/tracked-chat/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# Tracked Chat Example

This example demonstrates how to use the LaunchDarkly AI SDK chat functionality with multiple providers for tracked chat interactions.

## Prerequisites

1. A LaunchDarkly account and SDK key
1. An OpenAI API key (for the AI provider)
1. Node.js 16 or later

## Setup

1. Install dependencies:
```bash
yarn install
```

1. Set up environment variables:
```bash
cp .env.example .env
```

Edit `.env` and add your keys:
```
LAUNCHDARKLY_SDK_KEY=your-sdk-key-here
OPENAI_API_KEY=your-openai-api-key-here
LAUNCHDARKLY_AI_CONFIG_KEY=sample-ai-chat-config
```

1. Create an AI Config in LaunchDarkly:
- Navigate to the AI Configs section in your LaunchDarkly dashboard
- Create a new AI Config with the key `sample-ai-config`
- Add a variation with the following settings:
- **Model Selection**: Select "OpenAI" as the provider and "gpt-3.5-turbo" as the model
- **Messages**: Add a system message with the content: "You are a helpful assistant for {{companyName}}. You should be friendly and informative."
- Save the variation
- Update the default target rule to use the newly created variation

## Running the Example

```bash
yarn start
```

This will:
1. Initialize the LaunchDarkly client
1. Create a chat configuration using the AI Config
1. Send a message to the AI and display the response
1. Automatically track interaction metrics (duration, tokens, success/error)
27 changes: 27 additions & 0 deletions packages/sdk/server-ai/examples/tracked-chat/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"name": "tracked-chat-example",
"version": "1.0.0",
"description": "Example demonstrating LaunchDarkly AI SDK chat functionality with multiple providers",
"type": "module",
"scripts": {
"build": "tsc",
"start": "yarn build && node ./dist/index.js"
},
"dependencies": {
"@ai-sdk/google": "^2.0.20",
"@langchain/core": "^0.3.78",
"@langchain/google-genai": "^0.2.18",
"@launchdarkly/node-server-sdk": "^9.0.0",
"@launchdarkly/server-sdk-ai": "0.12.1",
"@launchdarkly/server-sdk-ai-langchain": "^0.1.0",
"@launchdarkly/server-sdk-ai-openai": "^0.1.0",
"@launchdarkly/server-sdk-ai-vercel": "^0.1.0",
"dotenv": "^16.0.0",
"langchain": "^0.1.0"
},
"devDependencies": {
"@types/node": "^20.0.0",
"tsx": "^4.0.0",
"typescript": "^5.0.0"
}
}
74 changes: 74 additions & 0 deletions packages/sdk/server-ai/examples/tracked-chat/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
/* eslint-disable no-console */
import { init, type LDContext } from '@launchdarkly/node-server-sdk';
import { initAi } from '@launchdarkly/server-sdk-ai';

// Environment variables
const sdkKey = process.env.LAUNCHDARKLY_SDK_KEY;
const aiConfigKey = process.env.LAUNCHDARKLY_AI_CONFIG_KEY || 'sample-ai-config';

// Validate required environment variables
if (!sdkKey) {
console.error('*** Please set the LAUNCHDARKLY_SDK_KEY env first');
process.exit(1);
}

// Initialize LaunchDarkly client
const ldClient = init(sdkKey);

// Set up the context properties. This context should appear on your LaunchDarkly contexts dashboard
// soon after you run the demo.
const context: LDContext = {
kind: 'user',
key: 'example-user-key',
name: 'Sandy',
};

async function main(): Promise<void> {
try {
await ldClient.waitForInitialization({ timeout: 10 });
console.log('*** SDK successfully initialized');
} catch (error) {
console.log(`*** SDK failed to initialize: ${error}`);
process.exit(1);
}

const aiClient = initAi(ldClient);
const defaultValue = {
enabled: true,
model: { name: 'gpt-3.5-turbo' },
messages: [{ role: 'system' as const, content: 'You are a helpful assistant.' }],
provider: { name: 'openai' },
};

// You provide a disabled default value
// const defaultValue = {
// enabled: false,
// };

// Get AI chat configuration from LaunchDarkly
const chat = await aiClient.initChat(aiConfigKey, context, defaultValue, {
companyName: 'LaunchDarkly',
});

if (!chat) {
console.log('*** AI chat configuration is not enabled');
process.exit(0);
}

// Example of using the chat functionality
console.log('\n*** Starting chat conversation:');
try {
const userInput = 'Hello! Can you help me understand how your company can help me?';
console.log('User Input:', userInput);

const response = await chat.invoke(userInput);

console.log('AI Response:', response.message.content);

console.log('Success.');
} catch (err) {
console.error('Error:', err);
}
}

main();
18 changes: 18 additions & 0 deletions packages/sdk/server-ai/examples/tracked-chat/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "node",
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"strict": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"outDir": "./dist",
"rootDir": "./src",
"declaration": true,
"sourceMap": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}
10 changes: 10 additions & 0 deletions release-please-config.json
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,16 @@
"type": "json",
"path": "examples/openai/package.json",
"jsonpath": "$.dependencies['@launchdarkly/server-sdk-ai']"
},
{
"type": "json",
"path": "examples/tracked-chat/package.json",
"jsonpath": "$.dependencies['@launchdarkly/server-sdk-ai']"
},
{
"type": "json",
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this one was previously missing so I added it in while adding the trackedchat.

"path": "examples/vercel-ai/package.json",
"jsonpath": "$.dependencies['@launchdarkly/server-sdk-ai']"
}
]
},
Expand Down