Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 16 additions & 13 deletions .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,22 +1,25 @@
{
"packages/shared/common": "2.19.0",
"packages/shared/sdk-server": "2.16.2",
"packages/sdk/server-node": "9.10.2",
"packages/ai-providers/server-ai-langchain": "0.1.0-alpha.0",
"packages/sdk/akamai-base": "3.0.10",
"packages/sdk/akamai-edgekv": "1.4.12",
"packages/sdk/browser": "0.8.1",
"packages/sdk/cloudflare": "2.7.10",
"packages/sdk/combined-browser": "0.0.0",
"packages/sdk/fastly": "0.2.1",
"packages/shared/sdk-server-edge": "2.6.9",
"packages/sdk/react-native": "10.11.0",
"packages/sdk/react-universal": "0.0.1",
"packages/sdk/server-ai": "0.11.4",
"packages/sdk/server-node": "9.10.2",
"packages/sdk/svelte": "0.1.0",
"packages/sdk/vercel": "1.3.34",
"packages/sdk/akamai-base": "3.0.10",
"packages/sdk/akamai-edgekv": "1.4.12",
"packages/shared/akamai-edgeworker-sdk": "2.0.10",
"packages/shared/common": "2.19.0",
"packages/shared/sdk-client": "1.15.1",
"packages/shared/sdk-server": "2.16.2",
"packages/shared/sdk-server-edge": "2.6.9",
"packages/store/node-server-sdk-dynamodb": "6.2.14",
"packages/store/node-server-sdk-redis": "4.2.14",
"packages/shared/sdk-client": "1.15.1",
"packages/sdk/react-native": "10.11.0",
"packages/telemetry/node-server-sdk-otel": "1.3.2",
"packages/sdk/browser": "0.8.1",
"packages/sdk/server-ai": "0.11.4",
"packages/telemetry/browser-telemetry": "1.0.11",
"packages/tooling/jest": "0.1.11",
"packages/sdk/combined-browser": "0.0.0"
"packages/telemetry/node-server-sdk-otel": "1.3.2",
"packages/tooling/jest": "0.1.11"
}
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"name": "@launchdarkly/js-core",
"workspaces": [
"packages/ai-providers/server-ai-langchain",
"packages/shared/common",
"packages/shared/sdk-client",
"packages/shared/sdk-server",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import { AIMessage, HumanMessage, SystemMessage } from 'langchain/schema';

import { LangChainProvider } from '../src/LangChainProvider';

describe('LangChainProvider', () => {
describe('convertMessagesToLangChain', () => {
it('converts system messages to SystemMessage', () => {
const messages = [{ role: 'system' as const, content: 'You are a helpful assistant.' }];
const result = LangChainProvider.convertMessagesToLangChain(messages);

expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(SystemMessage);
expect(result[0].content).toBe('You are a helpful assistant.');
});

it('converts user messages to HumanMessage', () => {
const messages = [{ role: 'user' as const, content: 'Hello, how are you?' }];
const result = LangChainProvider.convertMessagesToLangChain(messages);

expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(HumanMessage);
expect(result[0].content).toBe('Hello, how are you?');
});

it('converts assistant messages to AIMessage', () => {
const messages = [{ role: 'assistant' as const, content: 'I am doing well, thank you!' }];
const result = LangChainProvider.convertMessagesToLangChain(messages);

expect(result).toHaveLength(1);
expect(result[0]).toBeInstanceOf(AIMessage);
expect(result[0].content).toBe('I am doing well, thank you!');
});

it('converts multiple messages in order', () => {
const messages = [
{ role: 'system' as const, content: 'You are a helpful assistant.' },
{ role: 'user' as const, content: 'What is the weather like?' },
{ role: 'assistant' as const, content: 'I cannot check the weather.' },
];
const result = LangChainProvider.convertMessagesToLangChain(messages);

expect(result).toHaveLength(3);
expect(result[0]).toBeInstanceOf(SystemMessage);
expect(result[1]).toBeInstanceOf(HumanMessage);
expect(result[2]).toBeInstanceOf(AIMessage);
});

it('throws error for unsupported message role', () => {
const messages = [{ role: 'unknown' as any, content: 'Test message' }];

expect(() => LangChainProvider.convertMessagesToLangChain(messages)).toThrow(
'Unsupported message role: unknown'
);
});

it('handles empty message array', () => {
const result = LangChainProvider.convertMessagesToLangChain([]);

expect(result).toHaveLength(0);
});
});
});
9 changes: 9 additions & 0 deletions packages/ai-providers/server-ai-langchain/jest.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
roots: ['<rootDir>/src'],
testMatch: ['**/__tests__/**/*.test.ts'],
collectCoverageFrom: ['src/**/*.ts', '!src/**/*.d.ts'],
coverageDirectory: 'coverage',
coverageReporters: ['text', 'lcov', 'html'],
};
55 changes: 55 additions & 0 deletions packages/ai-providers/server-ai-langchain/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
{
"name": "@launchdarkly/server-sdk-ai-langchain",
"version": "0.1.0-alpha.0",
"description": "LaunchDarkly AI SDK LangChain Provider for Server-Side JavaScript",
"homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/ai-providers/server-ai-langchain",
"repository": {
"type": "git",
"url": "https://github.com/launchdarkly/js-core.git"
},
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "commonjs",
"scripts": {
"build": "npx tsc",
"lint": "npx eslint . --ext .ts",
"prettier": "prettier --write '**/*.@(js|ts|tsx|json|css)' --ignore-path ../../../.prettierignore",
"lint:fix": "yarn run lint --fix",
"check": "yarn prettier && yarn lint && yarn build && yarn test",
"test": "jest"
},
"keywords": [
"launchdarkly",
"ai",
"llm",
"langchain"
],
"author": "LaunchDarkly",
"license": "Apache-2.0",
"dependencies": {
"@langchain/core": ">=0.2.21 <0.3.0",
"@launchdarkly/server-sdk-ai": "0.11.4",
"langchain": "^0.2.11"
},
"devDependencies": {
"@launchdarkly/js-server-sdk-common": "2.16.2",
"@trivago/prettier-plugin-sort-imports": "^4.1.1",
"@types/jest": "^29.5.3",
"@typescript-eslint/eslint-plugin": "^6.20.0",
"@typescript-eslint/parser": "^6.20.0",
"eslint": "^8.45.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.1.0",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-jest": "^27.6.3",
"eslint-plugin-prettier": "^5.0.0",
"jest": "^29.6.1",
"prettier": "^3.0.0",
"ts-jest": "^29.1.1",
"typescript": "5.1.6"
},
"peerDependencies": {
"@launchdarkly/js-server-sdk-common": "2.x"
}
}
123 changes: 123 additions & 0 deletions packages/ai-providers/server-ai-langchain/src/LangChainProvider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';
import { initChatModel } from 'langchain/chat_models/universal';

import {
LDAIConfig,
LDAIConfigTracker,
LDMessage,
LDTokenUsage,
} from '@launchdarkly/server-sdk-ai';

/**
* LangChain provider utilities and helper functions.
*/
export class LangChainProvider {
/**
* Map LaunchDarkly provider names to LangChain provider names.
* This method enables seamless integration between LaunchDarkly's standardized
* provider naming and LangChain's naming conventions.
*/
static mapProvider(ldProviderName: string): string {
const lowercasedName = ldProviderName.toLowerCase();

const mapping: Record<string, string> = {
gemini: 'google-genai',
};

return mapping[lowercasedName] || lowercasedName;
}

/**
* Create token usage information from a LangChain provider response.
* This method extracts token usage information from LangChain responses
* and returns a LaunchDarkly TokenUsage object.
*/
static createTokenUsage(langChainResponse: AIMessage): LDTokenUsage | undefined {
if (!langChainResponse?.response_metadata?.tokenUsage) {
return undefined;
}

const { tokenUsage } = langChainResponse.response_metadata;

return {
total: tokenUsage.totalTokens || 0,
input: tokenUsage.promptTokens || 0,
output: tokenUsage.completionTokens || 0,
};
}

/**
* Convert LaunchDarkly messages to LangChain messages.
* This helper method enables developers to work directly with LangChain message types
* while maintaining compatibility with LaunchDarkly's standardized message format.
*/
static convertMessagesToLangChain(
messages: LDMessage[],
): (HumanMessage | SystemMessage | AIMessage)[] {
return messages.map((msg) => {
switch (msg.role) {
case 'system':
return new SystemMessage(msg.content);
case 'user':
return new HumanMessage(msg.content);
case 'assistant':
return new AIMessage(msg.content);
default:
throw new Error(`Unsupported message role: ${msg.role}`);
}
});
}

/**
* Track metrics for a LangChain callable execution.
* This helper method enables developers to work directly with LangChain callables
* while ensuring consistent tracking behavior.
*/
static async trackMetricsOf(
tracker: LDAIConfigTracker,
callable: () => Promise<AIMessage>,
): Promise<AIMessage> {
return tracker.trackDurationOf(async () => {
try {
const result = await callable();

// Extract and track token usage if available
const tokenUsage = this.createTokenUsage(result);
if (tokenUsage) {
tracker.trackTokens({
total: tokenUsage.total,
input: tokenUsage.input,
output: tokenUsage.output,
});
}

tracker.trackSuccess();
return result;
} catch (error) {
tracker.trackError();
throw error;
}
});
}

/**
* Create a LangChain model from an AI configuration.
* This public helper method enables developers to initialize their own LangChain models
* using LaunchDarkly AI configurations.
*
* @param aiConfig The LaunchDarkly AI configuration
* @returns A Promise that resolves to a configured LangChain BaseChatModel
*/
static async createLangChainModel(aiConfig: LDAIConfig): Promise<BaseChatModel> {
const modelName = aiConfig.model?.name || '';
const provider = aiConfig.provider?.name || '';
const parameters = aiConfig.model?.parameters || {};

// Use LangChain's universal initChatModel to support multiple providers
return initChatModel(modelName, {
modelProvider: this.mapProvider(provider),
...parameters,
});
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { AIMessage, HumanMessage, SystemMessage } from '@langchain/core/messages';

import {
BaseTrackedChat,
ChatResponse,
LDAIConfig,
LDAIConfigTracker,
LDMessage,
} from '@launchdarkly/server-sdk-ai';

import { LangChainProvider } from './LangChainProvider';

/**
* LangChain-specific implementation of TrackedChat.
* This implementation integrates LangChain models with LaunchDarkly's tracking capabilities.
*/
export class LangChainTrackedChat extends BaseTrackedChat {
private _llm: BaseChatModel;

constructor(aiConfig: LDAIConfig, tracker: LDAIConfigTracker, llm: BaseChatModel) {
super(aiConfig, tracker);
this._llm = llm;
}

/**
* Provider-specific implementation that converts LDMessage[] to LangChain format,
* invokes the model, and returns a ChatResponse.
*/
protected async invokeModel(messages: LDMessage[]): Promise<ChatResponse> {
// Convert LDMessage[] to LangChain messages
const langchainMessages = LangChainProvider.convertMessagesToLangChain(messages);

// Get the LangChain response
const response = await this._llm.invoke(langchainMessages);

// Extract token usage if available using the helper method
const usage = LangChainProvider.createTokenUsage(response);

// Handle different content types from LangChain
let content: string;
if (typeof response.content === 'string') {
content = response.content;
} else if (Array.isArray(response.content)) {
// Handle complex content (e.g., with images)
content = response.content
.map((item: any) => {
if (typeof item === 'string') return item;
if (item.type === 'text') return item.text;
return '';
})
.join('');
} else {
content = String(response.content);
}

// Create the assistant message
const assistantMessage: LDMessage = {
role: 'assistant',
content,
};

return {
message: assistantMessage,
usage,
};
}

/**
* LangChain-specific invoke method that accepts LangChain-native message types.
* This is the main implementation that does all the tracking and LangChain logic.
*/
async trackLangChainInvoke(
messages: (HumanMessage | SystemMessage | AIMessage)[],
): Promise<AIMessage> {
// Use the trackMetricsOf helper to handle all tracking automatically
return LangChainProvider.trackMetricsOf(this.tracker, () => this._llm.invoke(messages));
}

/**
* Get the underlying LangChain model instance.
*/
async getChatModel(): Promise<BaseChatModel> {
return this._llm;
}
}
11 changes: 11 additions & 0 deletions packages/ai-providers/server-ai-langchain/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
/**
* This is the API reference for the LaunchDarkly AI SDK LangChain Provider for Server-Side JavaScript.
*
* This package provides LangChain integration for the LaunchDarkly AI SDK, allowing you to use
* LangChain models and chains with LaunchDarkly's tracking and configuration capabilities.
*
* @packageDocumentation
*/

export * from './LangChainTrackedChat';
export * from './LangChainProvider';
Loading