Skip to content

Commit 0a41fc8

Browse files
committed
feat(cloudflare,vercel-edge): Add support for LangChain instrumentation
1 parent 6cda4e0 commit 0a41fc8

File tree

6 files changed

+348
-0
lines changed

6 files changed

+348
-0
lines changed
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
import * as Sentry from '@sentry/cloudflare';
2+
import { MockChain, MockChatModel, MockTool } from './mocks';
3+
4+
interface Env {
5+
SENTRY_DSN: string;
6+
}
7+
8+
export default Sentry.withSentry(
9+
(env: Env) => ({
10+
dsn: env.SENTRY_DSN,
11+
tracesSampleRate: 1.0,
12+
}),
13+
{
14+
async fetch(_request, _env, _ctx) {
15+
// Create LangChain callback handler
16+
const callbackHandler = Sentry.createLangChainCallbackHandler({
17+
recordInputs: false,
18+
recordOutputs: false,
19+
});
20+
21+
// Test 1: Chat model invocation
22+
const chatModel = new MockChatModel({
23+
model: 'claude-3-5-sonnet-20241022',
24+
temperature: 0.7,
25+
maxTokens: 100,
26+
});
27+
28+
await chatModel.invoke('Tell me a joke', {
29+
callbacks: [callbackHandler],
30+
});
31+
32+
// Test 2: Chain invocation
33+
const chain = new MockChain('my_test_chain');
34+
await chain.invoke(
35+
{ input: 'test input' },
36+
{
37+
callbacks: [callbackHandler],
38+
},
39+
);
40+
41+
// Test 3: Tool invocation
42+
const tool = new MockTool('search_tool');
43+
await tool.call('search query', {
44+
callbacks: [callbackHandler],
45+
});
46+
47+
return new Response(JSON.stringify({ success: true }));
48+
},
49+
},
50+
);
Lines changed: 226 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,226 @@
1+
// Mock LangChain types and classes for testing the callback handler
2+
3+
// Minimal callback handler interface to match LangChain's callback handler signature
4+
export interface CallbackHandler {
5+
handleChatModelStart?: (
6+
llm: unknown,
7+
messages: unknown,
8+
runId: string,
9+
parentRunId?: string,
10+
extraParams?: Record<string, unknown>,
11+
tags?: string[],
12+
metadata?: Record<string, unknown>,
13+
runName?: string,
14+
) => unknown;
15+
handleLLMEnd?: (output: unknown, runId: string) => unknown;
16+
handleChainStart?: (chain: { name?: string }, inputs: Record<string, unknown>, runId: string) => unknown;
17+
handleChainEnd?: (outputs: unknown, runId: string) => unknown;
18+
handleToolStart?: (tool: { name?: string }, input: string, runId: string) => unknown;
19+
handleToolEnd?: (output: unknown, runId: string) => unknown;
20+
}
21+
22+
export interface LangChainMessage {
23+
role: string;
24+
content: string;
25+
}
26+
27+
export interface LangChainLLMResult {
28+
generations: Array<
29+
Array<{
30+
text: string;
31+
generationInfo?: {
32+
[key: string]: unknown;
33+
finish_reason?: string;
34+
};
35+
}>
36+
>;
37+
llmOutput?: {
38+
[key: string]: unknown;
39+
tokenUsage?: {
40+
promptTokens?: number;
41+
completionTokens?: number;
42+
totalTokens?: number;
43+
};
44+
model?: string;
45+
id?: string;
46+
};
47+
}
48+
49+
export interface InvocationParams {
50+
[key: string]: unknown;
51+
model: string;
52+
temperature?: number;
53+
maxTokens?: number;
54+
}
55+
56+
// Mock LangChain Chat Model
57+
export class MockChatModel {
58+
private _model: string;
59+
private _temperature?: number;
60+
private _maxTokens?: number;
61+
62+
public constructor(params: InvocationParams) {
63+
this._model = params.model;
64+
this._temperature = params.temperature;
65+
this._maxTokens = params.maxTokens;
66+
}
67+
68+
public async invoke(
69+
messages: LangChainMessage[] | string,
70+
options?: { callbacks?: CallbackHandler[] },
71+
): Promise<LangChainLLMResult> {
72+
const callbacks = options?.callbacks || [];
73+
const runId = crypto.randomUUID();
74+
75+
// Get invocation params to match LangChain's signature
76+
const invocationParams = {
77+
model: this._model,
78+
temperature: this._temperature,
79+
max_tokens: this._maxTokens,
80+
};
81+
82+
// Create serialized representation similar to LangChain
83+
const serialized = {
84+
lc: 1,
85+
type: 'constructor',
86+
id: ['langchain', 'anthropic', 'anthropic'], // Third element is used as system provider
87+
kwargs: invocationParams,
88+
};
89+
90+
// Prepare messages in LangChain format
91+
const messageArray = typeof messages === 'string' ? [{ role: 'user', content: messages }] : messages;
92+
93+
const formattedMessages = [
94+
messageArray.map(msg => ({
95+
lc: 1,
96+
type: 'human',
97+
id: ['langchain', 'schema', 'HumanMessage'],
98+
kwargs: { content: msg.content },
99+
})),
100+
];
101+
102+
// Call handleChatModelStart
103+
// Pass tags as a record with invocation_params for proper extraction
104+
for (const callback of callbacks) {
105+
if (callback.handleChatModelStart) {
106+
await callback.handleChatModelStart(
107+
serialized,
108+
formattedMessages,
109+
runId,
110+
undefined,
111+
undefined,
112+
{ invocation_params: invocationParams } as unknown as string[], // LangChain can pass tags as either string[] or record
113+
{ ls_model_name: this._model, ls_provider: 'anthropic' },
114+
);
115+
}
116+
}
117+
118+
// Simulate API call
119+
await new Promise(resolve => setTimeout(resolve, 10));
120+
121+
// Create mock result
122+
const result: LangChainLLMResult = {
123+
generations: [
124+
[
125+
{
126+
text: 'Mock response from LangChain!',
127+
generationInfo: {
128+
finish_reason: 'stop',
129+
},
130+
},
131+
],
132+
],
133+
llmOutput: {
134+
tokenUsage: {
135+
promptTokens: 10,
136+
completionTokens: 15,
137+
totalTokens: 25,
138+
},
139+
model: this._model,
140+
id: 'chatcmpl_mock123',
141+
},
142+
};
143+
144+
// Call handleLLMEnd
145+
for (const callback of callbacks) {
146+
if (callback.handleLLMEnd) {
147+
await callback.handleLLMEnd(result, runId);
148+
}
149+
}
150+
151+
return result;
152+
}
153+
}
154+
155+
// Mock LangChain Chain
156+
export class MockChain {
157+
private _name: string;
158+
159+
public constructor(name: string) {
160+
this._name = name;
161+
}
162+
163+
public async invoke(
164+
inputs: Record<string, unknown>,
165+
options?: { callbacks?: CallbackHandler[] },
166+
): Promise<Record<string, unknown>> {
167+
const callbacks = options?.callbacks || [];
168+
const runId = crypto.randomUUID();
169+
170+
// Call handleChainStart
171+
for (const callback of callbacks) {
172+
if (callback.handleChainStart) {
173+
await callback.handleChainStart({ name: this._name }, inputs, runId);
174+
}
175+
}
176+
177+
// Simulate processing
178+
await new Promise(resolve => setTimeout(resolve, 10));
179+
180+
const outputs = { result: 'Chain execution completed!' };
181+
182+
// Call handleChainEnd
183+
for (const callback of callbacks) {
184+
if (callback.handleChainEnd) {
185+
await callback.handleChainEnd(outputs, runId);
186+
}
187+
}
188+
189+
return outputs;
190+
}
191+
}
192+
193+
// Mock LangChain Tool
194+
export class MockTool {
195+
private _name: string;
196+
197+
public constructor(name: string) {
198+
this._name = name;
199+
}
200+
201+
public async call(input: string, options?: { callbacks?: CallbackHandler[] }): Promise<string> {
202+
const callbacks = options?.callbacks || [];
203+
const runId = crypto.randomUUID();
204+
205+
// Call handleToolStart
206+
for (const callback of callbacks) {
207+
if (callback.handleToolStart) {
208+
await callback.handleToolStart({ name: this._name }, input, runId);
209+
}
210+
}
211+
212+
// Simulate tool execution
213+
await new Promise(resolve => setTimeout(resolve, 10));
214+
215+
const output = `Tool ${this._name} executed with input: ${input}`;
216+
217+
// Call handleToolEnd
218+
for (const callback of callbacks) {
219+
if (callback.handleToolEnd) {
220+
await callback.handleToolEnd(output, runId);
221+
}
222+
}
223+
224+
return output;
225+
}
226+
}
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
import { expect, it } from 'vitest';
2+
import { createRunner } from '../../../runner';
3+
4+
// These tests are not exhaustive because the instrumentation is
5+
// already tested in the node integration tests and we merely
6+
// want to test that the instrumentation does not break in our
7+
// cloudflare SDK.
8+
9+
it('traces langchain chat model, chain, and tool invocations', async ({ signal }) => {
10+
const runner = createRunner(__dirname)
11+
.ignore('event')
12+
.expect(envelope => {
13+
const transactionEvent = envelope[1]?.[0]?.[1] as any;
14+
15+
expect(transactionEvent.transaction).toBe('GET /');
16+
expect(transactionEvent.spans).toEqual(
17+
expect.arrayContaining([
18+
// Chat model span
19+
expect.objectContaining({
20+
data: expect.objectContaining({
21+
'gen_ai.operation.name': 'chat',
22+
'sentry.op': 'gen_ai.chat',
23+
'sentry.origin': 'auto.ai.langchain',
24+
'gen_ai.system': 'anthropic',
25+
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
26+
'gen_ai.request.temperature': 0.7,
27+
'gen_ai.request.max_tokens': 100,
28+
'gen_ai.usage.input_tokens': 10,
29+
'gen_ai.usage.output_tokens': 15,
30+
'gen_ai.usage.total_tokens': 25,
31+
}),
32+
description: 'chat claude-3-5-sonnet-20241022',
33+
op: 'gen_ai.chat',
34+
origin: 'auto.ai.langchain',
35+
}),
36+
// Chain span
37+
expect.objectContaining({
38+
data: expect.objectContaining({
39+
'sentry.origin': 'auto.ai.langchain',
40+
'sentry.op': 'gen_ai.invoke_agent',
41+
'langchain.chain.name': 'my_test_chain',
42+
}),
43+
description: 'chain my_test_chain',
44+
op: 'gen_ai.invoke_agent',
45+
origin: 'auto.ai.langchain',
46+
}),
47+
// Tool span
48+
expect.objectContaining({
49+
data: expect.objectContaining({
50+
'sentry.origin': 'auto.ai.langchain',
51+
'sentry.op': 'gen_ai.execute_tool',
52+
'gen_ai.tool.name': 'search_tool',
53+
}),
54+
description: 'execute_tool search_tool',
55+
op: 'gen_ai.execute_tool',
56+
origin: 'auto.ai.langchain',
57+
}),
58+
]),
59+
);
60+
})
61+
.start(signal);
62+
await runner.makeRequest('get', '/');
63+
await runner.completed();
64+
});
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"name": "worker-name",
3+
"compatibility_date": "2025-06-17",
4+
"main": "index.ts",
5+
"compatibility_flags": ["nodejs_compat"],
6+
}

packages/cloudflare/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,7 @@ export {
9696
wrapMcpServerWithSentry,
9797
consoleLoggingIntegration,
9898
createConsolaReporter,
99+
createLangChainCallbackHandler,
99100
featureFlagsIntegration,
100101
growthbookIntegration,
101102
logger,

packages/vercel-edge/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,7 @@ export {
9595
wrapMcpServerWithSentry,
9696
consoleLoggingIntegration,
9797
createConsolaReporter,
98+
createLangChainCallbackHandler,
9899
featureFlagsIntegration,
99100
logger,
100101
} from '@sentry/core';

0 commit comments

Comments
 (0)