Skip to content

Commit 628a513

Browse files
committed
feat(cloudflare,vercel-edge): Add support for LangChain instrumentation
1 parent e05acdd commit 628a513

File tree

6 files changed

+318
-0
lines changed

6 files changed

+318
-0
lines changed
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
import * as Sentry from '@sentry/cloudflare';
2+
import { MockChain, MockChatModel, MockTool } from './mocks';
3+
4+
interface Env {
5+
SENTRY_DSN: string;
6+
}
7+
8+
export default Sentry.withSentry(
9+
(env: Env) => ({
10+
dsn: env.SENTRY_DSN,
11+
tracesSampleRate: 1.0,
12+
}),
13+
{
14+
async fetch(_request, _env, _ctx) {
15+
// Create LangChain callback handler
16+
const callbackHandler = Sentry.createLangChainCallbackHandler({
17+
recordInputs: false,
18+
recordOutputs: false,
19+
});
20+
21+
// Test 1: Chat model invocation
22+
const chatModel = new MockChatModel({
23+
model: 'claude-3-5-sonnet-20241022',
24+
temperature: 0.7,
25+
maxTokens: 100,
26+
});
27+
28+
await chatModel.invoke('Tell me a joke', {
29+
callbacks: [callbackHandler],
30+
});
31+
32+
// Test 2: Chain invocation
33+
const chain = new MockChain('my_test_chain');
34+
await chain.invoke(
35+
{ input: 'test input' },
36+
{
37+
callbacks: [callbackHandler],
38+
},
39+
);
40+
41+
// Test 3: Tool invocation
42+
const tool = new MockTool('search_tool');
43+
await tool.call('search query', {
44+
callbacks: [callbackHandler],
45+
});
46+
47+
return new Response(JSON.stringify({ success: true }));
48+
},
49+
},
50+
);
Lines changed: 196 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,196 @@
1+
// Mock LangChain types and classes for testing the callback handler
2+
3+
// Minimal callback handler interface to match LangChain's callback handler signature
4+
export interface CallbackHandler {
5+
handleChatModelStart?: (
6+
llm: unknown,
7+
messages: unknown,
8+
runId: string,
9+
parentRunId?: string,
10+
extraParams?: Record<string, unknown>,
11+
tags?: string[],
12+
metadata?: Record<string, unknown>,
13+
runName?: string,
14+
) => unknown;
15+
handleLLMEnd?: (output: unknown, runId: string) => unknown;
16+
handleChainStart?: (chain: { name?: string }, inputs: Record<string, unknown>, runId: string) => unknown;
17+
handleChainEnd?: (outputs: unknown, runId: string) => unknown;
18+
handleToolStart?: (tool: { name?: string }, input: string, runId: string) => unknown;
19+
handleToolEnd?: (output: unknown, runId: string) => unknown;
20+
}
21+
22+
export interface LangChainMessage {
23+
role: string;
24+
content: string;
25+
}
26+
27+
export interface LangChainLLMResult {
28+
generations: Array<
29+
Array<{
30+
text: string;
31+
generationInfo?: Record<string, unknown>;
32+
}>
33+
>;
34+
llmOutput?: {
35+
tokenUsage?: {
36+
promptTokens?: number;
37+
completionTokens?: number;
38+
totalTokens?: number;
39+
};
40+
};
41+
}
42+
43+
export interface InvocationParams {
44+
model: string;
45+
temperature?: number;
46+
maxTokens?: number;
47+
}
48+
49+
// Mock LangChain Chat Model
50+
export class MockChatModel {
51+
private _model: string;
52+
private _temperature?: number;
53+
private _maxTokens?: number;
54+
55+
public constructor(params: InvocationParams) {
56+
this._model = params.model;
57+
this._temperature = params.temperature;
58+
this._maxTokens = params.maxTokens;
59+
}
60+
61+
public async invoke(
62+
messages: LangChainMessage[] | string,
63+
options?: { callbacks?: CallbackHandler[] },
64+
): Promise<LangChainLLMResult> {
65+
const callbacks = options?.callbacks || [];
66+
const runId = crypto.randomUUID();
67+
68+
// Get invocation params to match LangChain's signature
69+
const invocationParams = {
70+
model: this._model,
71+
temperature: this._temperature,
72+
max_tokens: this._maxTokens,
73+
};
74+
75+
// Create serialized representation similar to LangChain
76+
const serialized = {
77+
lc: 1,
78+
type: 'constructor',
79+
id: ['langchain', 'anthropic', 'anthropic'], // Third element is used as system provider
80+
kwargs: invocationParams,
81+
};
82+
83+
// Call handleChatModelStart
84+
// Pass tags as a record with invocation_params for proper extraction
85+
for (const callback of callbacks) {
86+
if (callback.handleChatModelStart) {
87+
await callback.handleChatModelStart(
88+
serialized,
89+
messages,
90+
runId,
91+
undefined,
92+
undefined,
93+
{ invocation_params: invocationParams } as unknown as string[], // LangChain can pass tags as either string[] or record
94+
{ ls_model_name: this._model, ls_provider: 'anthropic' },
95+
);
96+
}
97+
}
98+
99+
// Create mock result
100+
const result: LangChainLLMResult = {
101+
generations: [
102+
[
103+
{
104+
text: 'Mock response from LangChain!',
105+
generationInfo: {
106+
finish_reason: 'stop',
107+
},
108+
},
109+
],
110+
],
111+
llmOutput: {
112+
tokenUsage: {
113+
promptTokens: 10,
114+
completionTokens: 15,
115+
totalTokens: 25,
116+
},
117+
},
118+
};
119+
120+
// Call handleLLMEnd
121+
for (const callback of callbacks) {
122+
if (callback.handleLLMEnd) {
123+
await callback.handleLLMEnd(result, runId);
124+
}
125+
}
126+
127+
return result;
128+
}
129+
}
130+
131+
// Mock LangChain Chain
132+
export class MockChain {
133+
private _name: string;
134+
135+
public constructor(name: string) {
136+
this._name = name;
137+
}
138+
139+
public async invoke(
140+
inputs: Record<string, unknown>,
141+
options?: { callbacks?: CallbackHandler[] },
142+
): Promise<Record<string, unknown>> {
143+
const callbacks = options?.callbacks || [];
144+
const runId = crypto.randomUUID();
145+
146+
// Call handleChainStart
147+
for (const callback of callbacks) {
148+
if (callback.handleChainStart) {
149+
await callback.handleChainStart({ name: this._name }, inputs, runId);
150+
}
151+
}
152+
153+
const outputs = { result: 'Chain execution completed!' };
154+
155+
// Call handleChainEnd
156+
for (const callback of callbacks) {
157+
if (callback.handleChainEnd) {
158+
await callback.handleChainEnd(outputs, runId);
159+
}
160+
}
161+
162+
return outputs;
163+
}
164+
}
165+
166+
// Mock LangChain Tool
167+
export class MockTool {
168+
private _name: string;
169+
170+
public constructor(name: string) {
171+
this._name = name;
172+
}
173+
174+
public async call(input: string, options?: { callbacks?: CallbackHandler[] }): Promise<string> {
175+
const callbacks = options?.callbacks || [];
176+
const runId = crypto.randomUUID();
177+
178+
// Call handleToolStart
179+
for (const callback of callbacks) {
180+
if (callback.handleToolStart) {
181+
await callback.handleToolStart({ name: this._name }, input, runId);
182+
}
183+
}
184+
185+
const output = `Tool ${this._name} executed with input: ${input}`;
186+
187+
// Call handleToolEnd
188+
for (const callback of callbacks) {
189+
if (callback.handleToolEnd) {
190+
await callback.handleToolEnd(output, runId);
191+
}
192+
}
193+
194+
return output;
195+
}
196+
}
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
import { expect, it } from 'vitest';
2+
import { createRunner } from '../../../runner';
3+
4+
// These tests are not exhaustive because the instrumentation is
5+
// already tested in the node integration tests and we merely
6+
// want to test that the instrumentation does not break in our
7+
// cloudflare SDK.
8+
9+
it('traces langchain chat model, chain, and tool invocations', async ({ signal }) => {
10+
const runner = createRunner(__dirname)
11+
.ignore('event')
12+
.expect(envelope => {
13+
const transactionEvent = envelope[1]?.[0]?.[1] as any;
14+
15+
expect(transactionEvent.transaction).toBe('GET /');
16+
expect(transactionEvent.spans).toEqual(
17+
expect.arrayContaining([
18+
// Chat model span
19+
expect.objectContaining({
20+
data: expect.objectContaining({
21+
'gen_ai.operation.name': 'chat',
22+
'sentry.op': 'gen_ai.chat',
23+
'sentry.origin': 'auto.ai.langchain',
24+
'gen_ai.system': 'anthropic',
25+
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
26+
'gen_ai.request.temperature': 0.7,
27+
'gen_ai.request.max_tokens': 100,
28+
'gen_ai.usage.input_tokens': 10,
29+
'gen_ai.usage.output_tokens': 15,
30+
'gen_ai.usage.total_tokens': 25,
31+
}),
32+
description: 'chat claude-3-5-sonnet-20241022',
33+
op: 'gen_ai.chat',
34+
origin: 'auto.ai.langchain',
35+
}),
36+
// Chain span
37+
expect.objectContaining({
38+
data: expect.objectContaining({
39+
'sentry.origin': 'auto.ai.langchain',
40+
'sentry.op': 'gen_ai.invoke_agent',
41+
'langchain.chain.name': 'my_test_chain',
42+
}),
43+
description: 'chain my_test_chain',
44+
op: 'gen_ai.invoke_agent',
45+
origin: 'auto.ai.langchain',
46+
}),
47+
// Tool span
48+
expect.objectContaining({
49+
data: expect.objectContaining({
50+
'sentry.origin': 'auto.ai.langchain',
51+
'sentry.op': 'gen_ai.execute_tool',
52+
'gen_ai.tool.name': 'search_tool',
53+
}),
54+
description: 'execute_tool search_tool',
55+
op: 'gen_ai.execute_tool',
56+
origin: 'auto.ai.langchain',
57+
}),
58+
]),
59+
);
60+
})
61+
.start(signal);
62+
await runner.makeRequest('get', '/');
63+
await runner.completed();
64+
});
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"name": "worker-name",
3+
"compatibility_date": "2025-06-17",
4+
"main": "index.ts",
5+
"compatibility_flags": ["nodejs_compat"],
6+
}

packages/cloudflare/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,7 @@ export {
9696
wrapMcpServerWithSentry,
9797
consoleLoggingIntegration,
9898
createConsolaReporter,
99+
createLangChainCallbackHandler,
99100
featureFlagsIntegration,
100101
growthbookIntegration,
101102
logger,

packages/vercel-edge/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,7 @@ export {
9595
wrapMcpServerWithSentry,
9696
consoleLoggingIntegration,
9797
createConsolaReporter,
98+
createLangChainCallbackHandler,
9899
featureFlagsIntegration,
99100
logger,
100101
} from '@sentry/core';

0 commit comments

Comments
 (0)