Skip to content

Commit 379de2c

Browse files
committed
add integration option
1 parent 5314fa7 commit 379de2c

File tree

6 files changed

+439
-236
lines changed

6 files changed

+439
-236
lines changed

dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts

Lines changed: 112 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ describe('Vercel AI integration', () => {
1212
spans: expect.arrayContaining([
1313
// First span - no telemetry config, should enable telemetry but not record inputs/outputs when sendDefaultPii: false
1414
expect.objectContaining({
15-
data: expect.objectContaining({
15+
data: {
1616
'ai.model.id': 'mock-model-id',
1717
'ai.model.provider': 'mock-provider',
1818
'ai.operationId': 'ai.generateText',
@@ -28,15 +28,15 @@ describe('Vercel AI integration', () => {
2828
'operation.name': 'ai.generateText',
2929
'sentry.op': 'ai.pipeline.generateText',
3030
'sentry.origin': 'auto.vercelai.otel',
31-
}),
31+
},
3232
description: 'generateText',
3333
op: 'ai.pipeline.generateText',
3434
origin: 'auto.vercelai.otel',
3535
status: 'ok',
3636
}),
3737
// Second span - explicitly enabled telemetry but recordInputs/recordOutputs not set, should not record when sendDefaultPii: false
3838
expect.objectContaining({
39-
data: expect.objectContaining({
39+
data: {
4040
'sentry.origin': 'auto.vercelai.otel',
4141
'sentry.op': 'ai.run.doGenerate',
4242
'operation.name': 'ai.generateText.doGenerate',
@@ -58,12 +58,72 @@ describe('Vercel AI integration', () => {
5858
'gen_ai.response.id': expect.any(String),
5959
'gen_ai.response.model': 'mock-model-id',
6060
'gen_ai.usage.total_tokens': 30,
61-
}),
61+
},
62+
description: 'generateText.doGenerate',
63+
op: 'ai.run.doGenerate',
64+
origin: 'auto.vercelai.otel',
65+
status: 'ok',
66+
}),
67+
// Third span - explicit telemetry enabled, should record inputs/outputs regardless of sendDefaultPii
68+
expect.objectContaining({
69+
data: {
70+
'ai.model.id': 'mock-model-id',
71+
'ai.model.provider': 'mock-provider',
72+
'ai.operationId': 'ai.generateText',
73+
'ai.pipeline.name': 'generateText',
74+
'ai.prompt': '{"prompt":"Where is the second span?"}',
75+
'ai.response.finishReason': 'stop',
76+
'ai.response.text': expect.any(String),
77+
'ai.settings.maxRetries': 2,
78+
'ai.settings.maxSteps': 1,
79+
'ai.streaming': false,
80+
'gen_ai.prompt': '{"prompt":"Where is the second span?"}',
81+
'gen_ai.response.model': 'mock-model-id',
82+
'gen_ai.usage.input_tokens': 10,
83+
'gen_ai.usage.output_tokens': 20,
84+
'gen_ai.usage.total_tokens': 30,
85+
'operation.name': 'ai.generateText',
86+
'sentry.op': 'ai.pipeline.generateText',
87+
'sentry.origin': 'auto.vercelai.otel',
88+
},
6289
description: 'generateText',
6390
op: 'ai.pipeline.generateText',
6491
origin: 'auto.vercelai.otel',
6592
status: 'ok',
6693
}),
94+
// Fourth span - doGenerate for explicit telemetry enabled call
95+
expect.objectContaining({
96+
data: {
97+
'sentry.origin': 'auto.vercelai.otel',
98+
'sentry.op': 'ai.run.doGenerate',
99+
'operation.name': 'ai.generateText.doGenerate',
100+
'ai.operationId': 'ai.generateText.doGenerate',
101+
'ai.model.provider': 'mock-provider',
102+
'ai.model.id': 'mock-model-id',
103+
'ai.settings.maxRetries': 2,
104+
'gen_ai.system': 'mock-provider',
105+
'gen_ai.request.model': 'mock-model-id',
106+
'ai.pipeline.name': 'generateText.doGenerate',
107+
'ai.streaming': false,
108+
'ai.response.finishReason': 'stop',
109+
'ai.response.model': 'mock-model-id',
110+
'ai.response.id': expect.any(String),
111+
'ai.response.text': expect.any(String),
112+
'ai.response.timestamp': expect.any(String),
113+
'ai.prompt.format': expect.any(String),
114+
'ai.prompt.messages': expect.any(String),
115+
'gen_ai.response.finish_reasons': ['stop'],
116+
'gen_ai.usage.input_tokens': 10,
117+
'gen_ai.usage.output_tokens': 20,
118+
'gen_ai.response.id': expect.any(String),
119+
'gen_ai.response.model': 'mock-model-id',
120+
'gen_ai.usage.total_tokens': 30,
121+
},
122+
description: 'generateText.doGenerate',
123+
op: 'ai.run.doGenerate',
124+
origin: 'auto.vercelai.otel',
125+
status: 'ok',
126+
}),
67127
]),
68128
};
69129

@@ -72,35 +132,67 @@ describe('Vercel AI integration', () => {
72132
spans: expect.arrayContaining([
73133
// First span - no telemetry config, should enable telemetry AND record inputs/outputs when sendDefaultPii: true
74134
expect.objectContaining({
75-
data: expect.objectContaining({
76-
'ai.completion_tokens.used': 20,
135+
data: {
77136
'ai.model.id': 'mock-model-id',
78137
'ai.model.provider': 'mock-provider',
79-
'ai.model_id': 'mock-model-id',
80-
'ai.prompt': '{"prompt":"Where is the first span?"}',
81138
'ai.operationId': 'ai.generateText',
82139
'ai.pipeline.name': 'generateText',
83-
'ai.prompt_tokens.used': 10,
140+
'ai.prompt': '{"prompt":"Where is the first span?"}',
84141
'ai.response.finishReason': 'stop',
85-
'ai.input_messages': '{"prompt":"Where is the first span?"}',
142+
'ai.response.text': 'First span here!',
86143
'ai.settings.maxRetries': 2,
87144
'ai.settings.maxSteps': 1,
88145
'ai.streaming': false,
89-
'ai.total_tokens.used': 30,
90-
'ai.usage.completionTokens': 20,
91-
'ai.usage.promptTokens': 10,
146+
'gen_ai.prompt': '{"prompt":"Where is the first span?"}',
147+
'gen_ai.response.model': 'mock-model-id',
148+
'gen_ai.usage.input_tokens': 10,
149+
'gen_ai.usage.output_tokens': 20,
150+
'gen_ai.usage.total_tokens': 30,
92151
'operation.name': 'ai.generateText',
93152
'sentry.op': 'ai.pipeline.generateText',
94153
'sentry.origin': 'auto.vercelai.otel',
95-
}),
154+
},
96155
description: 'generateText',
97156
op: 'ai.pipeline.generateText',
98157
origin: 'auto.vercelai.otel',
99158
status: 'ok',
100159
}),
101-
// Second span - explicitly enabled telemetry, should record inputs/outputs regardless of sendDefaultPii
160+
// Second span - doGenerate for first call, should also include input/output fields when sendDefaultPii: true
161+
expect.objectContaining({
162+
data: {
163+
'ai.model.id': 'mock-model-id',
164+
'ai.model.provider': 'mock-provider',
165+
'ai.operationId': 'ai.generateText.doGenerate',
166+
'ai.pipeline.name': 'generateText.doGenerate',
167+
'ai.prompt.format': 'prompt',
168+
'ai.prompt.messages': '[{"role":"user","content":[{"type":"text","text":"Where is the first span?"}]}]',
169+
'ai.response.finishReason': 'stop',
170+
'ai.response.id': expect.any(String),
171+
'ai.response.model': 'mock-model-id',
172+
'ai.response.text': 'First span here!',
173+
'ai.response.timestamp': expect.any(String),
174+
'ai.settings.maxRetries': 2,
175+
'ai.streaming': false,
176+
'gen_ai.request.model': 'mock-model-id',
177+
'gen_ai.response.finish_reasons': ['stop'],
178+
'gen_ai.response.id': expect.any(String),
179+
'gen_ai.response.model': 'mock-model-id',
180+
'gen_ai.system': 'mock-provider',
181+
'gen_ai.usage.input_tokens': 10,
182+
'gen_ai.usage.output_tokens': 20,
183+
'gen_ai.usage.total_tokens': 30,
184+
'operation.name': 'ai.generateText.doGenerate',
185+
'sentry.op': 'ai.run.doGenerate',
186+
'sentry.origin': 'auto.vercelai.otel',
187+
},
188+
description: 'generateText.doGenerate',
189+
op: 'ai.run.doGenerate',
190+
origin: 'auto.vercelai.otel',
191+
status: 'ok',
192+
}),
193+
// Third span - explicitly enabled telemetry, should record inputs/outputs regardless of sendDefaultPii
102194
expect.objectContaining({
103-
data: expect.objectContaining({
195+
data: {
104196
'ai.model.id': 'mock-model-id',
105197
'ai.model.provider': 'mock-provider',
106198
'ai.operationId': 'ai.generateText',
@@ -119,14 +211,15 @@ describe('Vercel AI integration', () => {
119211
'operation.name': 'ai.generateText',
120212
'sentry.op': 'ai.pipeline.generateText',
121213
'sentry.origin': 'auto.vercelai.otel',
122-
}),
214+
},
123215
description: 'generateText',
124216
op: 'ai.pipeline.generateText',
125217
origin: 'auto.vercelai.otel',
126218
status: 'ok',
127219
}),
220+
// Fourth span - doGenerate for explicitly enabled telemetry call
128221
expect.objectContaining({
129-
data: expect.objectContaining({
222+
data: {
130223
'sentry.origin': 'auto.vercelai.otel',
131224
'sentry.op': 'ai.run.doGenerate',
132225
'operation.name': 'ai.generateText.doGenerate',
@@ -151,7 +244,7 @@ describe('Vercel AI integration', () => {
151244
'gen_ai.response.id': expect.any(String),
152245
'gen_ai.response.model': 'mock-model-id',
153246
'gen_ai.usage.total_tokens': 30,
154-
}),
247+
},
155248
description: 'generateText.doGenerate',
156249
op: 'ai.run.doGenerate',
157250
origin: 'auto.vercelai.otel',

packages/node/src/integrations/tracing/vercelai/index.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,16 @@ import {
1515
} from './attributes';
1616
import { INTEGRATION_NAME } from './constants';
1717
import { SentryVercelAiInstrumentation } from './instrumentation';
18+
import type { VercelAiOptions } from './types';
1819

1920
export const instrumentVercelAi = generateInstrumentOnce(INTEGRATION_NAME, () => new SentryVercelAiInstrumentation({}));
2021

21-
const _vercelAIIntegration = (() => {
22+
const _vercelAIIntegration = ((options: VercelAiOptions = {}) => {
2223
let instrumentation: undefined | SentryVercelAiInstrumentation;
2324

2425
return {
2526
name: INTEGRATION_NAME,
27+
options,
2628
setupOnce() {
2729
instrumentation = instrumentVercelAi();
2830
},

packages/node/src/integrations/tracing/vercelai/instrumentation.ts

Lines changed: 52 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import type { InstrumentationConfig, InstrumentationModuleDefinition } from '@op
22
import { InstrumentationBase, InstrumentationNodeModuleDefinition } from '@opentelemetry/instrumentation';
33
import { getCurrentScope, SDK_VERSION } from '@sentry/core';
44
import { INTEGRATION_NAME } from './constants';
5-
import type { TelemetrySettings } from './types';
5+
import type { TelemetrySettings, VercelAiIntegration } from './types';
66

77
// List of patched methods
88
// From: https://sdk.vercel.ai/docs/ai-sdk-core/telemetry#collected-data
@@ -24,6 +24,47 @@ type MethodArgs = [MethodFirstArg, ...unknown[]];
2424
type PatchedModuleExports = Record<(typeof INSTRUMENTED_METHODS)[number], (...args: MethodArgs) => unknown> &
2525
Record<string, unknown>;
2626

27+
interface RecordingOptions {
28+
recordInputs?: boolean;
29+
recordOutputs?: boolean;
30+
}
31+
32+
/**
33+
* Determines whether to record inputs and outputs for Vercel AI telemetry based on the configuration hierarchy.
34+
*
35+
* The order of precedence is:
36+
* 1. The vercel ai integration options
37+
* 2. The experimental_telemetry options in the vercel ai method calls
38+
* 3. When telemetry is explicitly enabled (isEnabled: true), default to recording
39+
* 4. Otherwise, use the sendDefaultPii option from client options
40+
*/
41+
export function determineRecordingSettings(
42+
integrationRecordingOptions: RecordingOptions | undefined,
43+
methodTelemetryOptions: RecordingOptions,
44+
telemetryExplicitlyEnabled: boolean | undefined,
45+
defaultRecordingEnabled: boolean,
46+
): { recordInputs: boolean; recordOutputs: boolean } {
47+
const recordInputs =
48+
integrationRecordingOptions?.recordInputs !== undefined
49+
? integrationRecordingOptions.recordInputs
50+
: methodTelemetryOptions.recordInputs !== undefined
51+
? methodTelemetryOptions.recordInputs
52+
: telemetryExplicitlyEnabled === true
53+
? true // When telemetry is explicitly enabled, default to recording inputs
54+
: defaultRecordingEnabled;
55+
56+
const recordOutputs =
57+
integrationRecordingOptions?.recordOutputs !== undefined
58+
? integrationRecordingOptions.recordOutputs
59+
: methodTelemetryOptions.recordOutputs !== undefined
60+
? methodTelemetryOptions.recordOutputs
61+
: telemetryExplicitlyEnabled === true
62+
? true // When telemetry is explicitly enabled, default to recording inputs
63+
: defaultRecordingEnabled;
64+
65+
return { recordInputs, recordOutputs };
66+
}
67+
2768
/**
2869
* This detects is added by the Sentry Vercel AI Integration to detect if the integration should
2970
* be enabled.
@@ -73,19 +114,16 @@ export class SentryVercelAiInstrumentation extends InstrumentationBase {
73114
const isEnabled = existingExperimentalTelemetry.isEnabled;
74115

75116
const client = getCurrentScope().getClient();
76-
const shouldRecordInputsAndOutputs = client?.getIntegrationByName(INTEGRATION_NAME)
77-
? client.getOptions().sendDefaultPii
78-
: false;
79-
80-
// Set recordInputs and recordOutputs based on sendDefaultPii if not explicitly set
81-
const recordInputs =
82-
existingExperimentalTelemetry.recordInputs !== undefined
83-
? existingExperimentalTelemetry.recordInputs
84-
: shouldRecordInputsAndOutputs;
85-
const recordOutputs =
86-
existingExperimentalTelemetry.recordOutputs !== undefined
87-
? existingExperimentalTelemetry.recordOutputs
88-
: shouldRecordInputsAndOutputs;
117+
const integration = client?.getIntegrationByName<VercelAiIntegration>(INTEGRATION_NAME);
118+
const integrationOptions = integration?.options;
119+
const shouldRecordInputsAndOutputs = integration ? Boolean(client?.getOptions().sendDefaultPii) : false;
120+
121+
const { recordInputs, recordOutputs } = determineRecordingSettings(
122+
integrationOptions,
123+
existingExperimentalTelemetry,
124+
isEnabled,
125+
shouldRecordInputsAndOutputs,
126+
);
89127

90128
args[0].experimental_telemetry = {
91129
...existingExperimentalTelemetry,

packages/node/src/integrations/tracing/vercelai/types.ts

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import type { Integration } from '@sentry/core';
2+
13
/**
24
* Telemetry configuration.
35
*/
@@ -42,3 +44,20 @@ export declare type AttributeValue =
4244
| Array<null | undefined | string>
4345
| Array<null | undefined | number>
4446
| Array<null | undefined | boolean>;
47+
48+
export interface VercelAiOptions {
49+
/**
50+
* Enable or disable input recording. Enabled if `sendDefaultPii` is `true`
51+
* or if you set `isEnabled` to `true` in your ai SDK method telemetry settings
52+
*/
53+
recordInputs?: boolean;
54+
/**
55+
* Enable or disable output recording. Enabled if `sendDefaultPii` is `true`
56+
* or if you set `isEnabled` to `true` in your ai SDK method telemetry settings
57+
*/
58+
recordOutputs?: boolean;
59+
}
60+
61+
export interface VercelAiIntegration extends Integration {
62+
options: VercelAiOptions;
63+
}

0 commit comments

Comments
 (0)