Skip to content
17 changes: 17 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,23 @@

Work in this release was contributed by @dmmulroy. Thank you for your contribution!

### Important Changes

- **feat(core): Add `streamGenAiSpans` options to stream gen_ai spans ([#20785](https://github.com/getsentry/sentry-javascript/pull/20785))**

Adds a new `streamGenAiSpans` option that controls how `gen_ai` spans are
sent to Sentry. When set, the SDK extracts all `gen_ai` spans out of a
transaction and sends them as v2 envelope items.
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

q: Is it an important change if practically no one knows yet what a v2 envelope item is?

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think so, the justification for it is in the next line:

This avoids payload size limits of regular transactions.


Enable this option if gen_ai spans are being dropped because the transaction payload exceeds size limits.

```ts
Sentry.init({
dsn: 'https://examplePublicKey@o0.ingest.sentry.io/0',
streamGenAiSpans: true,
});
```

## 10.52.0

### Important Changes
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export default Sentry.withSentry(
(env: Env) => ({
dsn: env.SENTRY_DSN,
tracesSampleRate: 1.0,
streamGenAiSpans: true,
}),
{
async fetch(_request, _env, _ctx) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '@sentry/core';
import { expect, it } from 'vitest';
import {
GEN_AI_OPERATION_NAME_ATTRIBUTE,
Expand All @@ -21,30 +20,36 @@ it('traces a basic message creation request', async ({ signal }) => {
const runner = createRunner(__dirname)
.ignore('event')
.expect(envelope => {
// Transaction item (first item in envelope)
const transactionEvent = envelope[1]?.[0]?.[1] as any;

expect(transactionEvent.transaction).toBe('GET /');
expect(transactionEvent.spans).toEqual(
expect.arrayContaining([
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'chat',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.chat',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.anthropic',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'anthropic',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'claude-3-haiku-20240307',
[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE]: 0.7,
[GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'claude-3-haiku-20240307',
[GEN_AI_RESPONSE_ID_ATTRIBUTE]: 'msg_mock123',
[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]: 10,
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 15,
}),
description: 'chat claude-3-haiku-20240307',
op: 'gen_ai.chat',
origin: 'auto.ai.anthropic',
}),
]),
);

// Span container item (second item in same envelope)
const container = envelope[1]?.[1]?.[1] as any;
expect(container).toBeDefined();

expect(container.items).toHaveLength(1);
const [firstSpan] = container.items;

// [0] chat claude-3-haiku-20240307
expect(firstSpan!.name).toBe('chat claude-3-haiku-20240307');
expect(firstSpan!.status).toBe('ok');
expect(firstSpan!.attributes[GEN_AI_OPERATION_NAME_ATTRIBUTE]).toEqual({ type: 'string', value: 'chat' });
expect(firstSpan!.attributes['sentry.op']).toEqual({ type: 'string', value: 'gen_ai.chat' });
expect(firstSpan!.attributes['sentry.origin']).toEqual({ type: 'string', value: 'auto.ai.anthropic' });
expect(firstSpan!.attributes[GEN_AI_SYSTEM_ATTRIBUTE]).toEqual({ type: 'string', value: 'anthropic' });
expect(firstSpan!.attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE]).toEqual({
type: 'string',
value: 'claude-3-haiku-20240307',
});
expect(firstSpan!.attributes[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE]).toEqual({ type: 'double', value: 0.7 });
expect(firstSpan!.attributes[GEN_AI_RESPONSE_MODEL_ATTRIBUTE]).toEqual({
type: 'string',
value: 'claude-3-haiku-20240307',
});
expect(firstSpan!.attributes[GEN_AI_RESPONSE_ID_ATTRIBUTE]).toEqual({ type: 'string', value: 'msg_mock123' });
expect(firstSpan!.attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 10 });
expect(firstSpan!.attributes[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 15 });
})
.start(signal);
await runner.makeRequest('get', '/');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export default Sentry.withSentry(
(env: Env) => ({
dsn: env.SENTRY_DSN,
tracesSampleRate: 1.0,
streamGenAiSpans: true,
}),
{
async fetch(_request, _env, _ctx) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '@sentry/core';
import { expect, it } from 'vitest';
import {
GEN_AI_OPERATION_NAME_ATTRIBUTE,
Expand All @@ -18,67 +17,69 @@ import { createRunner } from '../../../runner';
// want to test that the instrumentation does not break in our
// cloudflare SDK.

it('traces Google GenAI chat creation and message sending', async () => {
it('traces Google GenAI chat creation and message sending', async ({ signal }) => {
const runner = createRunner(__dirname)
.ignore('event')
.expect(envelope => {
// Transaction item (first item in envelope)
const transactionEvent = envelope[1]?.[0]?.[1] as any;

expect(transactionEvent.transaction).toBe('GET /');
expect(transactionEvent.spans).toEqual(
expect.arrayContaining([
// chat.sendMessage
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'chat',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.chat',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-pro',
[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]: 8,
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 12,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 20,
}),
description: 'chat gemini-1.5-pro',
op: 'gen_ai.chat',
origin: 'auto.ai.google_genai',
}),
// models.generateContent
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_content',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'gemini-1.5-flash',
[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE]: 0.7,
[GEN_AI_REQUEST_TOP_P_ATTRIBUTE]: 0.9,
[GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE]: 100,
[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]: 8,
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 12,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 20,
}),
description: 'generate_content gemini-1.5-flash',
op: 'gen_ai.generate_content',
origin: 'auto.ai.google_genai',
}),
// models.embedContent
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
}),
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
}),
]),
);

// Span container item (second item in same envelope)
const container = envelope[1]?.[1]?.[1] as any;
expect(container).toBeDefined();
expect(container.items).toHaveLength(3);
const [firstSpan, secondSpan, thirdSpan] = container.items;

// [0] chat gemini-1.5-pro
expect(firstSpan!.name).toBe('chat gemini-1.5-pro');
expect(firstSpan!.status).toBe('ok');
expect(firstSpan!.attributes[GEN_AI_OPERATION_NAME_ATTRIBUTE]).toEqual({ type: 'string', value: 'chat' });
expect(firstSpan!.attributes['sentry.op']).toEqual({ type: 'string', value: 'gen_ai.chat' });
expect(firstSpan!.attributes['sentry.origin']).toEqual({ type: 'string', value: 'auto.ai.google_genai' });
expect(firstSpan!.attributes[GEN_AI_SYSTEM_ATTRIBUTE]).toEqual({ type: 'string', value: 'google_genai' });
expect(firstSpan!.attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE]).toEqual({
type: 'string',
value: 'gemini-1.5-pro',
});
expect(firstSpan!.attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 8 });
expect(firstSpan!.attributes[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 12 });
expect(firstSpan!.attributes[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 20 });

// [1] generate_content gemini-1.5-flash
expect(secondSpan!.name).toBe('generate_content gemini-1.5-flash');
expect(secondSpan!.status).toBe('ok');
expect(secondSpan!.attributes[GEN_AI_OPERATION_NAME_ATTRIBUTE]).toEqual({
type: 'string',
value: 'generate_content',
});
expect(secondSpan!.attributes['sentry.op']).toEqual({ type: 'string', value: 'gen_ai.generate_content' });
expect(secondSpan!.attributes['sentry.origin']).toEqual({ type: 'string', value: 'auto.ai.google_genai' });
expect(secondSpan!.attributes[GEN_AI_SYSTEM_ATTRIBUTE]).toEqual({ type: 'string', value: 'google_genai' });
expect(secondSpan!.attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE]).toEqual({
type: 'string',
value: 'gemini-1.5-flash',
});
expect(secondSpan!.attributes[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE]).toEqual({ type: 'double', value: 0.7 });
expect(secondSpan!.attributes[GEN_AI_REQUEST_TOP_P_ATTRIBUTE]).toEqual({ type: 'double', value: 0.9 });
expect(secondSpan!.attributes[GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 100 });
expect(secondSpan!.attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 8 });
expect(secondSpan!.attributes[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 12 });
expect(secondSpan!.attributes[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 20 });

// [2] embeddings text-embedding-004
expect(thirdSpan!.name).toBe('embeddings text-embedding-004');
expect(thirdSpan!.status).toBe('ok');
expect(thirdSpan!.attributes[GEN_AI_OPERATION_NAME_ATTRIBUTE]).toEqual({ type: 'string', value: 'embeddings' });
expect(thirdSpan!.attributes['sentry.op']).toEqual({ type: 'string', value: 'gen_ai.embeddings' });
expect(thirdSpan!.attributes['sentry.origin']).toEqual({ type: 'string', value: 'auto.ai.google_genai' });
expect(thirdSpan!.attributes[GEN_AI_SYSTEM_ATTRIBUTE]).toEqual({ type: 'string', value: 'google_genai' });
expect(thirdSpan!.attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE]).toEqual({
type: 'string',
value: 'text-embedding-004',
});
})
.start();
.start(signal);
await runner.makeRequest('get', '/');
await runner.completed();
});
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ export default Sentry.withSentry(
(env: Env) => ({
dsn: env.SENTRY_DSN,
tracesSampleRate: 1.0,
streamGenAiSpans: true,
}),
{
async fetch(_request, _env, _ctx) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '@sentry/core';
import { expect, it } from 'vitest';
import {
GEN_AI_OPERATION_NAME_ATTRIBUTE,
Expand All @@ -22,53 +21,46 @@ it('traces langchain chat model, chain, and tool invocations', async ({ signal }
const runner = createRunner(__dirname)
.ignore('event')
.expect(envelope => {
// Transaction item (first item in envelope)
const transactionEvent = envelope[1]?.[0]?.[1] as any;

expect(transactionEvent.transaction).toBe('GET /');
expect(transactionEvent.spans).toEqual(
expect.arrayContaining([
// Chat model span
expect.objectContaining({
data: expect.objectContaining({
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'chat',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.chat',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.langchain',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'anthropic',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'claude-3-5-sonnet-20241022',
[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE]: 0.7,
[GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE]: 100,
[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]: 10,
[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]: 15,
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 25,
}),
description: 'chat claude-3-5-sonnet-20241022',
op: 'gen_ai.chat',
origin: 'auto.ai.langchain',
}),
// Chain span
expect.objectContaining({
data: expect.objectContaining({
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.langchain',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
'langchain.chain.name': 'my_test_chain',
}),
description: 'chain my_test_chain',
op: 'gen_ai.invoke_agent',
origin: 'auto.ai.langchain',
}),
// Tool span
expect.objectContaining({
data: expect.objectContaining({
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.langchain',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.execute_tool',
[GEN_AI_TOOL_NAME_ATTRIBUTE]: 'search_tool',
}),
description: 'execute_tool search_tool',
op: 'gen_ai.execute_tool',
origin: 'auto.ai.langchain',
}),
]),
);

// Span container item (second item in same envelope)
const container = envelope[1]?.[1]?.[1] as any;
expect(container).toBeDefined();
expect(container.items).toHaveLength(3);
const [firstSpan, secondSpan, thirdSpan] = container.items;

// [0] chat claude-3-5-sonnet-20241022
expect(firstSpan!.name).toBe('chat claude-3-5-sonnet-20241022');
expect(firstSpan!.status).toBe('ok');
expect(firstSpan!.attributes[GEN_AI_OPERATION_NAME_ATTRIBUTE]).toEqual({ type: 'string', value: 'chat' });
expect(firstSpan!.attributes['sentry.op']).toEqual({ type: 'string', value: 'gen_ai.chat' });
expect(firstSpan!.attributes['sentry.origin']).toEqual({ type: 'string', value: 'auto.ai.langchain' });
expect(firstSpan!.attributes[GEN_AI_SYSTEM_ATTRIBUTE]).toEqual({ type: 'string', value: 'anthropic' });
expect(firstSpan!.attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE]).toEqual({
type: 'string',
value: 'claude-3-5-sonnet-20241022',
});
expect(firstSpan!.attributes[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE]).toEqual({ type: 'double', value: 0.7 });
expect(firstSpan!.attributes[GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 100 });
expect(firstSpan!.attributes[GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 10 });
expect(firstSpan!.attributes[GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 15 });
expect(firstSpan!.attributes[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]).toEqual({ type: 'integer', value: 25 });

// [1] chain my_test_chain
expect(secondSpan!.name).toBe('chain my_test_chain');
expect(secondSpan!.status).toBe('ok');
expect(secondSpan!.attributes['sentry.origin']).toEqual({ type: 'string', value: 'auto.ai.langchain' });
expect(secondSpan!.attributes['sentry.op']).toEqual({ type: 'string', value: 'gen_ai.invoke_agent' });
expect(secondSpan!.attributes['langchain.chain.name']).toEqual({ type: 'string', value: 'my_test_chain' });

// [2] execute_tool search_tool
expect(thirdSpan!.name).toBe('execute_tool search_tool');
expect(thirdSpan!.status).toBe('ok');
expect(thirdSpan!.attributes['sentry.origin']).toEqual({ type: 'string', value: 'auto.ai.langchain' });
expect(thirdSpan!.attributes['sentry.op']).toEqual({ type: 'string', value: 'gen_ai.execute_tool' });
expect(thirdSpan!.attributes[GEN_AI_TOOL_NAME_ATTRIBUTE]).toEqual({ type: 'string', value: 'search_tool' });
})
.start(signal);
await runner.makeRequest('get', '/');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ export default Sentry.withSentry(
dsn: env.SENTRY_DSN,
tracesSampleRate: 1.0,
sendDefaultPii: true,
streamGenAiSpans: true,
}),
{
async fetch(_request, _env, _ctx) {
Expand Down
Loading
Loading