Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,24 @@ export class MockGoogleGenAI {
},
};
},
embedContent: async (...args) => {
const params = args[0];
await new Promise(resolve => setTimeout(resolve, 10));

if (params.model === 'error-model') {
const error = new Error('Model not found');
error.status = 404;
throw error;
}

return {
embeddings: [
{
values: [0.1, 0.2, 0.3, 0.4, 0.5],
},
],
};
},
generateContentStream: async () => {
// Return a promise that resolves to an async generator
return (async function* () {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,11 @@ const response = await chat.sendMessage({
});

console.log('Received response', response);

// Test embedContent
const embedResponse = await client.models.embedContent({
model: 'text-embedding-004',
contents: 'Hello world',
});

console.log('Received embed response', embedResponse);
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import { GoogleGenAI } from '@google/genai';
import * as Sentry from '@sentry/node';
import express from 'express';

function startMockGoogleGenAIServer() {
const app = express();
app.use(express.json());

app.post('/v1beta/models/:model\\:batchEmbedContents', (req, res) => {
const model = req.params.model;

if (model === 'error-model') {
res.status(404).set('x-request-id', 'mock-request-123').end('Model not found');
return;
}

res.send({
embeddings: [
{
values: [0.1, 0.2, 0.3, 0.4, 0.5],
},
],
});
});

return new Promise(resolve => {
const server = app.listen(0, () => {
resolve(server);
});
});
}

async function run() {
const server = await startMockGoogleGenAIServer();

await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
const client = new GoogleGenAI({
apiKey: 'mock-api-key',
httpOptions: { baseUrl: `http://localhost:${server.address().port}` },
});

// Test 1: Basic embedContent with string contents
await client.models.embedContent({
model: 'text-embedding-004',
contents: 'What is the capital of France?',
});

// Test 2: Error handling
try {
await client.models.embedContent({
model: 'error-model',
contents: 'This will fail',
});
} catch {
// Expected error
}

// Test 3: embedContent with array contents
await client.models.embedContent({
model: 'text-embedding-004',
contents: [
{
role: 'user',
parts: [{ text: 'First input text' }],
},
{
role: 'user',
parts: [{ text: 'Second input text' }],
},
],
});
});

server.close();
}

run();
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { SEMANTIC_ATTRIBUTE_SENTRY_OP, SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN } from '@sentry/core';
import { afterAll, describe, expect } from 'vitest';
import {
GEN_AI_EMBEDDINGS_INPUT_ATTRIBUTE,
GEN_AI_INPUT_MESSAGES_ATTRIBUTE,
GEN_AI_INPUT_MESSAGES_ORIGINAL_LENGTH_ATTRIBUTE,
GEN_AI_OPERATION_NAME_ATTRIBUTE,
Expand Down Expand Up @@ -601,4 +602,124 @@ describe('Google GenAI integration', () => {
});
},
);

const EXPECTED_TRANSACTION_DEFAULT_PII_FALSE_EMBEDDINGS = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - embedContent with string contents
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
},
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Second span - embedContent error model
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'error-model',
},
description: 'embeddings error-model',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
// Third span - embedContent with array contents
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
},
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
]),
};

const EXPECTED_TRANSACTION_DEFAULT_PII_TRUE_EMBEDDINGS = {
transaction: 'main',
spans: expect.arrayContaining([
// First span - embedContent with PII
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
[GEN_AI_EMBEDDINGS_INPUT_ATTRIBUTE]: 'What is the capital of France?',
},
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
// Second span - embedContent error model with PII
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'error-model',
[GEN_AI_EMBEDDINGS_INPUT_ATTRIBUTE]: 'This will fail',
},
description: 'embeddings error-model',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'internal_error',
}),
// Third span - embedContent with array contents and PII
expect.objectContaining({
data: {
[GEN_AI_OPERATION_NAME_ATTRIBUTE]: 'embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.embeddings',
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.ai.google_genai',
[GEN_AI_SYSTEM_ATTRIBUTE]: 'google_genai',
[GEN_AI_REQUEST_MODEL_ATTRIBUTE]: 'text-embedding-004',
[GEN_AI_EMBEDDINGS_INPUT_ATTRIBUTE]:
'[{"role":"user","parts":[{"text":"First input text"}]},{"role":"user","parts":[{"text":"Second input text"}]}]',
},
description: 'embeddings text-embedding-004',
op: 'gen_ai.embeddings',
origin: 'auto.ai.google_genai',
status: 'ok',
}),
]),
};

createEsmAndCjsTests(__dirname, 'scenario-embeddings.mjs', 'instrument.mjs', (createRunner, test) => {
test('creates google genai embeddings spans with sendDefaultPii: false', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_FALSE_EMBEDDINGS })
.start()
.completed();
});
});

createEsmAndCjsTests(__dirname, 'scenario-embeddings.mjs', 'instrument-with-pii.mjs', (createRunner, test) => {
test('creates google genai embeddings spans with sendDefaultPii: true', async () => {
await createRunner()
.ignore('event')
.expect({ transaction: EXPECTED_TRANSACTION_DEFAULT_PII_TRUE_EMBEDDINGS })
.start()
.completed();
});
});
});
4 changes: 4 additions & 0 deletions packages/core/src/tracing/ai/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ export function getFinalOperationName(methodPath: string): string {
if (methodPath.includes('generateContent')) {
return 'generate_content';
}
// Google GenAI: models.embedContent -> embeddings
if (methodPath.includes('embedContent')) {
return 'embeddings';
}
// Anthropic: models.get/retrieve -> models (metadata retrieval only)
if (methodPath.includes('models')) {
return 'models';
Expand Down
1 change: 1 addition & 0 deletions packages/core/src/tracing/google-genai/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export const GOOGLE_GENAI_INTEGRATION_NAME = 'Google_GenAI';
export const GOOGLE_GENAI_INSTRUMENTED_METHODS = [
'models.generateContent',
'models.generateContentStream',
'models.embedContent',
'chats.create',
'sendMessage',
'sendMessageStream',
Expand Down
Loading
Loading