@@ -63,7 +63,58 @@ describe('Google GenAI integration', () => {
63
63
origin : 'auto.ai.google_genai' ,
64
64
status : 'ok' ,
65
65
} ) ,
66
- // Fourth span - error handling
66
+ // Fourth span - models.generateContentStream (streaming)
67
+ expect . objectContaining ( {
68
+ data : expect . objectContaining ( {
69
+ 'gen_ai.operation.name' : 'models' ,
70
+ 'sentry.op' : 'gen_ai.models' ,
71
+ 'sentry.origin' : 'auto.ai.google_genai' ,
72
+ 'gen_ai.system' : 'google_genai' ,
73
+ 'gen_ai.request.model' : 'gemini-1.5-flash' ,
74
+ 'gen_ai.request.temperature' : 0.7 ,
75
+ 'gen_ai.request.top_p' : 0.9 ,
76
+ 'gen_ai.request.max_tokens' : 100 ,
77
+ 'gen_ai.response.streaming' : true ,
78
+ 'gen_ai.response.id' : 'mock-response-id' ,
79
+ 'gen_ai.response.model' : 'gemini-1.5-pro' ,
80
+ } ) ,
81
+ description : 'models gemini-1.5-flash stream-response' ,
82
+ op : 'gen_ai.models' ,
83
+ origin : 'auto.ai.google_genai' ,
84
+ } ) ,
85
+ // Fifth span - chat.sendMessageStream (streaming)
86
+ expect . objectContaining ( {
87
+ data : expect . objectContaining ( {
88
+ 'gen_ai.operation.name' : 'chat' ,
89
+ 'sentry.op' : 'gen_ai.chat' ,
90
+ 'sentry.origin' : 'auto.ai.google_genai' ,
91
+ 'gen_ai.system' : 'google_genai' ,
92
+ 'gen_ai.request.model' : 'gemini-1.5-pro' ,
93
+ 'gen_ai.response.streaming' : true ,
94
+ 'gen_ai.response.id' : 'mock-response-id' ,
95
+ 'gen_ai.response.model' : 'gemini-1.5-pro' ,
96
+ } ) ,
97
+ description : 'chat gemini-1.5-pro stream-response' ,
98
+ op : 'gen_ai.chat' ,
99
+ origin : 'auto.ai.google_genai' ,
100
+ } ) ,
101
+ // Sixth span - blocked content stream
102
+ expect . objectContaining ( {
103
+ data : expect . objectContaining ( {
104
+ 'gen_ai.operation.name' : 'models' ,
105
+ 'sentry.op' : 'gen_ai.models' ,
106
+ 'sentry.origin' : 'auto.ai.google_genai' ,
107
+ 'gen_ai.system' : 'google_genai' ,
108
+ 'gen_ai.request.model' : 'blocked-model' ,
109
+ 'gen_ai.request.temperature' : 0.7 ,
110
+ 'gen_ai.response.streaming' : true ,
111
+ } ) ,
112
+ description : 'models blocked-model stream-response' ,
113
+ op : 'gen_ai.models' ,
114
+ origin : 'auto.ai.google_genai' ,
115
+ status : 'unknown_error' ,
116
+ } ) ,
117
+ // Seventh span - error handling
67
118
expect . objectContaining ( {
68
119
data : {
69
120
'gen_ai.operation.name' : 'models' ,
@@ -142,7 +193,57 @@ describe('Google GenAI integration', () => {
142
193
origin : 'auto.ai.google_genai' ,
143
194
status : 'ok' ,
144
195
} ) ,
145
- // Fourth span - error handling with PII
196
+ // Fourth span - models.generateContentStream (streaming) with PII
197
+ expect . objectContaining ( {
198
+ data : expect . objectContaining ( {
199
+ 'gen_ai.operation.name' : 'models' ,
200
+ 'sentry.op' : 'gen_ai.models' ,
201
+ 'sentry.origin' : 'auto.ai.google_genai' ,
202
+ 'gen_ai.system' : 'google_genai' ,
203
+ 'gen_ai.request.model' : 'gemini-1.5-flash' ,
204
+ 'gen_ai.request.messages' : expect . any ( String ) , // Should include contents when recordInputs: true
205
+ 'gen_ai.response.streaming' : true ,
206
+ 'gen_ai.response.id' : 'mock-response-id' ,
207
+ 'gen_ai.response.model' : 'gemini-1.5-pro' ,
208
+ } ) ,
209
+ description : 'models gemini-1.5-flash stream-response' ,
210
+ op : 'gen_ai.models' ,
211
+ origin : 'auto.ai.google_genai' ,
212
+ } ) ,
213
+ // Fifth span - chat.sendMessageStream (streaming) with PII
214
+ expect . objectContaining ( {
215
+ data : expect . objectContaining ( {
216
+ 'gen_ai.operation.name' : 'chat' ,
217
+ 'sentry.op' : 'gen_ai.chat' ,
218
+ 'sentry.origin' : 'auto.ai.google_genai' ,
219
+ 'gen_ai.system' : 'google_genai' ,
220
+ 'gen_ai.request.model' : 'gemini-1.5-pro' ,
221
+ 'gen_ai.request.messages' : expect . any ( String ) , // Should include message when recordInputs: true
222
+ 'gen_ai.response.streaming' : true ,
223
+ 'gen_ai.response.id' : 'mock-response-id' ,
224
+ 'gen_ai.response.model' : 'gemini-1.5-pro' ,
225
+ } ) ,
226
+ description : 'chat gemini-1.5-pro stream-response' ,
227
+ op : 'gen_ai.chat' ,
228
+ origin : 'auto.ai.google_genai' ,
229
+ } ) ,
230
+ // Sixth span - blocked content stream with PII
231
+ expect . objectContaining ( {
232
+ data : expect . objectContaining ( {
233
+ 'gen_ai.operation.name' : 'models' ,
234
+ 'sentry.op' : 'gen_ai.models' ,
235
+ 'sentry.origin' : 'auto.ai.google_genai' ,
236
+ 'gen_ai.system' : 'google_genai' ,
237
+ 'gen_ai.request.model' : 'blocked-model' ,
238
+ 'gen_ai.request.messages' : expect . any ( String ) , // Should include contents when recordInputs: true
239
+ 'gen_ai.response.streaming' : true ,
240
+ } ) ,
241
+ description : 'models blocked-model stream-response' ,
242
+ op : 'gen_ai.models' ,
243
+ origin : 'auto.ai.google_genai' ,
244
+ status : 'unknown_error' ,
245
+ } ) ,
246
+ // Seventh span - error handling with PII
146
247
expect . objectContaining ( {
147
248
data : expect . objectContaining ( {
148
249
'gen_ai.operation.name' : 'models' ,
@@ -163,12 +264,22 @@ describe('Google GenAI integration', () => {
163
264
const EXPECTED_TRANSACTION_WITH_OPTIONS = {
164
265
transaction : 'main' ,
165
266
spans : expect . arrayContaining ( [
166
- // Check that custom options are respected
267
+ // Check that custom options are respected for non-streaming
167
268
expect . objectContaining ( {
168
269
data : expect . objectContaining ( {
169
270
'gen_ai.request.messages' : expect . any ( String ) , // Should include messages when recordInputs: true
170
271
'gen_ai.response.text' : expect . any ( String ) , // Should include response text when recordOutputs: true
171
272
} ) ,
273
+ description : expect . not . stringContaining ( 'stream-response' ) , // Non-streaming span
274
+ } ) ,
275
+ // Check that custom options are respected for streaming
276
+ expect . objectContaining ( {
277
+ data : expect . objectContaining ( {
278
+ 'gen_ai.response.streaming' : true ,
279
+ 'gen_ai.request.messages' : expect . any ( String ) , // Should include messages when recordInputs: true
280
+ 'gen_ai.response.text' : expect . stringContaining ( 'streaming' ) , // Should include response text when recordOutputs: true
281
+ } ) ,
282
+ description : expect . stringContaining ( 'stream-response' ) ,
172
283
} ) ,
173
284
] ) ,
174
285
} ;
0 commit comments