@@ -279,9 +279,9 @@ def _set_output_data(span, response, kwargs, integration, finish_span=True):
279279
280280 def new_iterator ():
281281 # type: () -> Iterator[ChatCompletionChunk]
282- with capture_internal_exceptions ():
283- count_tokens_manually = True
284- for x in old_iterator :
282+ count_tokens_manually = True
283+ for x in old_iterator :
284+ with capture_internal_exceptions () :
285285 # OpenAI chat completion API
286286 if hasattr (x , "choices" ):
287287 choice_index = 0
@@ -312,8 +312,9 @@ def new_iterator():
312312 )
313313 count_tokens_manually = False
314314
315- yield x
315+ yield x
316316
317+ with capture_internal_exceptions ():
317318 if len (data_buf ) > 0 :
318319 all_responses = ["" .join (chunk ) for chunk in data_buf ]
319320 if should_send_default_pii () and integration .include_prompts :
@@ -334,9 +335,9 @@ def new_iterator():
334335
335336 async def new_iterator_async ():
336337 # type: () -> AsyncIterator[ChatCompletionChunk]
337- with capture_internal_exceptions ():
338- count_tokens_manually = True
339- async for x in old_iterator :
338+ count_tokens_manually = True
339+ async for x in old_iterator :
340+ with capture_internal_exceptions () :
340341 # OpenAI chat completion API
341342 if hasattr (x , "choices" ):
342343 choice_index = 0
@@ -367,8 +368,9 @@ async def new_iterator_async():
367368 )
368369 count_tokens_manually = False
369370
370- yield x
371+ yield x
371372
373+ with capture_internal_exceptions ():
372374 if len (data_buf ) > 0 :
373375 all_responses = ["" .join (chunk ) for chunk in data_buf ]
374376 if should_send_default_pii () and integration .include_prompts :
0 commit comments