Skip to content

Commit 4b29edc

Browse files
authored
models - openai - client context comment (#864)
1 parent 7226025 commit 4b29edc

File tree

1 file changed

+6
-0
lines changed

1 file changed

+6
-0
lines changed

src/strands/models/openai.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -378,6 +378,9 @@ async def stream(
378378

379379
logger.debug("invoking model")
380380

381+
# We initialize an OpenAI context on every request so as to avoid connection sharing in the underlying httpx
382+
# client. The asyncio event loop does not allow connections to be shared. For more details, please refer to
383+
# https://github.com/encode/httpx/discussions/2959.
381384
async with openai.AsyncOpenAI(**self.client_args) as client:
382385
response = await client.chat.completions.create(**request)
383386

@@ -449,6 +452,9 @@ async def structured_output(
449452
Yields:
450453
Model events with the last being the structured output.
451454
"""
455+
# We initialize an OpenAI context on every request so as to avoid connection sharing in the underlying httpx
456+
# client. The asyncio event loop does not allow connections to be shared. For more details, please refer to
457+
# https://github.com/encode/httpx/discussions/2959.
452458
async with openai.AsyncOpenAI(**self.client_args) as client:
453459
response: ParsedChatCompletion = await client.beta.chat.completions.parse(
454460
model=self.get_config()["model_id"],

0 commit comments

Comments
 (0)