Skip to content
10 changes: 10 additions & 0 deletions libs/partners/openai/tests/unit_tests/llms/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,3 +108,13 @@ def test_stream_response_to_generation_chunk() -> None:
assert chunk == GenerationChunk(
text="", generation_info={"finish_reason": None, "logprobs": None}
)


def test_generate_streaming_multiple_prompts_error() -> None:
"""Ensures ValueError when streaming=True and multiple prompts."""
llm = OpenAI(streaming=True)

with pytest.raises(
ValueError, match="Cannot stream results with multiple prompts\\."
):
llm._generate(["foo", "bar"])