File tree Expand file tree Collapse file tree 1 file changed +9
-12
lines changed Expand file tree Collapse file tree 1 file changed +9
-12
lines changed Original file line number Diff line number Diff line change @@ -74,18 +74,15 @@ async def test_bigger_truncation_size(client: openai.AsyncOpenAI):
7474 }
7575
7676 with pytest .raises (openai .BadRequestError ) as err :
77- err = await client .post (path = "embeddings" ,
78- cast_to = object ,
79- body = {** kwargs })
80-
81- assert str (err ) == f"""openai.BadRequestError:
82- Error code: 400 - {{'object': 'error',
83- 'message': 'truncate_prompt_tokens value
84- ({ truncation_size } )
85- is greater than max_model_len ({ max_model_len } ).
86- Please, select a smaller truncation size.',
87- 'type': 'BadRequestError',
88- 'param': None, 'code': 400}}"""
77+ await client .post (path = "embeddings" , cast_to = object , body = {** kwargs })
78+
79+ assert err .value .status_code == 400
80+ error_details = err .value .response .json ()["error" ]
81+ assert error_details ["type" ] == "BadRequestError"
82+ expected_message = ("truncate_prompt_tokens value is "
83+ "greater than max_model_len."
84+ " Please, select a smaller truncation size." )
85+ assert error_details ["message" ] == expected_message
8986
9087
9188@pytest .mark .asyncio
You can’t perform that action at this time.
0 commit comments