Skip to content

Commit 79acf46

Browse files
author
Andrej Simurka
committed
Changing unsupported mime types to text/plain
1 parent f4f6684 commit 79acf46

File tree

4 files changed

+80
-5
lines changed

4 files changed

+80
-5
lines changed

src/app/endpoints/query.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from llama_stack_client.types.agents.turn_create_params import (
1919
Toolgroup,
2020
ToolgroupAgentToolGroupWithArgs,
21+
Document,
2122
)
2223
from llama_stack_client.types.model_list_response import ModelListResponse
2324
from llama_stack_client.types.shared.interleaved_content_item import TextContentItem
@@ -692,10 +693,20 @@ async def retrieve_response( # pylint: disable=too-many-locals,too-many-branche
692693
if not toolgroups:
693694
toolgroups = None
694695

696+
# TODO: LCORE-881 - Remove if Llama Stack starts to support these mime types
697+
documents: list[Document] = [
698+
(
699+
{"content": doc["content"], "mime_type": "text/plain"}
700+
if doc["mime_type"].lower() in ("application/json", "application/xml")
701+
else doc
702+
)
703+
for doc in query_request.get_documents()
704+
]
705+
695706
response = await agent.create_turn(
696707
messages=[UserMessage(role="user", content=query_request.query)],
697708
session_id=session_id,
698-
documents=query_request.get_documents(),
709+
documents=documents,
699710
stream=False,
700711
toolgroups=toolgroups,
701712
)

src/app/endpoints/streaming_query.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Handler for REST API call to provide answer to streaming query.""" # pylint: disable=too-many-lines
1+
"""Handler for REST API call to provide answer to streaming query.""" # pylint: disable=too-many-lines,too-many-locals,W0511
22

33
import ast
44
import json
@@ -21,6 +21,7 @@
2121
)
2222
from llama_stack_client.types.shared import ToolCall
2323
from llama_stack_client.types.shared.interleaved_content_item import TextContentItem
24+
from llama_stack_client.types.agents.turn_create_params import Document
2425

2526
from app.database import get_session
2627
from app.endpoints.query import (
@@ -62,6 +63,7 @@
6263
from utils.transcripts import store_transcript
6364
from utils.types import TurnSummary
6465

66+
6567
logger = logging.getLogger("app.endpoints.handlers")
6668
router = APIRouter(tags=["streaming_query"])
6769

@@ -1039,10 +1041,20 @@ async def retrieve_response(
10391041
if not toolgroups:
10401042
toolgroups = None
10411043

1044+
# TODO: LCORE-881 - Remove if Llama Stack starts to support these mime types
1045+
documents: list[Document] = [
1046+
(
1047+
{"content": doc["content"], "mime_type": "text/plain"}
1048+
if doc["mime_type"].lower() in ("application/json", "application/xml")
1049+
else doc
1050+
)
1051+
for doc in query_request.get_documents()
1052+
]
1053+
10421054
response = await agent.create_turn(
10431055
messages=[UserMessage(role="user", content=query_request.query)],
10441056
session_id=session_id,
1045-
documents=query_request.get_documents(),
1057+
documents=documents,
10461058
stream=True,
10471059
toolgroups=toolgroups,
10481060
)

tests/e2e/features/query.feature

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,4 +113,30 @@ Scenario: Check if LLM responds for query request with error for missing query
113113
{"query": "Say hello"}
114114
"""
115115
Then The status code of the response is 500
116-
And The body of the response contains Unable to connect to Llama Stack
116+
And The body of the response contains Unable to connect to Llama Stack
117+
118+
Scenario: Check if LLM responds properly when XML and JSON attachments are sent
119+
Given The system is in default state
120+
And I set the Authorization header to Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6Ikpva
121+
When I use "query" to ask question with authorization header
122+
"""
123+
{
124+
"query": "Say hello",
125+
"attachments": [
126+
{
127+
"attachment_type": "configuration",
128+
"content": "<note><to>User</to><from>System</from><message>Hello</message></note>",
129+
"content_type": "application/xml"
130+
},
131+
{
132+
"attachment_type": "configuration",
133+
"content": "{\"foo\": \"bar\"}",
134+
"content_type": "application/json"
135+
}
136+
],
137+
"model": "{MODEL}",
138+
"provider": "{PROVIDER}",
139+
"system_prompt": "You are a helpful assistant"
140+
}
141+
"""
142+
Then The status code of the response is 200

tests/e2e/features/streaming_query.feature

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,4 +88,30 @@ Feature: streaming_query endpoint API tests
8888
{"query": "Say hello", "model": "{MODEL}"}
8989
"""
9090
Then The status code of the response is 422
91-
And The body of the response contains Value error, Provider must be specified if model is specified
91+
And The body of the response contains Value error, Provider must be specified if model is specified
92+
93+
Scenario: Check if LLM responds properly when XML and JSON attachments are sent
94+
Given The system is in default state
95+
And I set the Authorization header to Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6Ikpva
96+
When I use "streaming_query" to ask question with authorization header
97+
"""
98+
{
99+
"query": "Say hello",
100+
"attachments": [
101+
{
102+
"attachment_type": "configuration",
103+
"content": "<note><to>User</to><from>System</from><message>Hello</message></note>",
104+
"content_type": "application/xml"
105+
},
106+
{
107+
"attachment_type": "configuration",
108+
"content": "{\"foo\": \"bar\"}",
109+
"content_type": "application/json"
110+
}
111+
],
112+
"model": "{MODEL}",
113+
"provider": "{PROVIDER}",
114+
"system_prompt": "You are a helpful assistant"
115+
}
116+
"""
117+
Then The status code of the response is 200

0 commit comments

Comments
 (0)