Skip to content

Commit 2f071fd

Browse files
authored
Merge pull request #716 from asimurka/support_for_xml_and_json_attatchment_types
LCORE-784: Changing unsupported mime types to text/plain
2 parents 45c7482 + 79acf46 commit 2f071fd

File tree

4 files changed

+80
-5
lines changed

4 files changed

+80
-5
lines changed

src/app/endpoints/query.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from llama_stack_client.types.agents.turn_create_params import (
1919
Toolgroup,
2020
ToolgroupAgentToolGroupWithArgs,
21+
Document,
2122
)
2223
from llama_stack_client.types.model_list_response import ModelListResponse
2324
from llama_stack_client.types.shared.interleaved_content_item import TextContentItem
@@ -692,10 +693,20 @@ async def retrieve_response( # pylint: disable=too-many-locals,too-many-branche
692693
if not toolgroups:
693694
toolgroups = None
694695

696+
# TODO: LCORE-881 - Remove if Llama Stack starts to support these mime types
697+
documents: list[Document] = [
698+
(
699+
{"content": doc["content"], "mime_type": "text/plain"}
700+
if doc["mime_type"].lower() in ("application/json", "application/xml")
701+
else doc
702+
)
703+
for doc in query_request.get_documents()
704+
]
705+
695706
response = await agent.create_turn(
696707
messages=[UserMessage(role="user", content=query_request.query)],
697708
session_id=session_id,
698-
documents=query_request.get_documents(),
709+
documents=documents,
699710
stream=False,
700711
toolgroups=toolgroups,
701712
)

src/app/endpoints/streaming_query.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Handler for REST API call to provide answer to streaming query.""" # pylint: disable=too-many-lines
1+
"""Handler for REST API call to provide answer to streaming query.""" # pylint: disable=too-many-lines,too-many-locals,W0511
22

33
import ast
44
import json
@@ -21,6 +21,7 @@
2121
)
2222
from llama_stack_client.types.shared import ToolCall
2323
from llama_stack_client.types.shared.interleaved_content_item import TextContentItem
24+
from llama_stack_client.types.agents.turn_create_params import Document
2425

2526
from app.database import get_session
2627
from app.endpoints.query import (
@@ -62,6 +63,7 @@
6263
from utils.transcripts import store_transcript
6364
from utils.types import TurnSummary
6465

66+
6567
logger = logging.getLogger("app.endpoints.handlers")
6668
router = APIRouter(tags=["streaming_query"])
6769

@@ -1039,10 +1041,20 @@ async def retrieve_response(
10391041
if not toolgroups:
10401042
toolgroups = None
10411043

1044+
# TODO: LCORE-881 - Remove if Llama Stack starts to support these mime types
1045+
documents: list[Document] = [
1046+
(
1047+
{"content": doc["content"], "mime_type": "text/plain"}
1048+
if doc["mime_type"].lower() in ("application/json", "application/xml")
1049+
else doc
1050+
)
1051+
for doc in query_request.get_documents()
1052+
]
1053+
10421054
response = await agent.create_turn(
10431055
messages=[UserMessage(role="user", content=query_request.query)],
10441056
session_id=session_id,
1045-
documents=query_request.get_documents(),
1057+
documents=documents,
10461058
stream=True,
10471059
toolgroups=toolgroups,
10481060
)

tests/e2e/features/query.feature

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,4 +111,30 @@ Scenario: Check if LLM responds for query request with error for missing query
111111
{"query": "Say hello"}
112112
"""
113113
Then The status code of the response is 500
114-
And The body of the response contains Unable to connect to Llama Stack
114+
And The body of the response contains Unable to connect to Llama Stack
115+
116+
Scenario: Check if LLM responds properly when XML and JSON attachments are sent
117+
Given The system is in default state
118+
And I set the Authorization header to Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6Ikpva
119+
When I use "query" to ask question with authorization header
120+
"""
121+
{
122+
"query": "Say hello",
123+
"attachments": [
124+
{
125+
"attachment_type": "configuration",
126+
"content": "<note><to>User</to><from>System</from><message>Hello</message></note>",
127+
"content_type": "application/xml"
128+
},
129+
{
130+
"attachment_type": "configuration",
131+
"content": "{\"foo\": \"bar\"}",
132+
"content_type": "application/json"
133+
}
134+
],
135+
"model": "{MODEL}",
136+
"provider": "{PROVIDER}",
137+
"system_prompt": "You are a helpful assistant"
138+
}
139+
"""
140+
Then The status code of the response is 200

tests/e2e/features/streaming_query.feature

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,4 +86,30 @@ Feature: streaming_query endpoint API tests
8686
{"query": "Say hello", "model": "{MODEL}"}
8787
"""
8888
Then The status code of the response is 422
89-
And The body of the response contains Value error, Provider must be specified if model is specified
89+
And The body of the response contains Value error, Provider must be specified if model is specified
90+
91+
Scenario: Check if LLM responds properly when XML and JSON attachments are sent
92+
Given The system is in default state
93+
And I set the Authorization header to Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6Ikpva
94+
When I use "streaming_query" to ask question with authorization header
95+
"""
96+
{
97+
"query": "Say hello",
98+
"attachments": [
99+
{
100+
"attachment_type": "configuration",
101+
"content": "<note><to>User</to><from>System</from><message>Hello</message></note>",
102+
"content_type": "application/xml"
103+
},
104+
{
105+
"attachment_type": "configuration",
106+
"content": "{\"foo\": \"bar\"}",
107+
"content_type": "application/json"
108+
}
109+
],
110+
"model": "{MODEL}",
111+
"provider": "{PROVIDER}",
112+
"system_prompt": "You are a helpful assistant"
113+
}
114+
"""
115+
Then The status code of the response is 200

0 commit comments

Comments
 (0)