From c1e352195bd90e4a80295088f17a5840422444cc Mon Sep 17 00:00:00 2001 From: Brian Dussault Date: Thu, 6 Feb 2025 12:15:14 -0500 Subject: [PATCH 1/2] Fix for non-non streaming clients Fixes bug that that returned improper json for non-streaming clients. This fix corrects also ensures that the chat requests and responses are persisted properly for display in codegate admin UI. --- src/codegate/providers/formatting/input_pipeline.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/codegate/providers/formatting/input_pipeline.py b/src/codegate/providers/formatting/input_pipeline.py index ce28e7a7..053cf5c0 100644 --- a/src/codegate/providers/formatting/input_pipeline.py +++ b/src/codegate/providers/formatting/input_pipeline.py @@ -61,7 +61,6 @@ def _create_model_response( model=model, ) - async def _convert_to_stream( content: str, step_name: str, From 21405ca730b04df02de1ff55413a926db76a32e4 Mon Sep 17 00:00:00 2001 From: Brian Dussault Date: Thu, 6 Feb 2025 12:23:15 -0500 Subject: [PATCH 2/2] Fix for non-streaming clients Fixes that returned improper json for non-streaming clients. This fix corrects also ensures that the chat requests and responses are persisted properly for display in codegate admin UI. --- src/codegate/providers/formatting/input_pipeline.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/codegate/providers/formatting/input_pipeline.py b/src/codegate/providers/formatting/input_pipeline.py index 053cf5c0..9891df0d 100644 --- a/src/codegate/providers/formatting/input_pipeline.py +++ b/src/codegate/providers/formatting/input_pipeline.py @@ -2,7 +2,7 @@ from typing import AsyncIterator, Union from litellm import ModelResponse -from litellm.types.utils import Delta, StreamingChoices +from litellm.types.utils import Choices, Delta, Message, StreamingChoices from codegate.db.connection import DbRecorder from codegate.pipeline.base import PipelineContext, PipelineResponse @@ -56,11 +56,17 @@ def _create_model_response( else: return ModelResponse( id=response_id, - choices=[{"text": content, "index": 0, "finish_reason": None}], + # choices=[{"text": content, "index": 0, "finish_reason": None}], + choices=[ + Choices( + message=Message(content=content, role="assistant"), + ) + ], created=created, model=model, ) + async def _convert_to_stream( content: str, step_name: str,