Skip to content

Commit 7000db2

Browse files
committed
Fix Gemini tool calling via GeminiProvider workaround
Fixes #78 ## Problem Tool calling with Google Gemini models on OCI GenAI fails with 500 error after long timeout (~93s). The OCI translation layer doesn't properly convert Generic API format to Gemini's native functionResponse format. ## Solution Added GeminiProvider class that converts tool-related messages to regular user/assistant messages: - AIMessage with tool_calls → AssistantMessage with descriptive text - ToolMessage → UserMessage with tool result text ## Testing - 8 parallel tool calling tests pass - Cross-model compatibility tests pass (Gemini, Meta, xAI)
1 parent 17be3c4 commit 7000db2

File tree

2 files changed

+835
-0
lines changed

2 files changed

+835
-0
lines changed

libs/oci/langchain_oci/chat_models/oci_generative_ai.py

Lines changed: 102 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1105,6 +1105,107 @@ class MetaProvider(GenericProvider):
11051105
pass
11061106

11071107

1108+
class GeminiProvider(GenericProvider):
1109+
"""Provider for Google Gemini models.
1110+
1111+
This provider works around OCI's lack of support for ToolMessage and
1112+
tool_calls with Gemini models by converting tool interactions to
1113+
regular user/assistant messages.
1114+
1115+
The OCI GenAI service's translation layer for Gemini doesn't properly
1116+
handle:
1117+
- ToolMessage (role: TOOL)
1118+
- AssistantMessage with tool_calls
1119+
1120+
This provider converts:
1121+
- AIMessage with tool_calls → AssistantMessage with descriptive text
1122+
- ToolMessage → UserMessage with tool result
1123+
"""
1124+
1125+
def messages_to_oci_params(
1126+
self, messages: List[BaseMessage], **kwargs: Any
1127+
) -> Dict[str, Any]:
1128+
"""Convert LangChain messages to OCI chat parameters for Gemini.
1129+
1130+
This method transforms tool-related messages into regular messages
1131+
that Gemini can process through OCI's translation layer.
1132+
1133+
Args:
1134+
messages: List of LangChain BaseMessage objects
1135+
**kwargs: Additional keyword arguments
1136+
1137+
Returns:
1138+
Dict containing OCI chat parameters
1139+
"""
1140+
oci_messages = []
1141+
1142+
for message in messages:
1143+
if isinstance(message, ToolMessage):
1144+
# Convert ToolMessage to UserMessage with tool result
1145+
tool_name = message.name or "tool"
1146+
result_text = f"Function {tool_name} returned: {message.content}"
1147+
oci_message = self.oci_chat_message["USER"](
1148+
content=[self.oci_chat_message_text_content(text=result_text)]
1149+
)
1150+
elif isinstance(message, AIMessage) and message.tool_calls:
1151+
# Convert AIMessage with tool_calls to regular AssistantMessage
1152+
# describing what tool would be called
1153+
tool_descriptions = []
1154+
for tc in message.tool_calls:
1155+
args_str = json.dumps(tc.get("args", {}))
1156+
tool_descriptions.append(
1157+
f"I'll call {tc['name']} with arguments: {args_str}"
1158+
)
1159+
1160+
# Handle content which may be string or list
1161+
raw_content = message.content
1162+
if isinstance(raw_content, list):
1163+
# Extract text from list content
1164+
content_text = " ".join(
1165+
str(item)
1166+
if isinstance(item, str)
1167+
else str(item.get("text", ""))
1168+
for item in raw_content
1169+
)
1170+
else:
1171+
content_text = str(raw_content) if raw_content else ""
1172+
1173+
if tool_descriptions:
1174+
if content_text:
1175+
content_text += "\n"
1176+
content_text += "\n".join(tool_descriptions)
1177+
1178+
oci_message = self.oci_chat_message["ASSISTANT"](
1179+
content=[self.oci_chat_message_text_content(text=content_text)],
1180+
# Explicitly do NOT include tool_calls
1181+
)
1182+
elif isinstance(message, AIMessage):
1183+
# Regular AIMessage without tool_calls
1184+
content = self._process_message_content(message.content)
1185+
oci_message = self.oci_chat_message["ASSISTANT"](content=content)
1186+
elif isinstance(message, HumanMessage):
1187+
content = self._process_message_content(message.content)
1188+
oci_message = self.oci_chat_message["USER"](content=content)
1189+
elif isinstance(message, SystemMessage):
1190+
content = self._process_message_content(message.content)
1191+
oci_message = self.oci_chat_message["SYSTEM"](content=content)
1192+
else:
1193+
# Fall back to parent implementation for unknown types
1194+
content = self._process_message_content(message.content)
1195+
role = message.type.upper() if hasattr(message, "type") else "USER"
1196+
if role in self.oci_chat_message:
1197+
oci_message = self.oci_chat_message[role](content=content)
1198+
else:
1199+
oci_message = self.oci_chat_message["USER"](content=content)
1200+
1201+
oci_messages.append(oci_message)
1202+
1203+
return {
1204+
"messages": oci_messages,
1205+
"api_format": self.chat_api_format,
1206+
}
1207+
1208+
11081209
class ChatOCIGenAI(BaseChatModel, OCIGenAIBase):
11091210
"""ChatOCIGenAI chat model integration.
11101211
@@ -1189,6 +1290,7 @@ def _provider_map(self) -> Mapping[str, Provider]:
11891290
return {
11901291
"cohere": CohereProvider(),
11911292
"meta": MetaProvider(),
1293+
"google": GeminiProvider(),
11921294
"generic": GenericProvider(),
11931295
}
11941296

0 commit comments

Comments
 (0)