Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 22 additions & 29 deletions python/samples/demos/travel_planning_system/agents.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# Copyright (c) Microsoft. All rights reserved.

from semantic_kernel.agents import ChatCompletionAgent
from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion
from semantic_kernel.functions import kernel_function
from reasoning_agent import create_reasoning_compatible_agent

_BASE_SYSTEM_MSG = (
"You are a helpful travel planning assistant. Always be professional and provide accurate information."
Expand Down Expand Up @@ -52,59 +52,52 @@ def search_flights(self, origin: str, destination: str, date: str) -> str:

def get_agents() -> dict[str, ChatCompletionAgent]:
"""Creates and returns a set of agents for the travel planning system."""
# 1. Conversation Manager Agent
conversation_manager = ChatCompletionAgent(
print("Using ReasoningCompatibleAgent (auto-detects standard vs reasoning models)")

# Create reasoning-compatible agents that auto-detect model type
conversation_manager = create_reasoning_compatible_agent(
name="conversation_manager",
description="Manages conversation flow and coordinates between agents",
instructions=f"{_BASE_SYSTEM_MSG} You coordinate the conversation and ensure users get comprehensive help.",
service=AzureChatCompletion(),
instructions="You are a conversation manager for a travel planning system. "
"Coordinate between different agents to help users plan their trips.",
)

# 2. Travel Planner Agent
planner = ChatCompletionAgent(
planner = create_reasoning_compatible_agent(
name="planner",
description="Creates comprehensive travel plans including flights, hotels, and activities",
instructions=(
f"{_BASE_SYSTEM_MSG} You create detailed travel plans that include flights, hotels, and activities."
),
service=AzureChatCompletion(),
instructions="You are a travel planner. Create detailed travel itineraries "
"including flights, hotels, and activities based on user preferences.",
plugins=[PlanningPlugin()],
)

# 3. Router Agent
router = ChatCompletionAgent(
router = create_reasoning_compatible_agent(
name="router",
description="Routes tasks to appropriate specialized agents",
instructions=f"{_BASE_SYSTEM_MSG} You analyze plans and delegate tasks to the right specialized agents.",
service=AzureChatCompletion(),
instructions="You are a router agent. Analyze user requests and direct them "
"to the most appropriate specialist agent.",
)

# 4. Destination Expert Agent
destination_expert = ChatCompletionAgent(
destination_expert = create_reasoning_compatible_agent(
name="destination_expert",
description="Expert in destination recommendations and local information",
instructions=(
f"{_BASE_SYSTEM_MSG} You provide expert advice on destinations, attractions, and local experiences."
),
service=AzureChatCompletion(),
instructions="You are a destination expert. Provide detailed information about "
"travel destinations, local attractions, and travel tips.",
plugins=[PlanningPlugin()],
)

# 5. Flight Agent
flight_agent = ChatCompletionAgent(
flight_agent = create_reasoning_compatible_agent(
name="flight_agent",
description="Specializes in flight booking",
instructions=f"{_BASE_SYSTEM_MSG} You handle all flight-related tasks including booking.",
service=AzureChatCompletion(),
instructions="You are a flight booking specialist. Help users search for "
"and book flights that meet their travel needs.",
plugins=[FlightPlugin()],
)

# 6. Hotel Agent
hotel_agent = ChatCompletionAgent(
hotel_agent = create_reasoning_compatible_agent(
name="hotel_agent",
description="Specializes in hotel booking",
instructions=f"{_BASE_SYSTEM_MSG} You handle all hotel-related tasks including booking.",
service=AzureChatCompletion(),
instructions="You are a hotel booking specialist. Help users search for "
"and book hotels that meet their accommodation needs.",
plugins=[HotelPlugin()],
)

Expand Down
120 changes: 120 additions & 0 deletions python/samples/demos/travel_planning_system/azure_reasoning_service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
"""Azure OpenAI Reasoning Service for o3/o1 models."""

import logging
import os
from typing import Any, List
import httpx
from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion
from semantic_kernel.contents import ChatHistory, ChatMessageContent, AuthorRole
from semantic_kernel.exceptions import ServiceResponseException

logger = logging.getLogger(__name__)


class AzureReasoningCompletion(AzureChatCompletion):
"""Azure OpenAI Reasoning service for o3/o1 models that use the reasoning API."""

async def get_chat_message_contents(
self,
chat_history: ChatHistory,
settings,
**kwargs: Any,
) -> List[ChatMessageContent]:
"""Override to use reasoning API endpoint with Chain of Thought enhancement."""
try:
# Convert chat history to reasoning API format
messages = []
for message in chat_history.messages:
# Skip system messages as they're not supported by reasoning models
if message.role == AuthorRole.SYSTEM:
continue

# Enhance user messages with chain-of-thought prompting
content = message.content
if message.role == AuthorRole.USER and content:
# Check if this is a planning/complex task that would benefit from CoT
cot_keywords = ["plan", "book", "find", "search", "recommend", "budget", "analyze", "compare"]
if any(keyword in content.lower() for keyword in cot_keywords):
content = f"Think step by step about this request. {content}"

messages.append({
"role": str(message.role),
"content": content
})

# Prepare reasoning API request
request_data = {
"messages": messages,
"max_completion_tokens": getattr(settings, "max_tokens", 4000),
"temperature": getattr(settings, "temperature", 0.7),
}

# Get configuration from environment variables
endpoint = os.getenv("AZURE_OPENAI_ENDPOINT", "").rstrip('/')
api_key = os.getenv("AZURE_OPENAI_API_KEY", "")
deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "o3-mini")
api_version = os.getenv("AZURE_OPENAI_API_VERSION", "2024-12-01-preview")

# Use reasoning endpoint instead of chat/completions
url = f"{endpoint}/openai/deployments/{deployment_name}/reasoning/completions"

headers = {
"Content-Type": "application/json",
"api-key": api_key,
}

logger.info(f"Making reasoning API call to: {url}")
logger.info(f"Deployment: {deployment_name}, API Version: {api_version}")

# Make the API call
async with httpx.AsyncClient() as client:
response = await client.post(
url=url,
headers=headers,
json=request_data,
params={"api-version": api_version},
timeout=120.0 # Reasoning models can take longer
)

if response.status_code != 200:
error_text = response.text
logger.error(f"Azure Reasoning API error: {response.status_code} - {error_text}")
logger.error(f"Request URL: {url}")
logger.error(f"API Version: {api_version}")

raise ServiceResponseException(
f"Azure Reasoning API failed with status {response.status_code}: {error_text}"
)

response_data = response.json()
logger.info(f"Reasoning API response received successfully")

# Extract the response content
if "choices" in response_data and len(response_data["choices"]) > 0:
content = response_data["choices"][0]["message"]["content"]
return [ChatMessageContent(
role=AuthorRole.ASSISTANT,
content=content,
model_id=deployment_name
)]
else:
raise ServiceResponseException("No valid response from Azure Reasoning API")

except httpx.RequestError as e:
logger.error(f"Request error calling Azure Reasoning API: {e}")
raise ServiceResponseException(f"Request failed: {e}")
except Exception as e:
logger.error(f"Unexpected error in Azure Reasoning API: {e}")
raise ServiceResponseException(f"Azure Reasoning service error: {e}")

async def get_streaming_chat_message_contents(
self,
chat_history: ChatHistory,
settings,
**kwargs: Any,
):
"""Reasoning models don't typically support streaming, fall back to non-streaming."""
results = await self.get_chat_message_contents(chat_history, settings, **kwargs)
# Simulate streaming by yielding the full response
for result in results:
yield result
Loading