diff --git a/README.md b/README.md index 0672dff..dd9c58d 100644 --- a/README.md +++ b/README.md @@ -110,6 +110,20 @@ AI: Searching across all indices... Found 47 results from 3 indices: - 'tutorials': 9 hands-on tutorials ``` +### 🤖 Chat Completions with RAG (v1.6+): + +``` +You: "Create a chat workspace for customer support with my products and FAQs indices" +AI: I'll create that workspace... ✓ Chat workspace 'support-chat' created! + +You: "Using the support workspace, how do I return a defective product?" +AI: Based on your FAQs and product policies, here's the return process... +[Generates contextual response using indexed documents] + +You: "Generate a response about our warranty policy" +AI: [Streams response] According to your documentation, the warranty covers... +``` + ## 🔧 Installation ### Prerequisites @@ -352,6 +366,14 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file #### Search - `search`: Flexible search across single or multiple indices with filtering and sorting options +#### Chat Completions (Meilisearch v1.6+) +- `chat-completion`: Generate chat responses with RAG using indexed documents as context (supports streaming) +- `create-chat-workspace`: Create a chat workspace with default settings for consistent interactions +- `update-chat-workspace`: Modify existing chat workspace configurations +- `list-chat-workspaces`: List all available chat workspaces +- `get-chat-workspace`: Get details of a specific chat workspace +- `delete-chat-workspace`: Remove a chat workspace + #### Settings Management - `get-settings`: View current settings for an index - `update-settings`: Update index settings (ranking, faceting, etc.) diff --git a/src/meilisearch_mcp/chat.py b/src/meilisearch_mcp/chat.py new file mode 100644 index 0000000..9f07727 --- /dev/null +++ b/src/meilisearch_mcp/chat.py @@ -0,0 +1,278 @@ +from typing import Dict, Any, List, Optional, AsyncIterator +from meilisearch import Client +import httpx +import json + + +class ChatManager: + """Manage Meilisearch chat completions and workspaces""" + + def __init__(self, client: Client): + self.client = client + self.base_url = client.config.url.rstrip("/") + self.headers = { + "Authorization": ( + f"Bearer {client.config.api_key}" if client.config.api_key else None + ), + "Content-Type": "application/json", + } + # Remove None values from headers + self.headers = {k: v for k, v in self.headers.items() if v is not None} + + async def chat_completion_stream( + self, + query: str, + model: Optional[str] = None, + temperature: Optional[float] = None, + max_tokens: Optional[int] = None, + index_uids: Optional[List[str]] = None, + workspace_uid: Optional[str] = None, + ) -> AsyncIterator[str]: + """ + Stream chat completion responses from Meilisearch. + + Args: + query: The user's query/prompt + model: The model to use for chat completion (e.g., "gpt-4", "gpt-3.5-turbo") + temperature: Controls randomness (0-1) + max_tokens: Maximum tokens in response + index_uids: List of index UIDs to search for context + workspace_uid: Chat workspace UID to use + + Yields: + Streaming response chunks + """ + endpoint = f"{self.base_url}/chat/completions" + + payload = {"query": query, "stream": True} + + if model: + payload["model"] = model + if temperature is not None: + payload["temperature"] = temperature + if max_tokens: + payload["maxTokens"] = max_tokens + if index_uids: + payload["indexUids"] = index_uids + if workspace_uid: + payload["workspaceUid"] = workspace_uid + + async with httpx.AsyncClient() as client: + async with client.stream( + "POST", endpoint, headers=self.headers, json=payload, timeout=60.0 + ) as response: + response.raise_for_status() + async for line in response.aiter_lines(): + if line.startswith("data: "): + data = line[6:] # Remove "data: " prefix + if data == "[DONE]": + break + try: + chunk = json.loads(data) + if "choices" in chunk and chunk["choices"]: + content = ( + chunk["choices"][0] + .get("delta", {}) + .get("content", "") + ) + if content: + yield content + except json.JSONDecodeError: + continue + + def chat_completion( + self, + query: str, + model: Optional[str] = None, + temperature: Optional[float] = None, + max_tokens: Optional[int] = None, + index_uids: Optional[List[str]] = None, + workspace_uid: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Get a non-streaming chat completion response. + + Args: + query: The user's query/prompt + model: The model to use for chat completion + temperature: Controls randomness (0-1) + max_tokens: Maximum tokens in response + index_uids: List of index UIDs to search for context + workspace_uid: Chat workspace UID to use + + Returns: + Chat completion response + """ + endpoint = f"{self.base_url}/chat/completions" + + payload = {"query": query, "stream": False} + + if model: + payload["model"] = model + if temperature is not None: + payload["temperature"] = temperature + if max_tokens: + payload["maxTokens"] = max_tokens + if index_uids: + payload["indexUids"] = index_uids + if workspace_uid: + payload["workspaceUid"] = workspace_uid + + with httpx.Client() as client: + response = client.post( + endpoint, headers=self.headers, json=payload, timeout=60.0 + ) + response.raise_for_status() + return response.json() + + def create_chat_workspace( + self, + uid: str, + name: str, + description: Optional[str] = None, + model: Optional[str] = None, + temperature: Optional[float] = None, + max_tokens: Optional[int] = None, + index_uids: Optional[List[str]] = None, + ) -> Dict[str, Any]: + """ + Create a new chat workspace. + + Args: + uid: Unique identifier for the workspace + name: Name of the workspace + description: Description of the workspace + model: Default model for this workspace + temperature: Default temperature for this workspace + max_tokens: Default max tokens for this workspace + index_uids: Default index UIDs for this workspace + + Returns: + Created workspace information + """ + endpoint = f"{self.base_url}/chat/workspaces" + + payload = {"uid": uid, "name": name} + + if description: + payload["description"] = description + if model: + payload["model"] = model + if temperature is not None: + payload["temperature"] = temperature + if max_tokens: + payload["maxTokens"] = max_tokens + if index_uids: + payload["indexUids"] = index_uids + + with httpx.Client() as client: + response = client.post(endpoint, headers=self.headers, json=payload) + response.raise_for_status() + return response.json() + + def update_chat_workspace( + self, + uid: str, + name: Optional[str] = None, + description: Optional[str] = None, + model: Optional[str] = None, + temperature: Optional[float] = None, + max_tokens: Optional[int] = None, + index_uids: Optional[List[str]] = None, + ) -> Dict[str, Any]: + """ + Update an existing chat workspace. + + Args: + uid: Unique identifier of the workspace to update + name: New name for the workspace + description: New description for the workspace + model: New default model for this workspace + temperature: New default temperature for this workspace + max_tokens: New default max tokens for this workspace + index_uids: New default index UIDs for this workspace + + Returns: + Updated workspace information + """ + endpoint = f"{self.base_url}/chat/workspaces/{uid}" + + payload = {} + + if name: + payload["name"] = name + if description: + payload["description"] = description + if model: + payload["model"] = model + if temperature is not None: + payload["temperature"] = temperature + if max_tokens: + payload["maxTokens"] = max_tokens + if index_uids: + payload["indexUids"] = index_uids + + with httpx.Client() as client: + response = client.patch(endpoint, headers=self.headers, json=payload) + response.raise_for_status() + return response.json() + + def list_chat_workspaces( + self, limit: Optional[int] = None, offset: Optional[int] = None + ) -> Dict[str, Any]: + """ + List all chat workspaces. + + Args: + limit: Maximum number of workspaces to return + offset: Number of workspaces to skip + + Returns: + List of chat workspaces + """ + endpoint = f"{self.base_url}/chat/workspaces" + + params = {} + if limit: + params["limit"] = limit + if offset: + params["offset"] = offset + + with httpx.Client() as client: + response = client.get(endpoint, headers=self.headers, params=params) + response.raise_for_status() + return response.json() + + def get_chat_workspace(self, uid: str) -> Dict[str, Any]: + """ + Get details of a specific chat workspace. + + Args: + uid: Unique identifier of the workspace + + Returns: + Workspace details + """ + endpoint = f"{self.base_url}/chat/workspaces/{uid}" + + with httpx.Client() as client: + response = client.get(endpoint, headers=self.headers) + response.raise_for_status() + return response.json() + + def delete_chat_workspace(self, uid: str) -> Dict[str, Any]: + """ + Delete a chat workspace. + + Args: + uid: Unique identifier of the workspace to delete + + Returns: + Deletion confirmation + """ + endpoint = f"{self.base_url}/chat/workspaces/{uid}" + + with httpx.Client() as client: + response = client.delete(endpoint, headers=self.headers) + response.raise_for_status() + return response.json() diff --git a/src/meilisearch_mcp/client.py b/src/meilisearch_mcp/client.py index c7c5b5a..33c1ea3 100644 --- a/src/meilisearch_mcp/client.py +++ b/src/meilisearch_mcp/client.py @@ -9,6 +9,7 @@ from .keys import KeyManager from .logging import MCPLogger from .monitoring import MonitoringManager +from .chat import ChatManager from .__version__ import __version__ logger = MCPLogger() @@ -31,6 +32,7 @@ def __init__( self.tasks = TaskManager(self.client) self.keys = KeyManager(self.client) self.monitoring = MonitoringManager(self.client) + self.chat = ChatManager(self.client) def health_check(self) -> bool: """Check if Meilisearch is healthy""" diff --git a/src/meilisearch_mcp/server.py b/src/meilisearch_mcp/server.py index db6cca6..f52735d 100644 --- a/src/meilisearch_mcp/server.py +++ b/src/meilisearch_mcp/server.py @@ -361,6 +361,175 @@ async def handle_list_tools() -> list[types.Tool]: "additionalProperties": False, }, ), + types.Tool( + name="chat-completion", + description="Generate a chat completion response using Meilisearch's chat feature with RAG", + inputSchema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The user's query or prompt", + }, + "model": { + "type": "string", + "description": "The model to use (e.g., 'gpt-4', 'gpt-3.5-turbo')", + }, + "temperature": { + "type": "number", + "description": "Controls randomness (0-1)", + }, + "maxTokens": { + "type": "integer", + "description": "Maximum tokens in response", + }, + "indexUids": { + "type": "array", + "items": {"type": "string"}, + "description": "List of index UIDs to search for context", + }, + "workspaceUid": { + "type": "string", + "description": "Chat workspace UID to use", + }, + "stream": { + "type": "boolean", + "description": "Whether to stream the response", + "default": True, + }, + }, + "required": ["query"], + "additionalProperties": False, + }, + ), + types.Tool( + name="create-chat-workspace", + description="Create a new chat workspace for managing chat settings", + inputSchema={ + "type": "object", + "properties": { + "uid": { + "type": "string", + "description": "Unique identifier for the workspace", + }, + "name": { + "type": "string", + "description": "Name of the workspace", + }, + "description": { + "type": "string", + "description": "Description of the workspace", + }, + "model": { + "type": "string", + "description": "Default model for this workspace", + }, + "temperature": { + "type": "number", + "description": "Default temperature for this workspace", + }, + "maxTokens": { + "type": "integer", + "description": "Default max tokens for this workspace", + }, + "indexUids": { + "type": "array", + "items": {"type": "string"}, + "description": "Default index UIDs for this workspace", + }, + }, + "required": ["uid", "name"], + "additionalProperties": False, + }, + ), + types.Tool( + name="update-chat-workspace", + description="Update an existing chat workspace", + inputSchema={ + "type": "object", + "properties": { + "uid": { + "type": "string", + "description": "Unique identifier of the workspace to update", + }, + "name": { + "type": "string", + "description": "New name for the workspace", + }, + "description": { + "type": "string", + "description": "New description for the workspace", + }, + "model": { + "type": "string", + "description": "New default model for this workspace", + }, + "temperature": { + "type": "number", + "description": "New default temperature for this workspace", + }, + "maxTokens": { + "type": "integer", + "description": "New default max tokens for this workspace", + }, + "indexUids": { + "type": "array", + "items": {"type": "string"}, + "description": "New default index UIDs for this workspace", + }, + }, + "required": ["uid"], + "additionalProperties": False, + }, + ), + types.Tool( + name="list-chat-workspaces", + description="List all chat workspaces", + inputSchema={ + "type": "object", + "properties": { + "limit": { + "type": "integer", + "description": "Maximum number of workspaces to return", + }, + "offset": { + "type": "integer", + "description": "Number of workspaces to skip", + }, + }, + "additionalProperties": False, + }, + ), + types.Tool( + name="get-chat-workspace", + description="Get details of a specific chat workspace", + inputSchema={ + "type": "object", + "properties": { + "uid": { + "type": "string", + "description": "Unique identifier of the workspace", + } + }, + "required": ["uid"], + "additionalProperties": False, + }, + ), + types.Tool( + name="delete-chat-workspace", + description="Delete a chat workspace", + inputSchema={ + "type": "object", + "properties": { + "uid": { + "type": "string", + "description": "Unique identifier of the workspace to delete", + } + }, + "required": ["uid"], + "additionalProperties": False, + }, + ), ] @self.server.call_tool() @@ -408,9 +577,7 @@ async def handle_call_tool( ] elif name == "delete-index": - result = self.meili_client.indexes.delete_index( - arguments["uid"] - ) + result = self.meili_client.indexes.delete_index(arguments["uid"]) return [ types.TextContent( type="text", @@ -613,6 +780,133 @@ async def handle_call_tool( ) ] + elif name == "chat-completion": + stream = arguments.get("stream", True) + + if stream: + # For streaming, we need to collect all chunks and return them + response_chunks = [] + async for ( + chunk + ) in self.meili_client.chat.chat_completion_stream( + query=arguments["query"], + model=arguments.get("model"), + temperature=arguments.get("temperature"), + max_tokens=arguments.get("maxTokens"), + index_uids=arguments.get("indexUids"), + workspace_uid=arguments.get("workspaceUid"), + ): + response_chunks.append(chunk) + + full_response = "".join(response_chunks) + self.logger.info( + "Chat completion streamed", + query=arguments["query"], + response_length=len(full_response), + ) + return [types.TextContent(type="text", text=full_response)] + else: + # Non-streaming response + response = self.meili_client.chat.chat_completion( + query=arguments["query"], + model=arguments.get("model"), + temperature=arguments.get("temperature"), + max_tokens=arguments.get("maxTokens"), + index_uids=arguments.get("indexUids"), + workspace_uid=arguments.get("workspaceUid"), + ) + self.logger.info( + "Chat completion generated", query=arguments["query"] + ) + return [ + types.TextContent( + type="text", + text=json.dumps( + response, indent=2, default=json_serializer + ), + ) + ] + + elif name == "create-chat-workspace": + result = self.meili_client.chat.create_chat_workspace( + uid=arguments["uid"], + name=arguments["name"], + description=arguments.get("description"), + model=arguments.get("model"), + temperature=arguments.get("temperature"), + max_tokens=arguments.get("maxTokens"), + index_uids=arguments.get("indexUids"), + ) + self.logger.info( + "Chat workspace created", workspace_uid=arguments["uid"] + ) + return [ + types.TextContent( + type="text", + text=f"Chat workspace created: {json.dumps(result, indent=2, default=json_serializer)}", + ) + ] + + elif name == "update-chat-workspace": + result = self.meili_client.chat.update_chat_workspace( + uid=arguments["uid"], + name=arguments.get("name"), + description=arguments.get("description"), + model=arguments.get("model"), + temperature=arguments.get("temperature"), + max_tokens=arguments.get("maxTokens"), + index_uids=arguments.get("indexUids"), + ) + self.logger.info( + "Chat workspace updated", workspace_uid=arguments["uid"] + ) + return [ + types.TextContent( + type="text", + text=f"Chat workspace updated: {json.dumps(result, indent=2, default=json_serializer)}", + ) + ] + + elif name == "list-chat-workspaces": + result = self.meili_client.chat.list_chat_workspaces( + limit=arguments.get("limit"), offset=arguments.get("offset") + ) + self.logger.info("Chat workspaces listed") + return [ + types.TextContent( + type="text", + text=f"Chat workspaces: {json.dumps(result, indent=2, default=json_serializer)}", + ) + ] + + elif name == "get-chat-workspace": + result = self.meili_client.chat.get_chat_workspace( + uid=arguments["uid"] + ) + self.logger.info( + "Chat workspace retrieved", workspace_uid=arguments["uid"] + ) + return [ + types.TextContent( + type="text", + text=f"Chat workspace: {json.dumps(result, indent=2, default=json_serializer)}", + ) + ] + + elif name == "delete-chat-workspace": + result = self.meili_client.chat.delete_chat_workspace( + uid=arguments["uid"] + ) + self.logger.info( + "Chat workspace deleted", workspace_uid=arguments["uid"] + ) + return [ + types.TextContent( + type="text", + text=f"Chat workspace deleted: {json.dumps(result, indent=2, default=json_serializer)}", + ) + ] + raise ValueError(f"Unknown tool: {name}") except Exception as e: diff --git a/tests/test_chat_features.py b/tests/test_chat_features.py new file mode 100644 index 0000000..d62bc25 --- /dev/null +++ b/tests/test_chat_features.py @@ -0,0 +1,312 @@ +import pytest +import json +import asyncio +from unittest.mock import MagicMock, AsyncMock, patch +from mcp.types import CallToolRequest, CallToolRequestParams +from src.meilisearch_mcp.server import create_server + + +@pytest.fixture +def mock_server(): + """Create a mock server for testing chat features""" + server = create_server("http://localhost:7700", "test_key") + return server + + +async def simulate_mcp_call(server, tool_name, arguments=None): + """Simulate an MCP client call to the server""" + handler = server.server.request_handlers.get(CallToolRequest) + if not handler: + raise RuntimeError("No call_tool handler found") + + request = CallToolRequest( + method="tools/call", + params=CallToolRequestParams(name=tool_name, arguments=arguments or {}), + ) + + return await handler(request) + + +class TestChatCompletion: + """Test chat completion functionality""" + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.AsyncClient") + async def test_chat_completion_streaming(self, mock_async_client, mock_server): + """Test streaming chat completion""" + # Mock the streaming response + mock_response = AsyncMock() + mock_response.raise_for_status = MagicMock() + + # Simulate SSE stream chunks + async def mock_aiter_lines(): + yield 'data: {"choices": [{"delta": {"content": "Hello"}}]}' + yield 'data: {"choices": [{"delta": {"content": " world"}}]}' + yield "data: [DONE]" + + mock_response.aiter_lines = mock_aiter_lines + mock_async_client.return_value.__aenter__.return_value.stream.return_value.__aenter__.return_value = ( + mock_response + ) + + # Call the tool + result = await simulate_mcp_call( + mock_server, + "chat-completion", + { + "query": "Test query", + "stream": True, + "model": "gpt-4", + "temperature": 0.7, + "indexUids": ["movies", "books"], + }, + ) + + # Verify the response + assert result.content + assert len(result.content) == 1 + assert result.content[0].type == "text" + assert "Hello world" in result.content[0].text + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.Client") + async def test_chat_completion_non_streaming(self, mock_client, mock_server): + """Test non-streaming chat completion""" + # Mock the response + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = { + "choices": [{"message": {"content": "This is a complete response"}}], + "model": "gpt-4", + "usage": {"prompt_tokens": 10, "completion_tokens": 5, "total_tokens": 15}, + } + mock_client.return_value.__enter__.return_value.post.return_value = ( + mock_response + ) + + # Call the tool + result = await simulate_mcp_call( + mock_server, + "chat-completion", + {"query": "Test query", "stream": False, "model": "gpt-4"}, + ) + + # Verify the response + assert len(result.content) == 1 + assert result.content[0].type == "text" + response_data = json.loads(result.content[0].text) + assert "choices" in response_data + assert ( + response_data["choices"][0]["message"]["content"] + == "This is a complete response" + ) + + +class TestChatWorkspaces: + """Test chat workspace management""" + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.Client") + async def test_create_chat_workspace(self, mock_client, mock_server): + """Test creating a chat workspace""" + # Mock the response + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = { + "uid": "support-chat", + "name": "Customer Support", + "description": "Workspace for customer support queries", + "model": "gpt-4", + "temperature": 0.5, + "maxTokens": 1000, + "indexUids": ["products", "faqs"], + "createdAt": "2024-01-01T00:00:00Z", + "updatedAt": "2024-01-01T00:00:00Z", + } + mock_client.return_value.__enter__.return_value.post.return_value = ( + mock_response + ) + + # Call the tool + result = await simulate_mcp_call( + mock_server, + "create-chat-workspace", + { + "uid": "support-chat", + "name": "Customer Support", + "description": "Workspace for customer support queries", + "model": "gpt-4", + "temperature": 0.5, + "maxTokens": 1000, + "indexUids": ["products", "faqs"], + }, + ) + + # Verify the response + assert len(result.content) == 1 + assert result.content[0].type == "text" + assert "Chat workspace created" in result.content[0].text + assert "support-chat" in result.content[0].text + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.Client") + async def test_update_chat_workspace(self, mock_client, mock_server): + """Test updating a chat workspace""" + # Mock the response + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = { + "uid": "support-chat", + "name": "Updated Customer Support", + "temperature": 0.7, + "updatedAt": "2024-01-02T00:00:00Z", + } + mock_client.return_value.__enter__.return_value.patch.return_value = ( + mock_response + ) + + # Call the tool + result = await simulate_mcp_call( + mock_server, + "update-chat-workspace", + { + "uid": "support-chat", + "name": "Updated Customer Support", + "temperature": 0.7, + }, + ) + + # Verify the response + assert len(result.content) == 1 + assert result.content[0].type == "text" + assert "Chat workspace updated" in result.content[0].text + assert "Updated Customer Support" in result.content[0].text + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.Client") + async def test_list_chat_workspaces(self, mock_client, mock_server): + """Test listing chat workspaces""" + # Mock the response + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = { + "results": [ + {"uid": "support-chat", "name": "Customer Support"}, + {"uid": "sales-chat", "name": "Sales Assistant"}, + ], + "offset": 0, + "limit": 20, + "total": 2, + } + mock_client.return_value.__enter__.return_value.get.return_value = mock_response + + # Call the tool + result = await simulate_mcp_call( + mock_server, "list-chat-workspaces", {"limit": 10, "offset": 0} + ) + + # Verify the response + assert len(result.content) == 1 + assert result.content[0].type == "text" + assert "Chat workspaces" in result.content[0].text + response_data = json.loads( + result.content[0].text.replace("Chat workspaces: ", "") + ) + assert len(response_data["results"]) == 2 + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.Client") + async def test_get_chat_workspace(self, mock_client, mock_server): + """Test getting a specific chat workspace""" + # Mock the response + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = { + "uid": "support-chat", + "name": "Customer Support", + "description": "Workspace for customer support queries", + "model": "gpt-4", + "temperature": 0.5, + "maxTokens": 1000, + "indexUids": ["products", "faqs"], + } + mock_client.return_value.__enter__.return_value.get.return_value = mock_response + + # Call the tool + result = await simulate_mcp_call( + mock_server, "get-chat-workspace", {"uid": "support-chat"} + ) + + # Verify the response + assert len(result.content) == 1 + assert result.content[0].type == "text" + assert "Chat workspace" in result.content[0].text + assert "support-chat" in result.content[0].text + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.Client") + async def test_delete_chat_workspace(self, mock_client, mock_server): + """Test deleting a chat workspace""" + # Mock the response + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = { + "taskUid": 12345, + "status": "enqueued", + "type": "workspaceDeletion", + } + mock_client.return_value.__enter__.return_value.delete.return_value = ( + mock_response + ) + + # Call the tool + result = await simulate_mcp_call( + mock_server, "delete-chat-workspace", {"uid": "support-chat"} + ) + + # Verify the response + assert len(result.content) == 1 + assert result.content[0].type == "text" + assert "Chat workspace deleted" in result.content[0].text + + +class TestChatErrorHandling: + """Test error handling in chat features""" + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.Client") + async def test_chat_completion_error(self, mock_client, mock_server): + """Test error handling in chat completion""" + # Mock an error response + mock_client.return_value.__enter__.return_value.post.side_effect = Exception( + "API Error" + ) + + # Call the tool + result = await simulate_mcp_call( + mock_server, "chat-completion", {"query": "Test query", "stream": False} + ) + + # Verify error handling + assert len(result.content) == 1 + assert result.content[0].type == "text" + assert "Error" in result.content[0].text + + @pytest.mark.asyncio + @patch("src.meilisearch_mcp.chat.httpx.AsyncClient") + async def test_streaming_error(self, mock_async_client, mock_server): + """Test error handling in streaming""" + # Mock an error during streaming + mock_async_client.return_value.__aenter__.return_value.stream.side_effect = ( + Exception("Streaming Error") + ) + + # Call the tool + result = await simulate_mcp_call( + mock_server, "chat-completion", {"query": "Test query", "stream": True} + ) + + # Verify error handling + assert len(result.content) == 1 + assert result.content[0].type == "text" + assert "Error" in result.content[0].text diff --git a/tests/test_mcp_client.py b/tests/test_mcp_client.py index 9e5e88a..bd4fa3e 100644 --- a/tests/test_mcp_client.py +++ b/tests/test_mcp_client.py @@ -256,6 +256,12 @@ async def test_complete_tool_list(self, mcp_server): "get-health-status", "get-index-metrics", "get-system-info", + "chat-completion", + "create-chat-workspace", + "update-chat-workspace", + "list-chat-workspaces", + "get-chat-workspace", + "delete-chat-workspace", ] assert len(tools) == len(expected_tools)