Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,20 @@ AI: Searching across all indices... Found 47 results from 3 indices:
- 'tutorials': 9 hands-on tutorials
```

### 🤖 Chat Completions with RAG (v1.6+):

```
You: "Create a chat workspace for customer support with my products and FAQs indices"
AI: I'll create that workspace... ✓ Chat workspace 'support-chat' created!

You: "Using the support workspace, how do I return a defective product?"
AI: Based on your FAQs and product policies, here's the return process...
[Generates contextual response using indexed documents]

You: "Generate a response about our warranty policy"
AI: [Streams response] According to your documentation, the warranty covers...
```

## 🔧 Installation

### Prerequisites
Expand Down Expand Up @@ -352,6 +366,14 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
#### Search
- `search`: Flexible search across single or multiple indices with filtering and sorting options

#### Chat Completions (Meilisearch v1.6+)
- `chat-completion`: Generate chat responses with RAG using indexed documents as context (supports streaming)
- `create-chat-workspace`: Create a chat workspace with default settings for consistent interactions
- `update-chat-workspace`: Modify existing chat workspace configurations
- `list-chat-workspaces`: List all available chat workspaces
- `get-chat-workspace`: Get details of a specific chat workspace
- `delete-chat-workspace`: Remove a chat workspace

#### Settings Management
- `get-settings`: View current settings for an index
- `update-settings`: Update index settings (ranking, faceting, etc.)
Expand Down
278 changes: 278 additions & 0 deletions src/meilisearch_mcp/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,278 @@
from typing import Dict, Any, List, Optional, AsyncIterator
from meilisearch import Client
import httpx
import json


class ChatManager:
"""Manage Meilisearch chat completions and workspaces"""

def __init__(self, client: Client):
self.client = client
self.base_url = client.config.url.rstrip("/")
self.headers = {
"Authorization": (
f"Bearer {client.config.api_key}" if client.config.api_key else None
),
"Content-Type": "application/json",
}
# Remove None values from headers
self.headers = {k: v for k, v in self.headers.items() if v is not None}

async def chat_completion_stream(
self,
query: str,
model: Optional[str] = None,
temperature: Optional[float] = None,
max_tokens: Optional[int] = None,
index_uids: Optional[List[str]] = None,
workspace_uid: Optional[str] = None,
) -> AsyncIterator[str]:
"""
Stream chat completion responses from Meilisearch.

Args:
query: The user's query/prompt
model: The model to use for chat completion (e.g., "gpt-4", "gpt-3.5-turbo")
temperature: Controls randomness (0-1)
max_tokens: Maximum tokens in response
index_uids: List of index UIDs to search for context
workspace_uid: Chat workspace UID to use

Yields:
Streaming response chunks
"""
endpoint = f"{self.base_url}/chat/completions"

payload = {"query": query, "stream": True}

if model:
payload["model"] = model
if temperature is not None:
payload["temperature"] = temperature
if max_tokens:
payload["maxTokens"] = max_tokens
if index_uids:
payload["indexUids"] = index_uids
if workspace_uid:
payload["workspaceUid"] = workspace_uid

async with httpx.AsyncClient() as client:
async with client.stream(
"POST", endpoint, headers=self.headers, json=payload, timeout=60.0
) as response:
response.raise_for_status()
async for line in response.aiter_lines():
if line.startswith("data: "):
data = line[6:] # Remove "data: " prefix
if data == "[DONE]":
break
try:
chunk = json.loads(data)
if "choices" in chunk and chunk["choices"]:
content = (
chunk["choices"][0]
.get("delta", {})
.get("content", "")
)
if content:
yield content
except json.JSONDecodeError:
continue

def chat_completion(
self,
query: str,
model: Optional[str] = None,
temperature: Optional[float] = None,
max_tokens: Optional[int] = None,
index_uids: Optional[List[str]] = None,
workspace_uid: Optional[str] = None,
) -> Dict[str, Any]:
"""
Get a non-streaming chat completion response.

Args:
query: The user's query/prompt
model: The model to use for chat completion
temperature: Controls randomness (0-1)
max_tokens: Maximum tokens in response
index_uids: List of index UIDs to search for context
workspace_uid: Chat workspace UID to use

Returns:
Chat completion response
"""
endpoint = f"{self.base_url}/chat/completions"

payload = {"query": query, "stream": False}

if model:
payload["model"] = model
if temperature is not None:
payload["temperature"] = temperature
if max_tokens:
payload["maxTokens"] = max_tokens
if index_uids:
payload["indexUids"] = index_uids
if workspace_uid:
payload["workspaceUid"] = workspace_uid

with httpx.Client() as client:
response = client.post(
endpoint, headers=self.headers, json=payload, timeout=60.0
)
response.raise_for_status()
return response.json()

def create_chat_workspace(
self,
uid: str,
name: str,
description: Optional[str] = None,
model: Optional[str] = None,
temperature: Optional[float] = None,
max_tokens: Optional[int] = None,
index_uids: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""
Create a new chat workspace.

Args:
uid: Unique identifier for the workspace
name: Name of the workspace
description: Description of the workspace
model: Default model for this workspace
temperature: Default temperature for this workspace
max_tokens: Default max tokens for this workspace
index_uids: Default index UIDs for this workspace

Returns:
Created workspace information
"""
endpoint = f"{self.base_url}/chat/workspaces"

payload = {"uid": uid, "name": name}

if description:
payload["description"] = description
if model:
payload["model"] = model
if temperature is not None:
payload["temperature"] = temperature
if max_tokens:
payload["maxTokens"] = max_tokens
if index_uids:
payload["indexUids"] = index_uids

with httpx.Client() as client:
response = client.post(endpoint, headers=self.headers, json=payload)
response.raise_for_status()
return response.json()

def update_chat_workspace(
self,
uid: str,
name: Optional[str] = None,
description: Optional[str] = None,
model: Optional[str] = None,
temperature: Optional[float] = None,
max_tokens: Optional[int] = None,
index_uids: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""
Update an existing chat workspace.

Args:
uid: Unique identifier of the workspace to update
name: New name for the workspace
description: New description for the workspace
model: New default model for this workspace
temperature: New default temperature for this workspace
max_tokens: New default max tokens for this workspace
index_uids: New default index UIDs for this workspace

Returns:
Updated workspace information
"""
endpoint = f"{self.base_url}/chat/workspaces/{uid}"

payload = {}

if name:
payload["name"] = name
if description:
payload["description"] = description
if model:
payload["model"] = model
if temperature is not None:
payload["temperature"] = temperature
if max_tokens:
payload["maxTokens"] = max_tokens
if index_uids:
payload["indexUids"] = index_uids

with httpx.Client() as client:
response = client.patch(endpoint, headers=self.headers, json=payload)
response.raise_for_status()
return response.json()

def list_chat_workspaces(
self, limit: Optional[int] = None, offset: Optional[int] = None
) -> Dict[str, Any]:
"""
List all chat workspaces.

Args:
limit: Maximum number of workspaces to return
offset: Number of workspaces to skip

Returns:
List of chat workspaces
"""
endpoint = f"{self.base_url}/chat/workspaces"

params = {}
if limit:
params["limit"] = limit
if offset:
params["offset"] = offset

with httpx.Client() as client:
response = client.get(endpoint, headers=self.headers, params=params)
response.raise_for_status()
return response.json()

def get_chat_workspace(self, uid: str) -> Dict[str, Any]:
"""
Get details of a specific chat workspace.

Args:
uid: Unique identifier of the workspace

Returns:
Workspace details
"""
endpoint = f"{self.base_url}/chat/workspaces/{uid}"

with httpx.Client() as client:
response = client.get(endpoint, headers=self.headers)
response.raise_for_status()
return response.json()

def delete_chat_workspace(self, uid: str) -> Dict[str, Any]:
"""
Delete a chat workspace.

Args:
uid: Unique identifier of the workspace to delete

Returns:
Deletion confirmation
"""
endpoint = f"{self.base_url}/chat/workspaces/{uid}"

with httpx.Client() as client:
response = client.delete(endpoint, headers=self.headers)
response.raise_for_status()
return response.json()
2 changes: 2 additions & 0 deletions src/meilisearch_mcp/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from .keys import KeyManager
from .logging import MCPLogger
from .monitoring import MonitoringManager
from .chat import ChatManager
from .__version__ import __version__

logger = MCPLogger()
Expand All @@ -31,6 +32,7 @@ def __init__(
self.tasks = TaskManager(self.client)
self.keys = KeyManager(self.client)
self.monitoring = MonitoringManager(self.client)
self.chat = ChatManager(self.client)

def health_check(self) -> bool:
"""Check if Meilisearch is healthy"""
Expand Down
Loading
Loading