Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/codegate/clients/clients.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ class ClientType(Enum):
COPILOT = "copilot" # Copilot client
OPEN_INTERPRETER = "open_interpreter" # Open Interpreter client
AIDER = "aider" # Aider client
CONTINUE = "continue" # Continue client
19 changes: 19 additions & 0 deletions src/codegate/clients/detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,24 @@ def client_name(self) -> ClientType:
return ClientType.OPEN_INTERPRETER


class ContinueDetector(BaseClientDetector):
"""
Detector for Continue client based on message content
"""

def __init__(self):
super().__init__()
# This is a hack that really only detects Continue with DeepSeek
# we should get a header or user agent for this (upstream PR pending)
self.content_detector = ContentDetector(
"You are an AI programming assistant, utilizing the DeepSeek Coder model"
)

@property
def client_name(self) -> ClientType:
return ClientType.CONTINUE


class CopilotDetector(BaseClientDetector):
"""
Detector for Copilot client based on user agent
Expand Down Expand Up @@ -191,6 +209,7 @@ def __init__(self):
KoduDetector(),
OpenInterpreter(),
CopilotDetector(),
ContinueDetector(),
]

def __call__(self, func):
Expand Down
22 changes: 21 additions & 1 deletion src/codegate/pipeline/cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
)
from codegate.pipeline.cli.commands import CustomInstructions, Version, Workspace

codegate_regex = re.compile(r"^codegate(?:\s+(.*))?", re.IGNORECASE)

HELP_TEXT = """
## CodeGate CLI\n
**Usage**: `codegate [-h] <command> [args]`\n
Expand Down Expand Up @@ -77,6 +79,22 @@ def _get_cli_from_open_interpreter(last_user_message_str: str) -> Optional[re.Ma
return re.match(r"^codegate\s*(.*?)\s*$", last_user_block, re.IGNORECASE)


def _get_cli_from_continue(last_user_message_str: str) -> Optional[re.Match[str]]:
"""
Continue sends a differently formatted message to the CLI if DeepSeek is used
"""
deepseek_match = re.search(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I saw this happening with other model (tinyllama:latest). I was testing muxing so in reality I was hitting an open AI model but that was what I had in my Continue config.

Anyways just mentioning it because the var name implies is specific to deepseek and it maybe a gradual release in general for Continue.

r"utilizing the DeepSeek Coder model.*?### Instruction:\s*codegate\s+(.*?)\s*### Response:",
last_user_message_str,
re.DOTALL | re.IGNORECASE,
)
if deepseek_match:
command = deepseek_match.group(1).strip()
return re.match(r"^(.*?)$", command) # This creates a match object with the command

return codegate_regex.match(last_user_message_str)


class CodegateCli(PipelineStep):
"""Pipeline step that handles codegate cli."""

Expand Down Expand Up @@ -110,12 +128,14 @@ async def process(
if last_user_message is not None:
last_user_message_str, _ = last_user_message
last_user_message_str = last_user_message_str.strip()
codegate_regex = re.compile(r"^codegate(?:\s+(.*))?", re.IGNORECASE)

# Check client-specific matchers first
if context.client in [ClientType.CLINE, ClientType.KODU]:
match = _get_cli_from_cline(codegate_regex, last_user_message_str)
elif context.client in [ClientType.OPEN_INTERPRETER]:
match = _get_cli_from_open_interpreter(last_user_message_str)
elif context.client in [ClientType.CONTINUE]:
match = _get_cli_from_continue(last_user_message_str)
else:
# Check if "codegate" is the first word in the message
match = codegate_regex.match(last_user_message_str)
Expand Down
74 changes: 74 additions & 0 deletions tests/clients/test_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
BaseClientDetector,
ClineDetector,
ContentDetector,
ContinueDetector,
CopilotDetector,
DetectClient,
HeaderDetector,
Expand Down Expand Up @@ -291,6 +292,79 @@ async def test_missing_user_agent(self, mock_request):
assert await detector.detect(mock_request) is False


class TestContinueDetector:
@pytest.mark.asyncio
async def test_successful_detection_via_system_message(self, mock_request):
detector = ContinueDetector()

async def get_json():
return {
"system": "You are an AI programming assistant, utilizing the DeepSeek Coder model"
}

mock_request.json = get_json
assert await detector.detect(mock_request) is True
assert detector.client_name == ClientType.CONTINUE

@pytest.mark.asyncio
async def test_detection_in_message_content(self, mock_request):
detector = ContinueDetector()

async def get_json():
return {
"messages": [
{
"content": "You are an AI programming assistant, utilizing the DeepSeek Coder model" # noqa
}
]
}

mock_request.json = get_json
assert await detector.detect(mock_request) is True

@pytest.mark.asyncio
async def test_failed_detection_with_partial_match(self, mock_request):
detector = ContinueDetector()

async def get_json():
return {"system": "You are an AI assistant"}

mock_request.json = get_json
assert await detector.detect(mock_request) is False

@pytest.mark.asyncio
async def test_case_insensitive_match_handling(self, mock_request):
detector = ContinueDetector()

async def get_json():
return {
"system": "you ARE an ai programming assistant, UTILIZING the deepseek coder MODEL"
}

mock_request.json = get_json
assert await detector.detect(mock_request) is False # Should be case-sensitive

@pytest.mark.asyncio
async def test_empty_system_message(self, mock_request):
detector = ContinueDetector()

async def get_json():
return {"system": ""}

mock_request.json = get_json
assert await detector.detect(mock_request) is False

@pytest.mark.asyncio
async def test_malformed_system_field(self, mock_request):
detector = ContinueDetector()

async def get_json():
return {"system": {"nested": "You are an AI programming assistant"}}

mock_request.json = get_json
assert await detector.detect(mock_request) is False


class TestDetectClient:
@pytest.mark.asyncio
async def test_successful_client_detection(self, mock_request):
Expand Down