Skip to content

Commit 96e20ac

Browse files
committed
fix lint
1 parent 3df8971 commit 96e20ac

File tree

7 files changed

+16
-12
lines changed

7 files changed

+16
-12
lines changed

src/codegate/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
"anthropic": "https://api.anthropic.com/v1",
2121
"vllm": "http://localhost:8000", # Base URL without /v1 path
2222
"ollama": "http://localhost:11434", # Default Ollama server URL
23-
"lm_studio": "http://localhost:1234"
23+
"lm_studio": "http://localhost:1234",
2424
}
2525

2626

src/codegate/pipeline/base.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,8 @@ async def process(
321321

322322
class InputPipelineInstance:
323323
def __init__(
324-
self, pipeline_steps: List[PipelineStep], secret_manager: SecretsManager, is_fim: bool):
324+
self, pipeline_steps: List[PipelineStep], secret_manager: SecretsManager, is_fim: bool
325+
):
325326
self.pipeline_steps = pipeline_steps
326327
self.secret_manager = secret_manager
327328
self.is_fim = is_fim
@@ -384,7 +385,8 @@ async def process_request(
384385

385386
class SequentialPipelineProcessor:
386387
def __init__(
387-
self, pipeline_steps: List[PipelineStep], secret_manager: SecretsManager, is_fim: bool):
388+
self, pipeline_steps: List[PipelineStep], secret_manager: SecretsManager, is_fim: bool
389+
):
388390
self.pipeline_steps = pipeline_steps
389391
self.secret_manager = secret_manager
390392
self.is_fim = is_fim

src/codegate/pipeline/codegate_context_retriever/codegate.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -95,10 +95,12 @@ async def process(
9595
# in the rest of the user query/messsages
9696
user_messages = re.sub(r"```.*?```", "", user_message, flags=re.DOTALL)
9797
user_messages = re.sub(r"⋮...*?⋮...\n\n", "", user_messages, flags=re.DOTALL)
98-
user_messages = re.sub(r"<environment_details>.*?</environment_details>", "", user_messages, flags=re.DOTALL)
98+
user_messages = re.sub(
99+
r"<environment_details>.*?</environment_details>", "", user_messages, flags=re.DOTALL
100+
)
99101

100102
# split messages into double newlines, to avoid passing so many content in the search
101-
split_messages = re.split(r'</?task>|(\n\n)', user_messages)
103+
split_messages = re.split(r"</?task>|(\n\n)", user_messages)
102104
collected_bad_packages = []
103105
for item_message in split_messages:
104106
# Vector search to find bad packages
@@ -143,7 +145,7 @@ async def process(
143145
# Combine the updated task block with the rest of the message
144146
context_msg = updated_task_content + rest_of_message
145147
else:
146-
context_msg = f'Context: {context_str} \n\n Query: {message_str}' # type: ignore
148+
context_msg = f"Context: {context_str} \n\n Query: {message_str}" # type: ignore
147149
message["content"] = context_msg
148150

149151
logger.debug("Final context message", context_message=context_msg)

src/codegate/pipeline/secrets/secrets.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -452,7 +452,8 @@ async def process_chunk(
452452
return [chunk]
453453

454454
is_cline_client = any(
455-
"Cline" in str(message.trigger_string or "") for message in input_context.alerts_raised or []
455+
"Cline" in str(message.trigger_string or "")
456+
for message in input_context.alerts_raised or []
456457
)
457458

458459
# Check if this is the first chunk (delta role will be present, others will not)

src/codegate/providers/base.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,8 @@ async def _cleanup_after_streaming(
199199
context.sensitive.secure_cleanup()
200200

201201
async def complete(
202-
self, data: Dict, api_key: Optional[str], is_fim_request: bool) -> Union[ModelResponse, AsyncIterator[ModelResponse]]:
202+
self, data: Dict, api_key: Optional[str], is_fim_request: bool
203+
) -> Union[ModelResponse, AsyncIterator[ModelResponse]]:
203204
"""
204205
Main completion flow with pipeline integration
205206

src/codegate/providers/openai/provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ async def create_completion(
5656

5757
# if model starts with lm_studio, propagate it
5858
if data.get("model", "").startswith("lm_studio"):
59-
data["base_url"] = self.lm_studio_url+"/v1/"
59+
data["base_url"] = self.lm_studio_url + "/v1/"
6060
is_fim_request = self._is_fim_request(request, data)
6161
try:
6262
stream = await self.complete(data, api_key, is_fim_request=is_fim_request)

src/codegate/storage/storage_engine.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -53,9 +53,7 @@ def __init__(self, data_path="./sqlite_data"):
5353
self.inference_engine = LlamaCppInferenceEngine()
5454
conf = Config.get_config()
5555
if conf and conf.model_base_path and conf.embedding_model:
56-
self.model_path = (
57-
f"{conf.model_base_path}/{conf.embedding_model}"
58-
)
56+
self.model_path = f"{conf.model_base_path}/{conf.embedding_model}"
5957
else:
6058
self.model_path = ""
6159

0 commit comments

Comments
 (0)