diff --git a/Dockerfile b/Dockerfile index 68d8cbea..a20d1be4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -111,6 +111,7 @@ ENV CODEGATE_VLLM_URL= ENV CODEGATE_OPENAI_URL= ENV CODEGATE_ANTHROPIC_URL= ENV CODEGATE_OLLAMA_URL=http://host.docker.internal:11434 +ENV CODEGATE_LM_STUDIO_URL=http://host.docker.internal:1234 ENV CODEGATE_APP_LOG_LEVEL=WARNING ENV CODEGATE_LOG_FORMAT=TEXT diff --git a/config.yaml.example b/config.yaml.example index 05edcbc9..f5a954b5 100644 --- a/config.yaml.example +++ b/config.yaml.example @@ -26,6 +26,7 @@ provider_urls: # --vllm-url # --openai-url # --anthropic-url +# --lm-studio-url # Certificate configuration certs_dir: "./certs" # Directory for certificate files diff --git a/docs/cli.md b/docs/cli.md index 83c3d6aa..65021c83 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -71,6 +71,11 @@ codegate serve [OPTIONS] - Base URL for Ollama provider (/api path is added automatically) - Overrides configuration file and environment variables +- `--lm-studio-url TEXT`: LM Studio provider URL (default: `http://localhost:1234`) + - Optional + - Base URL for LM studio provider (/v1 path is added automatically) + - Overrides configuration file and environment variables + - `--model-base-path TEXT`: Base path for loading models needed for the system - Optional @@ -199,6 +204,12 @@ Start server with custom Ollama endpoint: codegate serve --ollama-url http://localhost:11434 ``` +Start server with custom LM Studio endpoint: + +```bash +codegate serve --lm-studio-url https://lmstudio.example.com +``` + Show default system prompts: ```bash diff --git a/docs/configuration.md b/docs/configuration.md index 67058151..94675468 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -29,6 +29,7 @@ Values from higher-priority sources take precedence over lower-priority values. - OpenAI: `"https://api.openai.com/v1"` - Anthropic: `"https://api.anthropic.com/v1"` - Ollama: `"http://localhost:11434"` + - LM Studio: `"http://localhost:1234"` - Certificate configuration: - Certs directory: `"./certs"` - CA certificate: `"ca.crt"` @@ -59,6 +60,7 @@ provider_urls: openai: "https://api.openai.com/v1" anthropic: "https://api.anthropic.com/v1" ollama: "http://localhost:11434" + lm_studio: "http://localhost:1234" certs_dir: "./certs" ca_cert: "ca.crt" ca_key: "ca.key" @@ -80,6 +82,7 @@ Environment variables are automatically loaded with these mappings: - `CODEGATE_PROVIDER_OPENAI_URL`: OpenAI provider URL - `CODEGATE_PROVIDER_ANTHROPIC_URL`: Anthropic provider URL - `CODEGATE_PROVIDER_OLLAMA_URL`: Ollama provider URL +- `CODEGATE_PROVIDER_LM_STUDIO_URL`: LM Studio provider URL - `CODEGATE_CERTS_DIR`: directory for certificate files - `CODEGATE_CA_CERT`: CA certificate file name - `CODEGATE_CA_KEY`: CA key file name @@ -139,6 +142,7 @@ Provider URLs can be configured in several ways: export CODEGATE_PROVIDER_OPENAI_URL=https://api.openai.com/v1 export CODEGATE_PROVIDER_ANTHROPIC_URL=https://api.anthropic.com/v1 export CODEGATE_PROVIDER_OLLAMA_URL=http://localhost:11434 + export CODEGATE_PROVIDER_LM_STUDIO_URL=http://localhost:1234 ``` 3. CLI flags: diff --git a/docs/development.md b/docs/development.md index 4107611f..3f7668d9 100644 --- a/docs/development.md +++ b/docs/development.md @@ -232,6 +232,8 @@ docker run --name codegate -d -v /path/to/volume:/app/codegate_volume -p 8989:89 [https://api.anthropic.com/v1](https://api.anthropic.com/v1)) - CODEGATE_OLLAMA_URL: URL for OLlama inference engine (defaults to [http://localhost:11434/api](http://localhost:11434/api)) +- CODEGATE_LM_STUDIO_URL: URL for LM Studio inference engine (defaults to + [http://localhost:1234/api](http://localhost:1234/api)) - CODEGATE_APP_LOG_LEVEL: Level of debug desired when running the codegate server (defaults to WARNING, can be ERROR/WARNING/INFO/DEBUG) - CODEGATE_LOG_FORMAT: Type of log formatting desired when running the codegate @@ -312,6 +314,7 @@ Provider URLs can be configured through: export CODEGATE_PROVIDER_OPENAI_URL=https://api.openai.com/v1 export CODEGATE_PROVIDER_ANTHROPIC_URL=https://api.anthropic.com/v1 export CODEGATE_PROVIDER_OLLAMA_URL=http://localhost:11434 + export CODEGATE_PROVIDER_LM_STUDIO_URL=http://localhost:1234 ``` 3. CLI flags: diff --git a/scripts/entrypoint.sh b/scripts/entrypoint.sh index 90515787..45a6e3e2 100755 --- a/scripts/entrypoint.sh +++ b/scripts/entrypoint.sh @@ -44,6 +44,7 @@ start_application() { [ -n "$CODEGATE_ANTHROPIC_URL" ] && CMD_ARGS+=" --anthropic-url $CODEGATE_ANTHROPIC_URL" [ -n "$CODEGATE_OLLAMA_URL" ] && CMD_ARGS+=" --ollama-url $CODEGATE_OLLAMA_URL" [ -n "$CODEGATE_VLLM_URL" ] && CMD_ARGS+=" --vllm-url $CODEGATE_VLLM_URL" + [ -n "$CODEGATE_LM_STUDIO_URL" ] && CMD_ARGS+=" --lm-studio-url $CODEGATE_LM_STUDIO_URL" # Check and append debug level if set [ -n "$CODEGATE_APP_LOG_LEVEL" ] && CMD_ARGS+=" --log-level $CODEGATE_APP_LOG_LEVEL" diff --git a/src/codegate/cli.py b/src/codegate/cli.py index 374de39b..dc05ed25 100644 --- a/src/codegate/cli.py +++ b/src/codegate/cli.py @@ -192,6 +192,12 @@ def show_prompts(prompts: Optional[Path]) -> None: default=None, help="Ollama provider URL (default: http://localhost:11434/)", ) +@click.option( + "--lm-studio-url", + type=str, + default=None, + help="LM Studio provider URL (default: http://localhost:1234/)", +) @click.option( "--model-base-path", type=str, @@ -246,7 +252,7 @@ def show_prompts(prompts: Optional[Path]) -> None: default=None, help="Path to the vector SQLite database file (default: ./sqlite_data/vectordb.db)", ) -def serve( +def serve( # noqa: C901 port: Optional[int], proxy_port: Optional[int], host: Optional[str], @@ -258,6 +264,7 @@ def serve( openai_url: Optional[str], anthropic_url: Optional[str], ollama_url: Optional[str], + lm_studio_url: Optional[str], model_base_path: Optional[str], embedding_model: Optional[str], db_path: Optional[str], @@ -280,6 +287,8 @@ def serve( cli_provider_urls["anthropic"] = anthropic_url if ollama_url: cli_provider_urls["ollama"] = ollama_url + if lm_studio_url: + cli_provider_urls["lm_studio"] = lm_studio_url # Load configuration with priority resolution cfg = Config.load(