LM Studio OSS Support (#2312)

## Overview

Adds LM Studio OSS support. Closes #1883


### Changes
This PR enhances the behavior of `--oss` flag to support LM Studio as a
provider. Additionally, it introduces a new flag`--local-provider` which
can take in `lmstudio` or `ollama` as values if the user wants to
explicitly choose which one to use.

If no provider is specified `codex --oss` will auto-select the provider
based on whichever is running.

#### Additional enhancements 
The default can be set using `oss-provider` in config like:

```
oss_provider = "lmstudio"
```

For non-interactive users, they will need to either provide the provider
as an arg or have it in their `config.toml`

### Notes
For best performance, [set the default context
length](https://lmstudio.ai/docs/app/advanced/per-model) for gpt-oss to
the maximum your machine can support

---------

Co-authored-by: Matt Clayton <matt@lmstudio.ai>
Co-authored-by: Eric Traut <etraut@openai.com>
This commit is contained in:
rugvedS07
2025-11-17 14:49:09 -05:00
committed by GitHub
parent 842a1b7fe7
commit 837bc98a1d
21 changed files with 1315 additions and 69 deletions

View File

@@ -7,18 +7,21 @@ use additional_dirs::add_dir_warning_message;
use app::App;
pub use app::AppExitInfo;
use codex_app_server_protocol::AuthMode;
use codex_common::oss::ensure_oss_provider_ready;
use codex_common::oss::get_default_model_for_oss_provider;
use codex_core::AuthManager;
use codex_core::BUILT_IN_OSS_MODEL_PROVIDER_ID;
use codex_core::CodexAuth;
use codex_core::INTERACTIVE_SESSION_SOURCES;
use codex_core::RolloutRecorder;
use codex_core::auth::enforce_login_restrictions;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::find_codex_home;
use codex_core::config::load_config_as_toml_with_cli_overrides;
use codex_core::config::resolve_oss_provider;
use codex_core::find_conversation_path_by_id_str;
use codex_core::get_platform_sandbox;
use codex_core::protocol::AskForApproval;
use codex_ollama::DEFAULT_OSS_MODEL;
use codex_protocol::config_types::SandboxMode;
use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge;
use std::fs::OpenOptions;
@@ -56,6 +59,7 @@ mod markdown_render;
mod markdown_stream;
mod model_migration;
pub mod onboarding;
mod oss_selection;
mod pager_overlay;
pub mod public_widgets;
mod render;
@@ -124,21 +128,75 @@ pub async fn run_main(
// When using `--oss`, let the bootstrapper pick the model (defaulting to
// gpt-oss:20b) and ensure it is present locally. Also, force the builtin
let raw_overrides = cli.config_overrides.raw_overrides.clone();
// `oss` model provider.
let overrides_cli = codex_common::CliConfigOverrides { raw_overrides };
let cli_kv_overrides = match overrides_cli.parse_overrides() {
// Parse `-c` overrides from the CLI.
Ok(v) => v,
#[allow(clippy::print_stderr)]
Err(e) => {
eprintln!("Error parsing -c overrides: {e}");
std::process::exit(1);
}
};
// we load config.toml here to determine project state.
#[allow(clippy::print_stderr)]
let codex_home = match find_codex_home() {
Ok(codex_home) => codex_home.to_path_buf(),
Err(err) => {
eprintln!("Error finding codex home: {err}");
std::process::exit(1);
}
};
#[allow(clippy::print_stderr)]
let config_toml =
match load_config_as_toml_with_cli_overrides(&codex_home, cli_kv_overrides.clone()).await {
Ok(config_toml) => config_toml,
Err(err) => {
eprintln!("Error loading config.toml: {err}");
std::process::exit(1);
}
};
let model_provider_override = if cli.oss {
let resolved = resolve_oss_provider(
cli.oss_provider.as_deref(),
&config_toml,
cli.config_profile.clone(),
);
if let Some(provider) = resolved {
Some(provider)
} else {
// No provider configured, prompt the user
let provider = oss_selection::select_oss_provider(&codex_home).await?;
if provider == "__CANCELLED__" {
return Err(std::io::Error::other(
"OSS provider selection was cancelled by user",
));
}
Some(provider)
}
} else {
None
};
// When using `--oss`, let the bootstrapper pick the model based on selected provider
let model = if let Some(model) = &cli.model {
Some(model.clone())
} else if cli.oss {
Some(DEFAULT_OSS_MODEL.to_owned())
// Use the provider from model_provider_override
model_provider_override
.as_ref()
.and_then(|provider_id| get_default_model_for_oss_provider(provider_id))
.map(std::borrow::ToOwned::to_owned)
} else {
None // No model specified, will use the default.
};
let model_provider_override = if cli.oss {
Some(BUILT_IN_OSS_MODEL_PROVIDER_ID.to_owned())
} else {
None
};
// canonicalize the cwd
let cwd = cli.cwd.clone().map(|p| p.canonicalize().unwrap_or(p));
let additional_dirs = cli.add_dir.clone();
@@ -149,7 +207,7 @@ pub async fn run_main(
approval_policy,
sandbox_mode,
cwd,
model_provider: model_provider_override,
model_provider: model_provider_override.clone(),
config_profile: cli.config_profile.clone(),
codex_linux_sandbox_exe,
base_instructions: None,
@@ -161,16 +219,6 @@ pub async fn run_main(
experimental_sandbox_command_assessment: None,
additional_writable_roots: additional_dirs,
};
let raw_overrides = cli.config_overrides.raw_overrides.clone();
let overrides_cli = codex_common::CliConfigOverrides { raw_overrides };
let cli_kv_overrides = match overrides_cli.parse_overrides() {
Ok(v) => v,
#[allow(clippy::print_stderr)]
Err(e) => {
eprintln!("Error parsing -c overrides: {e}");
std::process::exit(1);
}
};
let config = load_config_or_exit(cli_kv_overrides.clone(), overrides.clone()).await;
@@ -232,10 +280,19 @@ pub async fn run_main(
.with_target(false)
.with_filter(targets);
if cli.oss {
codex_ollama::ensure_oss_ready(&config)
.await
.map_err(|e| std::io::Error::other(format!("OSS setup failed: {e}")))?;
if cli.oss && model_provider_override.is_some() {
// We're in the oss section, so provider_id should be Some
// Let's handle None case gracefully though just in case
let provider_id = match model_provider_override.as_ref() {
Some(id) => id,
None => {
error!("OSS provider unexpectedly not set when oss flag is used");
return Err(std::io::Error::other(
"OSS provider not set but oss flag was used",
));
}
};
ensure_oss_provider_ready(provider_id, &config).await?;
}
let otel = codex_core::otel_init::build_provider(&config, env!("CARGO_PKG_VERSION"));