mirror of
https://github.com/openai/codex.git
synced 2026-04-30 03:12:20 +03:00
make model optional in config (#7769)
- Make Config.model optional and centralize default-selection logic in ModelsManager, including a default_model helper (with codex-auto-balanced when available) so sessions now carry an explicit chosen model separate from the base config. - Resolve `model` once in `core` and `tui` from config. Then store the state of it on other structs. - Move refreshing models to be before resolving the default model
This commit is contained in:
@@ -59,7 +59,6 @@ pub mod edit;
|
||||
pub mod profile;
|
||||
pub mod types;
|
||||
|
||||
pub const OPENAI_DEFAULT_MODEL: &str = "gpt-5.1-codex-max";
|
||||
const OPENAI_DEFAULT_REVIEW_MODEL: &str = "gpt-5.1-codex-max";
|
||||
|
||||
/// Maximum number of bytes of the documentation that will be embedded. Larger
|
||||
@@ -73,7 +72,7 @@ pub const CONFIG_TOML_FILE: &str = "config.toml";
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Config {
|
||||
/// Optional override of model selection.
|
||||
pub model: String,
|
||||
pub model: Option<String>,
|
||||
|
||||
/// Model used specifically for review sessions. Defaults to "gpt-5.1-codex-max".
|
||||
pub review_model: String,
|
||||
@@ -1108,11 +1107,7 @@ impl Config {
|
||||
|
||||
let forced_login_method = cfg.forced_login_method;
|
||||
|
||||
// todo(aibrahim): make model optional
|
||||
let model = model
|
||||
.or(config_profile.model)
|
||||
.or(cfg.model)
|
||||
.unwrap_or_else(default_model);
|
||||
let model = model.or(config_profile.model).or(cfg.model);
|
||||
|
||||
let compact_prompt = compact_prompt.or(cfg.compact_prompt).and_then(|value| {
|
||||
let trimmed = value.trim();
|
||||
@@ -1313,10 +1308,6 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
fn default_model() -> String {
|
||||
OPENAI_DEFAULT_MODEL.to_string()
|
||||
}
|
||||
|
||||
fn default_review_model() -> String {
|
||||
OPENAI_DEFAULT_REVIEW_MODEL.to_string()
|
||||
}
|
||||
@@ -2940,7 +2931,7 @@ model_verbosity = "high"
|
||||
)?;
|
||||
assert_eq!(
|
||||
Config {
|
||||
model: "o3".to_string(),
|
||||
model: Some("o3".to_string()),
|
||||
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
|
||||
model_context_window: None,
|
||||
model_auto_compact_token_limit: None,
|
||||
@@ -3014,7 +3005,7 @@ model_verbosity = "high"
|
||||
fixture.codex_home(),
|
||||
)?;
|
||||
let expected_gpt3_profile_config = Config {
|
||||
model: "gpt-3.5-turbo".to_string(),
|
||||
model: Some("gpt-3.5-turbo".to_string()),
|
||||
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
|
||||
model_context_window: None,
|
||||
model_auto_compact_token_limit: None,
|
||||
@@ -3103,7 +3094,7 @@ model_verbosity = "high"
|
||||
fixture.codex_home(),
|
||||
)?;
|
||||
let expected_zdr_profile_config = Config {
|
||||
model: "o3".to_string(),
|
||||
model: Some("o3".to_string()),
|
||||
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
|
||||
model_context_window: None,
|
||||
model_auto_compact_token_limit: None,
|
||||
@@ -3178,7 +3169,7 @@ model_verbosity = "high"
|
||||
fixture.codex_home(),
|
||||
)?;
|
||||
let expected_gpt5_profile_config = Config {
|
||||
model: "gpt-5.1".to_string(),
|
||||
model: Some("gpt-5.1".to_string()),
|
||||
review_model: OPENAI_DEFAULT_REVIEW_MODEL.to_string(),
|
||||
model_context_window: None,
|
||||
model_auto_compact_token_limit: None,
|
||||
|
||||
Reference in New Issue
Block a user