mirror of
https://github.com/openai/codex.git
synced 2026-05-02 20:32:04 +03:00
Migrate tui to use models manager (#7555)
- This PR treats the `ModelsManager` like `AuthManager` and propagate it into the tui, replacing the `builtin_model_presets` - We are also decreasing the visibility of `builtin_model_presets` based on https://github.com/openai/codex/pull/7552
This commit is contained in:
@@ -29,7 +29,7 @@ use codex_core::features::Feature;
|
||||
use codex_core::model_family::find_family_for_model;
|
||||
use codex_core::openai_models::model_presets::HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG;
|
||||
use codex_core::openai_models::model_presets::HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG;
|
||||
use codex_core::openai_models::model_presets::all_model_presets;
|
||||
use codex_core::openai_models::models_manager::ModelsManager;
|
||||
use codex_core::protocol::EventMsg;
|
||||
use codex_core::protocol::FinalOutput;
|
||||
use codex_core::protocol::Op;
|
||||
@@ -38,6 +38,7 @@ use codex_core::protocol::TokenUsage;
|
||||
use codex_core::skills::load_skills;
|
||||
use codex_core::skills::model::SkillMetadata;
|
||||
use codex_protocol::ConversationId;
|
||||
use codex_protocol::openai_models::ModelPreset;
|
||||
use codex_protocol::openai_models::ModelUpgrade;
|
||||
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
|
||||
use color_eyre::eyre::Result;
|
||||
@@ -98,12 +99,13 @@ fn should_show_model_migration_prompt(
|
||||
current_model: &str,
|
||||
target_model: &str,
|
||||
hide_prompt_flag: Option<bool>,
|
||||
available_models: Vec<ModelPreset>,
|
||||
) -> bool {
|
||||
if target_model == current_model || hide_prompt_flag.unwrap_or(false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
all_model_presets()
|
||||
available_models
|
||||
.iter()
|
||||
.filter(|preset| preset.upgrade.is_some())
|
||||
.any(|preset| preset.model == current_model)
|
||||
@@ -124,8 +126,10 @@ async fn handle_model_migration_prompt_if_needed(
|
||||
config: &mut Config,
|
||||
app_event_tx: &AppEventSender,
|
||||
auth_mode: Option<AuthMode>,
|
||||
models_manager: Arc<ModelsManager>,
|
||||
) -> Option<AppExitInfo> {
|
||||
let upgrade = all_model_presets()
|
||||
let available_models = models_manager.available_models.read().await.clone();
|
||||
let upgrade = available_models
|
||||
.iter()
|
||||
.find(|preset| preset.model == config.model)
|
||||
.and_then(|preset| preset.upgrade.as_ref());
|
||||
@@ -142,7 +146,12 @@ async fn handle_model_migration_prompt_if_needed(
|
||||
|
||||
let target_model = target_model.to_string();
|
||||
let hide_prompt_flag = migration_prompt_hidden(config, migration_config_key);
|
||||
if !should_show_model_migration_prompt(&config.model, &target_model, hide_prompt_flag) {
|
||||
if !should_show_model_migration_prompt(
|
||||
&config.model,
|
||||
&target_model,
|
||||
hide_prompt_flag,
|
||||
available_models.clone(),
|
||||
) {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -200,7 +209,6 @@ pub(crate) struct App {
|
||||
pub(crate) app_event_tx: AppEventSender,
|
||||
pub(crate) chat_widget: ChatWidget,
|
||||
pub(crate) auth_manager: Arc<AuthManager>,
|
||||
|
||||
/// Config is stored here so we can recreate ChatWidgets as needed.
|
||||
pub(crate) config: Config,
|
||||
pub(crate) active_profile: Option<String>,
|
||||
@@ -261,17 +269,21 @@ impl App {
|
||||
let app_event_tx = AppEventSender::new(app_event_tx);
|
||||
|
||||
let auth_mode = auth_manager.auth().map(|auth| auth.mode);
|
||||
let exit_info =
|
||||
handle_model_migration_prompt_if_needed(tui, &mut config, &app_event_tx, auth_mode)
|
||||
.await;
|
||||
if let Some(exit_info) = exit_info {
|
||||
return Ok(exit_info);
|
||||
}
|
||||
|
||||
let conversation_manager = Arc::new(ConversationManager::new(
|
||||
auth_manager.clone(),
|
||||
SessionSource::Cli,
|
||||
));
|
||||
let exit_info = handle_model_migration_prompt_if_needed(
|
||||
tui,
|
||||
&mut config,
|
||||
&app_event_tx,
|
||||
auth_mode,
|
||||
conversation_manager.get_models_manager(),
|
||||
)
|
||||
.await;
|
||||
if let Some(exit_info) = exit_info {
|
||||
return Ok(exit_info);
|
||||
}
|
||||
|
||||
let skills_outcome = load_skills(&config);
|
||||
if !skills_outcome.errors.is_empty() {
|
||||
@@ -305,6 +317,7 @@ impl App {
|
||||
initial_images: initial_images.clone(),
|
||||
enhanced_keys_supported,
|
||||
auth_manager: auth_manager.clone(),
|
||||
models_manager: conversation_manager.get_models_manager(),
|
||||
feedback: feedback.clone(),
|
||||
skills: skills.clone(),
|
||||
is_first_run,
|
||||
@@ -330,6 +343,7 @@ impl App {
|
||||
initial_images: initial_images.clone(),
|
||||
enhanced_keys_supported,
|
||||
auth_manager: auth_manager.clone(),
|
||||
models_manager: conversation_manager.get_models_manager(),
|
||||
feedback: feedback.clone(),
|
||||
skills: skills.clone(),
|
||||
is_first_run,
|
||||
@@ -349,7 +363,7 @@ impl App {
|
||||
let upgrade_version = crate::updates::get_upgrade_version(&config);
|
||||
|
||||
let mut app = Self {
|
||||
server: conversation_manager,
|
||||
server: conversation_manager.clone(),
|
||||
app_event_tx,
|
||||
chat_widget,
|
||||
auth_manager: auth_manager.clone(),
|
||||
@@ -486,6 +500,7 @@ impl App {
|
||||
initial_images: Vec::new(),
|
||||
enhanced_keys_supported: self.enhanced_keys_supported,
|
||||
auth_manager: self.auth_manager.clone(),
|
||||
models_manager: self.server.get_models_manager(),
|
||||
feedback: self.feedback.clone(),
|
||||
skills: self.skills.clone(),
|
||||
is_first_run: false,
|
||||
@@ -534,6 +549,7 @@ impl App {
|
||||
initial_images: Vec::new(),
|
||||
enhanced_keys_supported: self.enhanced_keys_supported,
|
||||
auth_manager: self.auth_manager.clone(),
|
||||
models_manager: self.server.get_models_manager(),
|
||||
feedback: self.feedback.clone(),
|
||||
skills: self.skills.clone(),
|
||||
is_first_run: false,
|
||||
@@ -1205,28 +1221,41 @@ mod tests {
|
||||
)
|
||||
}
|
||||
|
||||
fn all_model_presets() -> Vec<ModelPreset> {
|
||||
codex_core::openai_models::model_presets::all_model_presets().clone()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn model_migration_prompt_only_shows_for_deprecated_models() {
|
||||
assert!(should_show_model_migration_prompt("gpt-5", "gpt-5.1", None));
|
||||
assert!(should_show_model_migration_prompt(
|
||||
"gpt-5",
|
||||
"gpt-5.1",
|
||||
None,
|
||||
all_model_presets()
|
||||
));
|
||||
assert!(should_show_model_migration_prompt(
|
||||
"gpt-5-codex",
|
||||
"gpt-5.1-codex",
|
||||
None
|
||||
None,
|
||||
all_model_presets()
|
||||
));
|
||||
assert!(should_show_model_migration_prompt(
|
||||
"gpt-5-codex-mini",
|
||||
"gpt-5.1-codex-mini",
|
||||
None
|
||||
None,
|
||||
all_model_presets()
|
||||
));
|
||||
assert!(should_show_model_migration_prompt(
|
||||
"gpt-5.1-codex",
|
||||
"gpt-5.1-codex-max",
|
||||
None
|
||||
None,
|
||||
all_model_presets()
|
||||
));
|
||||
assert!(!should_show_model_migration_prompt(
|
||||
"gpt-5.1-codex",
|
||||
"gpt-5.1-codex",
|
||||
None
|
||||
None,
|
||||
all_model_presets()
|
||||
));
|
||||
}
|
||||
|
||||
@@ -1235,10 +1264,14 @@ mod tests {
|
||||
assert!(!should_show_model_migration_prompt(
|
||||
"gpt-5",
|
||||
"gpt-5.1",
|
||||
Some(true)
|
||||
Some(true),
|
||||
all_model_presets()
|
||||
));
|
||||
assert!(!should_show_model_migration_prompt(
|
||||
"gpt-5.1", "gpt-5.1", None
|
||||
"gpt-5.1",
|
||||
"gpt-5.1",
|
||||
None,
|
||||
all_model_presets()
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user