make model optional in config (#7769)

- Make Config.model optional and centralize default-selection logic in
ModelsManager, including a default_model helper (with
codex-auto-balanced when available) so sessions now carry an explicit
chosen model separate from the base config.
- Resolve `model` once in `core` and `tui` from config. Then store the
state of it on other structs.
- Move refreshing models to be before resolving the default model
This commit is contained in:
Ahmed Ibrahim
2025-12-10 11:19:00 -08:00
committed by GitHub
parent 8a71f8b634
commit cb9a189857
44 changed files with 838 additions and 429 deletions

View File

@@ -28,6 +28,7 @@ use core_test_support::responses::ev_assistant_message;
use core_test_support::responses::ev_completed;
use core_test_support::responses::ev_completed_with_tokens;
use core_test_support::responses::ev_function_call;
use core_test_support::responses::get_responses_requests;
use core_test_support::responses::mount_compact_json_once;
use core_test_support::responses::mount_sse_once;
use core_test_support::responses::mount_sse_once_match;
@@ -135,7 +136,10 @@ async fn summarize_context_three_requests_and_instructions() {
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
let conversation_manager = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"));
let conversation_manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
);
let NewConversation {
conversation: codex,
session_configured,
@@ -329,7 +333,10 @@ async fn manual_compact_uses_custom_prompt() {
config.model_provider = model_provider;
config.compact_prompt = Some(custom_prompt.to_string());
let conversation_manager = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"));
let conversation_manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
);
let codex = conversation_manager
.new_conversation(config)
.await
@@ -344,7 +351,7 @@ async fn manual_compact_uses_custom_prompt() {
assert_eq!(message, COMPACT_WARNING_MESSAGE);
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
let requests = server.received_requests().await.expect("collect requests");
let requests = get_responses_requests(&server).await;
let body = requests
.iter()
.find_map(|req| req.body_json::<serde_json::Value>().ok())
@@ -409,7 +416,10 @@ async fn manual_compact_emits_api_and_local_token_usage_events() {
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
let conversation_manager = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"));
let conversation_manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
);
let NewConversation {
conversation: codex,
..
@@ -570,7 +580,7 @@ async fn multiple_auto_compact_per_task_runs_after_token_limit_hit() {
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
// collect the requests payloads from the model
let requests_payloads = server.received_requests().await.unwrap();
let requests_payloads = get_responses_requests(&server).await;
let body = requests_payloads[0]
.body_json::<serde_json::Value>()
@@ -1050,7 +1060,10 @@ async fn auto_compact_runs_after_token_limit_hit() {
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
let conversation_manager = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"));
let conversation_manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
);
let codex = conversation_manager
.new_conversation(config)
.await
@@ -1090,7 +1103,7 @@ async fn auto_compact_runs_after_token_limit_hit() {
wait_for_event(&codex, |ev| matches!(ev, EventMsg::TaskComplete(_))).await;
let requests = server.received_requests().await.unwrap();
let requests = get_responses_requests(&server).await;
assert_eq!(
requests.len(),
5,
@@ -1295,7 +1308,10 @@ async fn auto_compact_persists_rollout_entries() {
let mut config = load_default_config_for_test(&home);
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
let conversation_manager = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"));
let conversation_manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
);
let NewConversation {
conversation: codex,
session_configured,
@@ -1397,11 +1413,14 @@ async fn manual_compact_retries_after_context_window_error() {
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200_000);
let codex = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"))
.new_conversation(config)
.await
.unwrap()
.conversation;
let codex = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
)
.new_conversation(config)
.await
.unwrap()
.conversation;
codex
.submit(Op::UserInput {
@@ -1529,11 +1548,14 @@ async fn manual_compact_twice_preserves_latest_user_messages() {
let mut config = load_default_config_for_test(&home);
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
let codex = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"))
.new_conversation(config)
.await
.unwrap()
.conversation;
let codex = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
)
.new_conversation(config)
.await
.unwrap()
.conversation;
codex
.submit(Op::UserInput {
@@ -1731,7 +1753,10 @@ async fn auto_compact_allows_multiple_attempts_when_interleaved_with_other_turn_
config.model_provider = model_provider;
set_test_compact_prompt(&mut config);
config.model_auto_compact_token_limit = Some(200);
let conversation_manager = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"));
let conversation_manager = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
);
let codex = conversation_manager
.new_conversation(config)
.await
@@ -1771,10 +1796,8 @@ async fn auto_compact_allows_multiple_attempts_when_interleaved_with_other_turn_
"auto compact should not emit task lifecycle events"
);
let request_bodies: Vec<String> = server
.received_requests()
.await
.unwrap()
let requests = get_responses_requests(&server).await;
let request_bodies: Vec<String> = requests
.into_iter()
.map(|request| String::from_utf8(request.body).unwrap_or_default())
.collect();
@@ -1845,11 +1868,14 @@ async fn auto_compact_triggers_after_function_call_over_95_percent_usage() {
config.model_context_window = Some(context_window);
config.model_auto_compact_token_limit = Some(limit);
let codex = ConversationManager::with_auth(CodexAuth::from_api_key("dummy"))
.new_conversation(config)
.await
.unwrap()
.conversation;
let codex = ConversationManager::with_models_provider(
CodexAuth::from_api_key("dummy"),
config.model_provider.clone(),
)
.new_conversation(config)
.await
.unwrap()
.conversation;
codex
.submit(Op::UserInput {