mirror of
https://github.com/openai/codex.git
synced 2026-05-05 05:42:33 +03:00
Change model picker to include gpt5.1 (#6569)
- Change the presets - Change the tests that make sure we keep the list of tools updated - Filter out deprecated models
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
source: tui/src/chatwidget/tests.rs
|
||||
expression: popup
|
||||
---
|
||||
Select Reasoning Level for gpt-5-codex
|
||||
Select Reasoning Level for gpt-5.1-codex
|
||||
|
||||
1. Low Fastest responses with limited reasoning
|
||||
2. Medium (default) Dynamically adjusts reasoning based on the task
|
||||
|
||||
@@ -5,8 +5,7 @@ expression: popup
|
||||
Select Model and Effort
|
||||
Access legacy models by running codex -m <model_name> or in your config
|
||||
|
||||
› 1. gpt-5-codex (current) Optimized for codex.
|
||||
2. gpt-5 Broad world knowledge with strong general
|
||||
reasoning.
|
||||
› 1. gpt-5.1-codex Optimized for codex.
|
||||
2. gpt-5.1 Broad world knowledge with strong general reasoning.
|
||||
|
||||
Press enter to select reasoning effort, or esc to dismiss.
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
---
|
||||
source: tui/src/chatwidget/tests.rs
|
||||
assertion_line: 500
|
||||
expression: popup
|
||||
---
|
||||
Approaching rate limits
|
||||
Switch to gpt-5-codex-mini for lower credit usage?
|
||||
Switch to gpt-5.1-codex-mini for lower credit usage?
|
||||
|
||||
› 1. Switch to gpt-5-codex-mini Optimized for codex. Cheaper,
|
||||
› 1. Switch to gpt-5.1-codex-mini Optimized for codex. Cheaper,
|
||||
faster, but less capable.
|
||||
2. Keep current model
|
||||
3. Keep current model (never show again) Hide future rate limit reminders
|
||||
|
||||
@@ -1504,13 +1504,13 @@ fn windows_auto_mode_instructions_popup_lists_install_steps() {
|
||||
fn model_reasoning_selection_popup_snapshot() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual();
|
||||
|
||||
chat.config.model = "gpt-5-codex".to_string();
|
||||
chat.config.model = "gpt-5.1-codex".to_string();
|
||||
chat.config.model_reasoning_effort = Some(ReasoningEffortConfig::High);
|
||||
|
||||
let preset = builtin_model_presets(None)
|
||||
.into_iter()
|
||||
.find(|preset| preset.model == "gpt-5-codex")
|
||||
.expect("gpt-5-codex preset");
|
||||
.find(|preset| preset.model == "gpt-5.1-codex")
|
||||
.expect("gpt-5.1-codex preset");
|
||||
chat.open_reasoning_popup(preset);
|
||||
|
||||
let popup = render_bottom_popup(&chat, 80);
|
||||
@@ -1582,13 +1582,13 @@ fn feedback_upload_consent_popup_snapshot() {
|
||||
fn reasoning_popup_escape_returns_to_model_popup() {
|
||||
let (mut chat, _rx, _op_rx) = make_chatwidget_manual();
|
||||
|
||||
chat.config.model = "gpt-5".to_string();
|
||||
chat.config.model = "gpt-5.1".to_string();
|
||||
chat.open_model_popup();
|
||||
|
||||
let presets = builtin_model_presets(None)
|
||||
.into_iter()
|
||||
.find(|preset| preset.model == "gpt-5-codex")
|
||||
.expect("gpt-5-codex preset");
|
||||
.find(|preset| preset.model == "gpt-5.1-codex")
|
||||
.expect("gpt-5.1-codex preset");
|
||||
chat.open_reasoning_popup(presets);
|
||||
|
||||
let before_escape = render_bottom_popup(&chat, 80);
|
||||
|
||||
Reference in New Issue
Block a user