Compare commits

..

30 Commits

Author SHA1 Message Date
pap-openai
77329e0f34 Merge branch 'main' into pap/model-selection 2025-08-02 22:18:11 +01:00
easong-openai
da2294548a Merge branch 'main' into pap/model-selection 2025-08-01 20:51:40 -07:00
pap
a5eea9048a Merge branch 'main' into pap/model-selection 2025-08-01 18:18:18 +01:00
pap
a2fe6336b6 fixing /model dropdown if no args provided 2025-08-01 18:09:21 +01:00
pap-openai
5d4ade38a4 Merge branch 'main' into pap/model-selection 2025-08-01 01:48:42 +01:00
pap-openai
4f2f4dcf6f Merge branch 'main' into pap/model-selection 2025-08-01 00:05:35 +01:00
pap
8dea0e4cd2 fuzzy is now a common lib + toml alphabetical order 2025-08-01 00:04:24 +01:00
pap
145688f019 linter 2025-07-31 23:09:55 +01:00
pap
1afa537148 remove useless code 2025-07-31 22:48:51 +01:00
pap
507f79deac linter 2025-07-31 22:34:12 +01:00
pap
d207169ea6 Merge branch 'main' into pap/model-selection 2025-07-31 22:29:45 +01:00
pap
4e2cf0bb7a can set non default models 2025-07-31 22:28:54 +01:00
pap
56e95f7ec7 scrollable model list 2025-07-31 21:54:42 +01:00
pap
fbc1ee7d62 new model popup 2025-07-31 15:00:08 +01:00
pap
f8e5b02320 desired_height for model selection 2025-07-31 13:35:59 +01:00
pap
02d16813bf Merge branch 'main' into pap/model-selection 2025-07-31 13:35:42 +01:00
pap-openai
7cf524d8b9 Merge branch 'main' into pap/model-selection 2025-07-30 22:49:44 +01:00
pap
40cf8a819c lint test 2025-07-30 22:28:02 +01:00
pap
55659e351c fixing merge 2025-07-30 22:00:13 +01:00
pap
2326f99e03 Merge branch 'main' into pap/model-selection 2025-07-30 21:49:14 +01:00
pap
91aa683ae9 cleaner code 2025-07-30 20:43:31 +01:00
pap
9dce0d7882 don't show session information at each reconfiguration 2025-07-30 20:32:01 +01:00
pap
661a4ff3f9 fix: self.emit_last_history_entry() 2025-07-30 20:07:14 +01:00
pap-openai
da3f90fdad Merge branch 'main' into pap/model-selection 2025-07-30 20:02:25 +01:00
pap-openai
fcbe6495f1 Merge branch 'main' into pap/model-selection 2025-07-30 18:06:38 +01:00
pap
34edf573d7 linter 2025-07-30 18:06:11 +01:00
pap
f78f8d8c7c fmt 2025-07-29 23:20:24 +01:00
pap
1836614c06 remove current model if search doesn't match 2025-07-28 23:25:35 +01:00
pap
9db5c7af9e remove preference ranking as we don't get models dynamically 2025-07-28 23:21:49 +01:00
pap
b294004ea9 adding /model 2025-07-28 23:06:30 +01:00
21 changed files with 836 additions and 424 deletions

70
codex-rs/Cargo.lock generated
View File

@@ -764,8 +764,8 @@ version = "0.0.0"
dependencies = [
"anyhow",
"clap",
"codex-common",
"ignore",
"nucleo-matcher",
"serde",
"serde_json",
"tokio",
@@ -869,6 +869,7 @@ dependencies = [
"strum 0.27.2",
"strum_macros 0.27.2",
"tokio",
"toml 0.8.23",
"tracing",
"tracing-appender",
"tracing-subscriber",
@@ -878,7 +879,6 @@ dependencies = [
"unicode-segmentation",
"unicode-width 0.1.14",
"uuid",
"vt100",
]
[[package]]
@@ -1471,7 +1471,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys 0.60.2",
]
[[package]]
@@ -1551,7 +1551,7 @@ checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
dependencies = [
"cfg-if",
"rustix 1.0.8",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -1754,7 +1754,7 @@ version = "0.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cba6ae63eb948698e300f645f87c70f76630d505f23b8907cf1e193ee85048c1"
dependencies = [
"unicode-width 0.2.1",
"unicode-width 0.2.0",
]
[[package]]
@@ -2334,7 +2334,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi",
"libc",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -2804,16 +2804,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "nucleo-matcher"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85"
dependencies = [
"memchr",
"unicode-segmentation",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@@ -3384,7 +3374,7 @@ dependencies = [
[[package]]
name = "ratatui"
version = "0.29.0"
source = "git+https://github.com/easong-openai/ratatui?branch=nornagon-v0.29.0-patch#159c1978c2f829cd322ec778df4168815ed9af96"
source = "git+https://github.com/nornagon/ratatui?branch=nornagon-v0.29.0-patch#bca287ddc5d38fe088c79e2eda22422b96226f2e"
dependencies = [
"bitflags 2.9.1",
"cassowary",
@@ -3398,7 +3388,7 @@ dependencies = [
"strum 0.26.3",
"unicode-segmentation",
"unicode-truncate",
"unicode-width 0.2.1",
"unicode-width 0.2.0",
]
[[package]]
@@ -3712,7 +3702,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys 0.4.15",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -3725,7 +3715,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys 0.9.4",
"windows-sys 0.52.0",
"windows-sys 0.60.2",
]
[[package]]
@@ -4486,7 +4476,7 @@ dependencies = [
"getrandom 0.3.3",
"once_cell",
"rustix 1.0.8",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -4780,6 +4770,7 @@ dependencies = [
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.11",
"toml_write",
"winnow",
]
@@ -4792,6 +4783,12 @@ dependencies = [
"winnow",
]
[[package]]
name = "toml_write"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
[[package]]
name = "toml_writer"
version = "1.0.2"
@@ -4970,7 +4967,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "911e93158bf80bbc94bad533b2b16e3d711e1132d69a6a6980c3920a63422c19"
dependencies = [
"ratatui",
"unicode-width 0.2.1",
"unicode-width 0.2.0",
]
[[package]]
@@ -4997,7 +4994,7 @@ checksum = "0a5318dd619ed73c52a9417ad19046724effc1287fb75cdcc4eca1d6ac1acbae"
dependencies = [
"crossterm",
"ratatui",
"unicode-width 0.2.1",
"unicode-width 0.2.0",
]
[[package]]
@@ -5043,9 +5040,9 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "unicode-width"
version = "0.2.1"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
[[package]]
name = "unicode-xid"
@@ -5130,27 +5127,6 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "vt100"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "054ff75fb8fa83e609e685106df4faeffdf3a735d3c74ebce97ec557d5d36fd9"
dependencies = [
"itoa",
"unicode-width 0.2.1",
"vte",
]
[[package]]
name = "vte"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5924018406ce0063cd67f8e008104968b74b563ee1b85dde3ed1f7cb87d3dbd"
dependencies = [
"arrayvec",
"memchr",
]
[[package]]
name = "wait-timeout"
version = "0.2.1"
@@ -5339,7 +5315,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]

View File

@@ -44,4 +44,4 @@ codegen-units = 1
[patch.crates-io]
# ratatui = { path = "../../ratatui" }
ratatui = { git = "https://github.com/easong-openai/ratatui", branch = "nornagon-v0.29.0-patch" }
ratatui = { git = "https://github.com/nornagon/ratatui", branch = "nornagon-v0.29.0-patch" }

View File

@@ -0,0 +1,49 @@
/// Simple case-insensitive subsequence matcher used for fuzzy filtering.
///
/// Returns the indices (positions) of the matched characters in `haystack`
/// and a score where smaller is better. Currently, indices are byte offsets
/// from `char_indices()` of a lowercased copy of `haystack`.
///
/// Note: For ASCII inputs these indices align with character positions. If
/// extended Unicode inputs are used, be mindful of byte vs char indices.
pub fn fuzzy_match(haystack: &str, needle: &str) -> Option<(Vec<usize>, i32)> {
if needle.is_empty() {
return Some((Vec::new(), i32::MAX));
}
let h_lower = haystack.to_lowercase();
let n_lower = needle.to_lowercase();
let mut indices: Vec<usize> = Vec::with_capacity(n_lower.len());
let mut h_iter = h_lower.char_indices();
let mut last_pos: Option<usize> = None;
for ch in n_lower.chars() {
let mut found = None;
for (i, hc) in h_iter.by_ref() {
if hc == ch {
found = Some(i);
break;
}
}
if let Some(pos) = found {
indices.push(pos);
last_pos = Some(pos);
} else {
return None;
}
}
// Score: window length minus needle length (tighter is better), with a bonus for prefix match.
let first = *indices.first().unwrap_or(&0);
let last = last_pos.unwrap_or(first);
let window = (last as i32 - first as i32 + 1) - (n_lower.len() as i32);
let mut score = window.max(0);
if first == 0 {
score -= 100; // strong bonus for prefix match
}
Some((indices, score))
}
/// Convenience wrapper to get only the indices for a fuzzy match.
pub fn fuzzy_indices(haystack: &str, needle: &str) -> Option<Vec<usize>> {
fuzzy_match(haystack, needle).map(|(idx, _)| idx)
}

View File

@@ -23,3 +23,5 @@ mod sandbox_summary;
#[cfg(feature = "sandbox_summary")]
pub use sandbox_summary::summarize_sandbox_policy;
pub mod fuzzy_match;

View File

@@ -125,20 +125,8 @@ impl Codex {
let user_instructions = get_user_instructions(&config).await;
let configure_session = Op::ConfigureSession {
provider: config.model_provider.clone(),
model: config.model.clone(),
model_reasoning_effort: config.model_reasoning_effort,
model_reasoning_summary: config.model_reasoning_summary,
user_instructions,
base_instructions: config.base_instructions.clone(),
approval_policy: config.approval_policy,
sandbox_policy: config.sandbox_policy.clone(),
disable_response_storage: config.disable_response_storage,
notify: config.notify.clone(),
cwd: config.cwd.clone(),
resume_path: resume_path.clone(),
};
let configure_session =
config.to_configure_session_op(Some(config.model.clone()), user_instructions);
let config = Arc::new(config);
@@ -721,8 +709,14 @@ async fn submission_loop(
}
};
let client_config = {
let mut c = (*config).clone();
c.model = model.clone();
Arc::new(c)
};
let client = ModelClient::new(
config.clone(),
client_config,
auth.clone(),
provider.clone(),
model_reasoning_effort,

View File

@@ -14,6 +14,7 @@ use crate::model_provider_info::ModelProviderInfo;
use crate::model_provider_info::built_in_model_providers;
use crate::openai_model_info::get_model_info;
use crate::protocol::AskForApproval;
use crate::protocol::Op;
use crate::protocol::SandboxPolicy;
use dirs::home_dir;
use serde::Deserialize;
@@ -185,6 +186,32 @@ impl Config {
// Step 4: merge with the strongly-typed overrides.
Self::load_from_base_config_with_overrides(cfg, overrides, codex_home)
}
/// Construct an Op::ConfigureSession from this Config.
///
/// - `override_model`: when Some, use this model instead of `self.model`.
/// - `user_instructions`: pass-through instructions to embed in the session.
pub fn to_configure_session_op(
&self,
override_model: Option<String>,
user_instructions: Option<String>,
) -> Op {
let model = override_model.unwrap_or_else(|| self.model.clone());
Op::ConfigureSession {
provider: self.model_provider.clone(),
model,
model_reasoning_effort: self.model_reasoning_effort,
model_reasoning_summary: self.model_reasoning_summary,
user_instructions,
base_instructions: self.base_instructions.clone(),
approval_policy: self.approval_policy,
sandbox_policy: self.sandbox_policy.clone(),
disable_response_storage: self.disable_response_storage,
notify: self.notify.clone(),
cwd: self.cwd.clone(),
resume_path: self.experimental_resume.clone(),
}
}
}
/// Read `CODEX_HOME/config.toml` and return it as a generic TOML value. Returns

View File

@@ -32,7 +32,7 @@ pub use model_provider_info::ModelProviderInfo;
pub use model_provider_info::WireApi;
pub use model_provider_info::built_in_model_providers;
mod models;
mod openai_model_info;
pub mod openai_model_info;
mod openai_tools;
pub mod plan_tool;
mod project_doc;

View File

@@ -69,3 +69,8 @@ pub(crate) fn get_model_info(name: &str) -> Option<ModelInfo> {
_ => None,
}
}
/// Return a curated list of commonly-used OpenAI model names for selection UIs.
pub fn get_all_model_names() -> Vec<&'static str> {
vec!["codex-mini-latest", "o3", "o4-mini", "gpt-4.1", "gpt-4o"]
}

View File

@@ -14,8 +14,8 @@ path = "src/lib.rs"
[dependencies]
anyhow = "1"
clap = { version = "4", features = ["derive"] }
codex-common = { path = "../common" }
ignore = "0.4.23"
nucleo-matcher = "0.3.1"
serde = { version = "1", features = ["derive"] }
serde_json = "1.0.110"
tokio = { version = "1", features = ["full"] }

View File

@@ -1,14 +1,9 @@
use codex_common::fuzzy_match::fuzzy_indices as common_fuzzy_indices;
use codex_common::fuzzy_match::fuzzy_match as common_fuzzy_match;
use ignore::WalkBuilder;
use ignore::overrides::OverrideBuilder;
use nucleo_matcher::Matcher;
use nucleo_matcher::Utf32Str;
use nucleo_matcher::pattern::AtomKind;
use nucleo_matcher::pattern::CaseMatching;
use nucleo_matcher::pattern::Normalization;
use nucleo_matcher::pattern::Pattern;
use serde::Serialize;
use std::cell::UnsafeCell;
use std::cmp::Reverse;
use std::collections::BinaryHeap;
use std::num::NonZero;
use std::path::Path;
@@ -24,17 +19,13 @@ pub use cli::Cli;
/// A single match result returned from the search.
///
/// * `score` Relevance score returned by `nucleo_matcher`.
/// * `score` Relevance score from the fuzzy matcher (smaller is better).
/// * `path` Path to the matched file (relative to the search directory).
/// * `indices` Optional list of character indices that matched the query.
/// These are only filled when the caller of [`run`] sets
/// `compute_indices` to `true`. The indices vector follows the
/// guidance from `nucleo_matcher::Pattern::indices`: they are
/// unique and sorted in ascending order so that callers can use
/// them directly for highlighting.
/// * `indices` Optional list of character positions that matched the query.
/// These are unique and sorted so callers can use them directly for highlighting.
#[derive(Debug, Clone, Serialize)]
pub struct FileMatch {
pub score: u32,
pub score: i32,
pub path: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub indices: Option<Vec<u32>>, // Sorted & deduplicated when present
@@ -130,7 +121,6 @@ pub fn run(
cancel_flag: Arc<AtomicBool>,
compute_indices: bool,
) -> anyhow::Result<FileSearchResults> {
let pattern = create_pattern(pattern_text);
// Create one BestMatchesList per worker thread so that each worker can
// operate independently. The results across threads will be merged when
// the traversal is complete.
@@ -139,13 +129,7 @@ pub fn run(
num_best_matches_lists,
} = create_worker_count(threads);
let best_matchers_per_worker: Vec<UnsafeCell<BestMatchesList>> = (0..num_best_matches_lists)
.map(|_| {
UnsafeCell::new(BestMatchesList::new(
limit.get(),
pattern.clone(),
Matcher::new(nucleo_matcher::Config::DEFAULT),
))
})
.map(|_| UnsafeCell::new(BestMatchesList::new(limit.get(), pattern_text.to_string())))
.collect();
// Use the same tree-walker library that ripgrep uses. We use it directly so
@@ -220,47 +204,33 @@ pub fn run(
}
// Merge results across best_matchers_per_worker.
let mut global_heap: BinaryHeap<Reverse<(u32, String)>> = BinaryHeap::new();
let mut global_heap: BinaryHeap<(i32, String)> = BinaryHeap::new();
let mut total_match_count = 0;
for best_list_cell in best_matchers_per_worker.iter() {
let best_list = unsafe { &*best_list_cell.get() };
total_match_count += best_list.num_matches;
for &Reverse((score, ref line)) in best_list.binary_heap.iter() {
for &(score, ref line) in best_list.binary_heap.iter() {
if global_heap.len() < limit.get() {
global_heap.push(Reverse((score, line.clone())));
} else if let Some(min_element) = global_heap.peek() {
if score > min_element.0.0 {
global_heap.push((score, line.clone()));
} else if let Some(&(worst_score, _)) = global_heap.peek() {
if score < worst_score {
global_heap.pop();
global_heap.push(Reverse((score, line.clone())));
global_heap.push((score, line.clone()));
}
}
}
}
let mut raw_matches: Vec<(u32, String)> = global_heap.into_iter().map(|r| r.0).collect();
let mut raw_matches: Vec<(i32, String)> = global_heap.into_iter().collect();
sort_matches(&mut raw_matches);
// Transform into `FileMatch`, optionally computing indices.
let mut matcher = if compute_indices {
Some(Matcher::new(nucleo_matcher::Config::DEFAULT))
} else {
None
};
let matches: Vec<FileMatch> = raw_matches
.into_iter()
.map(|(score, path)| {
let indices = if compute_indices {
let mut buf = Vec::<char>::new();
let haystack: Utf32Str<'_> = Utf32Str::new(&path, &mut buf);
let mut idx_vec: Vec<u32> = Vec::new();
if let Some(ref mut m) = matcher {
// Ignore the score returned from indices we already have `score`.
pattern.indices(haystack, m, &mut idx_vec);
}
idx_vec.sort_unstable();
idx_vec.dedup();
Some(idx_vec)
common_fuzzy_indices(&path, pattern_text)
.map(|v| v.into_iter().map(|i| i as u32).collect())
} else {
None
};
@@ -279,9 +249,9 @@ pub fn run(
})
}
/// Sort matches in-place by descending score, then ascending path.
fn sort_matches(matches: &mut [(u32, String)]) {
matches.sort_by(|a, b| match b.0.cmp(&a.0) {
/// Sort matches in-place by ascending score, then ascending path.
fn sort_matches(matches: &mut [(i32, String)]) {
matches.sort_by(|a, b| match a.0.cmp(&b.0) {
std::cmp::Ordering::Equal => a.1.cmp(&b.1),
other => other,
});
@@ -291,39 +261,31 @@ fn sort_matches(matches: &mut [(u32, String)]) {
struct BestMatchesList {
max_count: usize,
num_matches: usize,
pattern: Pattern,
matcher: Matcher,
binary_heap: BinaryHeap<Reverse<(u32, String)>>,
/// Internal buffer for converting strings to UTF-32.
utf32buf: Vec<char>,
pattern: String,
binary_heap: BinaryHeap<(i32, String)>,
}
impl BestMatchesList {
fn new(max_count: usize, pattern: Pattern, matcher: Matcher) -> Self {
fn new(max_count: usize, pattern: String) -> Self {
Self {
max_count,
num_matches: 0,
pattern,
matcher,
binary_heap: BinaryHeap::new(),
utf32buf: Vec::<char>::new(),
}
}
fn insert(&mut self, line: &str) {
let haystack: Utf32Str<'_> = Utf32Str::new(line, &mut self.utf32buf);
if let Some(score) = self.pattern.score(haystack, &mut self.matcher) {
// In the tests below, we verify that score() returns None for a
// non-match, so we can categorically increment the count here.
if let Some((_indices, score)) = common_fuzzy_match(line, &self.pattern) {
// Count all matches; non-matches return None above.
self.num_matches += 1;
if self.binary_heap.len() < self.max_count {
self.binary_heap.push(Reverse((score, line.to_string())));
} else if let Some(min_element) = self.binary_heap.peek() {
if score > min_element.0.0 {
self.binary_heap.push((score, line.to_string()));
} else if let Some(&(worst_score, _)) = self.binary_heap.peek() {
if score < worst_score {
self.binary_heap.pop();
self.binary_heap.push(Reverse((score, line.to_string())));
self.binary_heap.push((score, line.to_string()));
}
}
}
@@ -354,28 +316,16 @@ fn create_worker_count(num_workers: NonZero<usize>) -> WorkerCount {
}
}
fn create_pattern(pattern: &str) -> Pattern {
Pattern::new(
pattern,
CaseMatching::Smart,
Normalization::Smart,
AtomKind::Fuzzy,
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn verify_score_is_none_for_non_match() {
let mut utf32buf = Vec::<char>::new();
let line = "hello";
let mut matcher = Matcher::new(nucleo_matcher::Config::DEFAULT);
let haystack: Utf32Str<'_> = Utf32Str::new(line, &mut utf32buf);
let pattern = create_pattern("zzz");
let score = pattern.score(haystack, &mut matcher);
assert_eq!(score, None);
fn verify_no_match_does_not_increment_or_push() {
let mut list = BestMatchesList::new(5, "zzz".to_string());
list.insert("hello");
assert_eq!(list.num_matches, 0);
assert_eq!(list.binary_heap.len(), 0);
}
#[test]
@@ -388,11 +338,11 @@ mod tests {
sort_matches(&mut matches);
// Highest score first; ties broken alphabetically.
// Lowest score first; ties broken alphabetically.
let expected = vec![
(90, "zzz".to_string()),
(100, "a_path".to_string()),
(100, "b_path".to_string()),
(90, "zzz".to_string()),
];
assert_eq!(matches, expected);

View File

@@ -11,10 +11,6 @@ path = "src/main.rs"
name = "codex_tui"
path = "src/lib.rs"
[features]
# Enable vt100-based tests (emulator) when running with `--features vt100-tests`.
vt100-tests = ["dep:vt100"]
[lints]
workspace = true
@@ -59,6 +55,7 @@ tokio = { version = "1", features = [
"rt-multi-thread",
"signal",
] }
toml = "0.8"
tracing = { version = "0.1.41", features = ["log"] }
tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
@@ -68,13 +65,9 @@ tui-textarea = "0.7.0"
unicode-segmentation = "1.12.0"
unicode-width = "0.1"
uuid = "1"
vt100 = { version = "0.16.2", optional = true }
[dev-dependencies]
insta = "1.43.1"
pretty_assertions = "1"
#[target.'cfg(feature = "vt100-tests")'.dev-dependencies]
#vt100 = "0.16.2" # Revisit: conflicts with ratatui's pinned unicode-width (=0.2.0)

View File

@@ -45,6 +45,28 @@ enum AppState<'a> {
GitWarning { screen: GitWarningScreen },
}
/// Strip a single pair of surrounding quotes from the provided string if present.
/// Supports straight and common curly quotes: '…', "…", ‘…’, “…”.
pub fn strip_surrounding_quotes(s: &str) -> &str {
// Opening/closing pairs (note curly quotes differ on each side)
const QUOTE_PAIRS: &[(char, char)] = &[('"', '"'), ('\'', '\''), ('“', '”'), ('', '')];
let t = s.trim();
if t.len() < 2 {
return t;
}
for &(open, close) in QUOTE_PAIRS {
if t.starts_with(open) && t.ends_with(close) {
let start = open.len_utf8();
let end = t.len() - close.len_utf8();
return &t[start..end];
}
}
t
}
pub(crate) struct App<'a> {
app_event_tx: AppEventSender,
app_event_rx: Receiver<AppEvent>,
@@ -279,6 +301,16 @@ impl App<'_> {
AppEvent::ExitRequest => {
break;
}
AppEvent::SelectModel(model) => {
if let AppState::Chat { widget } = &mut self.app_state {
widget.update_model_and_reconfigure(model);
}
}
AppEvent::OpenModelSelector => {
if let AppState::Chat { widget } = &mut self.app_state {
widget.show_model_selector();
}
}
AppEvent::CodexOp(op) => match &mut self.app_state {
AppState::Chat { widget } => widget.submit_op(op),
AppState::GitWarning { .. } => {}
@@ -368,6 +400,37 @@ impl App<'_> {
),
}));
}
SlashCommand::Model => {
// Open the model selector when `/model` has no arguments.
if let AppState::Chat { widget } = &mut self.app_state {
widget.show_model_selector();
}
}
},
AppEvent::DispatchCommandWithArgs(command, args) => match command {
SlashCommand::Model => {
let arg = args.trim();
if let AppState::Chat { widget } = &mut self.app_state {
// Normalize commonly quoted inputs like \"o3\" or 'o3' or “o3”.
let normalized = strip_surrounding_quotes(arg).trim().to_string();
if !normalized.is_empty() {
widget.update_model_and_reconfigure(normalized);
}
}
}
#[cfg(debug_assertions)]
SlashCommand::TestApproval => {
// Ignore args; forward to the existing no-args handler
self.app_event_tx.send(AppEvent::DispatchCommand(command));
}
SlashCommand::New
| SlashCommand::Quit
| SlashCommand::Diff
| SlashCommand::Compact => {
// For other commands, fall back to existing handling.
// We can ignore args for now.
self.app_event_tx.send(AppEvent::DispatchCommand(command));
}
},
AppEvent::StartFileSearch(query) => {
self.file_search.on_user_query(query);
@@ -498,3 +561,44 @@ impl App<'_> {
}
}
}
#[cfg(test)]
mod tests {
use super::strip_surrounding_quotes;
#[test]
fn strip_surrounding_quotes_cases() {
let cases = vec![
("o3", "o3"),
("\"codex-mini-latest\"", "codex-mini-latest"),
("another_model", "another_model"),
];
for (input, expected) in cases {
assert_eq!(strip_surrounding_quotes(input), expected.to_string());
}
}
#[test]
fn model_command_args_extraction_and_normalization() {
let cases = vec![
("/model", "", ""),
("/model o3", "o3", "o3"),
("/model another_model", "another_model", "another_model"),
];
for (line, raw_expected, norm_expected) in cases {
// Extract raw args as in chat_composer
let raw = if let Some(stripped) = line.strip_prefix('/') {
let token = stripped.trim_start();
let cmd_token = token.split_whitespace().next().unwrap_or("");
let rest = &token[cmd_token.len()..];
rest.trim_start().to_string()
} else {
String::new()
};
assert_eq!(raw, raw_expected, "raw args for '{line}'");
// Normalize as in app dispatch logic
let normalized = strip_surrounding_quotes(&raw).trim().to_string();
assert_eq!(normalized, norm_expected, "normalized args for '{line}'");
}
}
}

View File

@@ -34,6 +34,10 @@ pub(crate) enum AppEvent {
/// layer so it can be handled centrally.
DispatchCommand(SlashCommand),
/// Dispatch a recognized slash command along with the raw argument string
/// following the command on the first line.
DispatchCommandWithArgs(SlashCommand, String),
/// Kick off an asynchronous file search for the given query (text after
/// the `@`). Previous searches may be cancelled by the app layer so there
/// is at most one in-flight search.
@@ -48,4 +52,10 @@ pub(crate) enum AppEvent {
},
InsertHistory(Vec<Line<'static>>),
/// User selected a model from the model-selection dropdown.
SelectModel(String),
/// Request the app to open the model selector (populate options and show popup).
OpenModelSelector,
}

View File

@@ -19,9 +19,11 @@ use tui_textarea::TextArea;
use super::chat_composer_history::ChatComposerHistory;
use super::command_popup::CommandPopup;
use super::file_search_popup::FileSearchPopup;
use super::model_selection_popup::ModelSelectionPopup;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use crate::slash_command::SlashCommand;
use codex_file_search::FileMatch;
const BASE_PLACEHOLDER_TEXT: &str = "...";
@@ -52,6 +54,7 @@ enum ActivePopup {
None,
Command(CommandPopup),
File(FileSearchPopup),
Model(ModelSelectionPopup),
}
impl ChatComposer<'_> {
@@ -87,6 +90,7 @@ impl ChatComposer<'_> {
ActivePopup::None => 1u16,
ActivePopup::Command(c) => c.calculate_required_height(),
ActivePopup::File(c) => c.calculate_required_height(),
ActivePopup::Model(c) => c.calculate_required_height(),
}
}
@@ -182,20 +186,47 @@ impl ChatComposer<'_> {
self.update_border(has_focus);
}
/// Open or update the model-selection popup with the provided options.
pub(crate) fn open_model_selector(&mut self, current_model: &str, options: Vec<String>) {
match &mut self.active_popup {
ActivePopup::Model(popup) => {
popup.set_options(current_model, options);
}
_ => {
self.active_popup =
ActivePopup::Model(ModelSelectionPopup::new(current_model, options));
}
}
// Initialize/update the query from the composer.
self.sync_model_popup();
}
/// Handle a key event coming from the main UI.
pub fn handle_key_event(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
let result = match &mut self.active_popup {
ActivePopup::Command(_) => self.handle_key_event_with_slash_popup(key_event),
ActivePopup::File(_) => self.handle_key_event_with_file_popup(key_event),
ActivePopup::Model(_) => self.handle_key_event_with_model_popup(key_event),
ActivePopup::None => self.handle_key_event_without_popup(key_event),
};
// Update (or hide/show) popup after processing the key.
self.sync_command_popup();
if matches!(self.active_popup, ActivePopup::Command(_)) {
self.dismissed_file_popup_token = None;
} else {
self.sync_file_search_popup();
match &self.active_popup {
ActivePopup::Model(_) => {
// Only keep model popup in sync when active; do not interfere with other popups.
self.sync_model_popup();
}
ActivePopup::Command(_) => {
self.sync_command_popup();
// When slash popup active, suppress file popup.
self.dismissed_file_popup_token = None;
}
_ => {
self.sync_command_popup();
if !matches!(self.active_popup, ActivePopup::Command(_)) {
self.sync_file_search_popup();
}
}
}
result
@@ -244,10 +275,39 @@ impl ChatComposer<'_> {
ctrl: false,
} => {
if let Some(cmd) = popup.selected_command() {
// Send command to the app layer.
self.app_event_tx.send(AppEvent::DispatchCommand(*cmd));
// Extract arguments after the command from the first line.
let first_line = self
.textarea
.lines()
.first()
.map(|s| s.as_str())
.unwrap_or("");
// Clear textarea so no residual text remains.
let args = if let Some((_, args)) =
Self::parse_slash_command_and_args_from_line(first_line)
{
args
} else {
String::new()
};
// Special-case: for `/model` with no arguments, keep the composer as "/model "
// so the model selector opens and the user can type to filter.
if *cmd == SlashCommand::Model && args.trim().is_empty() {
// Replace the entire input with "/model " (with a trailing space).
self.textarea.select_all();
self.textarea.cut();
let _ = self.textarea.insert_str(format!("/{} ", cmd.command()));
// Hide the slash-command popup; sync logic will open the model selector.
self.active_popup = ActivePopup::None;
return (InputResult::None, true);
}
// Send command + args to the app layer.
self.app_event_tx
.send(AppEvent::DispatchCommandWithArgs(*cmd, args));
// Clear textarea so no residual text remains
self.textarea.select_all();
self.textarea.cut();
@@ -305,6 +365,80 @@ impl ChatComposer<'_> {
}
}
/// Handle key events when model selection popup is visible.
fn handle_key_event_with_model_popup(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
let ActivePopup::Model(popup) = &mut self.active_popup else {
unreachable!();
};
match key_event.into() {
Input { key: Key::Up, .. } => {
popup.move_up();
(InputResult::None, true)
}
Input { key: Key::Down, .. } => {
popup.move_down();
(InputResult::None, true)
}
Input { key: Key::Esc, .. } => {
// Hide model popup; keep composer content unchanged.
self.active_popup = ActivePopup::None;
(InputResult::None, true)
}
Input {
key: Key::Enter,
ctrl: false,
alt: false,
shift: false,
}
| Input { key: Key::Tab, .. } => {
if let Some(model) = popup.selected_model() {
self.app_event_tx.send(AppEvent::SelectModel(model));
// Clear composer input and close the popup.
self.textarea.select_all();
self.textarea.cut();
self.pending_pastes.clear();
self.active_popup = ActivePopup::None;
return (InputResult::None, true);
}
// No selection in the list: treat the typed argument as the model name.
// Extract arguments after `/model` from the first line.
let first_line = self
.textarea
.lines()
.first()
.map(|s| s.as_str())
.unwrap_or("");
let args = if let Some((cmd_token, args)) =
Self::parse_slash_command_and_args_from_line(first_line)
{
if cmd_token == SlashCommand::Model.command() {
args
} else {
String::new()
}
} else {
String::new()
};
if !args.trim().is_empty() {
// Dispatch as a command with args so normalization is applied centrally.
self.app_event_tx
.send(AppEvent::DispatchCommandWithArgs(SlashCommand::Model, args));
// Clear composer input and close the popup.
self.textarea.select_all();
self.textarea.cut();
self.pending_pastes.clear();
self.active_popup = ActivePopup::None;
return (InputResult::None, true);
}
(InputResult::None, false)
}
input => self.handle_input_basic(input),
}
}
/// Extract the `@token` that the cursor is currently positioned on, if any.
///
/// The returned string **does not** include the leading `@`.
@@ -590,19 +724,47 @@ impl ChatComposer<'_> {
.unwrap_or("");
let input_starts_with_slash = first_line.starts_with('/');
// Special handling: if the user typed `/model ` (with a space), open the model selector
// and do not show the slash-command popup.
let should_open_model_selector = if let Some(stripped) = first_line.strip_prefix('/') {
let token = stripped.trim_start();
let cmd_token = token.split_whitespace().next().unwrap_or("");
if cmd_token == SlashCommand::Model.command() {
let rest = &token[cmd_token.len()..];
// Show model popup as soon as a whitespace after the command is present.
rest.chars().next().is_some_and(|c| c.is_whitespace())
} else {
false
}
} else {
false
};
match &mut self.active_popup {
ActivePopup::Command(popup) => {
if input_starts_with_slash {
popup.on_composer_text_change(first_line.to_string());
if should_open_model_selector {
// Switch away from command popup and request opening the model selector.
self.active_popup = ActivePopup::None;
self.app_event_tx.send(AppEvent::OpenModelSelector);
} else {
popup.on_composer_text_change(first_line.to_string());
}
} else {
self.active_popup = ActivePopup::None;
}
}
_ => {
if input_starts_with_slash {
let mut command_popup = CommandPopup::new();
command_popup.on_composer_text_change(first_line.to_string());
self.active_popup = ActivePopup::Command(command_popup);
if should_open_model_selector {
// Request the app to open the model selector; popup will render once options arrive.
self.app_event_tx.send(AppEvent::OpenModelSelector);
} else {
let mut command_popup = CommandPopup::new();
command_popup.on_composer_text_change(first_line.to_string());
self.active_popup = ActivePopup::Command(command_popup);
}
}
}
}
@@ -644,6 +806,48 @@ impl ChatComposer<'_> {
self.dismissed_file_popup_token = None;
}
/// Synchronize the model-selection popup filter with the current composer text.
/// When the first line starts with `/model`, everything after the command becomes the query.
fn sync_model_popup(&mut self) {
let first_line = self
.textarea
.lines()
.first()
.map(|s| s.as_str())
.unwrap_or("");
// Expect `/model` as the first token on the first line.
if let Some((cmd_token, args)) = Self::parse_slash_command_and_args_from_line(first_line) {
if cmd_token == SlashCommand::Model.command() {
if let ActivePopup::Model(popup) = &mut self.active_popup {
popup.set_query(&args);
}
return;
}
}
// Not a `/model` line anymore; hide the model popup if visible.
if matches!(self.active_popup, ActivePopup::Model(_)) {
self.active_popup = ActivePopup::None;
}
}
/// Parse a leading "/command" and return (command_token, args_trimmed_left).
/// Returns None if the line does not start with a slash or the command is empty.
fn parse_slash_command_and_args_from_line(line: &str) -> Option<(String, String)> {
if let Some(stripped) = line.strip_prefix('/') {
let token = stripped.trim_start();
let cmd_token = token.split_whitespace().next().unwrap_or("");
if cmd_token.is_empty() {
return None;
}
let rest = &token[cmd_token.len()..];
Some((cmd_token.to_string(), rest.trim_start().to_string()))
} else {
None
}
}
fn update_border(&mut self, has_focus: bool) {
let border_style = if has_focus {
Style::default().fg(Color::Cyan)
@@ -705,6 +909,26 @@ impl WidgetRef for &ChatComposer<'_> {
popup.render(popup_rect, buf);
self.textarea.render(textarea_rect, buf);
}
ActivePopup::Model(popup) => {
let popup_height = popup.calculate_required_height();
let popup_height = popup_height.min(area.height);
let textarea_rect = Rect {
x: area.x,
y: area.y,
width: area.width,
height: area.height.saturating_sub(popup_height),
};
let popup_rect = Rect {
x: area.x,
y: area.y + textarea_rect.height,
width: area.width,
height: popup_height,
};
popup.render(popup_rect, buf);
self.textarea.render(textarea_rect, buf);
}
ActivePopup::None => {
let mut textarea_rect = area;
textarea_rect.height = textarea_rect.height.saturating_sub(1);

View File

@@ -18,6 +18,7 @@ mod chat_composer;
mod chat_composer_history;
mod command_popup;
mod file_search_popup;
mod model_selection_popup;
mod status_indicator_view;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -70,6 +71,12 @@ impl BottomPane<'_> {
}
}
/// Show the model-selection popup in the composer.
pub(crate) fn show_model_selector(&mut self, current_model: &str, options: Vec<String>) {
self.composer.open_model_selector(current_model, options);
self.request_redraw();
}
pub fn desired_height(&self, width: u16) -> u16 {
self.active_view
.as_ref()

View File

@@ -0,0 +1,247 @@
use codex_common::fuzzy_match::fuzzy_indices;
use codex_common::fuzzy_match::fuzzy_match;
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use ratatui::prelude::Constraint;
use ratatui::style::Color;
use ratatui::style::Modifier;
use ratatui::style::Style;
use ratatui::text::Line;
use ratatui::text::Span;
use ratatui::widgets::Block;
use ratatui::widgets::BorderType;
use ratatui::widgets::Borders;
use ratatui::widgets::Cell;
use ratatui::widgets::Row;
use ratatui::widgets::Table;
use ratatui::widgets::Widget;
use ratatui::widgets::WidgetRef;
/// Maximum number of options shown in the popup.
const MAX_RESULTS: usize = 8;
/// Visual state for the model-selection popup.
pub(crate) struct ModelSelectionPopup {
/// The current model (pinned and color-coded when visible).
current_model: String,
/// All available model options (deduplicated externally as needed).
options: Vec<String>,
/// Current filter query (derived from the composer, e.g. after `/model`).
query: String,
/// Currently selected index among the visible rows (if any).
selected_idx: Option<usize>,
}
impl ModelSelectionPopup {
pub(crate) fn new(current_model: &str, options: Vec<String>) -> Self {
Self {
current_model: current_model.to_string(),
options,
query: String::new(),
selected_idx: None,
}
}
/// Update the current model and option list. Resets/clamps selection as needed.
pub(crate) fn set_options(&mut self, current_model: &str, options: Vec<String>) {
self.current_model = current_model.to_string();
self.options = options;
let visible_len = self.visible_rows().len();
self.selected_idx = match visible_len {
0 => None,
_ => Some(self.selected_idx.unwrap_or(0).min(visible_len - 1)),
};
}
/// Update the fuzzy filter query.
pub(crate) fn set_query(&mut self, query: &str) {
if self.query == query {
return;
}
self.query.clear();
self.query.push_str(query);
// Reset/clamp selection based on new filtered list.
let visible_len = self.visible_rows().len();
self.selected_idx = match visible_len {
0 => None,
_ => Some(0),
};
}
/// Move selection cursor up.
pub(crate) fn move_up(&mut self) {
if let Some(idx) = self.selected_idx {
if idx > 0 {
self.selected_idx = Some(idx - 1);
}
} else if !self.visible_rows().is_empty() {
self.selected_idx = Some(0);
}
}
/// Move selection cursor down.
pub(crate) fn move_down(&mut self) {
let len = self.visible_rows().len();
if len == 0 {
self.selected_idx = None;
return;
}
match self.selected_idx {
Some(idx) if idx + 1 < len => self.selected_idx = Some(idx + 1),
None => self.selected_idx = Some(0),
_ => {}
}
}
/// Currently selected model name, if any.
pub(crate) fn selected_model(&self) -> Option<String> {
let rows = self.visible_rows();
self.selected_idx.and_then(|idx| {
rows.get(idx)
.map(|DisplayRow::Model { name, .. }| name.clone())
})
}
/// Preferred height (rows) including border.
pub(crate) fn calculate_required_height(&self) -> u16 {
self.visible_rows().len().clamp(1, MAX_RESULTS) as u16
}
/// Compute rows to display applying fuzzy filtering and pinning current model.
fn visible_rows(&self) -> Vec<DisplayRow> {
// Build candidate list excluding the current model.
let mut others: Vec<&str> = self
.options
.iter()
.map(|s| s.as_str())
.filter(|m| *m != self.current_model)
.collect();
// Keep original ordering for non-search.
if self.query.trim().is_empty() {
let mut rows: Vec<DisplayRow> = Vec::new();
// Current model first.
rows.push(DisplayRow::Model {
name: self.current_model.clone(),
match_indices: None,
is_current: true,
});
for name in others.drain(..) {
rows.push(DisplayRow::Model {
name: name.to_string(),
match_indices: None,
is_current: false,
});
}
return rows;
}
// Searching: include current model only if it matches.
let mut rows: Vec<DisplayRow> = Vec::new();
if let Some(indices) = fuzzy_indices(&self.current_model, &self.query) {
rows.push(DisplayRow::Model {
name: self.current_model.clone(),
match_indices: Some(indices),
is_current: true,
});
}
// Fuzzy-match the rest and sort by score, then name, then match tightness.
let mut matches: Vec<(String, Vec<usize>, i32)> = Vec::new();
for name in others.into_iter() {
if let Some((indices, score)) = fuzzy_match(name, &self.query) {
matches.push((name.to_string(), indices, score));
}
}
matches.sort_by(|(a_name, a_idx, a_score), (b_name, b_idx, b_score)| {
a_score
.cmp(b_score)
.then_with(|| a_name.cmp(b_name))
.then_with(|| a_idx.len().cmp(&b_idx.len()))
});
for (name, indices, _score) in matches.into_iter() {
if name != self.current_model {
rows.push(DisplayRow::Model {
name,
match_indices: Some(indices),
is_current: false,
});
}
}
rows
}
}
/// Row in the model popup.
enum DisplayRow {
Model {
name: String,
match_indices: Option<Vec<usize>>, // indices to bold (char positions)
is_current: bool,
},
}
impl WidgetRef for &ModelSelectionPopup {
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
let rows_all = self.visible_rows();
let mut rows: Vec<Row> = Vec::new();
if rows_all.is_empty() {
rows.push(Row::new(vec![Cell::from(Line::from(Span::styled(
"no matches",
Style::default().add_modifier(Modifier::ITALIC | Modifier::DIM),
)))]));
} else {
for (i, row) in rows_all.into_iter().take(MAX_RESULTS).enumerate() {
match row {
DisplayRow::Model {
name,
match_indices,
is_current,
} => {
// Highlight fuzzy indices when present.
let mut spans: Vec<Span> = Vec::with_capacity(name.len());
if let Some(idxs) = match_indices.as_ref() {
let mut idx_iter = idxs.iter().peekable();
for (char_idx, ch) in name.chars().enumerate() {
let mut style = Style::default();
if idx_iter.peek().is_some_and(|next| **next == char_idx) {
idx_iter.next();
style = style.add_modifier(Modifier::BOLD);
}
spans.push(Span::styled(ch.to_string(), style));
}
} else {
spans.push(Span::raw(name.clone()));
}
let mut cell = Cell::from(Line::from(spans));
if Some(i) == self.selected_idx {
cell = cell.style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
);
} else if is_current {
cell = cell.style(Style::default().fg(Color::Cyan));
}
rows.push(Row::new(vec![cell]));
}
}
}
}
let table = Table::new(rows, vec![Constraint::Percentage(100)])
.block(
Block::default()
.borders(Borders::LEFT)
.border_type(BorderType::QuadrantOutside)
.border_style(Style::default().fg(Color::DarkGray)),
)
.widths([Constraint::Percentage(100)]);
table.render(area, buf);
}
}

View File

@@ -6,6 +6,8 @@ use std::time::Duration;
use codex_core::codex_wrapper::CodexConversation;
use codex_core::codex_wrapper::init_codex;
use codex_core::config::Config;
use codex_core::config::ConfigToml;
use codex_core::openai_model_info::get_all_model_names;
use codex_core::protocol::AgentMessageDeltaEvent;
use codex_core::protocol::AgentMessageEvent;
use codex_core::protocol::AgentReasoningDeltaEvent;
@@ -64,6 +66,7 @@ pub(crate) struct ChatWidget<'a> {
// We wait for the final AgentMessage event and then emit the full text
// at once into scrollback so the history contains a single message.
answer_buffer: String,
new_session: bool,
running_commands: HashMap<String, RunningCommand>,
}
@@ -151,6 +154,7 @@ impl ChatWidget<'_> {
token_usage: TokenUsage::default(),
reasoning_buffer: String::new(),
answer_buffer: String::new(),
new_session: true,
running_commands: HashMap::new(),
}
}
@@ -224,8 +228,12 @@ impl ChatWidget<'_> {
EventMsg::SessionConfigured(event) => {
self.bottom_pane
.set_history_metadata(event.history_log_id, event.history_entry_count);
// Record session information at the top of the conversation.
self.add_to_history(HistoryCell::new_session_info(&self.config, event, true));
if self.new_session {
self.add_to_history(HistoryCell::new_session_info(&self.config, event, true));
self.new_session = false;
}
if let Some(user_message) = self.initial_user_message.take() {
// If the user provided an initial message, add it to the
@@ -504,6 +512,62 @@ impl ChatWidget<'_> {
&self.token_usage
}
/// Open the model selection view in the bottom pane.
pub(crate) fn show_model_selector(&mut self) {
let current = self.config.model.clone();
let mut options = get_all_model_names()
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<_>>();
// Always include the currently configured model (covers custom values).
options.push(current.clone());
// Append any models found in config.toml profiles and top-level model.
let config_path = self.config.codex_home.join("config.toml");
if let Ok(contents) = std::fs::read_to_string(&config_path) {
if let Ok(cfg) = toml::from_str::<ConfigToml>(&contents) {
let mut config_models: Vec<String> = Vec::new();
if let Some(m) = cfg.model {
config_models.push(m);
}
for (_name, profile) in cfg.profiles.into_iter() {
if let Some(m) = profile.model {
config_models.push(m);
}
}
// Alphabetical ordering for config models.
config_models.sort();
options.extend(config_models);
}
}
self.bottom_pane.show_model_selector(&current, options);
}
/// Update the current model and reconfigure the running Codex session.
pub(crate) fn update_model_and_reconfigure(&mut self, model: String) {
// Update local config so UI reflects the new model.
let changed = self.config.model != model;
self.config.model = model.clone();
// Emit an event in the conversation log so the change is visible.
if changed {
self.add_to_history(HistoryCell::new_background_event(format!(
"Set model to {model}."
)));
}
// Reconfigure the agent session with the same provider and policies.
// Build the op from the config to avoid drift when fields are added.
let op = self
.config
.to_configure_session_op(None, self.config.user_instructions.clone());
self.submit_op(op);
self.request_redraw();
}
pub(crate) fn clear_token_usage(&mut self) {
self.token_usage = TokenUsage::default();
self.bottom_pane

View File

@@ -14,6 +14,7 @@ use crossterm::style::SetBackgroundColor;
use crossterm::style::SetColors;
use crossterm::style::SetForegroundColor;
use ratatui::layout::Size;
use ratatui::prelude::Backend;
use ratatui::style::Color;
use ratatui::style::Modifier;
use ratatui::text::Line;
@@ -21,20 +22,6 @@ use ratatui::text::Span;
/// Insert `lines` above the viewport.
pub(crate) fn insert_history_lines(terminal: &mut tui::Tui, lines: Vec<Line>) {
let mut out = std::io::stdout();
insert_history_lines_to_writer(terminal, &mut out, lines);
}
/// Like `insert_history_lines`, but writes ANSI to the provided writer. This
/// is intended for testing where a capture buffer is used instead of stdout.
pub fn insert_history_lines_to_writer<B, W>(
terminal: &mut crate::custom_terminal::Terminal<B>,
writer: &mut W,
lines: Vec<Line>,
) where
B: ratatui::backend::Backend,
W: Write,
{
let screen_size = terminal.backend().size().unwrap_or(Size::new(0, 0));
let cursor_pos = terminal.get_cursor_position().ok();
@@ -45,22 +32,10 @@ pub fn insert_history_lines_to_writer<B, W>(
// If the viewport is not at the bottom of the screen, scroll it down to make room.
// Don't scroll it past the bottom of the screen.
let scroll_amount = wrapped_lines.min(screen_size.height - area.bottom());
// Emit ANSI to scroll the lower region (from the top of the viewport to the bottom
// of the screen) downward by `scroll_amount` lines. We do this by:
// 1) Limiting the scroll region to [area.top()+1 .. screen_height] (1-based bounds)
// 2) Placing the cursor at the top margin of that region
// 3) Emitting Reverse Index (RI, ESC M) `scroll_amount` times
// 4) Resetting the scroll region back to full screen
let top_1based = area.top() + 1; // Convert 0-based row to 1-based for DECSTBM
queue!(writer, SetScrollRegion(top_1based..screen_size.height)).ok();
queue!(writer, MoveTo(0, area.top())).ok();
for _ in 0..scroll_amount {
// Reverse Index (RI): ESC M
queue!(writer, Print("\x1bM")).ok();
}
queue!(writer, ResetScrollRegion).ok();
terminal
.backend_mut()
.scroll_region_down(area.top()..screen_size.height, scroll_amount)
.ok();
let cursor_top = area.top().saturating_sub(1);
area.y += scroll_amount;
terminal.set_viewport_area(area);
@@ -84,23 +59,23 @@ pub fn insert_history_lines_to_writer<B, W>(
// ││ ││
// │╰────────────────────────────╯│
// └──────────────────────────────┘
queue!(writer, SetScrollRegion(1..area.top())).ok();
queue!(std::io::stdout(), SetScrollRegion(1..area.top())).ok();
// NB: we are using MoveTo instead of set_cursor_position here to avoid messing with the
// terminal's last_known_cursor_position, which hopefully will still be accurate after we
// fetch/restore the cursor position. insert_history_lines should be cursor-position-neutral :)
queue!(writer, MoveTo(0, cursor_top)).ok();
queue!(std::io::stdout(), MoveTo(0, cursor_top)).ok();
for line in lines {
queue!(writer, Print("\r\n")).ok();
write_spans(writer, line.iter()).ok();
queue!(std::io::stdout(), Print("\r\n")).ok();
write_spans(&mut std::io::stdout(), line.iter()).ok();
}
queue!(writer, ResetScrollRegion).ok();
queue!(std::io::stdout(), ResetScrollRegion).ok();
// Restore the cursor position to where it was before we started.
if let Some(cursor_pos) = cursor_pos {
queue!(writer, MoveTo(cursor_pos.x, cursor_pos.y)).ok();
queue!(std::io::stdout(), MoveTo(cursor_pos.x, cursor_pos.y)).ok();
}
}

View File

@@ -25,18 +25,12 @@ mod bottom_pane;
mod chatwidget;
mod citation_regex;
mod cli;
#[cfg(feature = "vt100-tests")]
pub mod custom_terminal;
#[cfg(not(feature = "vt100-tests"))]
mod custom_terminal;
mod exec_command;
mod file_search;
mod get_git_diff;
mod git_warning_screen;
mod history_cell;
#[cfg(feature = "vt100-tests")]
pub mod insert_history;
#[cfg(not(feature = "vt100-tests"))]
mod insert_history;
mod log_layer;
mod markdown;

View File

@@ -15,6 +15,7 @@ pub enum SlashCommand {
New,
Compact,
Diff,
Model,
Quit,
#[cfg(debug_assertions)]
TestApproval,
@@ -27,6 +28,7 @@ impl SlashCommand {
SlashCommand::New => "Start a new chat.",
SlashCommand::Compact => "Compact the chat history.",
SlashCommand::Quit => "Exit the application.",
SlashCommand::Model => "Select the model to use.",
SlashCommand::Diff => {
"Show git diff of the working directory (including untracked files)"
}

View File

@@ -1,211 +0,0 @@
#![cfg(feature = "vt100-tests")]
use ratatui::backend::TestBackend;
use ratatui::layout::Rect;
use ratatui::text::Line;
use ratatui::text::Span;
use ratatui::style::{Color, Style};
/// HIST-001: Basic insertion at bottom, no wrap.
///
/// This test captures the ANSI bytes produced by `insert_history_lines_to_writer`
/// when the viewport is at the bottom of the screen (so no pre-scroll is
/// required). It feeds the bytes into a vt100 parser and asserts that the
/// inserted lines are visible near the bottom of the screen.
#[test]
fn hist_001_basic_insertion_no_wrap() {
// Screen of 20x6; viewport is the last row (height=1 at y=5)
let backend = TestBackend::new(20, 6);
let mut term = codex_tui::custom_terminal::Terminal::with_options(backend).unwrap();
// Place the viewport at the bottom row
let area = Rect::new(0, 5, 20, 1);
term.set_viewport_area(area);
let lines = vec![Line::from("first"), Line::from("second")];
let mut buf: Vec<u8> = Vec::new();
codex_tui::insert_history::insert_history_lines_to_writer(&mut term, &mut buf, lines);
// Feed captured bytes into vt100 emulator
let mut parser = vt100::Parser::new(6, 20, 0);
parser.process(&buf);
let screen = parser.screen();
// Gather visible rows as strings
let mut rows: Vec<String> = Vec::new();
for row in 0..6 {
let mut s = String::new();
for col in 0..20 {
if let Some(cell) = screen.cell(row, col) {
let cont = cell.contents();
if let Some(ch) = cont.chars().next() {
s.push(ch);
} else {
s.push(' ');
}
} else {
s.push(' ');
}
}
rows.push(s);
}
// The inserted lines should appear somewhere above the viewport; in this
// simple case, they will occupy the two rows immediately above the final
// row of the scroll region.
let joined = rows.join("\n");
assert!(joined.contains("first"), "screen did not contain 'first'\n{joined}");
assert!(joined.contains("second"), "screen did not contain 'second'\n{joined}");
}
/// HIST-002: Long token wraps across rows within the scroll region.
#[test]
fn hist_002_long_token_wraps() {
let backend = TestBackend::new(20, 6);
let mut term = codex_tui::custom_terminal::Terminal::with_options(backend).unwrap();
let area = Rect::new(0, 5, 20, 1);
term.set_viewport_area(area);
let long = "A".repeat(45); // > 2 lines at width 20
let lines = vec![Line::from(long.clone())];
let mut buf: Vec<u8> = Vec::new();
codex_tui::insert_history::insert_history_lines_to_writer(&mut term, &mut buf, lines);
let mut parser = vt100::Parser::new(6, 20, 0);
parser.process(&buf);
let screen = parser.screen();
// Count total A's on the screen
let mut count_a = 0usize;
for row in 0..6 {
for col in 0..20 {
if let Some(cell) = screen.cell(row, col) {
if let Some(ch) = cell.contents().chars().next() {
if ch == 'A' { count_a += 1; }
}
}
}
}
assert_eq!(count_a, long.len(), "wrapped content did not preserve all characters");
}
/// HIST-003: Emoji/CJK content renders fully (no broken graphemes).
#[test]
fn hist_003_emoji_and_cjk() {
let backend = TestBackend::new(20, 6);
let mut term = codex_tui::custom_terminal::Terminal::with_options(backend).unwrap();
let area = Rect::new(0, 5, 20, 1);
term.set_viewport_area(area);
let text = String::from("😀😀😀😀😀 你好世界");
let lines = vec![Line::from(text.clone())];
let mut buf: Vec<u8> = Vec::new();
codex_tui::insert_history::insert_history_lines_to_writer(&mut term, &mut buf, lines);
let mut parser = vt100::Parser::new(6, 20, 0);
parser.process(&buf);
let screen = parser.screen();
// Reconstruct string by concatenating non-space cells; ensure all emojis and CJK are present.
let mut reconstructed = String::new();
for row in 0..6 {
for col in 0..20 {
if let Some(cell) = screen.cell(row, col) {
let cont = cell.contents();
if let Some(ch) = cont.chars().next() {
if ch != ' ' { reconstructed.push(ch); }
}
}
}
}
for ch in text.chars().filter(|c| !c.is_whitespace()) {
assert!(reconstructed.contains(ch), "missing character {:?} in reconstructed screen", ch);
}
}
/// HIST-004: Mixed ANSI spans render textual content correctly (styles stripped in emulator).
#[test]
fn hist_004_mixed_ansi_spans() {
let backend = TestBackend::new(20, 6);
let mut term = codex_tui::custom_terminal::Terminal::with_options(backend).unwrap();
let area = Rect::new(0, 5, 20, 1);
term.set_viewport_area(area);
let line = Line::from(vec![
Span::styled("red", Style::default().fg(Color::Red)),
Span::raw("+plain"),
]);
let mut buf: Vec<u8> = Vec::new();
codex_tui::insert_history::insert_history_lines_to_writer(&mut term, &mut buf, vec![line]);
let mut parser = vt100::Parser::new(6, 20, 0);
parser.process(&buf);
let screen = parser.screen();
let mut rows: Vec<String> = Vec::new();
for row in 0..6 {
let mut s = String::new();
for col in 0..20 {
if let Some(cell) = screen.cell(row, col) {
let cont = cell.contents();
if let Some(ch) = cont.chars().next() { s.push(ch); } else { s.push(' '); }
} else {
s.push(' ');
}
}
rows.push(s);
}
let joined = rows.join("\n");
assert!(joined.contains("red+plain"), "styled text did not render as expected\n{joined}");
}
/// HIST-006: Cursor is restored after insertion (CUP to 1;1 when backend reports 0,0).
#[test]
fn hist_006_cursor_restoration() {
let backend = TestBackend::new(20, 6);
let mut term = codex_tui::custom_terminal::Terminal::with_options(backend).unwrap();
let area = Rect::new(0, 5, 20, 1);
term.set_viewport_area(area);
let lines = vec![Line::from("x")];
let mut buf: Vec<u8> = Vec::new();
codex_tui::insert_history::insert_history_lines_to_writer(&mut term, &mut buf, lines);
let s = String::from_utf8_lossy(&buf);
// CUP to 1;1 (ANSI: ESC[1;1H)
assert!(s.contains("\u{1b}[1;1H"), "expected final CUP to 1;1 in output, got: {s:?}");
// Reset scroll region
assert!(s.contains("\u{1b}[r"), "expected reset scroll region in output, got: {s:?}");
}
/// HIST-005: Pre-scroll region is emitted via ANSI when viewport is not at bottom.
#[test]
fn hist_005_pre_scroll_region_down() {
let backend = TestBackend::new(20, 6);
let mut term = codex_tui::custom_terminal::Terminal::with_options(backend).unwrap();
// Viewport not at bottom: y=3 (0-based), height=1
let area = Rect::new(0, 3, 20, 1);
term.set_viewport_area(area);
let lines = vec![Line::from("first"), Line::from("second")];
let mut buf: Vec<u8> = Vec::new();
codex_tui::insert_history::insert_history_lines_to_writer(&mut term, &mut buf, lines);
let s = String::from_utf8_lossy(&buf);
// Expect we limited scroll region to [top+1 .. screen_height] => [4 .. 6] (1-based)
assert!(s.contains("\u{1b}[4;6r"), "expected pre-scroll SetScrollRegion 4..6, got: {s:?}");
// Expect we moved cursor to top of that region: row 3 (0-based) => CUP 4;1H
assert!(s.contains("\u{1b}[4;1H"), "expected cursor at top of pre-scroll region, got: {s:?}");
// Expect at least two Reverse Index commands (ESC M) for two inserted lines
let ri_count = s.matches("\u{1b}M").count();
assert!(ri_count >= 1, "expected at least one RI (ESC M), got: {s:?}");
// After pre-scroll, we set insertion scroll region to [1 .. new_top] => [1 .. 5]
assert!(s.contains("\u{1b}[1;5r"), "expected insertion SetScrollRegion 1..5, got: {s:?}");
}