59 KiB
PR #2696: Custom /prompts
- URL: https://github.com/openai/codex/pull/2696
- Author: dedrisian-oai
- Created: 2025-08-26 01:23:07 UTC
- Updated: 2025-08-29 02:16:51 UTC
- Changes: +448/-52, Files changed: 13, Commits: 25
Description
Adds custom /prompts to ~/.codex/prompts/<command>.md.
Details:
- Adds
Op::ListCustomPromptsto core. - Returns
ListCustomPromptsResponsewith list ofCustomPrompt(name, content). - TUI calls the operation on load, and populates the custom prompts (excluding prompts that collide with builtins).
- Selecting the custom prompt automatically sends the prompt to the agent.
Full Diff
diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs
index 8443c534fb..40bcdc2c94 100644
--- a/codex-rs/core/src/codex.rs
+++ b/codex-rs/core/src/codex.rs
@@ -89,6 +89,7 @@ use crate::protocol::ExecCommandBeginEvent;
use crate::protocol::ExecCommandEndEvent;
use crate::protocol::FileChange;
use crate::protocol::InputItem;
+use crate::protocol::ListCustomPromptsResponseEvent;
use crate::protocol::Op;
use crate::protocol::PatchApplyBeginEvent;
use crate::protocol::PatchApplyEndEvent;
@@ -110,6 +111,7 @@ use crate::user_notification::UserNotification;
use crate::util::backoff;
use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig;
use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig;
+use codex_protocol::custom_prompts::CustomPrompt;
use codex_protocol::models::ContentItem;
use codex_protocol::models::FunctionCallOutputPayload;
use codex_protocol::models::LocalShellAction;
@@ -1286,6 +1288,27 @@ async fn submission_loop(
warn!("failed to send McpListToolsResponse event: {e}");
}
}
+ Op::ListCustomPrompts => {
+ let tx_event = sess.tx_event.clone();
+ let sub_id = sub.id.clone();
+
+ let custom_prompts: Vec<CustomPrompt> =
+ if let Some(dir) = crate::custom_prompts::default_prompts_dir() {
+ crate::custom_prompts::discover_prompts_in(&dir).await
+ } else {
+ Vec::new()
+ };
+
+ let event = Event {
+ id: sub_id,
+ msg: EventMsg::ListCustomPromptsResponse(ListCustomPromptsResponseEvent {
+ custom_prompts,
+ }),
+ };
+ if let Err(e) = tx_event.send(event).await {
+ warn!("failed to send ListCustomPromptsResponse event: {e}");
+ }
+ }
Op::Compact => {
// Create a summarization request as user input
const SUMMARIZATION_PROMPT: &str = include_str!("prompt_for_compact_command.md");
diff --git a/codex-rs/core/src/custom_prompts.rs b/codex-rs/core/src/custom_prompts.rs
new file mode 100644
index 0000000000..4974e7c52d
--- /dev/null
+++ b/codex-rs/core/src/custom_prompts.rs
@@ -0,0 +1,127 @@
+use codex_protocol::custom_prompts::CustomPrompt;
+use std::collections::HashSet;
+use std::path::Path;
+use std::path::PathBuf;
+use tokio::fs;
+
+/// Return the default prompts directory: `$CODEX_HOME/prompts`.
+/// If `CODEX_HOME` cannot be resolved, returns `None`.
+pub fn default_prompts_dir() -> Option<PathBuf> {
+ crate::config::find_codex_home()
+ .ok()
+ .map(|home| home.join("prompts"))
+}
+
+/// Discover prompt files in the given directory, returning entries sorted by name.
+/// Non-files are ignored. If the directory does not exist or cannot be read, returns empty.
+pub async fn discover_prompts_in(dir: &Path) -> Vec<CustomPrompt> {
+ discover_prompts_in_excluding(dir, &HashSet::new()).await
+}
+
+/// Discover prompt files in the given directory, excluding any with names in `exclude`.
+/// Returns entries sorted by name. Non-files are ignored. Missing/unreadable dir yields empty.
+pub async fn discover_prompts_in_excluding(
+ dir: &Path,
+ exclude: &HashSet<String>,
+) -> Vec<CustomPrompt> {
+ let mut out: Vec<CustomPrompt> = Vec::new();
+ let mut entries = match fs::read_dir(dir).await {
+ Ok(entries) => entries,
+ Err(_) => return out,
+ };
+
+ while let Ok(Some(entry)) = entries.next_entry().await {
+ let path = entry.path();
+ let is_file = entry
+ .file_type()
+ .await
+ .map(|ft| ft.is_file())
+ .unwrap_or(false);
+ if !is_file {
+ continue;
+ }
+ // Only include Markdown files with a .md extension.
+ let is_md = path
+ .extension()
+ .and_then(|s| s.to_str())
+ .map(|ext| ext.eq_ignore_ascii_case("md"))
+ .unwrap_or(false);
+ if !is_md {
+ continue;
+ }
+ let Some(name) = path
+ .file_stem()
+ .and_then(|s| s.to_str())
+ .map(|s| s.to_string())
+ else {
+ continue;
+ };
+ if exclude.contains(&name) {
+ continue;
+ }
+ let content = match fs::read_to_string(&path).await {
+ Ok(s) => s,
+ Err(_) => continue,
+ };
+ out.push(CustomPrompt {
+ name,
+ path,
+ content,
+ });
+ }
+ out.sort_by(|a, b| a.name.cmp(&b.name));
+ out
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::fs;
+ use tempfile::tempdir;
+
+ #[tokio::test]
+ async fn empty_when_dir_missing() {
+ let tmp = tempdir().expect("create TempDir");
+ let missing = tmp.path().join("nope");
+ let found = discover_prompts_in(&missing).await;
+ assert!(found.is_empty());
+ }
+
+ #[tokio::test]
+ async fn discovers_and_sorts_files() {
+ let tmp = tempdir().expect("create TempDir");
+ let dir = tmp.path();
+ fs::write(dir.join("b.md"), b"b").unwrap();
+ fs::write(dir.join("a.md"), b"a").unwrap();
+ fs::create_dir(dir.join("subdir")).unwrap();
+ let found = discover_prompts_in(dir).await;
+ let names: Vec<String> = found.into_iter().map(|e| e.name).collect();
+ assert_eq!(names, vec!["a", "b"]);
+ }
+
+ #[tokio::test]
+ async fn excludes_builtins() {
+ let tmp = tempdir().expect("create TempDir");
+ let dir = tmp.path();
+ fs::write(dir.join("init.md"), b"ignored").unwrap();
+ fs::write(dir.join("foo.md"), b"ok").unwrap();
+ let mut exclude = HashSet::new();
+ exclude.insert("init".to_string());
+ let found = discover_prompts_in_excluding(dir, &exclude).await;
+ let names: Vec<String> = found.into_iter().map(|e| e.name).collect();
+ assert_eq!(names, vec!["foo"]);
+ }
+
+ #[tokio::test]
+ async fn skips_non_utf8_files() {
+ let tmp = tempdir().expect("create TempDir");
+ let dir = tmp.path();
+ // Valid UTF-8 file
+ fs::write(dir.join("good.md"), b"hello").unwrap();
+ // Invalid UTF-8 content in .md file (e.g., lone 0xFF byte)
+ fs::write(dir.join("bad.md"), vec![0xFF, 0xFE, b'\n']).unwrap();
+ let found = discover_prompts_in(dir).await;
+ let names: Vec<String> = found.into_iter().map(|e| e.name).collect();
+ assert_eq!(names, vec!["good"]);
+ }
+}
diff --git a/codex-rs/core/src/lib.rs b/codex-rs/core/src/lib.rs
index ae18332087..9f23420c7e 100644
--- a/codex-rs/core/src/lib.rs
+++ b/codex-rs/core/src/lib.rs
@@ -17,6 +17,7 @@ pub mod config;
pub mod config_profile;
pub mod config_types;
mod conversation_history;
+pub mod custom_prompts;
mod environment_context;
pub mod error;
pub mod exec;
diff --git a/codex-rs/exec/src/event_processor_with_human_output.rs b/codex-rs/exec/src/event_processor_with_human_output.rs
index a3558f7407..6d1bc123fd 100644
--- a/codex-rs/exec/src/event_processor_with_human_output.rs
+++ b/codex-rs/exec/src/event_processor_with_human_output.rs
@@ -533,6 +533,9 @@ impl EventProcessor for EventProcessorWithHumanOutput {
EventMsg::McpListToolsResponse(_) => {
// Currently ignored in exec output.
}
+ EventMsg::ListCustomPromptsResponse(_) => {
+ // Currently ignored in exec output.
+ }
EventMsg::TurnAborted(abort_reason) => match abort_reason.reason {
TurnAbortReason::Interrupted => {
ts_println!(self, "task interrupted");
diff --git a/codex-rs/mcp-server/src/codex_tool_runner.rs b/codex-rs/mcp-server/src/codex_tool_runner.rs
index 3a26c26cd3..72cac591e4 100644
--- a/codex-rs/mcp-server/src/codex_tool_runner.rs
+++ b/codex-rs/mcp-server/src/codex_tool_runner.rs
@@ -264,6 +264,7 @@ async fn run_codex_tool_session_inner(
| EventMsg::McpToolCallBegin(_)
| EventMsg::McpToolCallEnd(_)
| EventMsg::McpListToolsResponse(_)
+ | EventMsg::ListCustomPromptsResponse(_)
| EventMsg::ExecCommandBegin(_)
| EventMsg::ExecCommandOutputDelta(_)
| EventMsg::ExecCommandEnd(_)
diff --git a/codex-rs/protocol/src/custom_prompts.rs b/codex-rs/protocol/src/custom_prompts.rs
new file mode 100644
index 0000000000..8567c1c944
--- /dev/null
+++ b/codex-rs/protocol/src/custom_prompts.rs
@@ -0,0 +1,10 @@
+use serde::Deserialize;
+use serde::Serialize;
+use std::path::PathBuf;
+
+#[derive(Serialize, Deserialize, Debug, Clone)]
+pub struct CustomPrompt {
+ pub name: String,
+ pub path: PathBuf,
+ pub content: String,
+}
diff --git a/codex-rs/protocol/src/lib.rs b/codex-rs/protocol/src/lib.rs
index d7aceeb9c8..d5e7c6bde1 100644
--- a/codex-rs/protocol/src/lib.rs
+++ b/codex-rs/protocol/src/lib.rs
@@ -1,4 +1,5 @@
pub mod config_types;
+pub mod custom_prompts;
pub mod mcp_protocol;
pub mod message_history;
pub mod models;
diff --git a/codex-rs/protocol/src/protocol.rs b/codex-rs/protocol/src/protocol.rs
index 7f317bbfba..8707e1a836 100644
--- a/codex-rs/protocol/src/protocol.rs
+++ b/codex-rs/protocol/src/protocol.rs
@@ -10,6 +10,7 @@ use std::path::PathBuf;
use std::str::FromStr;
use std::time::Duration;
+use crate::custom_prompts::CustomPrompt;
use mcp_types::CallToolResult;
use mcp_types::Tool as McpTool;
use serde::Deserialize;
@@ -146,6 +147,9 @@ pub enum Op {
/// Reply is delivered via `EventMsg::McpListToolsResponse`.
ListMcpTools,
+ /// Request the list of available custom prompts.
+ ListCustomPrompts,
+
/// Request the agent to summarize the current conversation context.
/// The agent will use its existing context (either conversation history or previous response id)
/// to generate a summary which will be returned as an AgentMessage event.
@@ -472,6 +476,9 @@ pub enum EventMsg {
/// List of MCP tools available to the agent.
McpListToolsResponse(McpListToolsResponseEvent),
+ /// List of custom prompts available to the agent.
+ ListCustomPromptsResponse(ListCustomPromptsResponseEvent),
+
PlanUpdate(UpdatePlanArgs),
TurnAborted(TurnAbortedEvent),
@@ -806,6 +813,12 @@ pub struct McpListToolsResponseEvent {
pub tools: std::collections::HashMap<String, McpTool>,
}
+/// Response payload for `Op::ListCustomPrompts`.
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct ListCustomPromptsResponseEvent {
+ pub custom_prompts: Vec<CustomPrompt>,
+}
+
#[derive(Debug, Default, Clone, Deserialize, Serialize)]
pub struct SessionConfiguredEvent {
/// Unique id for this session.
diff --git a/codex-rs/tui/src/bottom_pane/chat_composer.rs b/codex-rs/tui/src/bottom_pane/chat_composer.rs
index c21d5a4b5d..49a3478582 100644
--- a/codex-rs/tui/src/bottom_pane/chat_composer.rs
+++ b/codex-rs/tui/src/bottom_pane/chat_composer.rs
@@ -22,11 +22,13 @@ use ratatui::widgets::StatefulWidgetRef;
use ratatui::widgets::WidgetRef;
use super::chat_composer_history::ChatComposerHistory;
+use super::command_popup::CommandItem;
use super::command_popup::CommandPopup;
use super::file_search_popup::FileSearchPopup;
use super::paste_burst::CharDecision;
use super::paste_burst::PasteBurst;
use crate::slash_command::SlashCommand;
+use codex_protocol::custom_prompts::CustomPrompt;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
@@ -47,6 +49,7 @@ use std::time::Instant;
const LARGE_PASTE_CHAR_THRESHOLD: usize = 1000;
/// Result returned when the user interacts with the text area.
+#[derive(Debug, PartialEq)]
pub enum InputResult {
Submitted(String),
Command(SlashCommand),
@@ -94,6 +97,7 @@ pub(crate) struct ChatComposer {
paste_burst: PasteBurst,
// When true, disables paste-burst logic and inserts characters immediately.
disable_paste_burst: bool,
+ custom_prompts: Vec<CustomPrompt>,
}
/// Popup state – at most one can be visible at any time.
@@ -131,6 +135,7 @@ impl ChatComposer {
placeholder_text,
paste_burst: PasteBurst::default(),
disable_paste_burst: false,
+ custom_prompts: Vec::new(),
};
// Apply configuration via the setter to keep side-effects centralized.
this.set_disable_paste_burst(disable_paste_burst);
@@ -391,16 +396,27 @@ impl ChatComposer {
KeyEvent {
code: KeyCode::Tab, ..
} => {
- if let Some(cmd) = popup.selected_command() {
+ if let Some(sel) = popup.selected_item() {
let first_line = self.textarea.text().lines().next().unwrap_or("");
- let starts_with_cmd = first_line
- .trim_start()
- .starts_with(&format!("/{}", cmd.command()));
-
- if !starts_with_cmd {
- self.textarea.set_text(&format!("/{} ", cmd.command()));
- self.textarea.set_cursor(self.textarea.text().len());
+ match sel {
+ CommandItem::Builtin(cmd) => {
+ let starts_with_cmd = first_line
+ .trim_start()
+ .starts_with(&format!("/{}", cmd.command()));
+ if !starts_with_cmd {
+ self.textarea.set_text(&format!("/{} ", cmd.command()));
+ }
+ }
+ CommandItem::UserPrompt(idx) => {
+ if let Some(name) = popup.prompt_name(idx) {
+ let starts_with_cmd =
+ first_line.trim_start().starts_with(&format!("/{name}"));
+ if !starts_with_cmd {
+ self.textarea.set_text(&format!("/{name} "));
+ }
+ }
+ }
}
// After completing the command, move cursor to the end.
if !self.textarea.text().is_empty() {
@@ -415,16 +431,30 @@ impl ChatComposer {
modifiers: KeyModifiers::NONE,
..
} => {
- if let Some(cmd) = popup.selected_command() {
+ if let Some(sel) = popup.selected_item() {
// Clear textarea so no residual text remains.
self.textarea.set_text("");
-
- let result = (InputResult::Command(*cmd), true);
-
- // Hide popup since the command has been dispatched.
+ // Capture any needed data from popup before clearing it.
+ let prompt_content = match sel {
+ CommandItem::UserPrompt(idx) => {
+ popup.prompt_content(idx).map(|s| s.to_string())
+ }
+ _ => None,
+ };
+ // Hide popup since an action has been dispatched.
self.active_popup = ActivePopup::None;
- return result;
+ match sel {
+ CommandItem::Builtin(cmd) => {
+ return (InputResult::Command(cmd), true);
+ }
+ CommandItem::UserPrompt(_) => {
+ if let Some(contents) = prompt_content {
+ return (InputResult::Submitted(contents), true);
+ }
+ return (InputResult::None, true);
+ }
+ }
}
// Fallback to default newline handling if no command selected.
self.handle_key_event_without_popup(key_event)
@@ -1117,7 +1147,7 @@ impl ChatComposer {
}
_ => {
if input_starts_with_slash {
- let mut command_popup = CommandPopup::new();
+ let mut command_popup = CommandPopup::new(self.custom_prompts.clone());
command_popup.on_composer_text_change(first_line.to_string());
self.active_popup = ActivePopup::Command(command_popup);
}
@@ -1125,6 +1155,13 @@ impl ChatComposer {
}
}
+ pub(crate) fn set_custom_prompts(&mut self, prompts: Vec<CustomPrompt>) {
+ self.custom_prompts = prompts.clone();
+ if let ActivePopup::Command(popup) = &mut self.active_popup {
+ popup.set_prompts(prompts);
+ }
+ }
+
/// Synchronize `self.file_search_popup` with the current text in the textarea.
/// Note this is only called when self.active_popup is NOT Command.
fn sync_file_search_popup(&mut self) {
@@ -2098,6 +2135,38 @@ mod tests {
assert_eq!(imgs, vec![tmp_path.clone()]);
}
+ #[test]
+ fn selecting_custom_prompt_submits_file_contents() {
+ let prompt_text = "Hello from saved prompt";
+
+ let (tx, _rx) = unbounded_channel::<AppEvent>();
+ let sender = AppEventSender::new(tx);
+ let mut composer = ChatComposer::new(
+ true,
+ sender,
+ false,
+ "Ask Codex to do anything".to_string(),
+ false,
+ );
+
+ // Inject prompts as if received via event.
+ composer.set_custom_prompts(vec![CustomPrompt {
+ name: "my-prompt".to_string(),
+ path: "/tmp/my-prompt.md".to_string().into(),
+ content: prompt_text.to_string(),
+ }]);
+
+ type_chars_humanlike(
+ &mut composer,
+ &['/', 'm', 'y', '-', 'p', 'r', 'o', 'm', 'p', 't'],
+ );
+
+ let (result, _needs_redraw) =
+ composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
+
+ assert_eq!(InputResult::Submitted(prompt_text.to_string()), result);
+ }
+
#[test]
fn burst_paste_fast_small_buffers_and_flushes_on_stop() {
use crossterm::event::KeyCode;
diff --git a/codex-rs/tui/src/bottom_pane/command_popup.rs b/codex-rs/tui/src/bottom_pane/command_popup.rs
index 9ae7ada81a..4e60e7eaa1 100644
--- a/codex-rs/tui/src/bottom_pane/command_popup.rs
+++ b/codex-rs/tui/src/bottom_pane/command_popup.rs
@@ -9,22 +9,58 @@ use super::selection_popup_common::render_rows;
use crate::slash_command::SlashCommand;
use crate::slash_command::built_in_slash_commands;
use codex_common::fuzzy_match::fuzzy_match;
+use codex_protocol::custom_prompts::CustomPrompt;
+use std::collections::HashSet;
+
+/// A selectable item in the popup: either a built-in command or a user prompt.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub(crate) enum CommandItem {
+ Builtin(SlashCommand),
+ // Index into `prompts`
+ UserPrompt(usize),
+}
pub(crate) struct CommandPopup {
command_filter: String,
- all_commands: Vec<(&'static str, SlashCommand)>,
+ builtins: Vec<(&'static str, SlashCommand)>,
+ prompts: Vec<CustomPrompt>,
state: ScrollState,
}
impl CommandPopup {
- pub(crate) fn new() -> Self {
+ pub(crate) fn new(mut prompts: Vec<CustomPrompt>) -> Self {
+ let builtins = built_in_slash_commands();
+ // Exclude prompts that collide with builtin command names and sort by name.
+ let exclude: HashSet<String> = builtins.iter().map(|(n, _)| (*n).to_string()).collect();
+ prompts.retain(|p| !exclude.contains(&p.name));
+ prompts.sort_by(|a, b| a.name.cmp(&b.name));
Self {
command_filter: String::new(),
- all_commands: built_in_slash_commands(),
+ builtins,
+ prompts,
state: ScrollState::new(),
}
}
+ pub(crate) fn set_prompts(&mut self, mut prompts: Vec<CustomPrompt>) {
+ let exclude: HashSet<String> = self
+ .builtins
+ .iter()
+ .map(|(n, _)| (*n).to_string())
+ .collect();
+ prompts.retain(|p| !exclude.contains(&p.name));
+ prompts.sort_by(|a, b| a.name.cmp(&b.name));
+ self.prompts = prompts;
+ }
+
+ pub(crate) fn prompt_name(&self, idx: usize) -> Option<&str> {
+ self.prompts.get(idx).map(|p| p.name.as_str())
+ }
+
+ pub(crate) fn prompt_content(&self, idx: usize) -> Option<&str> {
+ self.prompts.get(idx).map(|p| p.content.as_str())
+ }
+
/// Update the filter string based on the current composer text. The text
/// passed in is expected to start with a leading '/'. Everything after the
/// *first* '/" on the *first* line becomes the active filter that is used
@@ -50,7 +86,7 @@ impl CommandPopup {
}
// Reset or clamp selected index based on new filtered list.
- let matches_len = self.filtered_commands().len();
+ let matches_len = self.filtered_items().len();
self.state.clamp_selection(matches_len);
self.state
.ensure_visible(matches_len, MAX_POPUP_ROWS.min(matches_len));
@@ -59,56 +95,76 @@ impl CommandPopup {
/// Determine the preferred height of the popup. This is the number of
/// rows required to show at most MAX_POPUP_ROWS commands.
pub(crate) fn calculate_required_height(&self) -> u16 {
- self.filtered_commands().len().clamp(1, MAX_POPUP_ROWS) as u16
+ self.filtered_items().len().clamp(1, MAX_POPUP_ROWS) as u16
}
- /// Compute fuzzy-filtered matches paired with optional highlight indices and score.
- /// Sorted by ascending score, then by command name for stability.
- fn filtered(&self) -> Vec<(&SlashCommand, Option<Vec<usize>>, i32)> {
+ /// Compute fuzzy-filtered matches over built-in commands and user prompts,
+ /// paired with optional highlight indices and score. Sorted by ascending
+ /// score, then by name for stability.
+ fn filtered(&self) -> Vec<(CommandItem, Option<Vec<usize>>, i32)> {
let filter = self.command_filter.trim();
- let mut out: Vec<(&SlashCommand, Option<Vec<usize>>, i32)> = Vec::new();
+ let mut out: Vec<(CommandItem, Option<Vec<usize>>, i32)> = Vec::new();
if filter.is_empty() {
- for (_, cmd) in self.all_commands.iter() {
- out.push((cmd, None, 0));
+ // Built-ins first, in presentation order.
+ for (_, cmd) in self.builtins.iter() {
+ out.push((CommandItem::Builtin(*cmd), None, 0));
+ }
+ // Then prompts, already sorted by name.
+ for idx in 0..self.prompts.len() {
+ out.push((CommandItem::UserPrompt(idx), None, 0));
}
- // Keep the original presentation order when no filter is applied.
return out;
- } else {
- for (_, cmd) in self.all_commands.iter() {
- if let Some((indices, score)) = fuzzy_match(cmd.command(), filter) {
- out.push((cmd, Some(indices), score));
- }
+ }
+
+ for (_, cmd) in self.builtins.iter() {
+ if let Some((indices, score)) = fuzzy_match(cmd.command(), filter) {
+ out.push((CommandItem::Builtin(*cmd), Some(indices), score));
+ }
+ }
+ for (idx, p) in self.prompts.iter().enumerate() {
+ if let Some((indices, score)) = fuzzy_match(&p.name, filter) {
+ out.push((CommandItem::UserPrompt(idx), Some(indices), score));
}
}
- // When filtering, sort by ascending score and then by command for stability.
- out.sort_by(|a, b| a.2.cmp(&b.2).then_with(|| a.0.command().cmp(b.0.command())));
+ // When filtering, sort by ascending score and then by name for stability.
+ out.sort_by(|a, b| {
+ a.2.cmp(&b.2).then_with(|| {
+ let an = match a.0 {
+ CommandItem::Builtin(c) => c.command(),
+ CommandItem::UserPrompt(i) => &self.prompts[i].name,
+ };
+ let bn = match b.0 {
+ CommandItem::Builtin(c) => c.command(),
+ CommandItem::UserPrompt(i) => &self.prompts[i].name,
+ };
+ an.cmp(bn)
+ })
+ });
out
}
- fn filtered_commands(&self) -> Vec<&SlashCommand> {
+ fn filtered_items(&self) -> Vec<CommandItem> {
self.filtered().into_iter().map(|(c, _, _)| c).collect()
}
/// Move the selection cursor one step up.
pub(crate) fn move_up(&mut self) {
- let matches = self.filtered_commands();
- let len = matches.len();
+ let len = self.filtered_items().len();
self.state.move_up_wrap(len);
self.state.ensure_visible(len, MAX_POPUP_ROWS.min(len));
}
/// Move the selection cursor one step down.
pub(crate) fn move_down(&mut self) {
- let matches = self.filtered_commands();
- let matches_len = matches.len();
+ let matches_len = self.filtered_items().len();
self.state.move_down_wrap(matches_len);
self.state
.ensure_visible(matches_len, MAX_POPUP_ROWS.min(matches_len));
}
/// Return currently selected command, if any.
- pub(crate) fn selected_command(&self) -> Option<&SlashCommand> {
- let matches = self.filtered_commands();
+ pub(crate) fn selected_item(&self) -> Option<CommandItem> {
+ let matches = self.filtered_items();
self.state
.selected_idx
.and_then(|idx| matches.get(idx).copied())
@@ -123,11 +179,19 @@ impl WidgetRef for CommandPopup {
} else {
matches
.into_iter()
- .map(|(cmd, indices, _)| GenericDisplayRow {
- name: format!("/{}", cmd.command()),
- match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
- is_current: false,
- description: Some(cmd.description().to_string()),
+ .map(|(item, indices, _)| match item {
+ CommandItem::Builtin(cmd) => GenericDisplayRow {
+ name: format!("/{}", cmd.command()),
+ match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
+ is_current: false,
+ description: Some(cmd.description().to_string()),
+ },
+ CommandItem::UserPrompt(i) => GenericDisplayRow {
+ name: format!("/{}", self.prompts[i].name),
+ match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
+ is_current: false,
+ description: Some("send saved prompt".to_string()),
+ },
})
.collect()
};
@@ -141,31 +205,82 @@ mod tests {
#[test]
fn filter_includes_init_when_typing_prefix() {
- let mut popup = CommandPopup::new();
+ let mut popup = CommandPopup::new(Vec::new());
// Simulate the composer line starting with '/in' so the popup filters
// matching commands by prefix.
popup.on_composer_text_change("/in".to_string());
// Access the filtered list via the selected command and ensure that
// one of the matches is the new "init" command.
- let matches = popup.filtered_commands();
+ let matches = popup.filtered_items();
+ let has_init = matches.iter().any(|item| match item {
+ CommandItem::Builtin(cmd) => cmd.command() == "init",
+ CommandItem::UserPrompt(_) => false,
+ });
assert!(
- matches.iter().any(|cmd| cmd.command() == "init"),
+ has_init,
"expected '/init' to appear among filtered commands"
);
}
#[test]
fn selecting_init_by_exact_match() {
- let mut popup = CommandPopup::new();
+ let mut popup = CommandPopup::new(Vec::new());
popup.on_composer_text_change("/init".to_string());
// When an exact match exists, the selected command should be that
// command by default.
- let selected = popup.selected_command();
+ let selected = popup.selected_item();
match selected {
- Some(cmd) => assert_eq!(cmd.command(), "init"),
+ Some(CommandItem::Builtin(cmd)) => assert_eq!(cmd.command(), "init"),
+ Some(CommandItem::UserPrompt(_)) => panic!("unexpected prompt selected for '/init'"),
None => panic!("expected a selected command for exact match"),
}
}
+
+ #[test]
+ fn prompt_discovery_lists_custom_prompts() {
+ let prompts = vec![
+ CustomPrompt {
+ name: "foo".to_string(),
+ path: "/tmp/foo.md".to_string().into(),
+ content: "hello from foo".to_string(),
+ },
+ CustomPrompt {
+ name: "bar".to_string(),
+ path: "/tmp/bar.md".to_string().into(),
+ content: "hello from bar".to_string(),
+ },
+ ];
+ let popup = CommandPopup::new(prompts);
+ let items = popup.filtered_items();
+ let mut prompt_names: Vec<String> = items
+ .into_iter()
+ .filter_map(|it| match it {
+ CommandItem::UserPrompt(i) => popup.prompt_name(i).map(|s| s.to_string()),
+ _ => None,
+ })
+ .collect();
+ prompt_names.sort();
+ assert_eq!(prompt_names, vec!["bar".to_string(), "foo".to_string()]);
+ }
+
+ #[test]
+ fn prompt_name_collision_with_builtin_is_ignored() {
+ // Create a prompt named like a builtin (e.g. "init").
+ let popup = CommandPopup::new(vec![CustomPrompt {
+ name: "init".to_string(),
+ path: "/tmp/init.md".to_string().into(),
+ content: "should be ignored".to_string(),
+ }]);
+ let items = popup.filtered_items();
+ let has_collision_prompt = items.into_iter().any(|it| match it {
+ CommandItem::UserPrompt(i) => popup.prompt_name(i) == Some("init"),
+ _ => false,
+ });
+ assert!(
+ !has_collision_prompt,
+ "prompt with builtin name should be ignored"
+ );
+ }
}
diff --git a/codex-rs/tui/src/bottom_pane/mod.rs b/codex-rs/tui/src/bottom_pane/mod.rs
index c1e84beeef..b94db79f1c 100644
--- a/codex-rs/tui/src/bottom_pane/mod.rs
+++ b/codex-rs/tui/src/bottom_pane/mod.rs
@@ -36,6 +36,7 @@ pub(crate) enum CancellationEvent {
pub(crate) use chat_composer::ChatComposer;
pub(crate) use chat_composer::InputResult;
+use codex_protocol::custom_prompts::CustomPrompt;
use crate::status_indicator_widget::StatusIndicatorWidget;
use approval_modal_view::ApprovalModalView;
@@ -336,6 +337,12 @@ impl BottomPane {
self.request_redraw();
}
+ /// Update custom prompts available for the slash popup.
+ pub(crate) fn set_custom_prompts(&mut self, prompts: Vec<CustomPrompt>) {
+ self.composer.set_custom_prompts(prompts);
+ self.request_redraw();
+ }
+
pub(crate) fn composer_is_empty(&self) -> bool {
self.composer.is_empty()
}
diff --git a/codex-rs/tui/src/chatwidget.rs b/codex-rs/tui/src/chatwidget.rs
index 5e1fd45fc6..2692854590 100644
--- a/codex-rs/tui/src/chatwidget.rs
+++ b/codex-rs/tui/src/chatwidget.rs
@@ -19,6 +19,7 @@ use codex_core::protocol::ExecApprovalRequestEvent;
use codex_core::protocol::ExecCommandBeginEvent;
use codex_core::protocol::ExecCommandEndEvent;
use codex_core::protocol::InputItem;
+use codex_core::protocol::ListCustomPromptsResponseEvent;
use codex_core::protocol::McpListToolsResponseEvent;
use codex_core::protocol::McpToolCallBeginEvent;
use codex_core::protocol::McpToolCallEndEvent;
@@ -153,6 +154,8 @@ impl ChatWidget {
event,
self.show_welcome_banner,
));
+ // Ask codex-core to enumerate custom prompts for this session.
+ self.submit_op(Op::ListCustomPrompts);
if let Some(user_message) = self.initial_user_message.take() {
self.submit_user_message(user_message);
}
@@ -991,6 +994,7 @@ impl ChatWidget {
EventMsg::WebSearchBegin(ev) => self.on_web_search_begin(ev),
EventMsg::GetHistoryEntryResponse(ev) => self.on_get_history_entry_response(ev),
EventMsg::McpListToolsResponse(ev) => self.on_list_mcp_tools(ev),
+ EventMsg::ListCustomPromptsResponse(ev) => self.on_list_custom_prompts(ev),
EventMsg::ShutdownComplete => self.on_shutdown_complete(),
EventMsg::TurnDiff(TurnDiffEvent { unified_diff }) => self.on_turn_diff(unified_diff),
EventMsg::BackgroundEvent(BackgroundEventEvent { message }) => {
@@ -1220,6 +1224,13 @@ impl ChatWidget {
self.add_to_history(history_cell::new_mcp_tools_output(&self.config, ev.tools));
}
+ fn on_list_custom_prompts(&mut self, ev: ListCustomPromptsResponseEvent) {
+ let len = ev.custom_prompts.len();
+ debug!("received {len} custom prompts");
+ // Forward to bottom pane so the slash popup can show them now.
+ self.bottom_pane.set_custom_prompts(ev.custom_prompts);
+ }
+
/// Programmatically submit a user text message as if typed in the
/// composer. The text will be added to conversation history and sent to
/// the agent.
diff --git a/docs/prompts.md b/docs/prompts.md
new file mode 100644
index 0000000000..b98240d2ad
--- /dev/null
+++ b/docs/prompts.md
@@ -0,0 +1,15 @@
+## Custom Prompts
+
+Save frequently used prompts as Markdown files and reuse them quickly from the slash menu.
+
+- Location: Put files in `$CODEX_HOME/prompts/` (defaults to `~/.codex/prompts/`).
+- File type: Only Markdown files with the `.md` extension are recognized.
+- Name: The filename without the `.md` extension becomes the slash entry. For a file named `my-prompt.md`, type `/my-prompt`.
+- Content: The file contents are sent as your message when you select the item in the slash popup and press Enter.
+- How to use:
+ - Start a new session (Codex loads custom prompts on session start).
+ - In the composer, type `/` to open the slash popup and begin typing your prompt name.
+ - Use Up/Down to select it. Press Enter to submit its contents, or Tab to autocomplete the name.
+- Notes:
+ - Files with names that collide with built‑in commands (e.g. `/init`) are ignored and won’t appear.
+ - New or changed files are discovered on session start. If you add a new prompt while Codex is running, start a new session to pick it up.
Review Comments
codex-rs/core/src/codex.rs
- Created: 2025-08-28 21:34:03 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308602762
@@ -1286,6 +1287,31 @@ async fn submission_loop(
warn!("failed to send McpListToolsResponse event: {e}");
}
}
+ Op::ListCustomPrompts => {
+ let tx_event = sess.tx_event.clone();
+ let sub_id = sub.id.clone();
+
+ // Discover prompts under the default prompts dir (includes content).
+ let custom_prompts: Vec<CustomPrompt> =
+ tokio::task::spawn_blocking(
+ || match crate::custom_prompts::default_prompts_dir() {
+ Some(dir) => crate::custom_prompts::discover_prompts_in(&dir),
+ None => Vec::new(),
+ },
+ )
+ .await
+ .unwrap_or_default();
I was surprised to see the use of
tokio::task::spawn_blocking(). I see we depend onfind_codex_home(), which I guess technically does a small amount of blocking I/O. I don't know it merits scheduling this on a special thread pool, though...
6209d49520/codex-rs/core/src/config.rs (L887-L904)I guess we could look into making
find_codex_home()async in a follow-up PR...
- Created: 2025-08-28 21:37:57 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308608340
@@ -1286,6 +1287,31 @@ async fn submission_loop(
warn!("failed to send McpListToolsResponse event: {e}");
}
}
+ Op::ListCustomPrompts => {
+ let tx_event = sess.tx_event.clone();
+ let sub_id = sub.id.clone();
+
+ // Discover prompts under the default prompts dir (includes content).
+ let custom_prompts: Vec<CustomPrompt> =
+ tokio::task::spawn_blocking(
+ || match crate::custom_prompts::default_prompts_dir() {
+ Some(dir) => crate::custom_prompts::discover_prompts_in(&dir),
+ None => Vec::new(),
+ },
+ )
+ .await
+ .unwrap_or_default();
+
+ let event = Event {
+ id: sub_id,
+ msg: EventMsg::ListCustomPromptsResponse(
+ crate::protocol::ListCustomPromptsResponseEvent { custom_prompts },
I would import above so you can just do:
ListCustomPromptsResponseEvent { custom_prompts },
codex-rs/core/src/custom_prompts.rs
- Created: 2025-08-28 21:40:18 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308611377
@@ -0,0 +1,95 @@
+use codex_protocol::custom_prompts::CustomPrompt;
+use std::collections::HashSet;
+use std::path::Path;
+use std::path::PathBuf;
+
+/// Return the default prompts directory: `$CODEX_HOME/prompts`.
+/// If `CODEX_HOME` cannot be resolved, returns `None`.
+pub fn default_prompts_dir() -> Option<PathBuf> {
+ crate::config::find_codex_home()
+ .ok()
+ .map(|home| home.join("prompts"))
+}
+
+/// Discover prompt files in the given directory, returning entries sorted by name.
+/// Non-files are ignored. If the directory does not exist or cannot be read, returns empty.
+pub fn discover_prompts_in(dir: &Path) -> Vec<CustomPrompt> {
+ discover_prompts_in_excluding(dir, &HashSet::new())
+}
+
+/// Discover prompt files in the given directory, excluding any with names in `exclude`.
+/// Returns entries sorted by name. Non-files are ignored. Missing/unreadable dir yields empty.
+pub fn discover_prompts_in_excluding(dir: &Path, exclude: &HashSet<String>) -> Vec<CustomPrompt> {
Prefer
tokio::fsand async operations.
- Created: 2025-08-28 21:41:32 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308612920
@@ -0,0 +1,95 @@
+use codex_protocol::custom_prompts::CustomPrompt;
+use std::collections::HashSet;
+use std::path::Path;
+use std::path::PathBuf;
+
+/// Return the default prompts directory: `$CODEX_HOME/prompts`.
+/// If `CODEX_HOME` cannot be resolved, returns `None`.
+pub fn default_prompts_dir() -> Option<PathBuf> {
+ crate::config::find_codex_home()
+ .ok()
+ .map(|home| home.join("prompts"))
+}
+
+/// Discover prompt files in the given directory, returning entries sorted by name.
+/// Non-files are ignored. If the directory does not exist or cannot be read, returns empty.
+pub fn discover_prompts_in(dir: &Path) -> Vec<CustomPrompt> {
+ discover_prompts_in_excluding(dir, &HashSet::new())
+}
+
+/// Discover prompt files in the given directory, excluding any with names in `exclude`.
+/// Returns entries sorted by name. Non-files are ignored. Missing/unreadable dir yields empty.
+pub fn discover_prompts_in_excluding(dir: &Path, exclude: &HashSet<String>) -> Vec<CustomPrompt> {
+ let mut out: Vec<CustomPrompt> = Vec::new();
+ if let Ok(entries) = std::fs::read_dir(dir) {
+ for entry in entries.flatten() {
+ let path = entry.path();
+ if !path.is_file() {
+ continue;
+ }
+ // Only include Markdown files with a .md extension.
+ let is_md = path
+ .extension()
+ .and_then(|s| s.to_str())
+ .map(|ext| ext.eq_ignore_ascii_case("md"))
+ .unwrap_or(false);
+ if !is_md {
+ continue;
+ }
+ let Some(name) = path
+ .file_stem()
+ .and_then(|s| s.to_str())
+ .map(|s| s.to_string())
+ else {
+ continue;
+ };
+ if exclude.contains(&name) {
+ continue;
+ }
+ let content = std::fs::read_to_string(&path).unwrap_or_default();
+ out.push(CustomPrompt { name, content });
+ }
+ out.sort_by(|a, b| a.name.cmp(&b.name));
+ }
+ out
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::fs;
+ use tempfile::tempdir;
+
+ #[test]
This will likely end up being
#[tokio::test].
codex-rs/protocol/src/custom_prompts.rs
- Created: 2025-08-28 21:42:46 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308614620
@@ -0,0 +1,8 @@
+use serde::Deserialize;
+use serde::Serialize;
+
+#[derive(Serialize, Deserialize, Debug, Clone)]
+pub struct CustomPrompt {
+ pub name: String,
+ pub content: String,
Maybe send the
Pathback now, as well, so it's easier to add support for opening the custom prompt's file from the client going forward.
- Created: 2025-08-29 01:55:05 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308928151
@@ -0,0 +1,9 @@
+use serde::Deserialize;
+use serde::Serialize;
+
+#[derive(Serialize, Deserialize, Debug, Clone)]
+pub struct CustomPrompt {
+ pub name: String,
+ pub path: String,
Please use
PathBufrather thanString.
codex-rs/tui/src/bottom_pane/chat_composer.rs
- Created: 2025-08-28 21:44:42 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308618124
@@ -2098,6 +2134,40 @@ mod tests {
assert_eq!(imgs, vec![tmp_path.clone()]);
}
+ #[test]
+ fn selecting_custom_prompt_submits_file_contents() {
+ let prompt_text = "Hello from saved prompt";
+
+ let (tx, _rx) = unbounded_channel::<AppEvent>();
+ let sender = AppEventSender::new(tx);
+ let mut composer = ChatComposer::new(
+ true,
+ sender,
+ false,
+ "Ask Codex to do anything".to_string(),
+ false,
+ );
Maybe it's time to introduce
struct ChatComposerParamsin a follow-up...
- Created: 2025-08-28 21:45:43 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308619668
@@ -2098,6 +2134,40 @@ mod tests {
assert_eq!(imgs, vec![tmp_path.clone()]);
}
+ #[test]
+ fn selecting_custom_prompt_submits_file_contents() {
+ let prompt_text = "Hello from saved prompt";
+
+ let (tx, _rx) = unbounded_channel::<AppEvent>();
+ let sender = AppEventSender::new(tx);
+ let mut composer = ChatComposer::new(
+ true,
+ sender,
+ false,
+ "Ask Codex to do anything".to_string(),
+ false,
+ );
+
+ // Inject prompts as if received via event.
+ composer.set_custom_prompts(vec![CustomPrompt {
+ name: "my-prompt".to_string(),
+ content: prompt_text.to_string(),
+ }]);
+
+ type_chars_humanlike(
+ &mut composer,
+ &['/', 'm', 'y', '-', 'p', 'r', 'o', 'm', 'p', 't'],
+ );
+
+ let (result, _needs_redraw) =
+ composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
+
+ match result {
+ InputResult::Submitted(s) => assert_eq!(s, prompt_text),
+ _ => panic!("expected Submitted with prompt contents"),
+ }
one line?
assert_eq!(InputResult::Submitted(prompt_text), result);
codex-rs/tui/src/bottom_pane/command_popup.rs
- Created: 2025-08-28 21:46:22 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308620675
@@ -9,22 +9,58 @@ use super::selection_popup_common::render_rows;
use crate::slash_command::SlashCommand;
use crate::slash_command::built_in_slash_commands;
use codex_common::fuzzy_match::fuzzy_match;
+use codex_protocol::custom_prompts::CustomPrompt;
+
+/// A selectable item in the popup: either a built-in command or a user prompt.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub(crate) enum CommandItem {
+ Builtin(SlashCommand),
+ // Index into `prompts`
+ Prompt(usize),
Can we name this
UserPromptinstead of justPromptbecausePromptcan mean a lot of things in this codebase.
- Created: 2025-08-28 21:46:47 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308621244
@@ -9,22 +9,58 @@ use super::selection_popup_common::render_rows;
use crate::slash_command::SlashCommand;
use crate::slash_command::built_in_slash_commands;
use codex_common::fuzzy_match::fuzzy_match;
+use codex_protocol::custom_prompts::CustomPrompt;
+
+/// A selectable item in the popup: either a built-in command or a user prompt.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub(crate) enum CommandItem {
+ Builtin(SlashCommand),
+ // Index into `prompts`
+ Prompt(usize),
+}
pub(crate) struct CommandPopup {
command_filter: String,
- all_commands: Vec<(&'static str, SlashCommand)>,
+ builtins: Vec<(&'static str, SlashCommand)>,
+ prompts: Vec<CustomPrompt>,
state: ScrollState,
}
impl CommandPopup {
- pub(crate) fn new() -> Self {
+ pub(crate) fn new(mut prompts: Vec<CustomPrompt>) -> Self {
+ let builtins = built_in_slash_commands();
+ // Exclude prompts that collide with builtin command names and sort by name.
+ let exclude: std::collections::HashSet<String> =
use HashSetabove?
- Created: 2025-08-28 21:47:00 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308621589
@@ -9,22 +9,58 @@ use super::selection_popup_common::render_rows;
use crate::slash_command::SlashCommand;
use crate::slash_command::built_in_slash_commands;
use codex_common::fuzzy_match::fuzzy_match;
+use codex_protocol::custom_prompts::CustomPrompt;
+
+/// A selectable item in the popup: either a built-in command or a user prompt.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub(crate) enum CommandItem {
+ Builtin(SlashCommand),
+ // Index into `prompts`
+ Prompt(usize),
+}
pub(crate) struct CommandPopup {
command_filter: String,
- all_commands: Vec<(&'static str, SlashCommand)>,
+ builtins: Vec<(&'static str, SlashCommand)>,
+ prompts: Vec<CustomPrompt>,
state: ScrollState,
}
impl CommandPopup {
- pub(crate) fn new() -> Self {
+ pub(crate) fn new(mut prompts: Vec<CustomPrompt>) -> Self {
+ let builtins = built_in_slash_commands();
+ // Exclude prompts that collide with builtin command names and sort by name.
+ let exclude: std::collections::HashSet<String> =
+ builtins.iter().map(|(n, _)| (*n).to_string()).collect();
+ prompts.retain(|p| !exclude.contains(&p.name));
+ prompts.sort_by(|a, b| a.name.cmp(&b.name));
Self {
command_filter: String::new(),
- all_commands: built_in_slash_commands(),
+ builtins,
+ prompts,
state: ScrollState::new(),
}
}
+ pub(crate) fn set_prompts(&mut self, mut prompts: Vec<CustomPrompt>) {
+ let exclude: std::collections::HashSet<String> = self
here too
- Created: 2025-08-28 21:50:17 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308626116
@@ -59,56 +95,73 @@ impl CommandPopup {
/// Determine the preferred height of the popup. This is the number of
/// rows required to show at most MAX_POPUP_ROWS commands.
pub(crate) fn calculate_required_height(&self) -> u16 {
- self.filtered_commands().len().clamp(1, MAX_POPUP_ROWS) as u16
+ self.filtered_items().len().clamp(1, MAX_POPUP_ROWS) as u16
}
/// Compute fuzzy-filtered matches paired with optional highlight indices and score.
/// Sorted by ascending score, then by command name for stability.
- fn filtered(&self) -> Vec<(&SlashCommand, Option<Vec<usize>>, i32)> {
+ fn filtered(&self) -> Vec<(CommandItem, Option<Vec<usize>>, i32)> {
Does the docstring need to be updated? Make it clear that user prompts and built-in commands are mixed together? That we don't show built-in commands first (score is the dominating thing?)
- Created: 2025-08-28 21:51:32 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308627858
@@ -59,56 +95,73 @@ impl CommandPopup {
/// Determine the preferred height of the popup. This is the number of
/// rows required to show at most MAX_POPUP_ROWS commands.
pub(crate) fn calculate_required_height(&self) -> u16 {
- self.filtered_commands().len().clamp(1, MAX_POPUP_ROWS) as u16
+ self.filtered_items().len().clamp(1, MAX_POPUP_ROWS) as u16
}
/// Compute fuzzy-filtered matches paired with optional highlight indices and score.
/// Sorted by ascending score, then by command name for stability.
- fn filtered(&self) -> Vec<(&SlashCommand, Option<Vec<usize>>, i32)> {
+ fn filtered(&self) -> Vec<(CommandItem, Option<Vec<usize>>, i32)> {
let filter = self.command_filter.trim();
- let mut out: Vec<(&SlashCommand, Option<Vec<usize>>, i32)> = Vec::new();
+ let mut out: Vec<(CommandItem, Option<Vec<usize>>, i32)> = Vec::new();
if filter.is_empty() {
- for (_, cmd) in self.all_commands.iter() {
- out.push((cmd, None, 0));
+ // Built-ins first, in presentation order.
+ for (_, cmd) in self.builtins.iter() {
+ out.push((CommandItem::Builtin(*cmd), None, 0));
+ }
+ // Then prompts, already sorted by name.
+ for idx in 0..self.prompts.len() {
+ out.push((CommandItem::Prompt(idx), None, 0));
}
- // Keep the original presentation order when no filter is applied.
return out;
- } else {
- for (_, cmd) in self.all_commands.iter() {
- if let Some((indices, score)) = fuzzy_match(cmd.command(), filter) {
- out.push((cmd, Some(indices), score));
- }
+ }
+
+ for (_, cmd) in self.builtins.iter() {
+ if let Some((indices, score)) = fuzzy_match(cmd.command(), filter) {
+ out.push((CommandItem::Builtin(*cmd), Some(indices), score));
+ }
+ }
+ for (idx, p) in self.prompts.iter().enumerate() {
+ if let Some((indices, score)) = fuzzy_match(&p.name, filter) {
+ out.push((CommandItem::Prompt(idx), Some(indices), score));
}
}
- // When filtering, sort by ascending score and then by command for stability.
- out.sort_by(|a, b| a.2.cmp(&b.2).then_with(|| a.0.command().cmp(b.0.command())));
+ // When filtering, sort by ascending score and then by name for stability.
+ out.sort_by(|a, b| {
+ let an = match a.0 {
+ CommandItem::Builtin(c) => c.command(),
+ CommandItem::Prompt(i) => &self.prompts[i].name,
+ };
+ let bn = match b.0 {
+ CommandItem::Builtin(c) => c.command(),
+ CommandItem::Prompt(i) => &self.prompts[i].name,
+ };
+ a.2.cmp(&b.2).then_with(|| an.cmp(bn))
Compute
anandbnonly if necessary.a.2.cmp(&b.2).then_with(|| { let an = match a.0 { CommandItem::Builtin(c) => c.command(), CommandItem::Prompt(i) => &self.prompts[i].name, }; let bn = match b.0 { CommandItem::Builtin(c) => c.command(), CommandItem::Prompt(i) => &self.prompts[i].name, }; an.cmp(bn) )
- Created: 2025-08-28 21:51:59 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308628546
@@ -123,11 +176,19 @@ impl WidgetRef for CommandPopup {
} else {
matches
.into_iter()
- .map(|(cmd, indices, _)| GenericDisplayRow {
- name: format!("/{}", cmd.command()),
- match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
- is_current: false,
- description: Some(cmd.description().to_string()),
+ .map(|(item, indices, _)| match item {
+ CommandItem::Builtin(cmd) => GenericDisplayRow {
+ name: format!("/{}", cmd.command()),
+ match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
+ is_current: false,
+ description: Some(cmd.description().to_string()),
+ },
+ CommandItem::Prompt(i) => GenericDisplayRow {
+ name: format!("/{}", self.prompts[i].name),
+ match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
+ is_current: false,
+ description: Some("send saved prompt".to_string()),
Why don't we take the first line of the prompt?
- Created: 2025-08-28 21:53:37 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308631536
@@ -141,31 +202,79 @@ mod tests {
#[test]
fn filter_includes_init_when_typing_prefix() {
- let mut popup = CommandPopup::new();
+ let mut popup = CommandPopup::new(Vec::new());
// Simulate the composer line starting with '/in' so the popup filters
// matching commands by prefix.
popup.on_composer_text_change("/in".to_string());
// Access the filtered list via the selected command and ensure that
// one of the matches is the new "init" command.
- let matches = popup.filtered_commands();
+ let matches = popup.filtered_items();
+ let has_init = matches.iter().any(|item| match item {
+ CommandItem::Builtin(cmd) => cmd.command() == "init",
+ CommandItem::Prompt(_) => false,
+ });
assert!(
- matches.iter().any(|cmd| cmd.command() == "init"),
+ has_init,
"expected '/init' to appear among filtered commands"
);
}
#[test]
fn selecting_init_by_exact_match() {
- let mut popup = CommandPopup::new();
+ let mut popup = CommandPopup::new(Vec::new());
popup.on_composer_text_change("/init".to_string());
// When an exact match exists, the selected command should be that
// command by default.
- let selected = popup.selected_command();
+ let selected = popup.selected_item();
match selected {
- Some(cmd) => assert_eq!(cmd.command(), "init"),
+ Some(CommandItem::Builtin(cmd)) => assert_eq!(cmd.command(), "init"),
+ Some(CommandItem::Prompt(_)) => panic!("unexpected prompt selected for '/init'"),
None => panic!("expected a selected command for exact match"),
}
}
+
+ #[test]
+ fn prompt_discovery_lists_custom_prompts() {
+ let prompts = vec![
+ CustomPrompt {
+ name: "foo".to_string(),
+ content: "hello from foo".to_string(),
+ },
+ CustomPrompt {
+ name: "bar".to_string(),
+ content: "hello from bar".to_string(),
+ },
+ ];
+ let popup = CommandPopup::new(prompts);
+ let items = popup.filtered_items();
+ let mut prompt_names: Vec<String> = items
+ .into_iter()
+ .filter_map(|it| match it {
+ CommandItem::Prompt(i) => popup.prompt_name(i).map(|s| s.to_string()),
+ _ => None,
+ })
+ .collect();
+ prompt_names.sort();
+ assert_eq!(prompt_names, vec!["bar".to_string(), "foo".to_string()]);
+ }
+
+ #[test]
+ fn prompt_name_collision_with_builtin_is_ignored() {
👍
- Created: 2025-08-29 01:56:15 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308929431
@@ -123,11 +176,19 @@ impl WidgetRef for CommandPopup {
} else {
matches
.into_iter()
- .map(|(cmd, indices, _)| GenericDisplayRow {
- name: format!("/{}", cmd.command()),
- match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
- is_current: false,
- description: Some(cmd.description().to_string()),
+ .map(|(item, indices, _)| match item {
+ CommandItem::Builtin(cmd) => GenericDisplayRow {
+ name: format!("/{}", cmd.command()),
+ match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
+ is_current: false,
+ description: Some(cmd.description().to_string()),
+ },
+ CommandItem::Prompt(i) => GenericDisplayRow {
+ name: format!("/{}", self.prompts[i].name),
+ match_indices: indices.map(|v| v.into_iter().map(|i| i + 1).collect()),
+ is_current: false,
+ description: Some("send saved prompt".to_string()),
We can try it this way and change it if it doesn't feel right, I guess.
codex-rs/tui/src/chatwidget.rs
- Created: 2025-08-28 21:54:36 UTC | Link: https://github.com/openai/codex/pull/2696#discussion_r2308632989
@@ -1220,6 +1224,13 @@ impl ChatWidget {
self.add_to_history(history_cell::new_mcp_tools_output(&self.config, ev.tools));
}
+ fn on_list_custom_prompts(&mut self, ev: ListCustomPromptsResponseEvent) {
+ let len = ev.custom_prompts.len();
+ debug!("received {} custom prompts", len);
I believe our stricter lint checks should reject this now? Maybe it doesn't know about
tracing::debug().debug!("received {len} custom prompts");