spawn prompt (#14362)

# External (non-OpenAI) Pull Request Requirements

Before opening this Pull Request, please read the dedicated
"Contributing" markdown file or your PR may be closed:
https://github.com/openai/codex/blob/main/docs/contributing.md

If your PR conforms to our contribution guidelines, replace this text
with a detailed and high quality description of your changes.

Include a link to a bug report or enhancement request.
This commit is contained in:
Ahmed Ibrahim
2026-03-11 11:14:51 -07:00
committed by Michael Bolin
parent 65b325159d
commit 8f8a0f55ce
5 changed files with 318 additions and 2 deletions

View File

@@ -29,6 +29,7 @@ use codex_protocol::openai_models::ApplyPatchToolType;
use codex_protocol::openai_models::ConfigShellToolType;
use codex_protocol::openai_models::InputModality;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::WebSearchToolType;
use codex_protocol::protocol::SessionSource;
use codex_protocol::protocol::SubAgentSource;
@@ -90,6 +91,7 @@ pub enum UnifiedExecBackendConfig {
#[derive(Debug, Clone)]
pub(crate) struct ToolsConfig {
pub available_models: Vec<ModelPreset>,
pub shell_type: ConfigShellToolType,
shell_command_backend: ShellCommandBackendConfig,
pub unified_exec_backend: UnifiedExecBackendConfig,
@@ -117,6 +119,7 @@ pub(crate) struct ToolsConfig {
pub(crate) struct ToolsConfigParams<'a> {
pub(crate) model_info: &'a ModelInfo,
pub(crate) available_models: &'a Vec<ModelPreset>,
pub(crate) features: &'a Features,
pub(crate) web_search_mode: Option<WebSearchMode>,
pub(crate) session_source: SessionSource,
@@ -126,6 +129,7 @@ impl ToolsConfig {
pub fn new(params: &ToolsConfigParams) -> Self {
let ToolsConfigParams {
model_info,
available_models: available_models_ref,
features,
web_search_mode,
session_source,
@@ -195,6 +199,7 @@ impl ToolsConfig {
);
Self {
available_models: available_models_ref.to_vec(),
shell_type,
shell_command_backend,
unified_exec_backend,
@@ -765,6 +770,7 @@ fn create_collab_input_items_schema() -> JsonSchema {
}
fn create_spawn_agent_tool(config: &ToolsConfig) -> ToolSpec {
let available_models_description = spawn_agent_models_description(&config.available_models);
let properties = BTreeMap::from([
(
"message".to_string(),
@@ -815,8 +821,11 @@ fn create_spawn_agent_tool(config: &ToolsConfig) -> ToolSpec {
ToolSpec::Function(ResponsesApiTool {
name: "spawn_agent".to_string(),
description: r#"Spawn a sub-agent for a well-scoped task. Returns the agent id (and user-facing nickname when available) to use to communicate with this agent. This spawn_agent tool provides you access to smaller but more efficient sub-agents. A mini model can solve many tasks faster than the main model. You should follow the rules and guidelines below to use this tool.
description: format!(
r#"
Only use `spawn_agent` if and only if the user explicitly asked for sub-agents or parallel agent work. Spawn a sub-agent for a well-scoped task. Returns the agent id (and user-facing nickname when available) to use to communicate with this agent. This spawn_agent tool provides you access to smaller but more efficient sub-agents. A mini model can solve many tasks faster than the main model. You should follow the rules and guidelines below to use this tool.
{available_models_description}
### When to delegate vs. do the subtask yourself
- First, quickly analyze the overall user task and form a succinct high-level plan. Identify which tasks are immediate blockers on the critical path, and which tasks are sidecar tasks that are needed but can run in parallel without blocking the next local step. As part of that plan, explicitly decide what immediate task you should do locally right now. Do this planning step before delegating to agents so you do not hand off the immediate blocking task to a submodel and then waste time waiting on it.
- Use the smaller subagent when a subtask is easy enough for it to handle and can run in parallel with your local work. Prefer delegating concrete, bounded sidecar tasks that materially advance the main task without blocking your immediate next local step.
@@ -845,7 +854,7 @@ fn create_spawn_agent_tool(config: &ToolsConfig) -> ToolSpec {
- Split implementation into disjoint codebase slices and spawn multiple agents for them in parallel when the write scopes do not overlap.
- Delegate verification only when it can run in parallel with ongoing implementation and is likely to catch a concrete risk before final integration.
- The key is to find opportunities to spawn multiple independent subtasks in parallel within the same round, while ensuring each subtask is well-defined, self-contained, and materially advances the main task."#
.to_string(),
),
strict: false,
parameters: JsonSchema::Object {
properties,
@@ -856,6 +865,35 @@ fn create_spawn_agent_tool(config: &ToolsConfig) -> ToolSpec {
})
}
fn spawn_agent_models_description(models: &[ModelPreset]) -> String {
let visible_models: Vec<&ModelPreset> =
models.iter().filter(|model| model.show_in_picker).collect();
if visible_models.is_empty() {
return "No picker-visible models are currently loaded.".to_string();
}
visible_models
.into_iter()
.map(|model| {
let efforts = model
.supported_reasoning_efforts
.iter()
.map(|preset| format!("{} ({})", preset.effort, preset.description))
.collect::<Vec<_>>()
.join(", ");
format!(
"- {} (`{}`): {} Default reasoning effort: {}. Supported reasoning efforts: {}.",
model.display_name,
model.model,
model.description,
model.default_reasoning_effort,
efforts
)
})
.collect::<Vec<_>>()
.join("\n")
}
fn create_spawn_agents_on_csv_tool() -> ToolSpec {
let mut properties = BTreeMap::new();
properties.insert(
@@ -2734,8 +2772,10 @@ mod tests {
let model_info = model_info_from_models_json("gpt-5-codex");
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
session_source: SessionSource::Cli,
@@ -2805,8 +2845,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::Collab);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -2827,8 +2869,10 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::SpawnCsv);
features.normalize_dependencies();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -2855,8 +2899,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::Artifact);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -2874,8 +2920,10 @@ mod tests {
features.enable(Feature::SpawnCsv);
features.normalize_dependencies();
features.enable(Feature::Sqlite);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::SubAgent(SubAgentSource::Other(
@@ -2904,8 +2952,10 @@ mod tests {
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -2918,8 +2968,10 @@ mod tests {
);
features.enable(Feature::DefaultModeRequestUserInput);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -2940,8 +2992,10 @@ mod tests {
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -2951,8 +3005,10 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::RequestPermissionsTool);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -2972,8 +3028,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::RequestPermissions);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -2990,8 +3048,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.disable(Feature::MemoryTool);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3010,8 +3070,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3036,8 +3098,10 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::JsRepl);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3058,8 +3122,10 @@ mod tests {
let mut image_generation_features = default_features.clone();
image_generation_features.enable(Feature::ImageGeneration);
let available_models = Vec::new();
let default_tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &supported_model_info,
available_models: &available_models,
features: &default_features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3074,6 +3140,7 @@ mod tests {
let supported_tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &supported_model_info,
available_models: &available_models,
features: &image_generation_features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3091,6 +3158,7 @@ mod tests {
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &unsupported_model_info,
available_models: &available_models,
features: &image_generation_features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3127,8 +3195,10 @@ mod tests {
) {
let _config = test_config();
let model_info = model_info_from_models_json(model_slug);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features,
web_search_mode,
session_source: SessionSource::Cli,
@@ -3161,8 +3231,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3189,8 +3261,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
session_source: SessionSource::Cli,
@@ -3230,8 +3304,10 @@ mod tests {
search_context_size: Some(codex_protocol::config_types::WebSearchContextSize::High),
};
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
session_source: SessionSource::Cli,
@@ -3264,8 +3340,10 @@ mod tests {
model_info.web_search_tool_type = WebSearchToolType::TextAndImage;
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
session_source: SessionSource::Cli,
@@ -3296,8 +3374,10 @@ mod tests {
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3319,8 +3399,10 @@ mod tests {
let model_info =
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3510,8 +3592,10 @@ mod tests {
let model_info = ModelsManager::construct_model_info_offline_for_tests("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
session_source: SessionSource::Cli,
@@ -3534,8 +3618,10 @@ mod tests {
features.enable(Feature::UnifiedExec);
features.enable(Feature::ShellZshFork);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
session_source: SessionSource::Cli,
@@ -3560,8 +3646,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3586,8 +3674,10 @@ mod tests {
"list_dir".to_string(),
];
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3618,8 +3708,10 @@ mod tests {
let model_info = ModelsManager::construct_model_info_offline_for_tests("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Live),
session_source: SessionSource::Cli,
@@ -3706,8 +3798,10 @@ mod tests {
let model_info = ModelsManager::construct_model_info_offline_for_tests("o3", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3752,8 +3846,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::Apps);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3837,8 +3933,10 @@ mod tests {
)]));
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3848,8 +3946,10 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::Apps);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3865,8 +3965,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::Apps);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3890,8 +3992,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3946,8 +4050,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -3999,8 +4105,10 @@ mod tests {
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
features.enable(Feature::ApplyPatchFreeform);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -4054,8 +4162,10 @@ mod tests {
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -4261,8 +4371,10 @@ Examples of valid command strings:
ModelsManager::construct_model_info_offline_for_tests("gpt-5-codex", &config);
let mut features = Features::with_defaults();
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -4368,8 +4480,10 @@ Examples of valid command strings:
let mut features = Features::with_defaults();
features.enable(Feature::CodeMode);
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
@@ -4396,8 +4510,10 @@ Examples of valid command strings:
let mut features = Features::with_defaults();
features.enable(Feature::CodeMode);
features.enable(Feature::UnifiedExec);
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,