Compare commits

..

5 Commits

Author SHA1 Message Date
jif-oai
dee46fb200 Merge branch 'main' into jif/warning-plugins 2026-04-13 16:33:25 +01:00
friel-openai
776246c3f5 Make forked agent spawns keep parent model config (#17247)
## Summary

When a `spawn_agent` call does a full-history fork, keep the parent's
effective agent type and model configuration instead of applying child
role/model overrides.

This is the minimal config-inheritance slice of #16055. Prompt-cache key
inheritance and MCP tool-surface stability are split into follow-up PRs.

## Design

- Reject `agent_type`, `model`, and `reasoning_effort` for v1
`fork_context` spawns.
- Reject `agent_type`, `model`, and `reasoning_effort` for v2
`fork_turns = "all"` spawns.
- Keep v2 partial-history forks (`fork_turns = "N"`) configurable;
requested model/reasoning overrides and role config still apply there.
- Keep non-forked spawn behavior unchanged.

## Tests

- `cargo +1.93.1 test -p codex-core spawn_agent_fork_context --lib`
- `cargo +1.93.1 test -p codex-core multi_agent_v2_spawn_fork_turns
--lib`
- `cargo +1.93.1 test -p codex-core
multi_agent_v2_spawn_partial_fork_turns_allows_agent_type_override
--lib`
2026-04-13 15:28:40 +00:00
jif-oai
3f62b5cc61 fix: dedup compact (#17643) 2026-04-13 16:08:53 +01:00
jif-oai
ee8b43220d Merge branch 'main' into jif/warning-plugins 2026-04-13 14:52:18 +01:00
jif-oai
fc98e66a89 feat: Avoid reloading curated marketplaces for tool-suggest discoverable plugins 2026-04-13 13:26:49 +01:00
9 changed files with 423 additions and 75 deletions

View File

@@ -92,7 +92,7 @@ members = [
resolver = "2"
[workspace.package]
version = "0.121.0-alpha.4"
version = "0.0.0"
# Track the edition for all workspace crates in one place. Individual
# crates can still override this value, but keeping it here means new
# crates created with `cargo new -w ...` automatically inherit the 2024

View File

@@ -4,7 +4,6 @@ use tracing::warn;
use super::OPENAI_CURATED_MARKETPLACE_NAME;
use super::PluginCapabilitySummary;
use super::PluginReadRequest;
use super::PluginsManager;
use crate::config::Config;
use codex_config::types::ToolSuggestDiscoverableType;
@@ -47,6 +46,7 @@ pub(crate) fn list_tool_suggest_discoverable_plugins(
else {
return Ok(Vec::new());
};
let curated_marketplace_name = curated_marketplace.name;
let mut discoverable_plugins = Vec::<DiscoverablePluginInfo>::new();
for plugin in curated_marketplace.plugins {
@@ -58,17 +58,14 @@ pub(crate) fn list_tool_suggest_discoverable_plugins(
}
let plugin_id = plugin.id.clone();
let plugin_name = plugin.name.clone();
match plugins_manager.read_plugin_for_config(
match plugins_manager.read_plugin_detail_for_marketplace_plugin(
config,
&PluginReadRequest {
plugin_name,
marketplace_path: curated_marketplace.path.clone(),
},
&curated_marketplace_name,
plugin,
) {
Ok(plugin) => {
let plugin: PluginCapabilitySummary = plugin.plugin.into();
let plugin: PluginCapabilitySummary = plugin.into();
discoverable_plugins.push(DiscoverablePluginInfo {
id: plugin.config_name,
name: plugin.display_name,

View File

@@ -9,6 +9,9 @@ use codex_tools::DiscoverablePluginInfo;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
use tracing::Level;
use tracing_subscriber::fmt::format::FmtSpan;
use tracing_test::internal::MockWriter;
#[tokio::test]
async fn list_tool_suggest_discoverable_plugins_returns_uninstalled_curated_plugins() {
@@ -140,3 +143,60 @@ discoverables = [{ type = "plugin", id = "sample@openai-curated" }]
}]
);
}
#[tokio::test]
async fn list_tool_suggest_discoverable_plugins_does_not_reload_marketplace_per_plugin() {
let codex_home = tempdir().expect("tempdir should succeed");
let curated_root = crate::plugins::curated_plugins_repo_path(codex_home.path());
write_openai_curated_marketplace(
&curated_root,
&["slack", "build-ios-apps", "life-science-research"],
);
write_plugins_feature_config(codex_home.path());
let too_long_prompt = "x".repeat(129);
for plugin_name in ["build-ios-apps", "life-science-research"] {
write_file(
&curated_root.join(format!("plugins/{plugin_name}/.codex-plugin/plugin.json")),
&format!(
r#"{{
"name": "{plugin_name}",
"description": "Plugin that includes skills, MCP servers, and app connectors",
"interface": {{
"defaultPrompt": "{too_long_prompt}"
}}
}}"#
),
);
}
let config = load_plugins_config(codex_home.path()).await;
let buffer: &'static std::sync::Mutex<Vec<u8>> =
Box::leak(Box::new(std::sync::Mutex::new(Vec::new())));
let subscriber = tracing_subscriber::fmt()
.with_level(true)
.with_ansi(false)
.with_max_level(Level::WARN)
.with_span_events(FmtSpan::NONE)
.with_writer(MockWriter::new(buffer))
.finish();
let _guard = tracing::subscriber::set_default(subscriber);
let discoverable_plugins = list_tool_suggest_discoverable_plugins(&config).unwrap();
assert_eq!(discoverable_plugins.len(), 1);
assert_eq!(discoverable_plugins[0].id, "slack@openai-curated");
let logs = String::from_utf8(buffer.lock().expect("buffer lock").clone()).expect("utf8 logs");
assert_eq!(logs.matches("ignoring interface.defaultPrompt").count(), 2);
assert_eq!(
logs.matches("build-ios-apps/.codex-plugin/plugin.json")
.count(),
1
);
assert_eq!(
logs.matches("life-science-research/.codex-plugin/plugin.json")
.count(),
1
);
}

View File

@@ -952,13 +952,6 @@ impl PluginsManager {
marketplace_name,
});
};
if !self.restriction_product_matches(plugin.policy.products.as_deref()) {
return Err(MarketplaceError::PluginNotFound {
plugin_name: request.plugin_name.clone(),
marketplace_name,
});
}
let plugin_id = PluginId::new(plugin.name.clone(), marketplace.name.clone()).map_err(
|err| match err {
PluginIdError::Invalid(message) => MarketplaceError::InvalidPlugin(message),
@@ -966,6 +959,51 @@ impl PluginsManager {
)?;
let plugin_key = plugin_id.as_key();
let (installed_plugins, enabled_plugins) = self.configured_plugin_states(config);
let plugin = self.read_plugin_detail_for_marketplace_plugin(
config,
&marketplace.name,
ConfiguredMarketplacePlugin {
id: plugin_key.clone(),
name: plugin.name,
source: plugin.source,
policy: plugin.policy,
interface: plugin.interface,
installed: installed_plugins.contains(&plugin_key),
enabled: enabled_plugins.contains(&plugin_key),
},
)?;
Ok(PluginReadOutcome {
marketplace_name: if marketplace.name == OPENAI_CURATED_MARKETPLACE_NAME {
OPENAI_CURATED_MARKETPLACE_DISPLAY_NAME.to_string()
} else {
marketplace.name
},
marketplace_path: marketplace.path,
plugin,
})
}
pub(crate) fn read_plugin_detail_for_marketplace_plugin(
&self,
config: &Config,
marketplace_name: &str,
plugin: ConfiguredMarketplacePlugin,
) -> Result<PluginDetail, MarketplaceError> {
if !self.restriction_product_matches(plugin.policy.products.as_deref()) {
return Err(MarketplaceError::PluginNotFound {
plugin_name: plugin.name,
marketplace_name: marketplace_name.to_string(),
});
}
let plugin_id =
PluginId::new(plugin.name.clone(), marketplace_name.to_string()).map_err(|err| {
match err {
PluginIdError::Invalid(message) => MarketplaceError::InvalidPlugin(message),
}
})?;
let plugin_key = plugin_id.as_key();
let source_path = match &plugin.source {
MarketplacePluginSource::Local { path } => path.clone(),
};
@@ -1001,27 +1039,19 @@ impl PluginsManager {
mcp_server_names.sort_unstable();
mcp_server_names.dedup();
Ok(PluginReadOutcome {
marketplace_name: if marketplace.name == OPENAI_CURATED_MARKETPLACE_NAME {
OPENAI_CURATED_MARKETPLACE_DISPLAY_NAME.to_string()
} else {
marketplace.name
},
marketplace_path: marketplace.path,
plugin: PluginDetail {
id: plugin_key.clone(),
name: plugin.name,
description,
source: plugin.source,
policy: plugin.policy,
interface: plugin.interface,
installed: installed_plugins.contains(&plugin_key),
enabled: enabled_plugins.contains(&plugin_key),
skills: resolved_skills.skills,
disabled_skill_paths: resolved_skills.disabled_skill_paths,
apps,
mcp_server_names,
},
Ok(PluginDetail {
id: plugin_key,
name: plugin.name,
description,
source: plugin.source,
policy: plugin.policy,
interface: plugin.interface,
installed: plugin.installed,
enabled: plugin.enabled,
skills: resolved_skills.skills,
disabled_skill_paths: resolved_skills.disabled_skill_paths,
apps,
mcp_server_names,
})
}

View File

@@ -2,11 +2,10 @@ use super::*;
use crate::agent::control::SpawnAgentForkMode;
use crate::agent::control::SpawnAgentOptions;
use crate::agent::control::render_input_preview;
use crate::agent::role::DEFAULT_ROLE_NAME;
use crate::agent::role::apply_role_to_config;
use crate::agent::exceeds_thread_spawn_depth_limit;
use crate::agent::next_thread_spawn_depth;
use crate::agent::role::DEFAULT_ROLE_NAME;
use crate::agent::role::apply_role_to_config;
pub(crate) struct Handler;
@@ -61,17 +60,25 @@ impl ToolHandler for Handler {
.await;
let mut config =
build_agent_spawn_config(&session.get_base_instructions().await, turn.as_ref())?;
apply_requested_spawn_agent_model_overrides(
&session,
turn.as_ref(),
&mut config,
args.model.as_deref(),
args.reasoning_effort,
)
.await?;
apply_role_to_config(&mut config, role_name)
.await
.map_err(FunctionCallError::RespondToModel)?;
if args.fork_context {
reject_full_fork_spawn_overrides(
role_name,
args.model.as_deref(),
args.reasoning_effort,
)?;
} else {
apply_requested_spawn_agent_model_overrides(
&session,
turn.as_ref(),
&mut config,
args.model.as_deref(),
args.reasoning_effort,
)
.await?;
apply_role_to_config(&mut config, role_name)
.await
.map_err(FunctionCallError::RespondToModel)?;
}
apply_spawn_agent_runtime_overrides(&mut config, turn.as_ref())?;
apply_spawn_agent_overrides(&mut config, child_depth);

View File

@@ -225,7 +225,9 @@ fn build_agent_shared_config(turn: &TurnContext) -> Result<Config, FunctionCallE
let mut config = (*base_config).clone();
config.model = Some(turn.model_info.slug.clone());
config.model_provider = turn.provider.clone();
config.model_reasoning_effort = turn.reasoning_effort;
config.model_reasoning_effort = turn
.reasoning_effort
.or(turn.model_info.default_reasoning_level);
config.model_reasoning_summary = Some(turn.reasoning_summary);
config.developer_instructions = turn.developer_instructions.clone();
config.compact_prompt = turn.compact_prompt.clone();
@@ -234,6 +236,19 @@ fn build_agent_shared_config(turn: &TurnContext) -> Result<Config, FunctionCallE
Ok(config)
}
pub(crate) fn reject_full_fork_spawn_overrides(
agent_type: Option<&str>,
model: Option<&str>,
reasoning_effort: Option<ReasoningEffort>,
) -> Result<(), FunctionCallError> {
if agent_type.is_some() || model.is_some() || reasoning_effort.is_some() {
return Err(FunctionCallError::RespondToModel(
"Full-history forked agents inherit the parent agent type, model, and reasoning effort; omit agent_type, model, and reasoning_effort, or spawn without fork_context/fork_turns=all.".to_string(),
));
}
Ok(())
}
/// Copies runtime-only turn state onto a child config before it is handed to `AgentControl`.
///
/// These values are chosen by the live turn rather than persisted config, so leaving them stale

View File

@@ -2,6 +2,7 @@ use super::*;
use crate::CodexThread;
use crate::ThreadManager;
use crate::codex::make_session_and_context;
use crate::config::AgentRoleConfig;
use crate::config::DEFAULT_AGENT_MAX_DEPTH;
use crate::function_tool::FunctionCallError;
use crate::session_prefix::format_subagent_notification_message;
@@ -28,6 +29,7 @@ use codex_protocol::models::ContentItem;
use codex_protocol::models::FunctionCallOutputBody;
use codex_protocol::models::ResponseInputItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::openai_models::ReasoningEffort;
use codex_protocol::protocol::AgentStatus;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::EventMsg;
@@ -89,6 +91,36 @@ fn thread_manager() -> ThreadManager {
)
}
async fn install_role_with_model_override(turn: &mut TurnContext) -> String {
let role_name = "fork-context-role".to_string();
tokio::fs::create_dir_all(&turn.config.codex_home)
.await
.expect("codex home should be created");
let role_config_path = turn.config.codex_home.join("fork-context-role.toml");
tokio::fs::write(
&role_config_path,
r#"model = "gpt-5-role-override"
model_provider = "ollama"
model_reasoning_effort = "minimal"
"#,
)
.await
.expect("role config should be written");
let mut config = (*turn.config).clone();
config.agent_roles.insert(
role_name.clone(),
AgentRoleConfig {
description: Some("Role with model overrides".to_string()),
config_file: Some(role_config_path),
nickname_candidates: None,
},
);
turn.config = Arc::new(config);
role_name
}
fn history_contains_inter_agent_communication(
history_items: &[ResponseItem],
expected: &InterAgentCommunication,
@@ -365,6 +397,215 @@ async fn spawn_agent_uses_explorer_role_and_preserves_approval_policy() {
assert_eq!(snapshot.model_provider_id, "ollama");
}
#[tokio::test]
async fn spawn_agent_fork_context_rejects_agent_type_override() {
let (mut session, mut turn) = make_session_and_context().await;
let role_name = install_role_with_model_override(&mut turn).await;
let manager = thread_manager();
let root = manager
.start_thread((*turn.config).clone())
.await
.expect("root thread should start");
session.services.agent_control = manager.agent_control();
session.conversation_id = root.thread_id;
let err = SpawnAgentHandler
.handle(invocation(
Arc::new(session),
Arc::new(turn),
"spawn_agent",
function_payload(json!({
"message": "inspect this repo",
"agent_type": role_name,
"fork_context": true
})),
))
.await
.expect_err("fork_context should reject agent_type overrides");
assert_eq!(
err,
FunctionCallError::RespondToModel(
"Full-history forked agents inherit the parent agent type, model, and reasoning effort; omit agent_type, model, and reasoning_effort, or spawn without fork_context/fork_turns=all.".to_string(),
)
);
}
#[tokio::test]
async fn spawn_agent_fork_context_rejects_child_model_overrides() {
let (mut session, turn) = make_session_and_context().await;
let manager = thread_manager();
let root = manager
.start_thread((*turn.config).clone())
.await
.expect("root thread should start");
session.services.agent_control = manager.agent_control();
session.conversation_id = root.thread_id;
let err = SpawnAgentHandler
.handle(invocation(
Arc::new(session),
Arc::new(turn),
"spawn_agent",
function_payload(json!({
"message": "inspect this repo",
"model": "gpt-5-child-override",
"reasoning_effort": "low",
"fork_context": true
})),
))
.await
.expect_err("forked spawn should reject child model overrides");
assert_eq!(
err,
FunctionCallError::RespondToModel(
"Full-history forked agents inherit the parent agent type, model, and reasoning effort; omit agent_type, model, and reasoning_effort, or spawn without fork_context/fork_turns=all.".to_string(),
)
);
}
#[tokio::test]
async fn multi_agent_v2_spawn_fork_turns_all_rejects_agent_type_override() {
let (mut session, mut turn) = make_session_and_context().await;
let role_name = install_role_with_model_override(&mut turn).await;
let manager = thread_manager();
let root = manager
.start_thread((*turn.config).clone())
.await
.expect("root thread should start");
session.services.agent_control = manager.agent_control();
session.conversation_id = root.thread_id;
let mut config = (*turn.config).clone();
config
.features
.enable(Feature::MultiAgentV2)
.expect("test config should allow feature update");
let turn = TurnContext {
config: Arc::new(config),
..turn
};
let err = SpawnAgentHandlerV2
.handle(invocation(
Arc::new(session),
Arc::new(turn),
"spawn_agent",
function_payload(json!({
"message": "inspect this repo",
"task_name": "fork_context_v2",
"agent_type": role_name,
"fork_turns": "all"
})),
))
.await
.expect_err("fork_turns=all should reject agent_type overrides");
assert_eq!(
err,
FunctionCallError::RespondToModel(
"Full-history forked agents inherit the parent agent type, model, and reasoning effort; omit agent_type, model, and reasoning_effort, or spawn without fork_context/fork_turns=all.".to_string(),
)
);
}
#[tokio::test]
async fn multi_agent_v2_spawn_fork_turns_rejects_child_model_overrides() {
let (mut session, mut turn) = make_session_and_context().await;
let manager = thread_manager();
let root = manager
.start_thread((*turn.config).clone())
.await
.expect("root thread should start");
session.services.agent_control = manager.agent_control();
session.conversation_id = root.thread_id;
let mut config = (*turn.config).clone();
config
.features
.enable(Feature::MultiAgentV2)
.expect("test config should allow feature update");
turn.config = Arc::new(config);
let err = SpawnAgentHandlerV2
.handle(invocation(
Arc::new(session),
Arc::new(turn),
"spawn_agent",
function_payload(json!({
"message": "inspect this repo",
"task_name": "fork_context_v2",
"model": "gpt-5-child-override",
"reasoning_effort": "low",
"fork_turns": "all"
})),
))
.await
.expect_err("forked spawn should reject child model overrides");
assert_eq!(
err,
FunctionCallError::RespondToModel(
"Full-history forked agents inherit the parent agent type, model, and reasoning effort; omit agent_type, model, and reasoning_effort, or spawn without fork_context/fork_turns=all.".to_string(),
)
);
}
#[tokio::test]
async fn multi_agent_v2_spawn_partial_fork_turns_allows_agent_type_override() {
let (mut session, mut turn) = make_session_and_context().await;
let role_name = install_role_with_model_override(&mut turn).await;
let manager = thread_manager();
let root = manager
.start_thread((*turn.config).clone())
.await
.expect("root thread should start");
session.services.agent_control = manager.agent_control();
session.conversation_id = root.thread_id;
let mut config = (*turn.config).clone();
config
.features
.enable(Feature::MultiAgentV2)
.expect("test config should allow feature update");
let turn = TurnContext {
config: Arc::new(config),
..turn
};
let output = SpawnAgentHandlerV2
.handle(invocation(
Arc::new(session),
Arc::new(turn),
"spawn_agent",
function_payload(json!({
"message": "inspect this repo",
"task_name": "partial_fork",
"agent_type": role_name,
"fork_turns": "1"
})),
))
.await
.expect("partial fork should allow agent_type overrides");
let (content, _) = expect_text_output(output);
let result: serde_json::Value =
serde_json::from_str(&content).expect("spawn_agent result should be json");
assert_eq!(result["task_name"], "/root/partial_fork");
let agent_id = manager
.captured_ops()
.into_iter()
.map(|(thread_id, _)| thread_id)
.find(|thread_id| *thread_id != root.thread_id)
.expect("spawned agent should receive an op");
let snapshot = manager
.get_thread(agent_id)
.await
.expect("spawned agent thread should exist")
.config_snapshot()
.await;
assert_eq!(snapshot.model, "gpt-5-role-override");
assert_eq!(snapshot.model_provider_id, "ollama");
assert_eq!(snapshot.reasoning_effort, Some(ReasoningEffort::Minimal));
}
#[tokio::test]
async fn spawn_agent_returns_agent_id_without_task_name() {
let (mut session, turn) = make_session_and_context().await;

View File

@@ -70,17 +70,25 @@ impl ToolHandler for Handler {
.await;
let mut config =
build_agent_spawn_config(&session.get_base_instructions().await, turn.as_ref())?;
apply_requested_spawn_agent_model_overrides(
&session,
turn.as_ref(),
&mut config,
args.model.as_deref(),
args.reasoning_effort,
)
.await?;
apply_role_to_config(&mut config, role_name)
.await
.map_err(FunctionCallError::RespondToModel)?;
if matches!(fork_mode, Some(SpawnAgentForkMode::FullHistory)) {
reject_full_fork_spawn_overrides(
role_name,
args.model.as_deref(),
args.reasoning_effort,
)?;
} else {
apply_requested_spawn_agent_model_overrides(
&session,
turn.as_ref(),
&mut config,
args.model.as_deref(),
args.reasoning_effort,
)
.await?;
apply_role_to_config(&mut config, role_name)
.await
.map_err(FunctionCallError::RespondToModel)?;
}
apply_spawn_agent_runtime_overrides(&mut config, turn.as_ref())?;
apply_spawn_agent_overrides(&mut config, child_depth);
config.developer_instructions = Some(

View File

@@ -2197,16 +2197,6 @@ impl ChatWidget {
self.finalize_completed_assistant_message(Some(&message));
}
fn on_context_compacted(&mut self) {
self.flush_answer_stream_with_separator();
self.handle_stream_finished();
self.add_to_history(history_cell::new_info_event(
"Context compacted".to_owned(),
/*hint*/ None,
));
self.request_redraw();
}
fn on_agent_message_delta(&mut self, delta: String) {
self.handle_streaming_delta(delta);
}
@@ -6254,7 +6244,7 @@ impl ChatWidget {
| ServerNotification::WindowsWorldWritableWarning(_)
| ServerNotification::WindowsSandboxSetupCompleted(_)
| ServerNotification::AccountLoginCompleted(_) => {}
ServerNotification::ContextCompacted(_) => self.on_context_compacted(),
ServerNotification::ContextCompacted(_) => {}
}
}
@@ -6739,7 +6729,7 @@ impl ChatWidget {
self.on_entered_review_mode(review_request, from_replay)
}
EventMsg::ExitedReviewMode(review) => self.on_exited_review_mode(review),
EventMsg::ContextCompacted(_) => self.on_context_compacted(),
EventMsg::ContextCompacted(_) => {}
EventMsg::CollabAgentSpawnBegin(CollabAgentSpawnBeginEvent {
call_id,
model,