change collaboration mode to struct (#9793)

Shouldn't cause behavioral change
This commit is contained in:
Ahmed Ibrahim
2026-01-23 17:00:23 -08:00
committed by GitHub
parent 1167465bf6
commit 69cfc73dc6
16 changed files with 291 additions and 210 deletions

View File

@@ -23,6 +23,7 @@ use codex_core::protocol::SessionSource;
use codex_otel::OtelManager;
use codex_protocol::ThreadId;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::Settings;
use codex_protocol::config_types::Verbosity;
@@ -887,11 +888,14 @@ async fn user_turn_collaboration_mode_overrides_model_and_effort() -> anyhow::Re
.build(&server)
.await?;
let collaboration_mode = CollaborationMode::Custom(Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
});
let collaboration_mode = CollaborationMode {
mode: ModeKind::Custom,
settings: Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
},
};
codex
.submit(Op::UserTurn {

View File

@@ -4,6 +4,7 @@ use codex_core::protocol::COLLABORATION_MODE_OPEN_TAG;
use codex_core::protocol::EventMsg;
use codex_core::protocol::Op;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::user_input::UserInput;
use core_test_support::responses::ev_completed;
@@ -22,11 +23,14 @@ fn sse_completed(id: &str) -> String {
}
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
CollaborationMode::Custom(Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: None,
developer_instructions: instructions.map(str::to_string),
})
CollaborationMode {
mode: ModeKind::Custom,
settings: Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: None,
developer_instructions: instructions.map(str::to_string),
},
}
}
fn developer_texts(input: &[Value]) -> Vec<String> {

View File

@@ -9,6 +9,7 @@ use codex_core::protocol::RolloutItem;
use codex_core::protocol::RolloutLine;
use codex_core::protocol::ENVIRONMENT_CONTEXT_OPEN_TAG;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::models::ContentItem;
use codex_protocol::models::ResponseItem;
@@ -23,11 +24,14 @@ use std::time::Duration;
use tempfile::TempDir;
fn collab_mode_with_instructions(instructions: Option<&str>) -> CollaborationMode {
CollaborationMode::Custom(Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: None,
developer_instructions: instructions.map(str::to_string),
})
CollaborationMode {
mode: ModeKind::Custom,
settings: Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: None,
developer_instructions: instructions.map(str::to_string),
},
}
}
fn collab_xml(text: &str) -> String {

View File

@@ -12,6 +12,7 @@ use codex_core::protocol_config_types::ReasoningSummary;
use codex_core::shell::Shell;
use codex_core::shell::default_user_shell;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::Settings;
use codex_protocol::config_types::WebSearchMode;
use codex_protocol::openai_models::ReasoningEffort;
@@ -412,11 +413,14 @@ async fn override_before_first_turn_emits_environment_context() -> anyhow::Resul
let TestCodex { codex, .. } = test_codex().build(&server).await?;
let collaboration_mode = CollaborationMode::Custom(Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
});
let collaboration_mode = CollaborationMode {
mode: ModeKind::Custom,
settings: Settings {
model: "gpt-5.1".to_string(),
reasoning_effort: Some(ReasoningEffort::High),
developer_instructions: None,
},
};
codex
.submit(Op::OverrideTurnContext {

View File

@@ -8,6 +8,7 @@ use codex_core::protocol::EventMsg;
use codex_core::protocol::Op;
use codex_core::protocol::SandboxPolicy;
use codex_protocol::config_types::CollaborationMode;
use codex_protocol::config_types::ModeKind;
use codex_protocol::config_types::ReasoningSummary;
use codex_protocol::config_types::Settings;
use codex_protocol::request_user_input::RequestUserInputAnswer;
@@ -132,11 +133,14 @@ async fn request_user_input_round_trip_resolves_pending() -> anyhow::Result<()>
model: session_model,
effort: None,
summary: ReasoningSummary::Auto,
collaboration_mode: Some(CollaborationMode::Plan(Settings {
model: session_configured.model.clone(),
reasoning_effort: None,
developer_instructions: None,
})),
collaboration_mode: Some(CollaborationMode {
mode: ModeKind::Plan,
settings: Settings {
model: session_configured.model.clone(),
reasoning_effort: None,
developer_instructions: None,
},
}),
personality: None,
})
.await?;
@@ -269,24 +273,26 @@ where
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_rejected_in_execute_mode() -> anyhow::Result<()> {
assert_request_user_input_rejected("Execute", |model| {
CollaborationMode::Execute(Settings {
assert_request_user_input_rejected("Execute", |model| CollaborationMode {
mode: ModeKind::Execute,
settings: Settings {
model,
reasoning_effort: None,
developer_instructions: None,
})
},
})
.await
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn request_user_input_rejected_in_custom_mode() -> anyhow::Result<()> {
assert_request_user_input_rejected("Custom", |model| {
CollaborationMode::Custom(Settings {
assert_request_user_input_rejected("Custom", |model| CollaborationMode {
mode: ModeKind::Custom,
settings: Settings {
model,
reasoning_effort: None,
developer_instructions: None,
})
},
})
.await
}