Compare commits

...

7 Commits

Author SHA1 Message Date
Casey Chow
5b2eaca1e1 Add retries for file transfer network phases
Co-authored-by: Codex <noreply@openai.com>
2026-03-19 14:08:20 +00:00
Casey Chow
941f2ad34c Clarify file transfer tool intent
Co-authored-by: Codex <noreply@openai.com>
2026-03-19 13:51:01 +00:00
Casey Chow
d5028cf0fb Make file transfer tools non-experimental
Register upload_file and download_file as built-in tools instead of gating them on experimental_supported_tools, and add regression coverage for the ungated behavior.

Co-authored-by: Codex <noreply@openai.com>
2026-03-19 13:46:42 +00:00
Casey Chow
64eaa282ce Fix core expectations for file transfer tools
Add the new file-transfer tools to the core tool expectation suites and satisfy the argument-comment lint in the file transfer handler.

Co-authored-by: Codex <noreply@openai.com>
2026-03-19 13:34:30 +00:00
Casey Chow
74d0875e74 Harden file transfer download headers
Keep auth headers on the backend link request, but do not forward them to the backend-provided blob download URL. Add regression coverage for the blob fetch header set.

Co-authored-by: Codex <noreply@openai.com>
2026-03-19 12:40:11 +00:00
Casey Chow
ddc4efc19d Add file transfer edge-case coverage
Co-authored-by: Codex <noreply@openai.com>
2026-03-19 03:14:45 +00:00
Casey Chow
99dcf63956 Add sandboxed file transfer tools
Co-authored-by: Codex <noreply@openai.com>
2026-03-19 01:34:45 +00:00
18 changed files with 2662 additions and 14 deletions

19
codex-rs/Cargo.lock generated
View File

@@ -1557,6 +1557,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"codex-apply-patch",
"codex-file-transfer",
"codex-linux-sandbox",
"codex-shell-escalation",
"codex-utils-home-dir",
@@ -1844,6 +1845,7 @@ dependencies = [
"codex-environment",
"codex-execpolicy",
"codex-file-search",
"codex-file-transfer",
"codex-git",
"codex-hooks",
"codex-keyring-store",
@@ -2075,6 +2077,23 @@ dependencies = [
"tokio",
]
[[package]]
name = "codex-file-transfer"
version = "0.0.0"
dependencies = [
"anyhow",
"codex-client",
"mime_guess",
"pretty_assertions",
"reqwest",
"serde",
"serde_json",
"tempfile",
"tokio",
"tokio-util",
"wiremock",
]
[[package]]
name = "codex-git"
version = "0.0.0"

View File

@@ -1,6 +1,7 @@
[workspace]
members = [
"backend-client",
"file-transfer",
"ansi-escape",
"async-utils",
"app-server",
@@ -109,6 +110,7 @@ codex-exec = { path = "exec" }
codex-execpolicy = { path = "execpolicy" }
codex-experimental-api-macros = { path = "codex-experimental-api-macros" }
codex-feedback = { path = "feedback" }
codex-file-transfer = { path = "file-transfer" }
codex-file-search = { path = "file-search" }
codex-git = { path = "utils/git" }
codex-hooks = { path = "hooks" }

View File

@@ -14,6 +14,7 @@ workspace = true
[dependencies]
anyhow = { workspace = true }
codex-apply-patch = { workspace = true }
codex-file-transfer = { workspace = true }
codex-linux-sandbox = { workspace = true }
codex-shell-escalation = { workspace = true }
codex-utils-home-dir = { workspace = true }

View File

@@ -4,6 +4,7 @@ use std::path::Path;
use std::path::PathBuf;
use codex_apply_patch::CODEX_CORE_APPLY_PATCH_ARG1;
use codex_file_transfer::CODEX_CORE_FILE_TRANSFER_ARG1;
use codex_utils_home_dir::find_codex_home;
#[cfg(unix)]
use std::os::unix::fs::symlink;
@@ -105,6 +106,43 @@ pub fn arg0_dispatch() -> Option<Arg0PathEntryGuard> {
};
std::process::exit(exit_code);
}
if argv1 == CODEX_CORE_FILE_TRANSFER_ARG1 {
let request_arg = args.next().and_then(|s| s.to_str().map(str::to_owned));
let runtime = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(runtime) => runtime,
Err(_) => std::process::exit(1),
};
let exit_code = match request_arg {
Some(request_arg) => runtime.block_on(async move {
match codex_file_transfer::run_from_arg(&request_arg).await {
Ok(output) => match output.to_json() {
Ok(json) => {
println!("{json}");
0
}
Err(err) => {
eprintln!("failed to serialize file transfer output: {err}");
1
}
},
Err(err) => {
eprintln!("file transfer helper failed: {err}");
1
}
}
}),
None => {
eprintln!(
"Error: {CODEX_CORE_FILE_TRANSFER_ARG1} requires a UTF-8 request argument."
);
1
}
};
std::process::exit(exit_code);
}
// This modifies the environment, which is not thread-safe, so do this
// before creating any threads/the Tokio runtime.

View File

@@ -38,6 +38,7 @@ codex-environment = { workspace = true }
codex-shell-command = { workspace = true }
codex-skills = { workspace = true }
codex-execpolicy = { workspace = true }
codex-file-transfer = { workspace = true }
codex-file-search = { workspace = true }
codex-git = { workspace = true }
codex-hooks = { workspace = true }

File diff suppressed because one or more lines are too long

View File

@@ -478,7 +478,17 @@ impl ModelsManager {
.iter()
.position(|existing| existing.slug == model.slug)
{
existing_models[existing_index] = model;
let existing = &existing_models[existing_index];
existing_models[existing_index] = if model.experimental_supported_tools.is_empty()
&& !existing.experimental_supported_tools.is_empty()
{
ModelInfo {
experimental_supported_tools: existing.experimental_supported_tools.clone(),
..model
}
} else {
model
};
} else {
existing_models.push(model);
}

View File

@@ -351,6 +351,45 @@ async fn refresh_available_models_uses_cache_when_fresh() {
);
}
#[tokio::test]
async fn refresh_available_models_preserves_local_experimental_tools_when_remote_empty() {
let server = MockServer::start().await;
let remote_models = vec![remote_model("gpt-5.4", "gpt-5.4", 0)];
let _models_mock = mount_models_once(
&server,
ModelsResponse {
models: remote_models,
},
)
.await;
let codex_home = tempdir().expect("temp dir");
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
let provider = provider_for(server.uri());
let manager = ModelsManager::with_provider_for_tests(
codex_home.path().to_path_buf(),
auth_manager,
provider,
);
manager
.refresh_available_models(RefreshStrategy::OnlineIfUncached)
.await
.expect("refresh succeeds");
let gpt_54 = manager
.get_remote_models()
.await
.into_iter()
.find(|model| model.slug == "gpt-5.4")
.expect("gpt-5.4 should exist");
assert_eq!(
gpt_54.experimental_supported_tools,
vec!["upload_file".to_string(), "download_file".to_string()]
);
}
#[tokio::test]
async fn refresh_available_models_refetches_when_cache_stale() {
let server = MockServer::start().await;

View File

@@ -0,0 +1,778 @@
use crate::codex::Session;
use crate::codex::TurnContext;
use crate::default_client::get_codex_user_agent;
use crate::error::CodexErr;
use crate::error::SandboxErr;
use crate::function_tool::FunctionCallError;
use crate::sandboxing::SandboxPermissions;
use crate::sandboxing::effective_file_system_sandbox_policy;
use crate::sandboxing::merge_permission_profiles;
use crate::sandboxing::normalize_additional_permissions;
use crate::tools::context::FunctionToolOutput;
use crate::tools::context::ToolInvocation;
use crate::tools::context::ToolPayload;
use crate::tools::handlers::parse_arguments;
use crate::tools::handlers::resolve_workdir_base_path;
use crate::tools::orchestrator::ToolOrchestrator;
use crate::tools::registry::ToolHandler;
use crate::tools::registry::ToolKind;
use crate::tools::runtimes::file_transfer::FileTransferRuntime;
use crate::tools::runtimes::file_transfer::InternalFileTransferRequest;
use crate::tools::sandboxing::ToolCtx;
use async_trait::async_trait;
use codex_file_transfer::DownloadFileToolResult;
use codex_file_transfer::FILE_TRANSFER_ACCOUNT_ID_ENV;
use codex_file_transfer::FILE_TRANSFER_BASE_URL_ENV;
use codex_file_transfer::FILE_TRANSFER_BEARER_TOKEN_ENV;
use codex_file_transfer::FILE_TRANSFER_USER_AGENT_ENV;
use codex_file_transfer::FileTransferRequest;
use codex_file_transfer::UploadFileToolResult;
use codex_protocol::models::FileSystemPermissions;
use codex_protocol::models::PermissionProfile;
use codex_utils_absolute_path::AbsolutePathBuf;
use serde::Deserialize;
use serde::Serialize;
use serde_json::Value;
use std::collections::HashMap;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
pub struct FileTransferHandler;
#[derive(Debug, Deserialize)]
struct UploadFileArgs {
path: String,
}
#[derive(Debug, Deserialize)]
struct DownloadFileArgs {
file_id: String,
path: String,
}
#[derive(Debug)]
struct EffectivePathAccess {
base_allowed: bool,
effective_allowed: bool,
}
#[async_trait]
impl ToolHandler for FileTransferHandler {
type Output = FunctionToolOutput;
fn kind(&self) -> ToolKind {
ToolKind::Function
}
async fn is_mutating(&self, _invocation: &ToolInvocation) -> bool {
true
}
async fn handle(&self, invocation: ToolInvocation) -> Result<Self::Output, FunctionCallError> {
let ToolInvocation {
session,
turn,
call_id,
tool_name,
payload,
..
} = invocation;
let arguments = match payload {
ToolPayload::Function { arguments } => arguments,
_ => {
return Err(FunctionCallError::RespondToModel(
"file transfer handler received unsupported payload".to_string(),
));
}
};
match tool_name.as_str() {
"upload_file" => {
let cwd = resolve_workdir_base_path(&arguments, turn.cwd.as_path())?;
let args: UploadFileArgs = parse_arguments(&arguments)?;
let path = crate::util::resolve_path(cwd.as_path(), &PathBuf::from(args.path));
handle_upload(&session, &turn, &call_id, &tool_name, path).await
}
"download_file" => {
let cwd = resolve_workdir_base_path(&arguments, turn.cwd.as_path())?;
let args: DownloadFileArgs = parse_arguments(&arguments)?;
let path = crate::util::resolve_path(cwd.as_path(), &PathBuf::from(args.path));
let file_id = parse_file_id(&args.file_id).ok_or_else(|| {
FunctionCallError::RespondToModel(
"download_file.file_id must be a bare file id or openai-file://v1/{file_id}"
.to_string(),
)
})?;
handle_download(&session, &turn, &call_id, &tool_name, file_id, path).await
}
_ => Err(FunctionCallError::RespondToModel(format!(
"unsupported file transfer tool `{tool_name}`"
))),
}
}
}
async fn handle_upload(
session: &std::sync::Arc<Session>,
turn: &std::sync::Arc<TurnContext>,
call_id: &str,
tool_name: &str,
path: PathBuf,
) -> Result<FunctionToolOutput, FunctionCallError> {
let access =
effective_path_access(session.as_ref(), turn.as_ref(), &path, AccessKind::Read).await;
if !access.effective_allowed {
return json_output(
&UploadFileToolResult {
ok: false,
file_id: None,
uri: None,
file_name: None,
file_size_bytes: None,
mime_type: None,
error_code: Some("sandbox_path_denied".to_string()),
message: Some(format!(
"upload path `{}` is outside the current sandbox",
path.display()
)),
retryable: Some(false),
http_status_code: None,
path: Some(path.display().to_string()),
},
/*success*/ false,
);
}
let metadata = match tokio::fs::metadata(&path).await {
Ok(metadata) => metadata,
Err(err) => {
let error_code = match err.kind() {
ErrorKind::NotFound => "path_not_found",
ErrorKind::PermissionDenied => "sandbox_path_denied",
_ => "upload_failed",
};
return json_output(
&UploadFileToolResult {
ok: false,
file_id: None,
uri: None,
file_name: path
.file_name()
.and_then(|name| name.to_str())
.map(str::to_string),
file_size_bytes: None,
mime_type: None,
error_code: Some(error_code.to_string()),
message: Some(format!("failed to inspect `{}`: {err}", path.display())),
retryable: Some(false),
http_status_code: None,
path: Some(path.display().to_string()),
},
/*success*/ false,
);
}
};
if metadata.is_dir() {
return json_output(
&UploadFileToolResult {
ok: false,
file_id: None,
uri: None,
file_name: None,
file_size_bytes: None,
mime_type: None,
error_code: Some("path_is_directory".to_string()),
message: Some(format!("upload path `{}` is a directory", path.display())),
retryable: Some(false),
http_status_code: None,
path: Some(path.display().to_string()),
},
/*success*/ false,
);
}
let auth = session.services.auth_manager.auth().await;
let Some(auth) = auth else {
return json_output(&upload_auth_required(&path), /*success*/ false);
};
if !auth.is_chatgpt_auth() {
return json_output(&upload_auth_required(&path), /*success*/ false);
}
let bearer_token = auth.get_token().map_err(|err| {
FunctionCallError::RespondToModel(format!("failed to load chatgpt auth token: {err}"))
})?;
let additional_permissions =
additional_permissions_for_access(&path, AccessKind::Read, &access).map_err(|err| {
FunctionCallError::RespondToModel(format!(
"failed to derive file transfer permissions: {err}"
))
})?;
let request = InternalFileTransferRequest {
request: FileTransferRequest::Upload { path: path.clone() },
cwd: turn.cwd.clone(),
env: helper_env(
turn.as_ref(),
&bearer_token,
auth.get_account_id().as_deref(),
),
network: turn.network.clone(),
sandbox_permissions: SandboxPermissions::UseDefault,
additional_permissions,
codex_exe: turn.codex_linux_sandbox_exe.clone(),
};
run_transfer(
request,
session,
turn,
call_id,
tool_name,
TransferKind::Upload,
)
.await
}
async fn handle_download(
session: &std::sync::Arc<Session>,
turn: &std::sync::Arc<TurnContext>,
call_id: &str,
tool_name: &str,
file_id: String,
path: PathBuf,
) -> Result<FunctionToolOutput, FunctionCallError> {
let path_is_directory = tokio::fs::metadata(&path)
.await
.map(|metadata| metadata.is_dir())
.unwrap_or(false);
let access_path = path.clone();
if !path_is_directory {
let Some(parent) = path.parent() else {
return json_output(
&DownloadFileToolResult {
ok: false,
file_id: Some(file_id.clone()),
uri: Some(openai_file_uri(&file_id)),
file_name: None,
mime_type: None,
destination_path: Some(path.display().to_string()),
bytes_written: None,
error_code: Some("destination_parent_missing".to_string()),
message: Some(format!(
"download destination `{}` has no parent directory",
path.display()
)),
retryable: Some(false),
http_status_code: None,
},
/*success*/ false,
);
};
if !parent.exists() {
return json_output(
&DownloadFileToolResult {
ok: false,
file_id: Some(file_id.clone()),
uri: Some(openai_file_uri(&file_id)),
file_name: None,
mime_type: None,
destination_path: Some(path.display().to_string()),
bytes_written: None,
error_code: Some("destination_parent_missing".to_string()),
message: Some(format!(
"download destination parent `{}` does not exist",
parent.display()
)),
retryable: Some(false),
http_status_code: None,
},
/*success*/ false,
);
}
}
let access = effective_path_access(
session.as_ref(),
turn.as_ref(),
&access_path,
AccessKind::Write,
)
.await;
if !access.effective_allowed {
return json_output(
&DownloadFileToolResult {
ok: false,
file_id: Some(file_id.clone()),
uri: Some(openai_file_uri(&file_id)),
file_name: None,
mime_type: None,
destination_path: Some(path.display().to_string()),
bytes_written: None,
error_code: Some("sandbox_path_denied".to_string()),
message: Some(format!(
"download destination `{}` is outside the current sandbox",
path.display()
)),
retryable: Some(false),
http_status_code: None,
},
/*success*/ false,
);
}
let auth = session.services.auth_manager.auth().await;
let Some(auth) = auth else {
return json_output(
&download_auth_required(&file_id, &path),
/*success*/ false,
);
};
if !auth.is_chatgpt_auth() {
return json_output(
&download_auth_required(&file_id, &path),
/*success*/ false,
);
}
let bearer_token = auth.get_token().map_err(|err| {
FunctionCallError::RespondToModel(format!("failed to load chatgpt auth token: {err}"))
})?;
let additional_permissions =
additional_permissions_for_access(&access_path, AccessKind::Write, &access).map_err(
|err| {
FunctionCallError::RespondToModel(format!(
"failed to derive file transfer permissions: {err}"
))
},
)?;
let request = InternalFileTransferRequest {
request: FileTransferRequest::Download {
file_id: file_id.clone(),
path: path.clone(),
path_is_directory,
},
cwd: turn.cwd.clone(),
env: helper_env(
turn.as_ref(),
&bearer_token,
auth.get_account_id().as_deref(),
),
network: turn.network.clone(),
sandbox_permissions: SandboxPermissions::UseDefault,
additional_permissions,
codex_exe: turn.codex_linux_sandbox_exe.clone(),
};
run_transfer(
request,
session,
turn,
call_id,
tool_name,
TransferKind::Download,
)
.await
}
async fn run_transfer(
request: InternalFileTransferRequest,
session: &std::sync::Arc<Session>,
turn: &std::sync::Arc<TurnContext>,
call_id: &str,
tool_name: &str,
kind: TransferKind,
) -> Result<FunctionToolOutput, FunctionCallError> {
let mut orchestrator = ToolOrchestrator::new();
let mut runtime = FileTransferRuntime::new();
let tool_ctx = ToolCtx {
session: session.clone(),
turn: turn.clone(),
call_id: call_id.to_string(),
tool_name: tool_name.to_string(),
};
let output = orchestrator
.run(
&mut runtime,
&request,
&tool_ctx,
turn.as_ref(),
turn.approval_policy.value(),
)
.await;
match output {
Ok(result) => parse_helper_output(result.output, kind, &request),
Err(crate::tools::sandboxing::ToolError::Codex(CodexErr::Sandbox(
SandboxErr::Denied {
network_policy_decision,
..
},
))) => {
let content = if network_policy_decision.is_some() {
transfer_network_denied(kind, &request)
} else {
transfer_internal_error(
kind,
&request,
"file transfer helper could not run inside the sandbox".to_string(),
)
};
json_output_value(content, /*success*/ false)
}
Err(crate::tools::sandboxing::ToolError::Rejected(message)) => {
json_output_value(
transfer_internal_error(kind, &request, message),
/*success*/ false,
)
}
Err(crate::tools::sandboxing::ToolError::Codex(err)) => json_output_value(
transfer_internal_error(kind, &request, err.to_string()),
/*success*/ false,
),
}
}
fn parse_helper_output(
output: crate::exec::ExecToolCallOutput,
kind: TransferKind,
request: &InternalFileTransferRequest,
) -> Result<FunctionToolOutput, FunctionCallError> {
if output.exit_code != 0 {
return json_output_value(
transfer_internal_error(
kind,
request,
format!(
"file transfer helper exited with status {}: {}",
output.exit_code, output.stderr.text
),
),
/*success*/ false,
);
}
let parsed: Value = serde_json::from_str(&output.stdout.text).map_err(|err| {
FunctionCallError::RespondToModel(format!(
"file transfer helper returned invalid JSON: {err}"
))
})?;
let success = parsed.get("ok").and_then(Value::as_bool).unwrap_or(false);
let content = serde_json::to_string(&parsed).map_err(|err| {
FunctionCallError::RespondToModel(format!("failed to encode file transfer output: {err}"))
})?;
Ok(FunctionToolOutput::from_text(content, Some(success)))
}
fn helper_env(
turn: &TurnContext,
bearer_token: &str,
account_id: Option<&str>,
) -> HashMap<String, String> {
let mut env = HashMap::from([
(
FILE_TRANSFER_BASE_URL_ENV.to_string(),
turn.config.chatgpt_base_url.clone(),
),
(
FILE_TRANSFER_BEARER_TOKEN_ENV.to_string(),
bearer_token.to_string(),
),
(
FILE_TRANSFER_USER_AGENT_ENV.to_string(),
get_codex_user_agent(),
),
]);
if let Some(account_id) = account_id {
env.insert(
FILE_TRANSFER_ACCOUNT_ID_ENV.to_string(),
account_id.to_string(),
);
}
env
}
async fn effective_path_access(
session: &Session,
turn: &TurnContext,
path: &Path,
kind: AccessKind,
) -> EffectivePathAccess {
let granted_permissions = merge_permission_profiles(
session.granted_session_permissions().await.as_ref(),
session.granted_turn_permissions().await.as_ref(),
);
let effective_policy = effective_file_system_sandbox_policy(
&turn.file_system_sandbox_policy,
granted_permissions.as_ref(),
);
let base_allowed = match kind {
AccessKind::Read => turn
.file_system_sandbox_policy
.can_read_path_with_cwd(path, turn.cwd.as_path()),
AccessKind::Write => turn
.file_system_sandbox_policy
.can_write_path_with_cwd(path, turn.cwd.as_path()),
};
let effective_allowed = match kind {
AccessKind::Read => effective_policy.can_read_path_with_cwd(path, turn.cwd.as_path()),
AccessKind::Write => effective_policy.can_write_path_with_cwd(path, turn.cwd.as_path()),
};
EffectivePathAccess {
base_allowed,
effective_allowed,
}
}
fn additional_permissions_for_access(
path: &Path,
kind: AccessKind,
access: &EffectivePathAccess,
) -> Result<Option<PermissionProfile>, String> {
if access.base_allowed || !access.effective_allowed {
return Ok(None);
}
let absolute_path = AbsolutePathBuf::from_absolute_path(path)
.map_err(|err| format!("invalid absolute path `{}`: {err}", path.display()))?;
let file_system = match kind {
AccessKind::Read => FileSystemPermissions {
read: Some(vec![absolute_path]),
write: None,
},
AccessKind::Write => FileSystemPermissions {
read: Some(vec![]),
write: Some(vec![absolute_path]),
},
};
normalize_additional_permissions(PermissionProfile {
file_system: Some(file_system),
..Default::default()
})
.map(Some)
}
fn parse_file_id(value: &str) -> Option<String> {
let trimmed = value.trim();
trimmed
.strip_prefix("openai-file://v1/")
.unwrap_or(trimmed)
.split('/')
.next()
.filter(|file_id| !file_id.is_empty())
.map(str::to_string)
}
fn upload_auth_required(path: &Path) -> UploadFileToolResult {
UploadFileToolResult {
ok: false,
file_id: None,
uri: None,
file_name: None,
file_size_bytes: None,
mime_type: None,
error_code: Some("chatgpt_auth_required".to_string()),
message: Some("chatgpt authentication is required to upload files".to_string()),
retryable: Some(false),
http_status_code: None,
path: Some(path.display().to_string()),
}
}
fn download_auth_required(file_id: &str, path: &Path) -> DownloadFileToolResult {
DownloadFileToolResult {
ok: false,
file_id: Some(file_id.to_string()),
uri: Some(openai_file_uri(file_id)),
file_name: None,
mime_type: None,
destination_path: Some(path.display().to_string()),
bytes_written: None,
error_code: Some("chatgpt_auth_required".to_string()),
message: Some("chatgpt authentication is required to download files".to_string()),
retryable: Some(false),
http_status_code: None,
}
}
fn transfer_network_denied(kind: TransferKind, request: &InternalFileTransferRequest) -> Value {
match kind {
TransferKind::Upload => serialize_output_value(UploadFileToolResult {
ok: false,
file_id: None,
uri: None,
file_name: None,
file_size_bytes: None,
mime_type: None,
error_code: Some("network_denied".to_string()),
message: Some(
"network access for file transfer was denied by sandbox policy".to_string(),
),
retryable: Some(false),
http_status_code: None,
path: request_path(request),
}),
TransferKind::Download => serialize_output_value(DownloadFileToolResult {
ok: false,
file_id: request_file_id(request),
uri: request_file_id(request)
.as_ref()
.map(|file_id| openai_file_uri(file_id)),
file_name: None,
mime_type: None,
destination_path: request_path(request),
bytes_written: None,
error_code: Some("network_denied".to_string()),
message: Some(
"network access for file transfer was denied by sandbox policy".to_string(),
),
retryable: Some(false),
http_status_code: None,
}),
}
}
fn transfer_internal_error(
kind: TransferKind,
request: &InternalFileTransferRequest,
message: String,
) -> Value {
match kind {
TransferKind::Upload => serialize_output_value(UploadFileToolResult {
ok: false,
file_id: request_file_id(request),
uri: request_file_id(request)
.as_ref()
.map(|file_id| openai_file_uri(file_id)),
file_name: None,
file_size_bytes: None,
mime_type: None,
error_code: Some("internal_helper_failed".to_string()),
message: Some(message),
retryable: Some(false),
http_status_code: None,
path: request_path(request),
}),
TransferKind::Download => serialize_output_value(DownloadFileToolResult {
ok: false,
file_id: request_file_id(request),
uri: request_file_id(request)
.as_ref()
.map(|file_id| openai_file_uri(file_id)),
file_name: None,
mime_type: None,
destination_path: request_path(request),
bytes_written: None,
error_code: Some("internal_helper_failed".to_string()),
message: Some(message),
retryable: Some(false),
http_status_code: None,
}),
}
}
fn request_path(request: &InternalFileTransferRequest) -> Option<String> {
match &request.request {
FileTransferRequest::Upload { path } => Some(path.display().to_string()),
FileTransferRequest::Download { path, .. } => Some(path.display().to_string()),
}
}
fn request_file_id(request: &InternalFileTransferRequest) -> Option<String> {
match &request.request {
FileTransferRequest::Upload { .. } => None,
FileTransferRequest::Download { file_id, .. } => Some(file_id.clone()),
}
}
fn json_output<T: Serialize>(
value: &T,
success: bool,
) -> Result<FunctionToolOutput, FunctionCallError> {
let content = serde_json::to_string(value).map_err(|err| {
FunctionCallError::RespondToModel(format!("failed to encode file transfer output: {err}"))
})?;
Ok(FunctionToolOutput::from_text(content, Some(success)))
}
fn json_output_value(value: Value, success: bool) -> Result<FunctionToolOutput, FunctionCallError> {
let content = serde_json::to_string(&value).map_err(|err| {
FunctionCallError::RespondToModel(format!("failed to encode file transfer output: {err}"))
})?;
Ok(FunctionToolOutput::from_text(content, Some(success)))
}
fn serialize_output_value<T: Serialize>(value: T) -> Value {
serde_json::to_value(value).unwrap_or_else(|err| {
Value::String(format!("failed to serialize file transfer output: {err}"))
})
}
fn openai_file_uri(file_id: &str) -> String {
format!("openai-file://v1/{file_id}")
}
#[derive(Clone, Copy, Debug)]
enum AccessKind {
Read,
Write,
}
#[derive(Clone, Copy, Debug)]
enum TransferKind {
Upload,
Download,
}
#[cfg(test)]
mod tests {
use super::*;
use crate::exec::ExecToolCallOutput;
use crate::exec::StreamOutput;
use pretty_assertions::assert_eq;
fn download_request() -> InternalFileTransferRequest {
InternalFileTransferRequest {
request: FileTransferRequest::Download {
file_id: "file-123".to_string(),
path: PathBuf::from("/tmp/output.txt"),
path_is_directory: false,
},
cwd: PathBuf::from("/tmp"),
env: HashMap::new(),
network: None,
sandbox_permissions: SandboxPermissions::UseDefault,
additional_permissions: None,
codex_exe: None,
}
}
#[test]
fn parse_helper_output_preserves_request_context_on_nonzero_exit() {
let output = ExecToolCallOutput {
exit_code: 23,
stdout: StreamOutput::new(String::new()),
stderr: StreamOutput::new("boom".to_string()),
aggregated_output: StreamOutput::new("boom".to_string()),
duration: std::time::Duration::ZERO,
timed_out: false,
};
let result =
parse_helper_output(output, TransferKind::Download, &download_request()).unwrap();
let payload: DownloadFileToolResult =
serde_json::from_str(&result.into_text()).expect("valid json");
assert_eq!(payload.ok, false);
assert_eq!(payload.file_id, Some("file-123".to_string()));
assert_eq!(
payload.destination_path,
Some("/tmp/output.txt".to_string())
);
assert_eq!(
payload.error_code,
Some("internal_helper_failed".to_string())
);
assert!(
payload
.message
.as_deref()
.is_some_and(|message| message.contains("status 23"))
);
}
}

View File

@@ -2,6 +2,7 @@ pub(crate) mod agent_jobs;
pub mod apply_patch;
mod artifacts;
mod dynamic;
mod file_transfer;
mod grep_files;
mod js_repl;
mod list_dir;
@@ -38,6 +39,7 @@ pub use artifacts::ArtifactsHandler;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::AskForApproval;
pub use dynamic::DynamicToolHandler;
pub use file_transfer::FileTransferHandler;
pub use grep_files::GrepFilesHandler;
pub use js_repl::JsReplHandler;
pub use js_repl::JsReplResetHandler;

View File

@@ -0,0 +1,162 @@
use crate::exec::ExecExpiration;
use crate::exec::ExecToolCallOutput;
use crate::sandboxing::CommandSpec;
use crate::sandboxing::SandboxPermissions;
use crate::sandboxing::execute_env;
use crate::tools::network_approval::NetworkApprovalMode;
use crate::tools::network_approval::NetworkApprovalSpec;
use crate::tools::sandboxing::Approvable;
use crate::tools::sandboxing::ApprovalCtx;
use crate::tools::sandboxing::ExecApprovalRequirement;
use crate::tools::sandboxing::SandboxAttempt;
use crate::tools::sandboxing::Sandboxable;
use crate::tools::sandboxing::SandboxablePreference;
use crate::tools::sandboxing::ToolCtx;
use crate::tools::sandboxing::ToolError;
use crate::tools::sandboxing::ToolRuntime;
use codex_file_transfer::CODEX_CORE_FILE_TRANSFER_ARG1;
use codex_file_transfer::FileTransferRequest;
use codex_network_proxy::NetworkProxy;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::AskForApproval;
use codex_protocol::protocol::ReviewDecision;
use futures::future::BoxFuture;
use std::collections::HashMap;
use std::path::PathBuf;
const DEFAULT_FILE_TRANSFER_TIMEOUT_MS: u64 = 120_000;
#[derive(Clone, Debug)]
pub struct InternalFileTransferRequest {
pub request: FileTransferRequest,
pub cwd: PathBuf,
pub env: HashMap<String, String>,
pub network: Option<NetworkProxy>,
pub sandbox_permissions: SandboxPermissions,
pub additional_permissions: Option<PermissionProfile>,
pub codex_exe: Option<PathBuf>,
}
#[derive(Default)]
pub struct FileTransferRuntime;
impl FileTransferRuntime {
pub fn new() -> Self {
Self
}
fn build_command_spec(req: &InternalFileTransferRequest) -> Result<CommandSpec, ToolError> {
let exe = if let Some(path) = &req.codex_exe {
path.clone()
} else {
#[cfg(target_os = "windows")]
{
codex_windows_sandbox::resolve_current_exe_for_launch(&req.cwd, "codex.exe")
}
#[cfg(not(target_os = "windows"))]
{
std::env::current_exe().map_err(|err| {
ToolError::Rejected(format!("failed to determine codex exe: {err}"))
})?
}
};
let request_json = serde_json::to_string(&req.request).map_err(|err| {
ToolError::Rejected(format!("failed to encode file transfer request: {err}"))
})?;
Ok(CommandSpec {
program: exe.to_string_lossy().to_string(),
args: vec![CODEX_CORE_FILE_TRANSFER_ARG1.to_string(), request_json],
cwd: req.cwd.clone(),
expiration: ExecExpiration::Timeout(std::time::Duration::from_millis(
DEFAULT_FILE_TRANSFER_TIMEOUT_MS,
)),
env: req.env.clone(),
sandbox_permissions: req.sandbox_permissions,
additional_permissions: req.additional_permissions.clone(),
justification: None,
})
}
fn stdout_stream(ctx: &ToolCtx) -> Option<crate::exec::StdoutStream> {
Some(crate::exec::StdoutStream {
sub_id: ctx.turn.sub_id.clone(),
call_id: ctx.call_id.clone(),
tx_event: ctx.session.get_tx_event(),
})
}
}
impl Sandboxable for FileTransferRuntime {
fn sandbox_preference(&self) -> SandboxablePreference {
SandboxablePreference::Auto
}
fn escalate_on_failure(&self) -> bool {
true
}
}
impl Approvable<InternalFileTransferRequest> for FileTransferRuntime {
type ApprovalKey = ();
fn approval_keys(&self, _req: &InternalFileTransferRequest) -> Vec<Self::ApprovalKey> {
vec![]
}
fn start_approval_async<'a>(
&'a mut self,
_req: &'a InternalFileTransferRequest,
_ctx: ApprovalCtx<'a>,
) -> BoxFuture<'a, ReviewDecision> {
Box::pin(async { ReviewDecision::Approved })
}
fn wants_no_sandbox_approval(&self, policy: AskForApproval) -> bool {
match policy {
AskForApproval::Never => false,
AskForApproval::Granular(granular_config) => granular_config.allows_sandbox_approval(),
AskForApproval::OnFailure => true,
AskForApproval::OnRequest => true,
AskForApproval::UnlessTrusted => true,
}
}
fn exec_approval_requirement(
&self,
_req: &InternalFileTransferRequest,
) -> Option<ExecApprovalRequirement> {
Some(ExecApprovalRequirement::Skip {
bypass_sandbox: false,
proposed_execpolicy_amendment: None,
})
}
}
impl ToolRuntime<InternalFileTransferRequest, ExecToolCallOutput> for FileTransferRuntime {
fn network_approval_spec(
&self,
req: &InternalFileTransferRequest,
_ctx: &ToolCtx,
) -> Option<NetworkApprovalSpec> {
req.network.as_ref()?;
Some(NetworkApprovalSpec {
network: req.network.clone(),
mode: NetworkApprovalMode::Deferred,
})
}
async fn run(
&mut self,
req: &InternalFileTransferRequest,
attempt: &SandboxAttempt<'_>,
ctx: &ToolCtx,
) -> Result<ExecToolCallOutput, ToolError> {
let spec = Self::build_command_spec(req)?;
let env = attempt
.env_for(spec, req.network.as_ref())
.map_err(|err| ToolError::Codex(err.into()))?;
execute_env(env, Self::stdout_stream(ctx))
.await
.map_err(ToolError::Codex)
}
}

View File

@@ -16,6 +16,7 @@ use std::collections::HashMap;
use std::path::Path;
pub mod apply_patch;
pub mod file_transfer;
pub mod shell;
pub mod unified_exec;

View File

@@ -98,6 +98,87 @@ fn unified_exec_output_schema() -> JsonValue {
})
}
fn file_transfer_error_properties() -> serde_json::Map<String, JsonValue> {
serde_json::Map::from_iter([
("error_code".to_string(), json!({"type": "string"})),
("message".to_string(), json!({"type": "string"})),
("retryable".to_string(), json!({"type": "boolean"})),
("http_status_code".to_string(), json!({"type": "number"})),
])
}
fn upload_file_output_schema() -> JsonValue {
let mut success_properties = serde_json::Map::from_iter([
("ok".to_string(), json!({"type": "boolean", "enum": [true]})),
("file_id".to_string(), json!({"type": "string"})),
("uri".to_string(), json!({"type": "string"})),
("file_name".to_string(), json!({"type": "string"})),
("file_size_bytes".to_string(), json!({"type": "number"})),
("mime_type".to_string(), json!({"type": "string"})),
("path".to_string(), json!({"type": "string"})),
]);
let mut error_properties = file_transfer_error_properties();
error_properties.insert(
"ok".to_string(),
json!({"type": "boolean", "enum": [false]}),
);
error_properties.insert("path".to_string(), json!({"type": "string"}));
success_properties.extend(error_properties.clone());
json!({
"oneOf": [
{
"type": "object",
"properties": success_properties,
"required": ["ok", "file_id", "uri", "file_name", "file_size_bytes", "mime_type", "path"],
"additionalProperties": false
},
{
"type": "object",
"properties": error_properties,
"required": ["ok", "error_code", "message", "retryable"],
"additionalProperties": false
}
]
})
}
fn download_file_output_schema() -> JsonValue {
let mut success_properties = serde_json::Map::from_iter([
("ok".to_string(), json!({"type": "boolean", "enum": [true]})),
("file_id".to_string(), json!({"type": "string"})),
("uri".to_string(), json!({"type": "string"})),
("file_name".to_string(), json!({"type": "string"})),
("mime_type".to_string(), json!({"type": "string"})),
("destination_path".to_string(), json!({"type": "string"})),
("bytes_written".to_string(), json!({"type": "number"})),
]);
let mut error_properties = file_transfer_error_properties();
error_properties.insert(
"ok".to_string(),
json!({"type": "boolean", "enum": [false]}),
);
error_properties.insert("file_id".to_string(), json!({"type": "string"}));
error_properties.insert("uri".to_string(), json!({"type": "string"}));
error_properties.insert("destination_path".to_string(), json!({"type": "string"}));
success_properties.extend(error_properties.clone());
json!({
"oneOf": [
{
"type": "object",
"properties": success_properties,
"required": ["ok", "file_id", "uri", "file_name", "mime_type", "destination_path", "bytes_written"],
"additionalProperties": false
},
{
"type": "object",
"properties": error_properties,
"required": ["ok", "file_id", "uri", "destination_path", "error_code", "message", "retryable"],
"additionalProperties": false
}
]
})
}
fn agent_status_output_schema() -> JsonValue {
json!({
"oneOf": [
@@ -1972,6 +2053,67 @@ fn create_read_file_tool() -> ToolSpec {
})
}
fn create_upload_file_tool() -> ToolSpec {
let properties = BTreeMap::from([(
"path".to_string(),
JsonSchema::String {
description: Some(
"Path to a local file to upload. May be relative to the current working directory or absolute. Directories are not allowed. Use this when a Codex Apps MCP tool explicitly expects an `openai-file://` URI rather than raw file contents or a local path."
.to_string(),
),
},
)]);
ToolSpec::Function(ResponsesApiTool {
name: "upload_file".to_string(),
description: "Uploads a local sandboxed file to OpenAI file storage for use with Codex Apps MCP tools that explicitly accept `openai-file://` URIs, and returns an `openai-file://v1/{file_id}` URI.".to_string(),
strict: false,
defer_loading: None,
parameters: JsonSchema::Object {
properties,
required: Some(vec!["path".to_string()]),
additional_properties: Some(false.into()),
},
output_schema: Some(upload_file_output_schema()),
})
}
fn create_download_file_tool() -> ToolSpec {
let properties = BTreeMap::from([
(
"file_id".to_string(),
JsonSchema::String {
description: Some(
"File id to download. Accepts either a bare id or `openai-file://v1/{file_id}`. Use this for files returned by Codex Apps MCP tools in `openai-file://` form."
.to_string(),
),
},
),
(
"path".to_string(),
JsonSchema::String {
description: Some(
"Destination path. If this path is an existing directory, the downloaded file is written into it using the remote file name; otherwise the file is written exactly to this path."
.to_string(),
),
},
),
]);
ToolSpec::Function(ResponsesApiTool {
name: "download_file".to_string(),
description: "Downloads an OpenAI file, typically one returned by a Codex Apps MCP tool as an `openai-file://` URI, into the local sandbox using strict `cp` destination semantics.".to_string(),
strict: false,
defer_loading: None,
parameters: JsonSchema::Object {
properties,
required: Some(vec!["file_id".to_string(), "path".to_string()]),
additional_properties: Some(false.into()),
},
output_schema: Some(download_file_output_schema()),
})
}
fn create_list_dir_tool() -> ToolSpec {
let properties = BTreeMap::from([
(
@@ -2516,6 +2658,7 @@ pub(crate) fn build_specs_with_discoverable_tools(
use crate::tools::handlers::CodeModeExecuteHandler;
use crate::tools::handlers::CodeModeWaitHandler;
use crate::tools::handlers::DynamicToolHandler;
use crate::tools::handlers::FileTransferHandler;
use crate::tools::handlers::GrepFilesHandler;
use crate::tools::handlers::JsReplHandler;
use crate::tools::handlers::JsReplResetHandler;
@@ -2555,6 +2698,7 @@ pub(crate) fn build_specs_with_discoverable_tools(
let request_user_input_handler = Arc::new(RequestUserInputHandler {
default_mode_request_user_input: config.default_mode_request_user_input,
});
let file_transfer_handler = Arc::new(FileTransferHandler);
let tool_suggest_handler = Arc::new(ToolSuggestHandler);
let code_mode_handler = Arc::new(CodeModeExecuteHandler);
let code_mode_wait_handler = Arc::new(CodeModeWaitHandler);
@@ -2817,6 +2961,22 @@ pub(crate) fn build_specs_with_discoverable_tools(
builder.register_handler("read_file", read_file_handler);
}
push_tool_spec(
&mut builder,
create_upload_file_tool(),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
builder.register_handler("upload_file", file_transfer_handler.clone());
push_tool_spec(
&mut builder,
create_download_file_tool(),
/*supports_parallel_tool_calls*/ true,
config.code_mode_enabled,
);
builder.register_handler("download_file", file_transfer_handler);
if config
.experimental_supported_tools
.iter()

View File

@@ -455,6 +455,8 @@ fn test_full_toolset_specs_for_gpt5_codex_unified_exec_web_search() {
PLAN_TOOL.clone(),
create_request_user_input_tool(CollaborationModesConfig::default()),
create_apply_patch_freeform_tool(),
create_upload_file_tool(),
create_download_file_tool(),
ToolSpec::WebSearch {
external_web_access: Some(true),
filters: None,
@@ -1191,6 +1193,8 @@ fn test_build_specs_gpt5_codex_default() {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1214,6 +1218,8 @@ fn test_build_specs_gpt51_codex_default() {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1239,6 +1245,8 @@ fn test_build_specs_gpt5_codex_unified_exec_web_search() {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1264,6 +1272,8 @@ fn test_build_specs_gpt51_codex_unified_exec_web_search() {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1287,6 +1297,8 @@ fn test_gpt_5_1_codex_max_defaults() {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1310,6 +1322,8 @@ fn test_codex_5_1_mini_defaults() {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1332,6 +1346,8 @@ fn test_gpt_5_defaults() {
&[
"update_plan",
"request_user_input",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1355,6 +1371,8 @@ fn test_gpt_5_1_defaults() {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1380,6 +1398,8 @@ fn test_gpt_5_1_codex_max_unified_exec_web_search() {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",
@@ -1504,6 +1524,8 @@ fn test_parallel_support_flags() {
assert!(find_tool(&tools, "grep_files").supports_parallel_tool_calls);
assert!(find_tool(&tools, "list_dir").supports_parallel_tool_calls);
assert!(find_tool(&tools, "read_file").supports_parallel_tool_calls);
assert!(find_tool(&tools, "upload_file").supports_parallel_tool_calls);
assert!(find_tool(&tools, "download_file").supports_parallel_tool_calls);
}
#[test]
@@ -1515,6 +1537,8 @@ fn test_test_model_info_includes_sync_tool() {
"read_file".to_string(),
"grep_files".to_string(),
"list_dir".to_string(),
"upload_file".to_string(),
"download_file".to_string(),
];
let features = Features::with_defaults();
let available_models = Vec::new();
@@ -1539,6 +1563,16 @@ fn test_test_model_info_includes_sync_tool() {
.iter()
.any(|tool| tool_name(&tool.spec) == "read_file")
);
assert!(
tools
.iter()
.any(|tool| tool_name(&tool.spec) == "upload_file")
);
assert!(
tools
.iter()
.any(|tool| tool_name(&tool.spec) == "download_file")
);
assert!(
tools
.iter()
@@ -1547,6 +1581,67 @@ fn test_test_model_info_includes_sync_tool() {
assert!(tools.iter().any(|tool| tool_name(&tool.spec) == "list_dir"));
}
#[test]
fn test_models_json_default_model_includes_file_transfer_tools() {
let _config = test_config();
let model_info = model_info_from_models_json("gpt-5.3-codex");
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
sandbox_policy: &SandboxPolicy::DangerFullAccess,
windows_sandbox_level: WindowsSandboxLevel::Disabled,
});
let (tools, _) = build_specs(&tools_config, None, None, &[]).build();
assert!(
tools
.iter()
.any(|tool| tool_name(&tool.spec) == "upload_file")
);
assert!(
tools
.iter()
.any(|tool| tool_name(&tool.spec) == "download_file")
);
}
#[test]
fn test_file_transfer_tools_are_not_experimentally_gated() {
let _config = test_config();
let mut model_info = model_info_from_models_json("gpt-5-codex");
model_info
.experimental_supported_tools
.retain(|tool| tool != "upload_file" && tool != "download_file");
let features = Features::with_defaults();
let available_models = Vec::new();
let tools_config = ToolsConfig::new(&ToolsConfigParams {
model_info: &model_info,
available_models: &available_models,
features: &features,
web_search_mode: Some(WebSearchMode::Cached),
session_source: SessionSource::Cli,
sandbox_policy: &SandboxPolicy::DangerFullAccess,
windows_sandbox_level: WindowsSandboxLevel::Disabled,
});
let (tools, _) = build_specs(&tools_config, None, None, &[]).build();
assert!(
tools
.iter()
.any(|tool| tool_name(&tool.spec) == "upload_file")
);
assert!(
tools
.iter()
.any(|tool| tool_name(&tool.spec) == "download_file")
);
}
#[test]
fn test_build_specs_mcp_tools_converted() {
let config = test_config();

View File

@@ -175,6 +175,8 @@ async fn prompt_tools_are_consistent_across_requests() -> anyhow::Result<()> {
"update_plan",
"request_user_input",
"apply_patch",
"upload_file",
"download_file",
"web_search",
"view_image",
"spawn_agent",

View File

@@ -0,0 +1,6 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "file-transfer",
crate_name = "codex_file_transfer",
)

View File

@@ -0,0 +1,29 @@
[package]
name = "codex-file-transfer"
version.workspace = true
edition.workspace = true
license.workspace = true
publish = false
[lib]
name = "codex_file_transfer"
path = "src/lib.rs"
[lints]
workspace = true
[dependencies]
anyhow = { workspace = true }
codex-client = { workspace = true }
mime_guess = { workspace = true }
reqwest = { workspace = true, features = ["json", "stream"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["fs", "io-util"] }
tokio-util = { workspace = true, features = ["io"] }
[dev-dependencies]
pretty_assertions = { workspace = true }
tempfile = { workspace = true }
tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }
wiremock = { workspace = true }

File diff suppressed because it is too large Load Diff