Compare commits

...

1 Commits

Author SHA1 Message Date
Xin Lin
369d383199 feat: fallback curated plugin download from backend endpint. 2026-04-06 12:39:05 -07:00
4 changed files with 540 additions and 10 deletions

View File

@@ -9,6 +9,7 @@ mod mentions;
mod remote;
mod render;
mod startup_sync;
mod startup_sync_export;
mod store;
#[cfg(test)]
pub(crate) mod test_support;

View File

@@ -20,6 +20,9 @@ use codex_login::AuthManager;
use codex_login::default_client::build_reqwest_client;
use super::PluginsManager;
use super::startup_sync_export::CURATED_PLUGINS_EXPORT_API_URL;
use super::startup_sync_export::fetch_curated_repo_export_zip;
use super::startup_sync_export::read_extracted_repo_git_sha;
const GITHUB_API_BASE_URL: &str = "https://api.github.com";
const GITHUB_API_ACCEPT_HEADER: &str = "application/vnd.github+json";
@@ -28,6 +31,7 @@ const OPENAI_PLUGINS_OWNER: &str = "openai";
const OPENAI_PLUGINS_REPO: &str = "plugins";
const CURATED_PLUGINS_RELATIVE_DIR: &str = ".tmp/plugins";
const CURATED_PLUGINS_SHA_FILE: &str = ".tmp/plugins.sha";
const CURATED_PLUGINS_EXPORT_FALLBACK_VERSION: &str = "export-backup";
const CURATED_PLUGINS_GIT_TIMEOUT: Duration = Duration::from_secs(30);
const CURATED_PLUGINS_HTTP_TIMEOUT: Duration = Duration::from_secs(30);
// Keep this comfortably above a normal sync attempt so we do not race another Codex process.
@@ -59,13 +63,19 @@ pub(crate) fn read_curated_plugins_sha(codex_home: &Path) -> Option<String> {
}
pub(crate) fn sync_openai_plugins_repo(codex_home: &Path) -> Result<String, String> {
sync_openai_plugins_repo_with_transport_overrides(codex_home, "git", GITHUB_API_BASE_URL)
sync_openai_plugins_repo_with_transport_overrides(
codex_home,
"git",
GITHUB_API_BASE_URL,
CURATED_PLUGINS_EXPORT_API_URL,
)
}
fn sync_openai_plugins_repo_with_transport_overrides(
codex_home: &Path,
git_binary: &str,
api_base_url: &str,
export_api_url: &str,
) -> Result<String, String> {
match sync_openai_plugins_repo_via_git(codex_home, git_binary) {
Ok(remote_sha) => {
@@ -80,11 +90,42 @@ fn sync_openai_plugins_repo_with_transport_overrides(
git_binary,
"git sync failed for curated plugin sync; falling back to GitHub HTTP"
);
let result = sync_openai_plugins_repo_via_http(codex_home, api_base_url);
let status = if result.is_ok() { "success" } else { "failure" };
emit_curated_plugins_startup_sync_metric("http", status);
emit_curated_plugins_startup_sync_final_metric("http", status);
result
match sync_openai_plugins_repo_via_http(codex_home, api_base_url) {
Ok(remote_sha) => {
emit_curated_plugins_startup_sync_metric("http", "success");
emit_curated_plugins_startup_sync_final_metric("http", "success");
Ok(remote_sha)
}
Err(http_err) => {
emit_curated_plugins_startup_sync_metric("http", "failure");
if has_local_curated_plugins_snapshot(codex_home) {
emit_curated_plugins_startup_sync_final_metric("http", "failure");
warn!(
error = %http_err,
"GitHub HTTP sync failed for curated plugin sync; skipping export archive fallback because a local curated plugins snapshot already exists"
);
Err(format!(
"git sync failed for curated plugin sync: {err}; GitHub HTTP sync failed for curated plugin sync: {http_err}; export archive fallback skipped because a local curated plugins snapshot already exists"
))
} else {
warn!(
error = %http_err,
export_api_url,
"GitHub HTTP sync failed for curated plugin sync; falling back to export archive"
);
let result =
sync_openai_plugins_repo_via_export_archive(codex_home, export_api_url);
let status = if result.is_ok() { "success" } else { "failure" };
emit_curated_plugins_startup_sync_metric("export_archive", status);
emit_curated_plugins_startup_sync_final_metric("export_archive", status);
result.map_err(|export_err| {
format!(
"git sync failed for curated plugin sync: {err}; GitHub HTTP sync failed for curated plugin sync: {http_err}; export archive sync failed for curated plugin sync: {export_err}"
)
})
}
}
}
}
}
}
@@ -152,6 +193,27 @@ fn sync_openai_plugins_repo_via_http(
Ok(remote_sha)
}
fn sync_openai_plugins_repo_via_export_archive(
codex_home: &Path,
export_api_url: &str,
) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.map_err(|err| format!("failed to create curated plugins sync runtime: {err}"))?;
let staged_repo_dir = prepare_curated_repo_parent_and_temp_dir(&repo_path)?;
let zipball_bytes = runtime.block_on(fetch_curated_repo_export_zip(export_api_url))?;
extract_zipball_to_dir(&zipball_bytes, staged_repo_dir.path())?;
ensure_marketplace_manifest_exists(staged_repo_dir.path())?;
let export_version = read_extracted_repo_git_sha(staged_repo_dir.path())?
.unwrap_or_else(|| CURATED_PLUGINS_EXPORT_FALLBACK_VERSION.to_string());
activate_curated_repo(&repo_path, staged_repo_dir)?;
write_curated_plugins_sha(&sha_path, &export_version)?;
Ok(export_version)
}
pub(super) fn start_startup_remote_plugin_sync_once(
manager: Arc<PluginsManager>,
codex_home: PathBuf,
@@ -213,11 +275,15 @@ fn startup_remote_plugin_sync_marker_path(codex_home: &Path) -> PathBuf {
codex_home.join(STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE)
}
fn startup_remote_plugin_sync_prerequisites_ready(codex_home: &Path) -> bool {
codex_home
.join(".tmp/plugins/.agents/plugins/marketplace.json")
fn has_local_curated_plugins_snapshot(codex_home: &Path) -> bool {
curated_plugins_repo_path(codex_home)
.join(".agents/plugins/marketplace.json")
.is_file()
&& codex_home.join(".tmp/plugins.sha").is_file()
&& codex_home.join(CURATED_PLUGINS_SHA_FILE).is_file()
}
fn startup_remote_plugin_sync_prerequisites_ready(codex_home: &Path) -> bool {
has_local_curated_plugins_snapshot(codex_home)
}
async fn wait_for_startup_remote_plugin_sync_prerequisites(codex_home: &Path) -> bool {

View File

@@ -0,0 +1,142 @@
use std::fs;
use std::path::Path;
use std::time::Duration;
use codex_login::default_client::build_reqwest_client;
use reqwest::Client;
use serde::Deserialize;
pub(super) const CURATED_PLUGINS_EXPORT_API_URL: &str =
"https://chatgpt.com/backend-api/plugins/export/curated";
const CURATED_PLUGINS_EXPORT_TIMEOUT: Duration = Duration::from_secs(30);
#[derive(Debug, Deserialize)]
struct CuratedPluginsExportResponse {
download_url: String,
}
pub(super) async fn fetch_curated_repo_export_zip(export_api_url: &str) -> Result<Vec<u8>, String> {
let client = build_reqwest_client();
let export_body = fetch_public_text(
&client,
export_api_url,
"get curated plugins export archive metadata",
)
.await?;
let export_response: CuratedPluginsExportResponse = serde_json::from_str(&export_body)
.map_err(|err| {
format!("failed to parse curated plugins export response from {export_api_url}: {err}")
})?;
if export_response.download_url.is_empty() {
return Err(format!(
"curated plugins export response from {export_api_url} did not include a download URL"
));
}
fetch_public_bytes(
&client,
&export_response.download_url,
"download curated plugins export archive",
)
.await
}
pub(super) fn read_extracted_repo_git_sha(repo_path: &Path) -> Result<Option<String>, String> {
let git_dir = repo_path.join(".git");
if !git_dir.is_dir() {
return Ok(None);
}
let head_path = git_dir.join("HEAD");
let head = fs::read_to_string(&head_path).map_err(|err| {
format!(
"failed to read curated plugins export git HEAD {}: {err}",
head_path.display()
)
})?;
let head = head.trim();
if head.is_empty() {
return Err(format!(
"curated plugins export git HEAD is empty at {}",
head_path.display()
));
}
if let Some(reference) = head.strip_prefix("ref: ") {
return read_git_ref_sha(&git_dir, reference.trim()).map(Some);
}
Ok(Some(head.to_string()))
}
fn read_git_ref_sha(git_dir: &Path, reference: &str) -> Result<String, String> {
let ref_path = git_dir.join(reference);
if let Ok(sha) = fs::read_to_string(&ref_path) {
let sha = sha.trim();
if sha.is_empty() {
return Err(format!(
"curated plugins export git ref {reference} is empty at {}",
ref_path.display()
));
}
return Ok(sha.to_string());
}
let packed_refs_path = git_dir.join("packed-refs");
if let Ok(packed_refs) = fs::read_to_string(&packed_refs_path)
&& let Some(sha) = packed_refs.lines().find_map(|line| {
let trimmed = line.trim();
if trimmed.is_empty() || trimmed.starts_with('#') || trimmed.starts_with('^') {
return None;
}
let (sha, candidate_ref) = trimmed.split_once(' ')?;
(candidate_ref == reference).then_some(sha.to_string())
})
{
return Ok(sha);
}
Err(format!(
"failed to resolve curated plugins export git ref {reference} from {}",
git_dir.display()
))
}
async fn fetch_public_text(client: &Client, url: &str, context: &str) -> Result<String, String> {
let response = client
.get(url)
.timeout(CURATED_PLUGINS_EXPORT_TIMEOUT)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(format!(
"{context} from {url} failed with status {status}: {body}"
));
}
Ok(body)
}
async fn fetch_public_bytes(client: &Client, url: &str, context: &str) -> Result<Vec<u8>, String> {
let response = client
.get(url)
.timeout(CURATED_PLUGINS_EXPORT_TIMEOUT)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response
.bytes()
.await
.map_err(|err| format!("failed to read {context} response from {url}: {err}"))?;
if !status.is_success() {
let body_text = String::from_utf8_lossy(&body);
return Err(format!(
"{context} from {url} failed with status {status}: {body_text}"
));
}
Ok(body.to_vec())
}

View File

@@ -147,6 +147,7 @@ exit 1
tmp.path(),
git_path.to_str().expect("utf8 path"),
"http://127.0.0.1:9",
"http://127.0.0.1:9/backend-api/plugins/export/curated",
)
.expect("git sync should succeed");
@@ -160,6 +161,137 @@ exit 1
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[cfg(unix)]
#[test]
fn sync_openai_plugins_repo_via_git_succeeds_with_local_rewritten_remote() {
use std::os::unix::fs::PermissionsExt;
let tmp = tempdir().expect("tempdir");
let repo_root = tempfile::Builder::new()
.prefix("curated-repo-success-")
.tempdir()
.expect("tempdir");
let work_repo = repo_root.path().join("work/plugins");
let remote_repo = repo_root.path().join("remotes/openai/plugins.git");
std::fs::create_dir_all(work_repo.join(".agents/plugins")).expect("create marketplace dir");
std::fs::create_dir_all(work_repo.join("plugins/gmail/.codex-plugin"))
.expect("create plugin dir");
std::fs::write(
work_repo.join(".agents/plugins/marketplace.json"),
r#"{"name":"openai-curated","plugins":[{"name":"gmail","source":{"source":"local","path":"./plugins/gmail"}}]}"#,
)
.expect("write marketplace");
std::fs::write(
work_repo.join("plugins/gmail/.codex-plugin/plugin.json"),
r#"{"name":"gmail"}"#,
)
.expect("write plugin manifest");
let init_status = Command::new("git")
.arg("-C")
.arg(&work_repo)
.arg("init")
.status()
.expect("run git init");
assert!(init_status.success());
let add_status = Command::new("git")
.arg("-C")
.arg(&work_repo)
.arg("add")
.arg(".")
.status()
.expect("run git add");
assert!(add_status.success());
let commit_status = Command::new("git")
.arg("-C")
.arg(&work_repo)
.arg("-c")
.arg("user.name=Codex Test")
.arg("-c")
.arg("user.email=codex@example.com")
.arg("commit")
.arg("-m")
.arg("init")
.status()
.expect("run git commit");
assert!(commit_status.success());
std::fs::create_dir_all(remote_repo.parent().expect("remote parent"))
.expect("create remote parent");
let clone_status = Command::new("git")
.arg("clone")
.arg("--bare")
.arg(&work_repo)
.arg(&remote_repo)
.status()
.expect("run git clone --bare");
assert!(clone_status.success());
let sha_output = Command::new("git")
.arg("-C")
.arg(&work_repo)
.arg("rev-parse")
.arg("HEAD")
.output()
.expect("run git rev-parse");
assert!(sha_output.status.success());
let sha = String::from_utf8_lossy(&sha_output.stdout)
.trim()
.to_string();
let git_config_path = repo_root.path().join("git-rewrite.conf");
std::fs::write(
&git_config_path,
format!(
"[url \"file://{}/\"]\n insteadOf = https://github.com/\n",
repo_root.path().join("remotes").display()
),
)
.expect("write git config");
let bin_dir = tempfile::Builder::new()
.prefix("git-rewrite-wrapper-")
.tempdir()
.expect("tempdir");
let git_wrapper = bin_dir.path().join("git");
std::fs::write(
&git_wrapper,
format!(
"#!/bin/sh\nGIT_CONFIG_GLOBAL='{}' exec git \"$@\"\n",
git_config_path.display()
),
)
.expect("write git wrapper");
let mut permissions = std::fs::metadata(&git_wrapper)
.expect("metadata")
.permissions();
permissions.set_mode(0o755);
std::fs::set_permissions(&git_wrapper, permissions).expect("chmod");
let synced_sha =
sync_openai_plugins_repo_via_git(tmp.path(), git_wrapper.to_str().expect("utf8 path"))
.expect("git sync should succeed");
assert_eq!(synced_sha, sha);
assert!(
curated_plugins_repo_path(tmp.path())
.join(".agents/plugins/marketplace.json")
.is_file()
);
assert!(
curated_plugins_repo_path(tmp.path())
.join("plugins/gmail/.codex-plugin/plugin.json")
.is_file()
);
assert_eq!(
read_curated_plugins_sha(tmp.path()).as_deref(),
Some(sha.as_str())
);
assert!(!has_plugins_clone_dirs(tmp.path()));
}
#[tokio::test]
async fn sync_openai_plugins_repo_falls_back_to_http_when_git_is_unavailable() {
let tmp = tempdir().expect("tempdir");
@@ -196,6 +328,7 @@ async fn sync_openai_plugins_repo_falls_back_to_http_when_git_is_unavailable() {
tmp_path.as_path(),
"missing-git-for-test",
&server_uri,
"http://127.0.0.1:9/backend-api/plugins/export/curated",
)
})
.await
@@ -271,6 +404,7 @@ exit 1
tmp_path.as_path(),
git_path.to_str().expect("utf8 path"),
&server_uri,
"http://127.0.0.1:9/backend-api/plugins/export/curated",
)
})
.await
@@ -412,6 +546,7 @@ async fn sync_openai_plugins_repo_skips_archive_download_when_sha_matches() {
tmp_path.as_path(),
"missing-git-for-test",
&server_uri,
"http://127.0.0.1:9/backend-api/plugins/export/curated",
)
})
.await
@@ -422,6 +557,146 @@ async fn sync_openai_plugins_repo_skips_archive_download_when_sha_matches() {
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
}
#[tokio::test]
async fn sync_openai_plugins_repo_falls_back_to_export_archive_when_no_snapshot_exists() {
let tmp = tempdir().expect("tempdir");
let server = MockServer::start().await;
let export_api_url = format!("{}/backend-api/plugins/export/curated", server.uri());
let export_sha = "1111111111111111111111111111111111111111";
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(500).set_body_string("github repo lookup failed"))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/backend-api/plugins/export/curated"))
.respond_with(ResponseTemplate::new(200).set_body_string(format!(
r#"{{"download_url":"{}/files/curated-plugins.zip"}}"#,
server.uri()
)))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/files/curated-plugins.zip"))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/zip")
.set_body_bytes(curated_repo_export_zip_bytes(export_sha)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
let synced_sha = tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_transport_overrides(
tmp_path.as_path(),
"missing-git-for-test",
&server_uri,
&export_api_url,
)
})
.await
.expect("sync task should join")
.expect("export fallback sync should succeed");
let repo_path = curated_plugins_repo_path(tmp.path());
assert_eq!(synced_sha, export_sha);
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
assert!(
repo_path
.join("plugins/gmail/.codex-plugin/plugin.json")
.is_file()
);
assert_eq!(
read_curated_plugins_sha(tmp.path()).as_deref(),
Some(export_sha)
);
}
#[tokio::test]
async fn sync_openai_plugins_repo_skips_export_archive_when_snapshot_exists() {
let tmp = tempdir().expect("tempdir");
let curated_root = curated_plugins_repo_path(tmp.path());
write_openai_curated_marketplace(&curated_root, &["linear"]);
write_curated_plugin_sha(tmp.path());
let plugin_manifest_path = curated_root.join("plugins/linear/.codex-plugin/plugin.json");
let original_manifest =
std::fs::read_to_string(&plugin_manifest_path).expect("read existing plugin manifest");
let server = MockServer::start().await;
let export_api_url = format!("{}/backend-api/plugins/export/curated", server.uri());
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(500).set_body_string("github repo lookup failed"))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/backend-api/plugins/export/curated"))
.respond_with(ResponseTemplate::new(200).set_body_string(format!(
r#"{{"download_url":"{}/files/curated-plugins.zip"}}"#,
server.uri()
)))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/files/curated-plugins.zip"))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/zip")
.set_body_bytes(curated_repo_export_zip_bytes(
"2222222222222222222222222222222222222222",
)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
let err = tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_transport_overrides(
tmp_path.as_path(),
"missing-git-for-test",
&server_uri,
&export_api_url,
)
})
.await
.expect("sync task should join")
.expect_err("existing snapshot should suppress export fallback");
assert!(err.contains("export archive fallback skipped"));
assert_eq!(
std::fs::read_to_string(&plugin_manifest_path).expect("read plugin manifest after sync"),
original_manifest
);
assert_eq!(
read_curated_plugins_sha(tmp.path()).as_deref(),
Some(TEST_CURATED_PLUGIN_SHA)
);
}
#[test]
fn read_extracted_repo_git_sha_reads_head_ref_from_extracted_repo() {
let tmp = tempdir().expect("tempdir");
let git_dir = tmp.path().join(".git/refs/heads");
std::fs::create_dir_all(&git_dir).expect("create git ref dir");
std::fs::write(tmp.path().join(".git/HEAD"), "ref: refs/heads/main\n").expect("write HEAD");
std::fs::write(
git_dir.join("main"),
"3333333333333333333333333333333333333333\n",
)
.expect("write main ref");
assert_eq!(
read_extracted_repo_git_sha(tmp.path()).expect("read extracted repo git sha"),
Some("3333333333333333333333333333333333333333".to_string())
);
}
#[tokio::test]
async fn startup_remote_plugin_sync_writes_marker_and_reconciles_state() {
let tmp = tempdir().expect("tempdir");
@@ -528,3 +803,49 @@ fn curated_repo_zipball_bytes(sha: &str) -> Vec<u8> {
writer.finish().expect("finish zip writer").into_inner()
}
fn curated_repo_export_zip_bytes(sha: &str) -> Vec<u8> {
let cursor = std::io::Cursor::new(Vec::new());
let mut writer = ZipWriter::new(cursor);
let options = SimpleFileOptions::default();
writer
.start_file("plugins/.git/HEAD", options)
.expect("start HEAD entry");
writer
.write_all(b"ref: refs/heads/main\n")
.expect("write HEAD");
writer
.start_file("plugins/.git/refs/heads/main", options)
.expect("start main ref entry");
writer
.write_all(format!("{sha}\n").as_bytes())
.expect("write main ref");
writer
.start_file("plugins/.agents/plugins/marketplace.json", options)
.expect("start marketplace entry");
writer
.write_all(
br#"{
"name": "openai-curated",
"plugins": [
{
"name": "gmail",
"source": {
"source": "local",
"path": "./plugins/gmail"
}
}
]
}"#,
)
.expect("write marketplace");
writer
.start_file("plugins/plugins/gmail/.codex-plugin/plugin.json", options)
.expect("start plugin manifest entry");
writer
.write_all(br#"{"name":"gmail"}"#)
.expect("write plugin manifest");
writer.finish().expect("finish zip writer").into_inner()
}