Compare commits

...

1 Commits

Author SHA1 Message Date
xl-openai
e5f4d1fef5 feat: prefer git for curated plugin sync (#15275)
start with git clone, fallback to http.
2026-03-20 00:06:24 -07:00
6 changed files with 938 additions and 597 deletions

View File

@@ -1,356 +0,0 @@
use crate::default_client::build_reqwest_client;
use reqwest::Client;
use serde::Deserialize;
use std::fs;
use std::io::Cursor;
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
use zip::ZipArchive;
const GITHUB_API_BASE_URL: &str = "https://api.github.com";
const GITHUB_API_ACCEPT_HEADER: &str = "application/vnd.github+json";
const GITHUB_API_VERSION_HEADER: &str = "2022-11-28";
const OPENAI_PLUGINS_OWNER: &str = "openai";
const OPENAI_PLUGINS_REPO: &str = "plugins";
const CURATED_PLUGINS_RELATIVE_DIR: &str = ".tmp/plugins";
const CURATED_PLUGINS_SHA_FILE: &str = ".tmp/plugins.sha";
const CURATED_PLUGINS_HTTP_TIMEOUT: Duration = Duration::from_secs(30);
#[derive(Debug, Deserialize)]
struct GitHubRepositorySummary {
default_branch: String,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefSummary {
object: GitHubGitRefObject,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefObject {
sha: String,
}
pub(crate) fn curated_plugins_repo_path(codex_home: &Path) -> PathBuf {
codex_home.join(CURATED_PLUGINS_RELATIVE_DIR)
}
pub(crate) fn read_curated_plugins_sha(codex_home: &Path) -> Option<String> {
read_sha_file(codex_home.join(CURATED_PLUGINS_SHA_FILE).as_path())
}
pub(crate) fn sync_openai_plugins_repo(codex_home: &Path) -> Result<String, String> {
sync_openai_plugins_repo_with_api_base_url(codex_home, GITHUB_API_BASE_URL)
}
fn sync_openai_plugins_repo_with_api_base_url(
codex_home: &Path,
api_base_url: &str,
) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.map_err(|err| format!("failed to create curated plugins sync runtime: {err}"))?;
let remote_sha = runtime.block_on(fetch_curated_repo_remote_sha(api_base_url))?;
let local_sha = read_sha_file(&sha_path);
if local_sha.as_deref() == Some(remote_sha.as_str()) && repo_path.is_dir() {
return Ok(remote_sha);
}
let Some(parent) = repo_path.parent() else {
return Err(format!(
"failed to determine curated plugins parent directory for {}",
repo_path.display()
));
};
fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins parent directory {}: {err}",
parent.display()
)
})?;
let clone_dir = tempfile::Builder::new()
.prefix("plugins-clone-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create temporary curated plugins directory in {}: {err}",
parent.display()
)
})?;
let cloned_repo_path = clone_dir.path().join("repo");
let zipball_bytes = runtime.block_on(fetch_curated_repo_zipball(api_base_url, &remote_sha))?;
extract_zipball_to_dir(&zipball_bytes, &cloned_repo_path)?;
if !cloned_repo_path
.join(".agents/plugins/marketplace.json")
.is_file()
{
return Err(format!(
"curated plugins archive missing marketplace manifest at {}",
cloned_repo_path
.join(".agents/plugins/marketplace.json")
.display()
));
}
if repo_path.exists() {
let backup_dir = tempfile::Builder::new()
.prefix("plugins-backup-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create curated plugins backup directory in {}: {err}",
parent.display()
)
})?;
let backup_repo_path = backup_dir.path().join("repo");
fs::rename(&repo_path, &backup_repo_path).map_err(|err| {
format!(
"failed to move previous curated plugins repo out of the way at {}: {err}",
repo_path.display()
)
})?;
if let Err(err) = fs::rename(&cloned_repo_path, &repo_path) {
let rollback_result = fs::rename(&backup_repo_path, &repo_path);
return match rollback_result {
Ok(()) => Err(format!(
"failed to activate new curated plugins repo at {}: {err}",
repo_path.display()
)),
Err(rollback_err) => {
let backup_path = backup_dir.keep().join("repo");
Err(format!(
"failed to activate new curated plugins repo at {}: {err}; failed to restore previous repo (left at {}): {rollback_err}",
repo_path.display(),
backup_path.display()
))
}
};
}
} else {
fs::rename(&cloned_repo_path, &repo_path).map_err(|err| {
format!(
"failed to activate curated plugins repo at {}: {err}",
repo_path.display()
)
})?;
}
if let Some(parent) = sha_path.parent() {
fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins sha directory {}: {err}",
parent.display()
)
})?;
}
fs::write(&sha_path, format!("{remote_sha}\n")).map_err(|err| {
format!(
"failed to write curated plugins sha file {}: {err}",
sha_path.display()
)
})?;
Ok(remote_sha)
}
async fn fetch_curated_repo_remote_sha(api_base_url: &str) -> Result<String, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let client = build_reqwest_client();
let repo_body = fetch_github_text(&client, &repo_url, "get curated plugins repository").await?;
let repo_summary: GitHubRepositorySummary =
serde_json::from_str(&repo_body).map_err(|err| {
format!("failed to parse curated plugins repository response from {repo_url}: {err}")
})?;
if repo_summary.default_branch.is_empty() {
return Err(format!(
"curated plugins repository response from {repo_url} did not include a default branch"
));
}
let git_ref_url = format!("{repo_url}/git/ref/heads/{}", repo_summary.default_branch);
let git_ref_body =
fetch_github_text(&client, &git_ref_url, "get curated plugins HEAD ref").await?;
let git_ref: GitHubGitRefSummary = serde_json::from_str(&git_ref_body).map_err(|err| {
format!("failed to parse curated plugins ref response from {git_ref_url}: {err}")
})?;
if git_ref.object.sha.is_empty() {
return Err(format!(
"curated plugins ref response from {git_ref_url} did not include a HEAD sha"
));
}
Ok(git_ref.object.sha)
}
async fn fetch_curated_repo_zipball(
api_base_url: &str,
remote_sha: &str,
) -> Result<Vec<u8>, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let zipball_url = format!("{repo_url}/zipball/{remote_sha}");
let client = build_reqwest_client();
fetch_github_bytes(&client, &zipball_url, "download curated plugins archive").await
}
async fn fetch_github_text(client: &Client, url: &str, context: &str) -> Result<String, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(format!(
"{context} from {url} failed with status {status}: {body}"
));
}
Ok(body)
}
async fn fetch_github_bytes(client: &Client, url: &str, context: &str) -> Result<Vec<u8>, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response
.bytes()
.await
.map_err(|err| format!("failed to read {context} response from {url}: {err}"))?;
if !status.is_success() {
let body_text = String::from_utf8_lossy(&body);
return Err(format!(
"{context} from {url} failed with status {status}: {body_text}"
));
}
Ok(body.to_vec())
}
fn github_request(client: &Client, url: &str) -> reqwest::RequestBuilder {
client
.get(url)
.timeout(CURATED_PLUGINS_HTTP_TIMEOUT)
.header("accept", GITHUB_API_ACCEPT_HEADER)
.header("x-github-api-version", GITHUB_API_VERSION_HEADER)
}
fn read_sha_file(sha_path: &Path) -> Option<String> {
fs::read_to_string(sha_path)
.ok()
.map(|sha| sha.trim().to_string())
.filter(|sha| !sha.is_empty())
}
fn extract_zipball_to_dir(bytes: &[u8], destination: &Path) -> Result<(), String> {
fs::create_dir_all(destination).map_err(|err| {
format!(
"failed to create curated plugins extraction directory {}: {err}",
destination.display()
)
})?;
let cursor = Cursor::new(bytes);
let mut archive = ZipArchive::new(cursor)
.map_err(|err| format!("failed to open curated plugins zip archive: {err}"))?;
for index in 0..archive.len() {
let mut entry = archive
.by_index(index)
.map_err(|err| format!("failed to read curated plugins zip entry: {err}"))?;
let Some(relative_path) = entry.enclosed_name() else {
return Err(format!(
"curated plugins zip entry `{}` escapes extraction root",
entry.name()
));
};
let mut components = relative_path.components();
let Some(Component::Normal(_)) = components.next() else {
continue;
};
let output_relative = components.fold(PathBuf::new(), |mut path, component| {
if let Component::Normal(segment) = component {
path.push(segment);
}
path
});
if output_relative.as_os_str().is_empty() {
continue;
}
let output_path = destination.join(&output_relative);
if entry.is_dir() {
fs::create_dir_all(&output_path).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
output_path.display()
)
})?;
continue;
}
if let Some(parent) = output_path.parent() {
fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
parent.display()
)
})?;
}
let mut output = fs::File::create(&output_path).map_err(|err| {
format!(
"failed to create curated plugins file {}: {err}",
output_path.display()
)
})?;
std::io::copy(&mut entry, &mut output).map_err(|err| {
format!(
"failed to write curated plugins file {}: {err}",
output_path.display()
)
})?;
apply_zip_permissions(&entry, &output_path)?;
}
Ok(())
}
#[cfg(unix)]
fn apply_zip_permissions(entry: &zip::read::ZipFile<'_>, output_path: &Path) -> Result<(), String> {
let Some(mode) = entry.unix_mode() else {
return Ok(());
};
fs::set_permissions(output_path, fs::Permissions::from_mode(mode)).map_err(|err| {
format!(
"failed to set permissions on curated plugins file {}: {err}",
output_path.display()
)
})
}
#[cfg(not(unix))]
fn apply_zip_permissions(
_entry: &zip::read::ZipFile<'_>,
_output_path: &Path,
) -> Result<(), String> {
Ok(())
}
#[cfg(test)]
#[path = "curated_repo_tests.rs"]
mod tests;

View File

@@ -1,159 +0,0 @@
use super::*;
use pretty_assertions::assert_eq;
use std::io::Write;
use tempfile::tempdir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::method;
use wiremock::matchers::path;
use zip::ZipWriter;
use zip::write::SimpleFileOptions;
#[test]
fn curated_plugins_repo_path_uses_codex_home_tmp_dir() {
let tmp = tempdir().expect("tempdir");
assert_eq!(
curated_plugins_repo_path(tmp.path()),
tmp.path().join(".tmp/plugins")
);
}
#[test]
fn read_curated_plugins_sha_reads_trimmed_sha_file() {
let tmp = tempdir().expect("tempdir");
fs::create_dir_all(tmp.path().join(".tmp")).expect("create tmp");
fs::write(tmp.path().join(".tmp/plugins.sha"), "abc123\n").expect("write sha");
assert_eq!(
read_curated_plugins_sha(tmp.path()).as_deref(),
Some("abc123")
);
}
#[tokio::test]
async fn sync_openai_plugins_repo_downloads_zipball_and_records_sha() {
let tmp = tempdir().expect("tempdir");
let server = MockServer::start().await;
let sha = "0123456789abcdef0123456789abcdef01234567";
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/repos/openai/plugins/zipball/{sha}")))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/zip")
.set_body_bytes(curated_repo_zipball_bytes(sha)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_api_base_url(tmp_path.as_path(), &server_uri)
})
.await
.expect("sync task should join")
.expect("sync should succeed");
let repo_path = curated_plugins_repo_path(tmp.path());
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
assert!(
repo_path
.join("plugins/gmail/.codex-plugin/plugin.json")
.is_file()
);
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[tokio::test]
async fn sync_openai_plugins_repo_skips_archive_download_when_sha_matches() {
let tmp = tempdir().expect("tempdir");
let repo_path = curated_plugins_repo_path(tmp.path());
fs::create_dir_all(repo_path.join(".agents/plugins")).expect("create repo");
fs::write(
repo_path.join(".agents/plugins/marketplace.json"),
r#"{"name":"openai-curated","plugins":[]}"#,
)
.expect("write marketplace");
fs::create_dir_all(tmp.path().join(".tmp")).expect("create tmp");
let sha = "fedcba9876543210fedcba9876543210fedcba98";
fs::write(tmp.path().join(".tmp/plugins.sha"), format!("{sha}\n")).expect("write sha");
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_api_base_url(tmp_path.as_path(), &server_uri)
})
.await
.expect("sync task should join")
.expect("sync should succeed");
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
}
fn curated_repo_zipball_bytes(sha: &str) -> Vec<u8> {
let cursor = Cursor::new(Vec::new());
let mut writer = ZipWriter::new(cursor);
let options = SimpleFileOptions::default();
let root = format!("openai-plugins-{sha}");
writer
.start_file(format!("{root}/.agents/plugins/marketplace.json"), options)
.expect("start marketplace entry");
writer
.write_all(
br#"{
"name": "openai-curated",
"plugins": [
{
"name": "gmail",
"source": {
"source": "local",
"path": "./plugins/gmail"
}
}
]
}"#,
)
.expect("write marketplace");
writer
.start_file(
format!("{root}/plugins/gmail/.codex-plugin/plugin.json"),
options,
)
.expect("start plugin manifest entry");
writer
.write_all(br#"{"name":"gmail"}"#)
.expect("write plugin manifest");
writer.finish().expect("finish zip writer").into_inner()
}

View File

@@ -60,6 +60,7 @@ use std::sync::RwLock;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::time::Instant;
use tokio::sync::Mutex;
use toml_edit::value;
use tracing::info;
use tracing::warn;
@@ -463,6 +464,7 @@ pub struct PluginsManager {
store: PluginStore,
featured_plugin_ids_cache: RwLock<Option<CachedFeaturedPluginIds>>,
cached_enabled_outcome: RwLock<Option<PluginLoadOutcome>>,
remote_sync_lock: Mutex<()>,
restriction_product: Option<Product>,
analytics_events_client: RwLock<Option<AnalyticsEventsClient>>,
}
@@ -488,6 +490,7 @@ impl PluginsManager {
store: PluginStore::new(codex_home),
featured_plugin_ids_cache: RwLock::new(None),
cached_enabled_outcome: RwLock::new(None),
remote_sync_lock: Mutex::new(()),
restriction_product,
analytics_events_client: RwLock::new(None),
}
@@ -777,6 +780,8 @@ impl PluginsManager {
auth: Option<&CodexAuth>,
additive_only: bool,
) -> Result<RemotePluginSyncResult, PluginRemoteSyncError> {
let _remote_sync_guard = self.remote_sync_lock.lock().await;
if !config.features.enabled(Feature::Plugins) {
return Ok(RemotePluginSyncResult::default());
}

View File

@@ -1,4 +1,3 @@
mod curated_repo;
mod discoverable;
mod injection;
mod manager;
@@ -12,9 +11,6 @@ mod store;
pub(crate) mod test_support;
mod toggles;
pub(crate) use curated_repo::curated_plugins_repo_path;
pub(crate) use curated_repo::read_curated_plugins_sha;
pub(crate) use curated_repo::sync_openai_plugins_repo;
pub(crate) use discoverable::list_tool_suggest_discoverable_plugins;
pub(crate) use injection::build_plugin_injections;
pub use manager::AppConnectorId;
@@ -52,5 +48,8 @@ pub use remote::RemotePluginFetchError;
pub use remote::fetch_remote_featured_plugin_ids;
pub(crate) use render::render_explicit_plugin_instructions;
pub(crate) use render::render_plugins_section;
pub(crate) use startup_sync::curated_plugins_repo_path;
pub(crate) use startup_sync::read_curated_plugins_sha;
pub(crate) use startup_sync::sync_openai_plugins_repo;
pub use store::PluginId;
pub use toggles::collect_plugin_enabled_candidates;

View File

@@ -1,19 +1,143 @@
use crate::default_client::build_reqwest_client;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::process::Output;
use std::process::Stdio;
use std::sync::Arc;
use std::time::Duration;
use reqwest::Client;
use serde::Deserialize;
use tracing::info;
use tracing::warn;
use zip::ZipArchive;
use crate::AuthManager;
use crate::config::Config;
use super::PluginsManager;
const GITHUB_API_BASE_URL: &str = "https://api.github.com";
const GITHUB_API_ACCEPT_HEADER: &str = "application/vnd.github+json";
const GITHUB_API_VERSION_HEADER: &str = "2022-11-28";
const OPENAI_PLUGINS_OWNER: &str = "openai";
const OPENAI_PLUGINS_REPO: &str = "plugins";
const CURATED_PLUGINS_RELATIVE_DIR: &str = ".tmp/plugins";
const CURATED_PLUGINS_SHA_FILE: &str = ".tmp/plugins.sha";
const CURATED_PLUGINS_GIT_TIMEOUT: Duration = Duration::from_secs(30);
const CURATED_PLUGINS_HTTP_TIMEOUT: Duration = Duration::from_secs(30);
const STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE: &str = ".tmp/app-server-remote-plugin-sync-v1";
const STARTUP_REMOTE_PLUGIN_SYNC_PREREQUISITE_TIMEOUT: Duration = Duration::from_secs(5);
#[derive(Debug, Deserialize)]
struct GitHubRepositorySummary {
default_branch: String,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefSummary {
object: GitHubGitRefObject,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefObject {
sha: String,
}
pub(crate) fn curated_plugins_repo_path(codex_home: &Path) -> PathBuf {
codex_home.join(CURATED_PLUGINS_RELATIVE_DIR)
}
pub(crate) fn read_curated_plugins_sha(codex_home: &Path) -> Option<String> {
read_sha_file(codex_home.join(CURATED_PLUGINS_SHA_FILE).as_path())
}
pub(crate) fn sync_openai_plugins_repo(codex_home: &Path) -> Result<String, String> {
sync_openai_plugins_repo_with_transport_overrides(codex_home, "git", GITHUB_API_BASE_URL)
}
fn sync_openai_plugins_repo_with_transport_overrides(
codex_home: &Path,
git_binary: &str,
api_base_url: &str,
) -> Result<String, String> {
match sync_openai_plugins_repo_via_git(codex_home, git_binary) {
Ok(remote_sha) => Ok(remote_sha),
Err(err) => {
warn!(
error = %err,
git_binary,
"git sync failed for curated plugin sync; falling back to GitHub HTTP"
);
sync_openai_plugins_repo_via_http(codex_home, api_base_url)
}
}
}
fn sync_openai_plugins_repo_via_git(codex_home: &Path, git_binary: &str) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let remote_sha = git_ls_remote_head_sha(git_binary)?;
let local_sha = read_local_git_or_sha_file(&repo_path, &sha_path, git_binary);
if local_sha.as_deref() == Some(remote_sha.as_str()) && repo_path.join(".git").is_dir() {
return Ok(remote_sha);
}
let cloned_repo_path = prepare_curated_repo_parent_and_temp_dir(&repo_path)?;
let clone_output = run_git_command_with_timeout(
Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("clone")
.arg("--depth")
.arg("1")
.arg("https://github.com/openai/plugins.git")
.arg(&cloned_repo_path),
"git clone curated plugins repo",
CURATED_PLUGINS_GIT_TIMEOUT,
)?;
ensure_git_success(&clone_output, "git clone curated plugins repo")?;
let cloned_sha = git_head_sha(&cloned_repo_path, git_binary)?;
if cloned_sha != remote_sha {
return Err(format!(
"curated plugins clone HEAD mismatch: expected {remote_sha}, got {cloned_sha}"
));
}
ensure_marketplace_manifest_exists(&cloned_repo_path)?;
activate_curated_repo(&repo_path, &cloned_repo_path)?;
write_curated_plugins_sha(&sha_path, &remote_sha)?;
Ok(remote_sha)
}
fn sync_openai_plugins_repo_via_http(
codex_home: &Path,
api_base_url: &str,
) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.map_err(|err| format!("failed to create curated plugins sync runtime: {err}"))?;
let remote_sha = runtime.block_on(fetch_curated_repo_remote_sha(api_base_url))?;
let local_sha = read_sha_file(&sha_path);
if local_sha.as_deref() == Some(remote_sha.as_str()) && repo_path.is_dir() {
return Ok(remote_sha);
}
let cloned_repo_path = prepare_curated_repo_parent_and_temp_dir(&repo_path)?;
let zipball_bytes = runtime.block_on(fetch_curated_repo_zipball(api_base_url, &remote_sha))?;
extract_zipball_to_dir(&zipball_bytes, &cloned_repo_path)?;
ensure_marketplace_manifest_exists(&cloned_repo_path)?;
activate_curated_repo(&repo_path, &cloned_repo_path)?;
write_curated_plugins_sha(&sha_path, &remote_sha)?;
Ok(remote_sha)
}
pub(super) fn start_startup_remote_plugin_sync_once(
manager: Arc<PluginsManager>,
codex_home: PathBuf,
@@ -103,93 +227,438 @@ async fn write_startup_remote_plugin_sync_marker(codex_home: &Path) -> std::io::
tokio::fs::write(marker_path, b"ok\n").await
}
#[cfg(test)]
mod tests {
use super::*;
use crate::auth::CodexAuth;
use crate::config::CONFIG_TOML_FILE;
use crate::plugins::curated_plugins_repo_path;
use crate::plugins::test_support::TEST_CURATED_PLUGIN_SHA;
use crate::plugins::test_support::write_curated_plugin_sha;
use crate::plugins::test_support::write_file;
use crate::plugins::test_support::write_openai_curated_marketplace;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
fn prepare_curated_repo_parent_and_temp_dir(repo_path: &Path) -> Result<PathBuf, String> {
let Some(parent) = repo_path.parent() else {
return Err(format!(
"failed to determine curated plugins parent directory for {}",
repo_path.display()
));
};
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins parent directory {}: {err}",
parent.display()
)
})?;
#[tokio::test]
async fn startup_remote_plugin_sync_writes_marker_and_reconciles_state() {
let tmp = tempdir().expect("tempdir");
let curated_root = curated_plugins_repo_path(tmp.path());
write_openai_curated_marketplace(&curated_root, &["linear"]);
write_curated_plugin_sha(tmp.path());
write_file(
&tmp.path().join(CONFIG_TOML_FILE),
r#"[features]
plugins = true
let clone_dir = tempfile::Builder::new()
.prefix("plugins-clone-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create temporary curated plugins directory in {}: {err}",
parent.display()
)
})?;
Ok(clone_dir.keep())
}
[plugins."linear@openai-curated"]
enabled = false
"#,
);
fn ensure_marketplace_manifest_exists(repo_path: &Path) -> Result<(), String> {
if repo_path.join(".agents/plugins/marketplace.json").is_file() {
return Ok(());
}
Err(format!(
"curated plugins archive missing marketplace manifest at {}",
repo_path.join(".agents/plugins/marketplace.json").display()
))
}
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/backend-api/plugins/list"))
.and(header("authorization", "Bearer Access Token"))
.and(header("chatgpt-account-id", "account_id"))
.respond_with(ResponseTemplate::new(200).set_body_string(
r#"[
{"id":"1","name":"linear","marketplace_name":"openai-curated","version":"1.0.0","enabled":true}
]"#,
))
.mount(&server)
.await;
fn activate_curated_repo(repo_path: &Path, staged_repo_path: &Path) -> Result<(), String> {
if repo_path.exists() {
let parent = repo_path.parent().ok_or_else(|| {
format!(
"failed to determine curated plugins parent directory for {}",
repo_path.display()
)
})?;
let backup_dir = tempfile::Builder::new()
.prefix("plugins-backup-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create curated plugins backup directory in {}: {err}",
parent.display()
)
})?;
let backup_repo_path = backup_dir.path().join("repo");
let mut config = crate::plugins::test_support::load_plugins_config(tmp.path()).await;
config.chatgpt_base_url = format!("{}/backend-api/", server.uri());
let manager = Arc::new(PluginsManager::new(tmp.path().to_path_buf()));
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
std::fs::rename(repo_path, &backup_repo_path).map_err(|err| {
format!(
"failed to move previous curated plugins repo out of the way at {}: {err}",
repo_path.display()
)
})?;
start_startup_remote_plugin_sync_once(
Arc::clone(&manager),
tmp.path().to_path_buf(),
config,
auth_manager,
);
let marker_path = tmp.path().join(STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE);
tokio::time::timeout(Duration::from_secs(5), async {
loop {
if marker_path.is_file() {
break;
if let Err(err) = std::fs::rename(staged_repo_path, repo_path) {
let rollback_result = std::fs::rename(&backup_repo_path, repo_path);
return match rollback_result {
Ok(()) => Err(format!(
"failed to activate new curated plugins repo at {}: {err}",
repo_path.display()
)),
Err(rollback_err) => {
let backup_path = backup_dir.keep().join("repo");
Err(format!(
"failed to activate new curated plugins repo at {}: {err}; failed to restore previous repo (left at {}): {rollback_err}",
repo_path.display(),
backup_path.display()
))
}
tokio::time::sleep(Duration::from_millis(10)).await;
};
}
} else {
std::fs::rename(staged_repo_path, repo_path).map_err(|err| {
format!(
"failed to activate curated plugins repo at {}: {err}",
repo_path.display()
)
})?;
}
Ok(())
}
fn write_curated_plugins_sha(sha_path: &Path, remote_sha: &str) -> Result<(), String> {
if let Some(parent) = sha_path.parent() {
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins sha directory {}: {err}",
parent.display()
)
})?;
}
std::fs::write(sha_path, format!("{remote_sha}\n")).map_err(|err| {
format!(
"failed to write curated plugins sha file {}: {err}",
sha_path.display()
)
})
}
fn read_local_git_or_sha_file(
repo_path: &Path,
sha_path: &Path,
git_binary: &str,
) -> Option<String> {
if repo_path.join(".git").is_dir()
&& let Ok(sha) = git_head_sha(repo_path, git_binary)
{
return Some(sha);
}
read_sha_file(sha_path)
}
fn git_ls_remote_head_sha(git_binary: &str) -> Result<String, String> {
let output = run_git_command_with_timeout(
Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("ls-remote")
.arg("https://github.com/openai/plugins.git")
.arg("HEAD"),
"git ls-remote curated plugins repo",
CURATED_PLUGINS_GIT_TIMEOUT,
)?;
ensure_git_success(&output, "git ls-remote curated plugins repo")?;
let stdout = String::from_utf8_lossy(&output.stdout);
let Some(first_line) = stdout.lines().next() else {
return Err("git ls-remote returned empty output for curated plugins repo".to_string());
};
let Some((sha, _)) = first_line.split_once('\t') else {
return Err(format!(
"unexpected git ls-remote output for curated plugins repo: {first_line}"
));
};
if sha.is_empty() {
return Err("git ls-remote returned empty sha for curated plugins repo".to_string());
}
Ok(sha.to_string())
}
fn git_head_sha(repo_path: &Path, git_binary: &str) -> Result<String, String> {
let output = Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("-C")
.arg(repo_path)
.arg("rev-parse")
.arg("HEAD")
.output()
.map_err(|err| {
format!(
"failed to run git rev-parse HEAD in {}: {err}",
repo_path.display()
)
})?;
ensure_git_success(&output, "git rev-parse HEAD")?;
let sha = String::from_utf8_lossy(&output.stdout).trim().to_string();
if sha.is_empty() {
return Err(format!(
"git rev-parse HEAD returned empty output in {}",
repo_path.display()
));
}
Ok(sha)
}
fn run_git_command_with_timeout(
command: &mut Command,
context: &str,
timeout: Duration,
) -> Result<Output, String> {
let mut child = command
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.map_err(|err| format!("failed to run {context}: {err}"))?;
let start = std::time::Instant::now();
loop {
match child.try_wait() {
Ok(Some(_)) => {
return child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context}: {err}"));
}
})
.await
.expect("marker should be written");
Ok(None) => {}
Err(err) => return Err(format!("failed to poll {context}: {err}")),
}
assert!(
tmp.path()
.join(format!(
"plugins/cache/openai-curated/linear/{TEST_CURATED_PLUGIN_SHA}"
if start.elapsed() >= timeout {
match child.try_wait() {
Ok(Some(_)) => {
return child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context}: {err}"));
}
Ok(None) => {}
Err(err) => return Err(format!("failed to poll {context}: {err}")),
}
let _ = child.kill();
let output = child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context} after timeout: {err}"))?;
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
return if stderr.is_empty() {
Err(format!("{context} timed out after {}s", timeout.as_secs()))
} else {
Err(format!(
"{context} timed out after {}s: {stderr}",
timeout.as_secs()
))
.is_dir()
);
let config = std::fs::read_to_string(tmp.path().join(CONFIG_TOML_FILE))
.expect("config should exist");
assert!(config.contains(r#"[plugins."linear@openai-curated"]"#));
assert!(config.contains("enabled = true"));
};
}
let marker_contents =
std::fs::read_to_string(marker_path).expect("marker should be readable");
assert_eq!(marker_contents, "ok\n");
std::thread::sleep(Duration::from_millis(100));
}
}
fn ensure_git_success(output: &Output, context: &str) -> Result<(), String> {
if output.status.success() {
return Ok(());
}
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
if stderr.is_empty() {
Err(format!("{context} failed with status {}", output.status))
} else {
Err(format!(
"{context} failed with status {}: {stderr}",
output.status
))
}
}
async fn fetch_curated_repo_remote_sha(api_base_url: &str) -> Result<String, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let client = build_reqwest_client();
let repo_body = fetch_github_text(&client, &repo_url, "get curated plugins repository").await?;
let repo_summary: GitHubRepositorySummary =
serde_json::from_str(&repo_body).map_err(|err| {
format!("failed to parse curated plugins repository response from {repo_url}: {err}")
})?;
if repo_summary.default_branch.is_empty() {
return Err(format!(
"curated plugins repository response from {repo_url} did not include a default branch"
));
}
let git_ref_url = format!("{repo_url}/git/ref/heads/{}", repo_summary.default_branch);
let git_ref_body =
fetch_github_text(&client, &git_ref_url, "get curated plugins HEAD ref").await?;
let git_ref: GitHubGitRefSummary = serde_json::from_str(&git_ref_body).map_err(|err| {
format!("failed to parse curated plugins ref response from {git_ref_url}: {err}")
})?;
if git_ref.object.sha.is_empty() {
return Err(format!(
"curated plugins ref response from {git_ref_url} did not include a HEAD sha"
));
}
Ok(git_ref.object.sha)
}
async fn fetch_curated_repo_zipball(
api_base_url: &str,
remote_sha: &str,
) -> Result<Vec<u8>, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let zipball_url = format!("{repo_url}/zipball/{remote_sha}");
let client = build_reqwest_client();
fetch_github_bytes(&client, &zipball_url, "download curated plugins archive").await
}
async fn fetch_github_text(client: &Client, url: &str, context: &str) -> Result<String, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(format!(
"{context} from {url} failed with status {status}: {body}"
));
}
Ok(body)
}
async fn fetch_github_bytes(client: &Client, url: &str, context: &str) -> Result<Vec<u8>, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response
.bytes()
.await
.map_err(|err| format!("failed to read {context} response from {url}: {err}"))?;
if !status.is_success() {
let body_text = String::from_utf8_lossy(&body);
return Err(format!(
"{context} from {url} failed with status {status}: {body_text}"
));
}
Ok(body.to_vec())
}
fn github_request(client: &Client, url: &str) -> reqwest::RequestBuilder {
client
.get(url)
.timeout(CURATED_PLUGINS_HTTP_TIMEOUT)
.header("accept", GITHUB_API_ACCEPT_HEADER)
.header("x-github-api-version", GITHUB_API_VERSION_HEADER)
}
fn read_sha_file(sha_path: &Path) -> Option<String> {
std::fs::read_to_string(sha_path)
.ok()
.map(|sha| sha.trim().to_string())
.filter(|sha| !sha.is_empty())
}
fn extract_zipball_to_dir(bytes: &[u8], destination: &Path) -> Result<(), String> {
std::fs::create_dir_all(destination).map_err(|err| {
format!(
"failed to create curated plugins extraction directory {}: {err}",
destination.display()
)
})?;
let cursor = std::io::Cursor::new(bytes);
let mut archive = ZipArchive::new(cursor)
.map_err(|err| format!("failed to open curated plugins zip archive: {err}"))?;
for index in 0..archive.len() {
let mut entry = archive
.by_index(index)
.map_err(|err| format!("failed to read curated plugins zip entry: {err}"))?;
let Some(relative_path) = entry.enclosed_name() else {
return Err(format!(
"curated plugins zip entry `{}` escapes extraction root",
entry.name()
));
};
let mut components = relative_path.components();
let Some(std::path::Component::Normal(_)) = components.next() else {
continue;
};
let output_relative = components.fold(PathBuf::new(), |mut path, component| {
if let std::path::Component::Normal(segment) = component {
path.push(segment);
}
path
});
if output_relative.as_os_str().is_empty() {
continue;
}
let output_path = destination.join(&output_relative);
if entry.is_dir() {
std::fs::create_dir_all(&output_path).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
output_path.display()
)
})?;
continue;
}
if let Some(parent) = output_path.parent() {
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
parent.display()
)
})?;
}
let mut output = std::fs::File::create(&output_path).map_err(|err| {
format!(
"failed to create curated plugins file {}: {err}",
output_path.display()
)
})?;
std::io::copy(&mut entry, &mut output).map_err(|err| {
format!(
"failed to write curated plugins file {}: {err}",
output_path.display()
)
})?;
apply_zip_permissions(&entry, &output_path)?;
}
Ok(())
}
#[cfg(unix)]
fn apply_zip_permissions(entry: &zip::read::ZipFile<'_>, output_path: &Path) -> Result<(), String> {
use std::os::unix::fs::PermissionsExt;
let Some(mode) = entry.unix_mode() else {
return Ok(());
};
std::fs::set_permissions(output_path, std::fs::Permissions::from_mode(mode)).map_err(|err| {
format!(
"failed to set permissions on curated plugins file {}: {err}",
output_path.display()
)
})
}
#[cfg(not(unix))]
fn apply_zip_permissions(
_entry: &zip::read::ZipFile<'_>,
_output_path: &Path,
) -> Result<(), String> {
Ok(())
}
#[cfg(test)]
#[path = "startup_sync_tests.rs"]
mod tests;

View File

@@ -0,0 +1,383 @@
use super::*;
use crate::auth::CodexAuth;
use crate::config::CONFIG_TOML_FILE;
use crate::plugins::test_support::TEST_CURATED_PLUGIN_SHA;
use crate::plugins::test_support::write_curated_plugin_sha;
use crate::plugins::test_support::write_file;
use crate::plugins::test_support::write_openai_curated_marketplace;
use pretty_assertions::assert_eq;
use std::io::Write;
use tempfile::tempdir;
use wiremock::Mock;
use wiremock::MockServer;
use wiremock::ResponseTemplate;
use wiremock::matchers::header;
use wiremock::matchers::method;
use wiremock::matchers::path;
use zip::ZipWriter;
use zip::write::SimpleFileOptions;
#[test]
fn curated_plugins_repo_path_uses_codex_home_tmp_dir() {
let tmp = tempdir().expect("tempdir");
assert_eq!(
curated_plugins_repo_path(tmp.path()),
tmp.path().join(".tmp/plugins")
);
}
#[test]
fn read_curated_plugins_sha_reads_trimmed_sha_file() {
let tmp = tempdir().expect("tempdir");
std::fs::create_dir_all(tmp.path().join(".tmp")).expect("create tmp");
std::fs::write(tmp.path().join(".tmp/plugins.sha"), "abc123\n").expect("write sha");
assert_eq!(
read_curated_plugins_sha(tmp.path()).as_deref(),
Some("abc123")
);
}
#[cfg(unix)]
#[test]
fn sync_openai_plugins_repo_prefers_git_when_available() {
use std::os::unix::fs::PermissionsExt;
let tmp = tempdir().expect("tempdir");
let bin_dir = tempfile::Builder::new()
.prefix("fake-git-")
.tempdir()
.expect("tempdir");
let git_path = bin_dir.path().join("git");
let sha = "0123456789abcdef0123456789abcdef01234567";
std::fs::write(
&git_path,
format!(
r#"#!/bin/sh
if [ "$1" = "ls-remote" ]; then
printf '%s\tHEAD\n' "{sha}"
exit 0
fi
if [ "$1" = "clone" ]; then
dest="$5"
mkdir -p "$dest/.git" "$dest/.agents/plugins" "$dest/plugins/gmail/.codex-plugin"
cat > "$dest/.agents/plugins/marketplace.json" <<'EOF'
{{"name":"openai-curated","plugins":[{{"name":"gmail","source":{{"source":"local","path":"./plugins/gmail"}}}}]}}
EOF
printf '%s\n' '{{"name":"gmail"}}' > "$dest/plugins/gmail/.codex-plugin/plugin.json"
exit 0
fi
if [ "$1" = "-C" ] && [ "$3" = "rev-parse" ] && [ "$4" = "HEAD" ]; then
printf '%s\n' "{sha}"
exit 0
fi
echo "unexpected git invocation: $@" >&2
exit 1
"#
),
)
.expect("write fake git");
let mut permissions = std::fs::metadata(&git_path)
.expect("metadata")
.permissions();
permissions.set_mode(0o755);
std::fs::set_permissions(&git_path, permissions).expect("chmod");
let synced_sha = sync_openai_plugins_repo_with_transport_overrides(
tmp.path(),
git_path.to_str().expect("utf8 path"),
"http://127.0.0.1:9",
)
.expect("git sync should succeed");
assert_eq!(synced_sha, sha);
assert!(curated_plugins_repo_path(tmp.path()).join(".git").is_dir());
assert!(
curated_plugins_repo_path(tmp.path())
.join(".agents/plugins/marketplace.json")
.is_file()
);
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[tokio::test]
async fn sync_openai_plugins_repo_falls_back_to_http_when_git_is_unavailable() {
let tmp = tempdir().expect("tempdir");
let server = MockServer::start().await;
let sha = "0123456789abcdef0123456789abcdef01234567";
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/repos/openai/plugins/zipball/{sha}")))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/zip")
.set_body_bytes(curated_repo_zipball_bytes(sha)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
let synced_sha = tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_transport_overrides(
tmp_path.as_path(),
"missing-git-for-test",
&server_uri,
)
})
.await
.expect("sync task should join")
.expect("fallback sync should succeed");
let repo_path = curated_plugins_repo_path(tmp.path());
assert_eq!(synced_sha, sha);
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
assert!(
repo_path
.join("plugins/gmail/.codex-plugin/plugin.json")
.is_file()
);
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[cfg(unix)]
#[tokio::test]
async fn sync_openai_plugins_repo_falls_back_to_http_when_git_sync_fails() {
use std::os::unix::fs::PermissionsExt;
let tmp = tempdir().expect("tempdir");
let bin_dir = tempfile::Builder::new()
.prefix("fake-git-fail-")
.tempdir()
.expect("tempdir");
let git_path = bin_dir.path().join("git");
let sha = "0123456789abcdef0123456789abcdef01234567";
std::fs::write(
&git_path,
r#"#!/bin/sh
echo "simulated git failure" >&2
exit 1
"#,
)
.expect("write fake git");
let mut permissions = std::fs::metadata(&git_path)
.expect("metadata")
.permissions();
permissions.set_mode(0o755);
std::fs::set_permissions(&git_path, permissions).expect("chmod");
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path(format!("/repos/openai/plugins/zipball/{sha}")))
.respond_with(
ResponseTemplate::new(200)
.insert_header("content-type", "application/zip")
.set_body_bytes(curated_repo_zipball_bytes(sha)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
let synced_sha = tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_transport_overrides(
tmp_path.as_path(),
git_path.to_str().expect("utf8 path"),
&server_uri,
)
})
.await
.expect("sync task should join")
.expect("fallback sync should succeed");
let repo_path = curated_plugins_repo_path(tmp.path());
assert_eq!(synced_sha, sha);
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
assert!(
repo_path
.join("plugins/gmail/.codex-plugin/plugin.json")
.is_file()
);
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
}
#[tokio::test]
async fn sync_openai_plugins_repo_skips_archive_download_when_sha_matches() {
let tmp = tempdir().expect("tempdir");
let repo_path = curated_plugins_repo_path(tmp.path());
std::fs::create_dir_all(repo_path.join(".agents/plugins")).expect("create repo");
std::fs::write(
repo_path.join(".agents/plugins/marketplace.json"),
r#"{"name":"openai-curated","plugins":[]}"#,
)
.expect("write marketplace");
std::fs::create_dir_all(tmp.path().join(".tmp")).expect("create tmp");
let sha = "fedcba9876543210fedcba9876543210fedcba98";
std::fs::write(tmp.path().join(".tmp/plugins.sha"), format!("{sha}\n")).expect("write sha");
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins"))
.respond_with(ResponseTemplate::new(200).set_body_string(r#"{"default_branch":"main"}"#))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/repos/openai/plugins/git/ref/heads/main"))
.respond_with(
ResponseTemplate::new(200)
.set_body_string(format!(r#"{{"object":{{"sha":"{sha}"}}}}"#)),
)
.mount(&server)
.await;
let server_uri = server.uri();
let tmp_path = tmp.path().to_path_buf();
tokio::task::spawn_blocking(move || {
sync_openai_plugins_repo_with_transport_overrides(
tmp_path.as_path(),
"missing-git-for-test",
&server_uri,
)
})
.await
.expect("sync task should join")
.expect("sync should succeed");
assert_eq!(read_curated_plugins_sha(tmp.path()).as_deref(), Some(sha));
assert!(repo_path.join(".agents/plugins/marketplace.json").is_file());
}
#[tokio::test]
async fn startup_remote_plugin_sync_writes_marker_and_reconciles_state() {
let tmp = tempdir().expect("tempdir");
let curated_root = curated_plugins_repo_path(tmp.path());
write_openai_curated_marketplace(&curated_root, &["linear"]);
write_curated_plugin_sha(tmp.path());
write_file(
&tmp.path().join(CONFIG_TOML_FILE),
r#"[features]
plugins = true
[plugins."linear@openai-curated"]
enabled = false
"#,
);
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/backend-api/plugins/list"))
.and(header("authorization", "Bearer Access Token"))
.and(header("chatgpt-account-id", "account_id"))
.respond_with(ResponseTemplate::new(200).set_body_string(
r#"[
{"id":"1","name":"linear","marketplace_name":"openai-curated","version":"1.0.0","enabled":true}
]"#,
))
.mount(&server)
.await;
let mut config = crate::plugins::test_support::load_plugins_config(tmp.path()).await;
config.chatgpt_base_url = format!("{}/backend-api/", server.uri());
let manager = Arc::new(PluginsManager::new(tmp.path().to_path_buf()));
let auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
start_startup_remote_plugin_sync_once(
Arc::clone(&manager),
tmp.path().to_path_buf(),
config,
auth_manager,
);
let marker_path = tmp.path().join(STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE);
tokio::time::timeout(Duration::from_secs(5), async {
loop {
if marker_path.is_file() {
break;
}
tokio::time::sleep(Duration::from_millis(10)).await;
}
})
.await
.expect("marker should be written");
assert!(
tmp.path()
.join(format!(
"plugins/cache/openai-curated/linear/{TEST_CURATED_PLUGIN_SHA}"
))
.is_dir()
);
let config =
std::fs::read_to_string(tmp.path().join(CONFIG_TOML_FILE)).expect("config should exist");
assert!(config.contains(r#"[plugins."linear@openai-curated"]"#));
assert!(config.contains("enabled = true"));
let marker_contents = std::fs::read_to_string(marker_path).expect("marker should be readable");
assert_eq!(marker_contents, "ok\n");
}
fn curated_repo_zipball_bytes(sha: &str) -> Vec<u8> {
let cursor = std::io::Cursor::new(Vec::new());
let mut writer = ZipWriter::new(cursor);
let options = SimpleFileOptions::default();
let root = format!("openai-plugins-{sha}");
writer
.start_file(format!("{root}/.agents/plugins/marketplace.json"), options)
.expect("start marketplace entry");
writer
.write_all(
br#"{
"name": "openai-curated",
"plugins": [
{
"name": "gmail",
"source": {
"source": "local",
"path": "./plugins/gmail"
}
}
]
}"#,
)
.expect("write marketplace");
writer
.start_file(
format!("{root}/plugins/gmail/.codex-plugin/plugin.json"),
options,
)
.expect("start plugin manifest entry");
writer
.write_all(br#"{"name":"gmail"}"#)
.expect("write plugin manifest");
writer.finish().expect("finish zip writer").into_inner()
}