Compare commits

...

7 Commits

Author SHA1 Message Date
Ahmed Ibrahim
c69b7fa2c5 codex: fix CI failure on PR #15333
Co-authored-by: Codex <noreply@openai.com>
2026-03-20 19:10:25 +00:00
Ahmed Ibrahim
8efafe10c2 codex: fix lint regressions in capability extraction
Co-authored-by: Codex <noreply@openai.com>
2026-03-20 18:56:13 +00:00
Ahmed Ibrahim
51904eabe4 codex: enable tokio test macros in capabilities
Co-authored-by: Codex <noreply@openai.com>
2026-03-20 18:27:22 +00:00
Ahmed Ibrahim
a541fa7346 codex: fix CI failure on PR #15333
Co-authored-by: Codex <noreply@openai.com>
2026-03-20 18:09:10 +00:00
Ahmed Ibrahim
9381ab4ea0 Update lockfile for manifest cleanup
Refresh Cargo.lock after removing stale dependency declarations from the extraction refactor.

Co-authored-by: Codex <noreply@openai.com>
2026-03-20 17:28:36 +00:00
Ahmed Ibrahim
8bdbc4c85f Remove stale manifest deps from capabilities split
Drop unused dependency declarations uncovered by the crate extraction follow-up.

Co-authored-by: Codex <noreply@openai.com>
2026-03-20 17:27:04 +00:00
Ahmed Ibrahim
9d91be7713 Extract plugins, apps, and skills into codex-capabilities
Move capability-owned apps, plugins, and skills logic into a new workspace crate and keep codex-core as the integration layer.

Co-authored-by: Codex <noreply@openai.com>
2026-03-20 17:19:04 +00:00
53 changed files with 8634 additions and 3906 deletions

31
codex-rs/Cargo.lock generated
View File

@@ -1623,6 +1623,35 @@ dependencies = [
"serde_with",
]
[[package]]
name = "codex-capabilities"
version = "0.0.0"
dependencies = [
"anyhow",
"async-trait",
"codex-app-server-protocol",
"codex-config",
"codex-login",
"codex-protocol",
"codex-skills",
"codex-utils-absolute-path",
"dirs",
"dunce",
"pretty_assertions",
"reqwest",
"schemars 0.8.22",
"serde",
"serde_json",
"serde_yaml",
"tempfile",
"thiserror 2.0.18",
"tokio",
"toml 0.9.11+spec-1.1.0",
"tracing",
"url",
"zip",
]
[[package]]
name = "codex-chatgpt"
version = "0.0.0"
@@ -1845,6 +1874,7 @@ dependencies = [
"codex-arg0",
"codex-artifacts",
"codex-async-utils",
"codex-capabilities",
"codex-config",
"codex-connectors",
"codex-exec-server",
@@ -1861,7 +1891,6 @@ dependencies = [
"codex-secrets",
"codex-shell-command",
"codex-shell-escalation",
"codex-skills",
"codex-state",
"codex-terminal-detection",
"codex-test-macros",

View File

@@ -17,6 +17,7 @@ members = [
"cloud-tasks",
"cloud-tasks-client",
"cli",
"capabilities",
"connectors",
"config",
"shell-command",
@@ -99,6 +100,7 @@ codex-apply-patch = { path = "apply-patch" }
codex-arg0 = { path = "arg0" }
codex-async-utils = { path = "async-utils" }
codex-backend-client = { path = "backend-client" }
codex-capabilities = { path = "capabilities" }
codex-chatgpt = { path = "chatgpt" }
codex-cli = { path = "cli" }
codex-client = { path = "codex-client" }

View File

@@ -0,0 +1,15 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "capabilities",
crate_name = "codex_capabilities",
compile_data = glob(
include = ["**"],
exclude = [
"**/* *",
"BUILD.bazel",
"Cargo.toml",
],
allow_empty = True,
),
)

View File

@@ -0,0 +1,41 @@
[package]
edition.workspace = true
license.workspace = true
name = "codex-capabilities"
version.workspace = true
[lib]
doctest = false
name = "codex_capabilities"
path = "src/lib.rs"
[lints]
workspace = true
[dependencies]
anyhow = { workspace = true }
async-trait = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-config = { workspace = true }
codex-login = { workspace = true }
codex-protocol = { workspace = true }
codex-skills = { workspace = true }
codex-utils-absolute-path = { workspace = true }
dirs = { workspace = true }
dunce = { workspace = true }
reqwest = { workspace = true, features = ["json", "stream"] }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
serde_yaml = { workspace = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["fs", "macros", "rt-multi-thread", "sync", "time"] }
toml = { workspace = true }
tracing = { workspace = true, features = ["log"] }
url = { workspace = true }
zip = { workspace = true }
[dev-dependencies]
pretty_assertions = { workspace = true }
tempfile = { workspace = true }

View File

@@ -0,0 +1,3 @@
mod render;
pub use render::render_apps_section;

View File

@@ -0,0 +1,11 @@
use codex_protocol::protocol::APPS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::APPS_INSTRUCTIONS_OPEN_TAG;
const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps";
pub fn render_apps_section() -> String {
let body = format!(
"## Apps (Connectors)\nApps (Connectors) can be explicitly triggered in user messages in the format `[$app-name](app://{{connector_id}})`. Apps can also be implicitly triggered as long as the context suggests usage of available apps, the available apps will be listed by the `tool_search` tool.\nAn app is equivalent to a set of MCP tools within the `{CODEX_APPS_MCP_SERVER_NAME}` MCP.\nAn installed app's MCP tools are either provided to you already, or can be lazy-loaded through the `tool_search` tool.\nDo not additionally call list_mcp_resources or list_mcp_resource_templates for apps."
);
format!("{APPS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{APPS_INSTRUCTIONS_CLOSE_TAG}")
}

View File

@@ -0,0 +1,292 @@
use codex_config::RequirementSource;
use codex_utils_absolute_path::AbsolutePathBuf;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Deserializer;
use serde::Serialize;
use serde::de::Error as SerdeError;
use std::collections::HashMap;
use std::fmt;
use std::path::PathBuf;
use std::time::Duration;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum McpServerDisabledReason {
Unknown,
Requirements { source: RequirementSource },
}
impl fmt::Display for McpServerDisabledReason {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Unknown => write!(f, "unknown"),
Self::Requirements { source } => {
write!(f, "requirements ({source})")
}
}
}
}
#[derive(Serialize, Debug, Clone, PartialEq)]
pub struct McpServerConfig {
#[serde(flatten)]
pub transport: McpServerTransportConfig,
#[serde(default = "default_enabled")]
pub enabled: bool,
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub required: bool,
#[serde(skip)]
pub disabled_reason: Option<McpServerDisabledReason>,
#[serde(
default,
with = "option_duration_secs",
skip_serializing_if = "Option::is_none"
)]
pub startup_timeout_sec: Option<Duration>,
#[serde(default, with = "option_duration_secs")]
pub tool_timeout_sec: Option<Duration>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled_tools: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub disabled_tools: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub scopes: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub oauth_resource: Option<String>,
}
#[derive(Deserialize, Clone, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct RawMcpServerConfig {
pub command: Option<String>,
#[serde(default)]
pub args: Option<Vec<String>>,
#[serde(default)]
pub env: Option<HashMap<String, String>>,
#[serde(default)]
pub env_vars: Option<Vec<String>>,
#[serde(default)]
pub cwd: Option<PathBuf>,
pub http_headers: Option<HashMap<String, String>>,
#[serde(default)]
pub env_http_headers: Option<HashMap<String, String>>,
pub url: Option<String>,
pub bearer_token: Option<String>,
pub bearer_token_env_var: Option<String>,
#[serde(default)]
pub startup_timeout_sec: Option<f64>,
#[serde(default)]
pub startup_timeout_ms: Option<u64>,
#[serde(default, with = "option_duration_secs")]
#[schemars(with = "Option<f64>")]
pub tool_timeout_sec: Option<Duration>,
#[serde(default)]
pub enabled: Option<bool>,
#[serde(default)]
pub required: Option<bool>,
#[serde(default)]
pub enabled_tools: Option<Vec<String>>,
#[serde(default)]
pub disabled_tools: Option<Vec<String>>,
#[serde(default)]
pub scopes: Option<Vec<String>>,
#[serde(default)]
pub oauth_resource: Option<String>,
}
impl<'de> Deserialize<'de> for McpServerConfig {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let mut raw = RawMcpServerConfig::deserialize(deserializer)?;
let startup_timeout_sec = match (raw.startup_timeout_sec, raw.startup_timeout_ms) {
(Some(sec), _) => {
let duration = Duration::try_from_secs_f64(sec).map_err(SerdeError::custom)?;
Some(duration)
}
(None, Some(ms)) => Some(Duration::from_millis(ms)),
(None, None) => None,
};
let tool_timeout_sec = raw.tool_timeout_sec;
let enabled = raw.enabled.unwrap_or_else(default_enabled);
let required = raw.required.unwrap_or_default();
let enabled_tools = raw.enabled_tools.clone();
let disabled_tools = raw.disabled_tools.clone();
let scopes = raw.scopes.clone();
let oauth_resource = raw.oauth_resource.clone();
fn throw_if_set<E, T>(transport: &str, field: &str, value: Option<&T>) -> Result<(), E>
where
E: SerdeError,
{
if value.is_none() {
return Ok(());
}
Err(E::custom(format!(
"{field} is not supported for {transport}",
)))
}
let transport = if let Some(command) = raw.command.clone() {
throw_if_set("stdio", "url", raw.url.as_ref())?;
throw_if_set(
"stdio",
"bearer_token_env_var",
raw.bearer_token_env_var.as_ref(),
)?;
throw_if_set("stdio", "bearer_token", raw.bearer_token.as_ref())?;
throw_if_set("stdio", "http_headers", raw.http_headers.as_ref())?;
throw_if_set("stdio", "env_http_headers", raw.env_http_headers.as_ref())?;
throw_if_set("stdio", "oauth_resource", raw.oauth_resource.as_ref())?;
McpServerTransportConfig::Stdio {
command,
args: raw.args.clone().unwrap_or_default(),
env: raw.env.clone(),
env_vars: raw.env_vars.clone().unwrap_or_default(),
cwd: raw.cwd.take(),
}
} else if let Some(url) = raw.url.clone() {
throw_if_set("streamable_http", "args", raw.args.as_ref())?;
throw_if_set("streamable_http", "env", raw.env.as_ref())?;
throw_if_set("streamable_http", "env_vars", raw.env_vars.as_ref())?;
throw_if_set("streamable_http", "cwd", raw.cwd.as_ref())?;
throw_if_set("streamable_http", "bearer_token", raw.bearer_token.as_ref())?;
McpServerTransportConfig::StreamableHttp {
url,
bearer_token_env_var: raw.bearer_token_env_var.clone(),
http_headers: raw.http_headers.clone(),
env_http_headers: raw.env_http_headers.take(),
}
} else {
return Err(SerdeError::custom("invalid transport"));
};
Ok(Self {
transport,
startup_timeout_sec,
tool_timeout_sec,
enabled,
required,
disabled_reason: None,
enabled_tools,
disabled_tools,
scopes,
oauth_resource,
})
}
}
const fn default_enabled() -> bool {
true
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema)]
#[serde(untagged, deny_unknown_fields, rename_all = "snake_case")]
pub enum McpServerTransportConfig {
Stdio {
command: String,
#[serde(default)]
args: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
env: Option<HashMap<String, String>>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
env_vars: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
cwd: Option<PathBuf>,
},
StreamableHttp {
url: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
bearer_token_env_var: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
http_headers: Option<HashMap<String, String>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
env_http_headers: Option<HashMap<String, String>>,
},
}
mod option_duration_secs {
use serde::Deserialize;
use serde::Deserializer;
use serde::Serializer;
use std::time::Duration;
pub fn serialize<S>(value: &Option<Duration>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match value {
Some(duration) => serializer.serialize_some(&duration.as_secs_f64()),
None => serializer.serialize_none(),
}
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>
where
D: Deserializer<'de>,
{
let secs = Option::<f64>::deserialize(deserializer)?;
secs.map(|secs| Duration::try_from_secs_f64(secs).map_err(serde::de::Error::custom))
.transpose()
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum ToolSuggestDiscoverableType {
Connector,
Plugin,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct ToolSuggestDiscoverable {
#[serde(rename = "type")]
pub kind: ToolSuggestDiscoverableType,
pub id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct ToolSuggestConfig {
#[serde(default)]
pub discoverables: Vec<ToolSuggestDiscoverable>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct SkillConfig {
pub path: AbsolutePathBuf,
pub enabled: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct PluginConfig {
#[serde(default = "default_enabled")]
pub enabled: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct SkillsConfig {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub bundled: Option<BundledSkillsConfig>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub config: Vec<SkillConfig>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct BundledSkillsConfig {
#[serde(default = "default_enabled")]
pub enabled: bool,
}
impl Default for BundledSkillsConfig {
fn default() -> Self {
Self { enabled: true }
}
}

View File

@@ -0,0 +1,7 @@
//! Shared capability-domain logic for apps, plugins, and skills.
pub mod apps;
pub mod config_types;
pub mod mentions;
pub mod plugins;
pub mod skills;

View File

@@ -0,0 +1,176 @@
use std::collections::HashSet;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ToolMentionKind {
App,
Mcp,
Plugin,
Skill,
Other,
}
const APP_PATH_PREFIX: &str = "app://";
const MCP_PATH_PREFIX: &str = "mcp://";
const PLUGIN_PATH_PREFIX: &str = "plugin://";
const SKILL_PATH_PREFIX: &str = "skill://";
const SKILL_FILENAME: &str = "SKILL.md";
pub fn tool_kind_for_path(path: &str) -> ToolMentionKind {
if path.starts_with(APP_PATH_PREFIX) {
ToolMentionKind::App
} else if path.starts_with(MCP_PATH_PREFIX) {
ToolMentionKind::Mcp
} else if path.starts_with(PLUGIN_PATH_PREFIX) {
ToolMentionKind::Plugin
} else if path.starts_with(SKILL_PATH_PREFIX) || is_skill_filename(path) {
ToolMentionKind::Skill
} else {
ToolMentionKind::Other
}
}
pub fn app_id_from_path(path: &str) -> Option<&str> {
path.strip_prefix(APP_PATH_PREFIX)
.filter(|value| !value.is_empty())
}
pub fn plugin_config_name_from_path(path: &str) -> Option<&str> {
path.strip_prefix(PLUGIN_PATH_PREFIX)
.filter(|value| !value.is_empty())
}
pub fn normalize_skill_path(path: &str) -> &str {
path.strip_prefix(SKILL_PATH_PREFIX).unwrap_or(path)
}
pub fn extract_tool_mentions_with_sigil(text: &str, sigil: char) -> ToolMentions<'_> {
let text_bytes = text.as_bytes();
let mut mentioned_names: HashSet<&str> = HashSet::new();
let mut mentioned_paths: HashSet<&str> = HashSet::new();
let mut plain_names: HashSet<&str> = HashSet::new();
let mut index = 0;
while index < text_bytes.len() {
let byte = text_bytes[index];
if byte == b'['
&& let Some((name, path, end_index)) =
parse_linked_tool_mention(text, text_bytes, index, sigil)
{
if !is_common_env_var(name) {
if !matches!(
tool_kind_for_path(path),
ToolMentionKind::App | ToolMentionKind::Mcp | ToolMentionKind::Plugin
) {
mentioned_names.insert(name);
}
mentioned_paths.insert(path);
}
index = end_index;
continue;
}
if byte != sigil as u8 {
index += 1;
continue;
}
let name_start = index + 1;
let Some(first_name_byte) = text_bytes.get(name_start) else {
index += 1;
continue;
};
if !is_mention_name_char(*first_name_byte) {
index += 1;
continue;
}
let mut name_end = name_start + 1;
while let Some(byte) = text_bytes.get(name_end) {
if !is_mention_name_char(*byte) {
break;
}
name_end += 1;
}
let name = &text[name_start..name_end];
if !is_common_env_var(name) {
mentioned_names.insert(name);
plain_names.insert(name);
}
index = name_end;
}
ToolMentions {
mentioned_names,
mentioned_paths,
plain_names,
}
}
pub struct ToolMentions<'a> {
mentioned_names: HashSet<&'a str>,
mentioned_paths: HashSet<&'a str>,
plain_names: HashSet<&'a str>,
}
impl<'a> ToolMentions<'a> {
pub fn names(&self) -> impl Iterator<Item = &'a str> {
self.mentioned_names.iter().copied()
}
pub fn paths(&self) -> impl Iterator<Item = &'a str> {
self.mentioned_paths.iter().copied()
}
pub fn plain_names(&self) -> impl Iterator<Item = &'a str> {
self.plain_names.iter().copied()
}
}
fn is_skill_filename(path: &str) -> bool {
let file_name = path.rsplit(['/', '\\']).next().unwrap_or(path);
file_name.eq_ignore_ascii_case(SKILL_FILENAME)
}
fn parse_linked_tool_mention<'a>(
text: &'a str,
text_bytes: &[u8],
index: usize,
sigil: char,
) -> Option<(&'a str, &'a str, usize)> {
let name_start = index.checked_add(2)?;
if text_bytes.get(index + 1).copied()? != sigil as u8 {
return None;
}
let mut name_end = name_start;
while let Some(byte) = text_bytes.get(name_end) {
if *byte == b']' {
break;
}
if !is_mention_name_char(*byte) {
return None;
}
name_end += 1;
}
if text_bytes.get(name_end).copied()? != b']' {
return None;
}
if text_bytes.get(name_end + 1).copied()? != b'(' {
return None;
}
let path_start = name_end + 2;
let path_end = text[path_start..].find(')')? + path_start;
let name = &text[name_start..name_end];
let path = &text[path_start..path_end];
Some((name, path, path_end + 1))
}
fn is_common_env_var(name: &str) -> bool {
matches!(
name,
"PATH" | "HOME" | "PWD" | "SHELL" | "USER" | "TMPDIR" | "TMP" | "TEMP"
)
}
fn is_mention_name_char(byte: u8) -> bool {
byte.is_ascii_alphanumeric() || matches!(byte, b'_' | b'-' | b'.' | b'/')
}

View File

@@ -0,0 +1,78 @@
use anyhow::Context;
use std::collections::HashSet;
use tracing::warn;
use super::OPENAI_CURATED_MARKETPLACE_NAME;
use super::PluginCapabilitySummary;
use super::PluginLoadRequest;
use super::PluginReadRequest;
use super::PluginsManager;
use crate::config_types::ToolSuggestDiscoverable;
use crate::config_types::ToolSuggestDiscoverableType;
const TOOL_SUGGEST_DISCOVERABLE_PLUGIN_ALLOWLIST: &[&str] = &[
"github@openai-curated",
"notion@openai-curated",
"slack@openai-curated",
"gmail@openai-curated",
"google-calendar@openai-curated",
"google-docs@openai-curated",
"google-drive@openai-curated",
"google-sheets@openai-curated",
"google-slides@openai-curated",
];
pub fn list_tool_suggest_discoverable_plugins(
plugins_manager: &PluginsManager,
request: &PluginLoadRequest,
discoverables: &[ToolSuggestDiscoverable],
) -> anyhow::Result<Vec<PluginCapabilitySummary>> {
if !request.plugins_enabled {
return Ok(Vec::new());
}
let configured_plugin_ids = discoverables
.iter()
.filter(|discoverable| discoverable.kind == ToolSuggestDiscoverableType::Plugin)
.map(|discoverable| discoverable.id.as_str())
.collect::<HashSet<_>>();
let marketplaces = plugins_manager
.list_marketplaces(request, &[])
.context("failed to list plugin marketplaces for tool suggestions")?;
let Some(curated_marketplace) = marketplaces
.into_iter()
.find(|marketplace| marketplace.name == OPENAI_CURATED_MARKETPLACE_NAME)
else {
return Ok(Vec::new());
};
let mut discoverable_plugins = Vec::<PluginCapabilitySummary>::new();
for plugin in curated_marketplace.plugins {
if plugin.installed
|| (!TOOL_SUGGEST_DISCOVERABLE_PLUGIN_ALLOWLIST.contains(&plugin.id.as_str())
&& !configured_plugin_ids.contains(plugin.id.as_str()))
{
continue;
}
let plugin_id = plugin.id.clone();
let plugin_name = plugin.name.clone();
match plugins_manager.read_plugin(
request,
&PluginReadRequest {
plugin_name,
marketplace_path: curated_marketplace.path.clone(),
},
) {
Ok(plugin) => discoverable_plugins.push(plugin.plugin.into()),
Err(err) => warn!("failed to load discoverable plugin suggestion {plugin_id}: {err:#}"),
}
}
discoverable_plugins.sort_by(|left, right| {
left.display_name
.cmp(&right.display_name)
.then_with(|| left.config_name.cmp(&right.config_name))
});
Ok(discoverable_plugins)
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,277 @@
use super::*;
use async_trait::async_trait;
use codex_app_server_protocol::ConfigLayerSource;
use codex_config::ConfigLayerEntry;
use codex_config::ConfigLayerStack;
use codex_config::ConfigRequirements;
use codex_config::ConfigRequirementsToml;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use std::fs;
use std::path::Path;
use std::sync::Arc;
use std::sync::Mutex;
use tempfile::tempdir;
use toml::Value as TomlValue;
#[derive(Default)]
struct RecordingConfigPersister {
edits: Mutex<Vec<PluginConfigEdit>>,
}
impl RecordingConfigPersister {
fn edits(&self) -> Vec<PluginConfigEdit> {
self.edits
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.clone()
}
}
#[async_trait]
impl PluginConfigPersister for RecordingConfigPersister {
async fn enable_plugin(&self, plugin_id: &str) -> anyhow::Result<()> {
self.edits
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.push(PluginConfigEdit::SetEnabled {
plugin_id: plugin_id.to_string(),
enabled: true,
});
Ok(())
}
async fn clear_plugin(&self, plugin_id: &str) -> anyhow::Result<()> {
self.edits
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.push(PluginConfigEdit::ClearPlugin {
plugin_id: plugin_id.to_string(),
});
Ok(())
}
async fn apply_plugin_edits(&self, edits: &[PluginConfigEdit]) -> anyhow::Result<()> {
self.edits
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.extend_from_slice(edits);
Ok(())
}
}
#[derive(Default)]
struct RecordingAnalyticsHook {
installed: Mutex<Vec<PluginTelemetryMetadata>>,
uninstalled: Mutex<Vec<PluginTelemetryMetadata>>,
}
impl RecordingAnalyticsHook {
fn installed(&self) -> Vec<PluginTelemetryMetadata> {
self.installed
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.clone()
}
fn uninstalled(&self) -> Vec<PluginTelemetryMetadata> {
self.uninstalled
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.clone()
}
}
impl PluginAnalyticsHook for RecordingAnalyticsHook {
fn track_plugin_installed(&self, metadata: PluginTelemetryMetadata) {
self.installed
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.push(metadata);
}
fn track_plugin_uninstalled(&self, metadata: PluginTelemetryMetadata) {
self.uninstalled
.lock()
.unwrap_or_else(std::sync::PoisonError::into_inner)
.push(metadata);
}
}
fn write_file(path: &Path, contents: &str) {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).expect("create parent directories");
}
fs::write(path, contents).expect("write file");
}
fn write_marketplace_plugin(codex_home: &Path) -> AbsolutePathBuf {
let marketplace_root = codex_home.join("marketplace");
write_file(
&marketplace_root.join("plugins/sample-plugin/.codex-plugin/plugin.json"),
r#"{
"name": "sample-plugin",
"description": "Plugin that includes the sample MCP server and Skills"
}"#,
);
write_file(
&marketplace_root.join("plugins/sample-plugin/skills/sample-search/SKILL.md"),
"---\nname: sample-search\ndescription: search sample data\n---\n",
);
write_file(
&marketplace_root.join("plugins/sample-plugin/.mcp.json"),
r#"{
"mcpServers": {
"sample": {
"type": "http",
"url": "https://sample.example/mcp"
}
}
}"#,
);
write_file(
&marketplace_root.join("plugins/sample-plugin/.app.json"),
r#"{
"apps": {
"example": {
"id": "connector_example"
}
}
}"#,
);
write_file(
&marketplace_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "debug",
"plugins": [
{
"name": "sample-plugin",
"source": {
"source": "local",
"path": "./plugins/sample-plugin"
}
}
]
}"#,
);
AbsolutePathBuf::try_from(marketplace_root.join(".agents/plugins/marketplace.json"))
.expect("marketplace path")
}
fn plugin_load_request(codex_home: &Path) -> PluginLoadRequest {
let config_layer_stack = ConfigLayerStack::new(
vec![ConfigLayerEntry::new(
ConfigLayerSource::User {
file: AbsolutePathBuf::from_absolute_path(codex_home.join("config.toml"))
.expect("absolute config path"),
},
toml::from_str::<TomlValue>(
r#"
[plugins."sample-plugin@debug"]
enabled = true
"#,
)
.expect("parse config"),
)],
ConfigRequirements::default(),
ConfigRequirementsToml::default(),
)
.expect("config layer stack");
PluginLoadRequest {
plugins_enabled: true,
config_layer_stack,
}
}
#[tokio::test]
async fn install_plugin_enables_config_tracks_analytics_and_loads_capabilities() {
let codex_home = tempdir().expect("tempdir");
let marketplace_path = write_marketplace_plugin(codex_home.path());
let persister = RecordingConfigPersister::default();
let analytics = Arc::new(RecordingAnalyticsHook::default());
let manager = PluginsManager::new(codex_home.path().to_path_buf());
manager.set_analytics_hook(analytics.clone());
let outcome = manager
.install_plugin(
PluginInstallRequest {
plugin_name: "sample-plugin".to_string(),
marketplace_path,
},
&persister,
)
.await
.expect("install plugin");
assert_eq!(outcome.plugin_id.as_key(), "sample-plugin@debug");
assert_eq!(
persister.edits(),
vec![PluginConfigEdit::SetEnabled {
plugin_id: "sample-plugin@debug".to_string(),
enabled: true,
}]
);
assert_eq!(analytics.installed().len(), 1);
let load_outcome = manager.plugins_for_request(&plugin_load_request(codex_home.path()));
assert_eq!(
load_outcome.capability_summaries(),
&[PluginCapabilitySummary {
config_name: "sample-plugin@debug".to_string(),
display_name: "sample-plugin".to_string(),
description: Some("Plugin that includes the sample MCP server and Skills".to_string(),),
has_skills: true,
mcp_server_names: vec!["sample".to_string()],
app_connector_ids: vec![AppConnectorId("connector_example".to_string())],
}]
);
assert_eq!(
load_outcome.effective_skill_roots(),
vec![
codex_home
.path()
.join("plugins/cache/debug/sample-plugin/local/skills")
]
);
}
#[tokio::test]
async fn uninstall_plugin_clears_config_tracks_analytics_and_removes_cache() {
let codex_home = tempdir().expect("tempdir");
let marketplace_path = write_marketplace_plugin(codex_home.path());
let install_persister = RecordingConfigPersister::default();
let uninstall_persister = RecordingConfigPersister::default();
let analytics = Arc::new(RecordingAnalyticsHook::default());
let manager = PluginsManager::new(codex_home.path().to_path_buf());
manager.set_analytics_hook(analytics.clone());
manager
.install_plugin(
PluginInstallRequest {
plugin_name: "sample-plugin".to_string(),
marketplace_path,
},
&install_persister,
)
.await
.expect("install plugin");
manager
.uninstall_plugin("sample-plugin@debug".to_string(), &uninstall_persister)
.await
.expect("uninstall plugin");
assert_eq!(
uninstall_persister.edits(),
vec![PluginConfigEdit::ClearPlugin {
plugin_id: "sample-plugin@debug".to_string(),
}]
);
assert_eq!(analytics.uninstalled().len(), 1);
assert!(
!codex_home
.path()
.join("plugins/cache/debug/sample-plugin/local")
.exists()
);
}

View File

@@ -0,0 +1,477 @@
use codex_utils_absolute_path::AbsolutePathBuf;
use serde::Deserialize;
use serde_json::Value as JsonValue;
use std::fs;
use std::path::Component;
use std::path::Path;
pub(crate) const PLUGIN_MANIFEST_PATH: &str = ".codex-plugin/plugin.json";
const MAX_DEFAULT_PROMPT_COUNT: usize = 3;
const MAX_DEFAULT_PROMPT_LEN: usize = 128;
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RawPluginManifest {
#[serde(default)]
name: String,
#[serde(default)]
description: Option<String>,
// Keep manifest paths as raw strings so we can validate the required `./...` syntax before
// resolving them under the plugin root.
#[serde(default)]
skills: Option<String>,
#[serde(default)]
mcp_servers: Option<String>,
#[serde(default)]
apps: Option<String>,
#[serde(default)]
interface: Option<RawPluginManifestInterface>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginManifest {
pub name: String,
pub description: Option<String>,
pub paths: PluginManifestPaths,
pub interface: Option<PluginManifestInterface>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginManifestPaths {
pub skills: Option<AbsolutePathBuf>,
pub mcp_servers: Option<AbsolutePathBuf>,
pub apps: Option<AbsolutePathBuf>,
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct PluginManifestInterface {
pub display_name: Option<String>,
pub short_description: Option<String>,
pub long_description: Option<String>,
pub developer_name: Option<String>,
pub category: Option<String>,
pub capabilities: Vec<String>,
pub website_url: Option<String>,
pub privacy_policy_url: Option<String>,
pub terms_of_service_url: Option<String>,
pub default_prompt: Option<Vec<String>>,
pub brand_color: Option<String>,
pub composer_icon: Option<AbsolutePathBuf>,
pub logo: Option<AbsolutePathBuf>,
pub screenshots: Vec<AbsolutePathBuf>,
}
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RawPluginManifestInterface {
#[serde(default)]
display_name: Option<String>,
#[serde(default)]
short_description: Option<String>,
#[serde(default)]
long_description: Option<String>,
#[serde(default)]
developer_name: Option<String>,
#[serde(default)]
category: Option<String>,
#[serde(default)]
capabilities: Vec<String>,
#[serde(default)]
#[serde(alias = "websiteURL")]
website_url: Option<String>,
#[serde(default)]
#[serde(alias = "privacyPolicyURL")]
privacy_policy_url: Option<String>,
#[serde(default)]
#[serde(alias = "termsOfServiceURL")]
terms_of_service_url: Option<String>,
#[serde(default)]
default_prompt: Option<RawPluginManifestDefaultPrompt>,
#[serde(default)]
brand_color: Option<String>,
#[serde(default)]
composer_icon: Option<String>,
#[serde(default)]
logo: Option<String>,
#[serde(default)]
screenshots: Vec<String>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum RawPluginManifestDefaultPrompt {
String(String),
List(Vec<RawPluginManifestDefaultPromptEntry>),
Invalid(JsonValue),
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum RawPluginManifestDefaultPromptEntry {
String(String),
Invalid(JsonValue),
}
pub fn load_plugin_manifest(plugin_root: &Path) -> Option<PluginManifest> {
let manifest_path = plugin_root.join(PLUGIN_MANIFEST_PATH);
if !manifest_path.is_file() {
return None;
}
let contents = fs::read_to_string(&manifest_path).ok()?;
match serde_json::from_str::<RawPluginManifest>(&contents) {
Ok(manifest) => {
let RawPluginManifest {
name: raw_name,
description,
skills,
mcp_servers,
apps,
interface,
} = manifest;
let name = plugin_root
.file_name()
.and_then(|entry| entry.to_str())
.filter(|_| raw_name.trim().is_empty())
.unwrap_or(&raw_name)
.to_string();
let interface = interface.and_then(|interface| {
let RawPluginManifestInterface {
display_name,
short_description,
long_description,
developer_name,
category,
capabilities,
website_url,
privacy_policy_url,
terms_of_service_url,
default_prompt,
brand_color,
composer_icon,
logo,
screenshots,
} = interface;
let interface = PluginManifestInterface {
display_name,
short_description,
long_description,
developer_name,
category,
capabilities,
website_url,
privacy_policy_url,
terms_of_service_url,
default_prompt: resolve_default_prompts(plugin_root, default_prompt.as_ref()),
brand_color,
composer_icon: resolve_interface_asset_path(
plugin_root,
"interface.composerIcon",
composer_icon.as_deref(),
),
logo: resolve_interface_asset_path(
plugin_root,
"interface.logo",
logo.as_deref(),
),
screenshots: screenshots
.iter()
.filter_map(|screenshot| {
resolve_interface_asset_path(
plugin_root,
"interface.screenshots",
Some(screenshot),
)
})
.collect(),
};
let has_fields = interface.display_name.is_some()
|| interface.short_description.is_some()
|| interface.long_description.is_some()
|| interface.developer_name.is_some()
|| interface.category.is_some()
|| !interface.capabilities.is_empty()
|| interface.website_url.is_some()
|| interface.privacy_policy_url.is_some()
|| interface.terms_of_service_url.is_some()
|| interface.default_prompt.is_some()
|| interface.brand_color.is_some()
|| interface.composer_icon.is_some()
|| interface.logo.is_some()
|| !interface.screenshots.is_empty();
has_fields.then_some(interface)
});
Some(PluginManifest {
name,
description,
paths: PluginManifestPaths {
skills: resolve_manifest_path(plugin_root, "skills", skills.as_deref()),
mcp_servers: resolve_manifest_path(
plugin_root,
"mcpServers",
mcp_servers.as_deref(),
),
apps: resolve_manifest_path(plugin_root, "apps", apps.as_deref()),
},
interface,
})
}
Err(err) => {
tracing::warn!(
path = %manifest_path.display(),
"failed to parse plugin manifest: {err}"
);
None
}
}
}
fn resolve_interface_asset_path(
plugin_root: &Path,
field: &'static str,
path: Option<&str>,
) -> Option<AbsolutePathBuf> {
resolve_manifest_path(plugin_root, field, path)
}
fn resolve_default_prompts(
plugin_root: &Path,
value: Option<&RawPluginManifestDefaultPrompt>,
) -> Option<Vec<String>> {
match value? {
RawPluginManifestDefaultPrompt::String(prompt) => {
resolve_default_prompt_str(plugin_root, "interface.defaultPrompt", prompt)
.map(|prompt| vec![prompt])
}
RawPluginManifestDefaultPrompt::List(values) => {
let mut prompts = Vec::new();
for (index, item) in values.iter().enumerate() {
if prompts.len() >= MAX_DEFAULT_PROMPT_COUNT {
warn_invalid_default_prompt(
plugin_root,
"interface.defaultPrompt",
&format!("maximum of {MAX_DEFAULT_PROMPT_COUNT} prompts is supported"),
);
break;
}
match item {
RawPluginManifestDefaultPromptEntry::String(prompt) => {
let field = format!("interface.defaultPrompt[{index}]");
if let Some(prompt) =
resolve_default_prompt_str(plugin_root, &field, prompt)
{
prompts.push(prompt);
}
}
RawPluginManifestDefaultPromptEntry::Invalid(value) => {
let field = format!("interface.defaultPrompt[{index}]");
warn_invalid_default_prompt(
plugin_root,
&field,
&format!("expected a string, found {}", json_value_type(value)),
);
}
}
}
(!prompts.is_empty()).then_some(prompts)
}
RawPluginManifestDefaultPrompt::Invalid(value) => {
warn_invalid_default_prompt(
plugin_root,
"interface.defaultPrompt",
&format!(
"expected a string or array of strings, found {}",
json_value_type(value)
),
);
None
}
}
}
fn resolve_default_prompt_str(plugin_root: &Path, field: &str, prompt: &str) -> Option<String> {
let prompt = prompt.split_whitespace().collect::<Vec<_>>().join(" ");
if prompt.is_empty() {
warn_invalid_default_prompt(plugin_root, field, "prompt must not be empty");
return None;
}
if prompt.chars().count() > MAX_DEFAULT_PROMPT_LEN {
warn_invalid_default_prompt(
plugin_root,
field,
&format!("prompt must be at most {MAX_DEFAULT_PROMPT_LEN} characters"),
);
return None;
}
Some(prompt)
}
fn warn_invalid_default_prompt(plugin_root: &Path, field: &str, message: &str) {
let manifest_path = plugin_root.join(PLUGIN_MANIFEST_PATH);
tracing::warn!(
path = %manifest_path.display(),
"ignoring {field}: {message}"
);
}
fn json_value_type(value: &JsonValue) -> &'static str {
match value {
JsonValue::Null => "null",
JsonValue::Bool(_) => "boolean",
JsonValue::Number(_) => "number",
JsonValue::String(_) => "string",
JsonValue::Array(_) => "array",
JsonValue::Object(_) => "object",
}
}
fn resolve_manifest_path(
plugin_root: &Path,
field: &'static str,
path: Option<&str>,
) -> Option<AbsolutePathBuf> {
// `plugin.json` paths are required to be relative to the plugin root and we return the
// normalized absolute path to the rest of the system.
let path = path?;
if path.is_empty() {
return None;
}
let Some(relative_path) = path.strip_prefix("./") else {
tracing::warn!("ignoring {field}: path must start with `./` relative to plugin root");
return None;
};
if relative_path.is_empty() {
tracing::warn!("ignoring {field}: path must not be `./`");
return None;
}
let mut normalized = std::path::PathBuf::new();
for component in Path::new(relative_path).components() {
match component {
Component::Normal(component) => normalized.push(component),
Component::ParentDir => {
tracing::warn!("ignoring {field}: path must not contain '..'");
return None;
}
_ => {
tracing::warn!("ignoring {field}: path must stay within the plugin root");
return None;
}
}
}
AbsolutePathBuf::try_from(plugin_root.join(normalized))
.map_err(|err| {
tracing::warn!("ignoring {field}: path must resolve to an absolute path: {err}");
err
})
.ok()
}
#[cfg(test)]
mod tests {
use super::MAX_DEFAULT_PROMPT_LEN;
use super::PluginManifest;
use super::load_plugin_manifest;
use pretty_assertions::assert_eq;
use std::fs;
use std::path::Path;
use tempfile::tempdir;
fn write_manifest(plugin_root: &Path, interface: &str) {
fs::create_dir_all(plugin_root.join(".codex-plugin")).expect("create manifest dir");
fs::write(
plugin_root.join(".codex-plugin/plugin.json"),
format!(
r#"{{
"name": "demo-plugin",
"interface": {interface}
}}"#
),
)
.expect("write manifest");
}
fn load_manifest(plugin_root: &Path) -> PluginManifest {
load_plugin_manifest(plugin_root).expect("load plugin manifest")
}
#[test]
fn plugin_interface_accepts_legacy_default_prompt_string() {
let tmp = tempdir().expect("tempdir");
let plugin_root = tmp.path().join("demo-plugin");
write_manifest(
&plugin_root,
r#"{
"displayName": "Demo Plugin",
"defaultPrompt": " Summarize my inbox "
}"#,
);
let manifest = load_manifest(&plugin_root);
let interface = manifest.interface.expect("plugin interface");
assert_eq!(
interface.default_prompt,
Some(vec!["Summarize my inbox".to_string()])
);
}
#[test]
fn plugin_interface_normalizes_default_prompt_array() {
let tmp = tempdir().expect("tempdir");
let plugin_root = tmp.path().join("demo-plugin");
let too_long = "x".repeat(MAX_DEFAULT_PROMPT_LEN + 1);
write_manifest(
&plugin_root,
&format!(
r#"{{
"displayName": "Demo Plugin",
"defaultPrompt": [
" Summarize my inbox ",
123,
"{too_long}",
" ",
"Draft the reply ",
"Find my next action",
"Archive old mail"
]
}}"#
),
);
let manifest = load_manifest(&plugin_root);
let interface = manifest.interface.expect("plugin interface");
assert_eq!(
interface.default_prompt,
Some(vec![
"Summarize my inbox".to_string(),
"Draft the reply".to_string(),
"Find my next action".to_string(),
])
);
}
#[test]
fn plugin_interface_ignores_invalid_default_prompt_shape() {
let tmp = tempdir().expect("tempdir");
let plugin_root = tmp.path().join("demo-plugin");
write_manifest(
&plugin_root,
r#"{
"displayName": "Demo Plugin",
"defaultPrompt": { "text": "Summarize my inbox" }
}"#,
);
let manifest = load_manifest(&plugin_root);
let interface = manifest.interface.expect("plugin interface");
assert_eq!(interface.default_prompt, None);
}
}

View File

@@ -0,0 +1,488 @@
use super::PluginManifestInterface;
use super::load_plugin_manifest;
use super::store::PluginId;
use super::store::PluginIdError;
use codex_app_server_protocol::PluginAuthPolicy;
use codex_app_server_protocol::PluginInstallPolicy;
use codex_protocol::protocol::Product;
use codex_utils_absolute_path::AbsolutePathBuf;
use dirs::home_dir;
use serde::Deserialize;
use std::fs;
use std::io;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use tracing::warn;
const MARKETPLACE_RELATIVE_PATH: &str = ".agents/plugins/marketplace.json";
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ResolvedMarketplacePlugin {
pub plugin_id: PluginId,
pub source_path: AbsolutePathBuf,
pub auth_policy: MarketplacePluginAuthPolicy,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Marketplace {
pub name: String,
pub path: AbsolutePathBuf,
pub interface: Option<MarketplaceInterface>,
pub plugins: Vec<MarketplacePlugin>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MarketplaceInterface {
pub display_name: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MarketplacePlugin {
pub name: String,
pub source: MarketplacePluginSource,
pub policy: MarketplacePluginPolicy,
pub interface: Option<PluginManifestInterface>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MarketplacePluginSource {
Local { path: AbsolutePathBuf },
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MarketplacePluginPolicy {
pub installation: MarketplacePluginInstallPolicy,
pub authentication: MarketplacePluginAuthPolicy,
// TODO: Surface or enforce product gating at the Codex/plugin consumer boundary instead of
// only carrying it through core marketplace metadata.
pub products: Option<Vec<Product>>,
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Deserialize)]
pub enum MarketplacePluginInstallPolicy {
#[serde(rename = "NOT_AVAILABLE")]
NotAvailable,
#[default]
#[serde(rename = "AVAILABLE")]
Available,
#[serde(rename = "INSTALLED_BY_DEFAULT")]
InstalledByDefault,
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Deserialize)]
pub enum MarketplacePluginAuthPolicy {
#[default]
#[serde(rename = "ON_INSTALL")]
OnInstall,
#[serde(rename = "ON_USE")]
OnUse,
}
impl From<MarketplacePluginInstallPolicy> for PluginInstallPolicy {
fn from(value: MarketplacePluginInstallPolicy) -> Self {
match value {
MarketplacePluginInstallPolicy::NotAvailable => Self::NotAvailable,
MarketplacePluginInstallPolicy::Available => Self::Available,
MarketplacePluginInstallPolicy::InstalledByDefault => Self::InstalledByDefault,
}
}
}
impl From<MarketplacePluginAuthPolicy> for PluginAuthPolicy {
fn from(value: MarketplacePluginAuthPolicy) -> Self {
match value {
MarketplacePluginAuthPolicy::OnInstall => Self::OnInstall,
MarketplacePluginAuthPolicy::OnUse => Self::OnUse,
}
}
}
#[derive(Debug, thiserror::Error)]
pub enum MarketplaceError {
#[error("{context}: {source}")]
Io {
context: &'static str,
#[source]
source: io::Error,
},
#[error("marketplace file `{path}` does not exist")]
MarketplaceNotFound { path: PathBuf },
#[error("invalid marketplace file `{path}`: {message}")]
InvalidMarketplaceFile { path: PathBuf, message: String },
#[error("plugin `{plugin_name}` was not found in marketplace `{marketplace_name}`")]
PluginNotFound {
plugin_name: String,
marketplace_name: String,
},
#[error(
"plugin `{plugin_name}` is not available for install in marketplace `{marketplace_name}`"
)]
PluginNotAvailable {
plugin_name: String,
marketplace_name: String,
},
#[error("plugins feature is disabled")]
PluginsDisabled,
#[error("{0}")]
InvalidPlugin(String),
}
impl MarketplaceError {
fn io(context: &'static str, source: io::Error) -> Self {
Self::Io { context, source }
}
}
// Always read the specified marketplace file from disk so installs see the
// latest marketplace.json contents without any in-memory cache invalidation.
pub fn resolve_marketplace_plugin(
marketplace_path: &AbsolutePathBuf,
plugin_name: &str,
restriction_product: Option<Product>,
) -> Result<ResolvedMarketplacePlugin, MarketplaceError> {
let marketplace = load_raw_marketplace_manifest(marketplace_path)?;
let marketplace_name = marketplace.name;
let plugin = marketplace
.plugins
.into_iter()
.find(|plugin| plugin.name == plugin_name);
let Some(plugin) = plugin else {
return Err(MarketplaceError::PluginNotFound {
plugin_name: plugin_name.to_string(),
marketplace_name,
});
};
let RawMarketplaceManifestPlugin {
name,
source,
policy,
..
} = plugin;
let install_policy = policy.installation;
let product_allowed = match policy.products.as_deref() {
None => true,
Some([]) => false,
Some(products) => {
restriction_product.is_some_and(|product| product.matches_product_restriction(products))
}
};
if install_policy == MarketplacePluginInstallPolicy::NotAvailable || !product_allowed {
return Err(MarketplaceError::PluginNotAvailable {
plugin_name: name,
marketplace_name,
});
}
let plugin_id = PluginId::new(name, marketplace_name).map_err(|err| match err {
PluginIdError::Invalid(message) => MarketplaceError::InvalidPlugin(message),
})?;
Ok(ResolvedMarketplacePlugin {
plugin_id,
source_path: resolve_plugin_source_path(marketplace_path, source)?,
auth_policy: policy.authentication,
})
}
pub fn list_marketplaces(
additional_roots: &[AbsolutePathBuf],
) -> Result<Vec<Marketplace>, MarketplaceError> {
list_marketplaces_with_home(additional_roots, home_dir().as_deref())
}
pub(crate) fn load_marketplace(path: &AbsolutePathBuf) -> Result<Marketplace, MarketplaceError> {
let marketplace = load_raw_marketplace_manifest(path)?;
let mut plugins = Vec::new();
for plugin in marketplace.plugins {
let RawMarketplaceManifestPlugin {
name,
source,
policy,
category,
} = plugin;
let source_path = resolve_plugin_source_path(path, source)?;
let source = MarketplacePluginSource::Local {
path: source_path.clone(),
};
let mut interface =
load_plugin_manifest(source_path.as_path()).and_then(|manifest| manifest.interface);
if let Some(category) = category {
// Marketplace taxonomy wins when both sources provide a category.
interface
.get_or_insert_with(PluginManifestInterface::default)
.category = Some(category);
}
plugins.push(MarketplacePlugin {
name,
source,
policy: MarketplacePluginPolicy {
installation: policy.installation,
authentication: policy.authentication,
products: policy.products,
},
interface,
});
}
Ok(Marketplace {
name: marketplace.name,
path: path.clone(),
interface: resolve_marketplace_interface(marketplace.interface),
plugins,
})
}
fn list_marketplaces_with_home(
additional_roots: &[AbsolutePathBuf],
home_dir: Option<&Path>,
) -> Result<Vec<Marketplace>, MarketplaceError> {
let mut marketplaces = Vec::new();
for marketplace_path in discover_marketplace_paths_from_roots(additional_roots, home_dir) {
match load_marketplace(&marketplace_path) {
Ok(marketplace) => marketplaces.push(marketplace),
Err(err) => {
warn!(
path = %marketplace_path.display(),
error = %err,
"skipping marketplace that failed to load"
);
}
}
}
Ok(marketplaces)
}
fn discover_marketplace_paths_from_roots(
additional_roots: &[AbsolutePathBuf],
home_dir: Option<&Path>,
) -> Vec<AbsolutePathBuf> {
let mut paths = Vec::new();
if let Some(home) = home_dir {
let path = home.join(MARKETPLACE_RELATIVE_PATH);
if path.is_file()
&& let Ok(path) = AbsolutePathBuf::try_from(path)
{
paths.push(path);
}
}
for root in additional_roots {
// Curated marketplaces can now come from an HTTP-downloaded directory that is not a git
// checkout, so check the root directly before falling back to repo-root discovery.
if let Ok(path) = root.join(MARKETPLACE_RELATIVE_PATH)
&& path.as_path().is_file()
&& !paths.contains(&path)
{
paths.push(path);
continue;
}
if let Some(repo_root) = get_git_repo_root(root.as_path())
&& let Ok(repo_root) = AbsolutePathBuf::try_from(repo_root)
&& let Ok(path) = repo_root.join(MARKETPLACE_RELATIVE_PATH)
&& path.as_path().is_file()
&& !paths.contains(&path)
{
paths.push(path);
}
}
paths
}
fn get_git_repo_root(base_dir: &Path) -> Option<PathBuf> {
let base = if base_dir.is_dir() {
base_dir
} else {
base_dir.parent()?
};
find_ancestor_git_entry(base).map(canonicalize_or_raw)
}
fn find_ancestor_git_entry(base_dir: &Path) -> Option<PathBuf> {
let mut dir = base_dir.to_path_buf();
loop {
if dir.join(".git").exists() {
return Some(dir);
}
if !dir.pop() {
break;
}
}
None
}
fn canonicalize_or_raw(path: PathBuf) -> PathBuf {
std::fs::canonicalize(&path).unwrap_or(path)
}
fn load_raw_marketplace_manifest(
path: &AbsolutePathBuf,
) -> Result<RawMarketplaceManifest, MarketplaceError> {
let contents = fs::read_to_string(path.as_path()).map_err(|err| {
if err.kind() == io::ErrorKind::NotFound {
MarketplaceError::MarketplaceNotFound {
path: path.to_path_buf(),
}
} else {
MarketplaceError::io("failed to read marketplace file", err)
}
})?;
serde_json::from_str(&contents).map_err(|err| MarketplaceError::InvalidMarketplaceFile {
path: path.to_path_buf(),
message: err.to_string(),
})
}
fn resolve_plugin_source_path(
marketplace_path: &AbsolutePathBuf,
source: RawMarketplaceManifestPluginSource,
) -> Result<AbsolutePathBuf, MarketplaceError> {
match source {
RawMarketplaceManifestPluginSource::Local { path } => {
let Some(path) = path.strip_prefix("./") else {
return Err(MarketplaceError::InvalidMarketplaceFile {
path: marketplace_path.to_path_buf(),
message: "local plugin source path must start with `./`".to_string(),
});
};
if path.is_empty() {
return Err(MarketplaceError::InvalidMarketplaceFile {
path: marketplace_path.to_path_buf(),
message: "local plugin source path must not be empty".to_string(),
});
}
let relative_source_path = Path::new(path);
if relative_source_path
.components()
.any(|component| !matches!(component, Component::Normal(_)))
{
return Err(MarketplaceError::InvalidMarketplaceFile {
path: marketplace_path.to_path_buf(),
message: "local plugin source path must stay within the marketplace root"
.to_string(),
});
}
// `marketplace.json` lives under `<root>/.agents/plugins/`, but local plugin paths
// are resolved relative to `<root>`, not relative to the `plugins/` directory.
marketplace_root_dir(marketplace_path)?
.join(relative_source_path)
.map_err(|err| MarketplaceError::InvalidMarketplaceFile {
path: marketplace_path.to_path_buf(),
message: format!("plugin source path must resolve to an absolute path: {err}"),
})
}
}
}
fn marketplace_root_dir(
marketplace_path: &AbsolutePathBuf,
) -> Result<AbsolutePathBuf, MarketplaceError> {
let Some(plugins_dir) = marketplace_path.parent() else {
return Err(MarketplaceError::InvalidMarketplaceFile {
path: marketplace_path.to_path_buf(),
message: "marketplace file must live under `<root>/.agents/plugins/`".to_string(),
});
};
let Some(dot_agents_dir) = plugins_dir.parent() else {
return Err(MarketplaceError::InvalidMarketplaceFile {
path: marketplace_path.to_path_buf(),
message: "marketplace file must live under `<root>/.agents/plugins/`".to_string(),
});
};
let Some(marketplace_root) = dot_agents_dir.parent() else {
return Err(MarketplaceError::InvalidMarketplaceFile {
path: marketplace_path.to_path_buf(),
message: "marketplace file must live under `<root>/.agents/plugins/`".to_string(),
});
};
if plugins_dir.as_path().file_name().and_then(|s| s.to_str()) != Some("plugins")
|| dot_agents_dir
.as_path()
.file_name()
.and_then(|s| s.to_str())
!= Some(".agents")
{
return Err(MarketplaceError::InvalidMarketplaceFile {
path: marketplace_path.to_path_buf(),
message: "marketplace file must live under `<root>/.agents/plugins/`".to_string(),
});
}
Ok(marketplace_root)
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RawMarketplaceManifest {
name: String,
#[serde(default)]
interface: Option<RawMarketplaceManifestInterface>,
plugins: Vec<RawMarketplaceManifestPlugin>,
}
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RawMarketplaceManifestInterface {
#[serde(default)]
display_name: Option<String>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RawMarketplaceManifestPlugin {
name: String,
source: RawMarketplaceManifestPluginSource,
#[serde(default)]
policy: RawMarketplaceManifestPluginPolicy,
#[serde(default)]
category: Option<String>,
}
#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RawMarketplaceManifestPluginPolicy {
#[serde(default)]
installation: MarketplacePluginInstallPolicy,
#[serde(default)]
authentication: MarketplacePluginAuthPolicy,
products: Option<Vec<Product>>,
}
#[derive(Debug, Deserialize)]
#[serde(tag = "source", rename_all = "lowercase")]
enum RawMarketplaceManifestPluginSource {
Local { path: String },
}
fn resolve_marketplace_interface(
interface: Option<RawMarketplaceManifestInterface>,
) -> Option<MarketplaceInterface> {
let interface = interface?;
if interface.display_name.is_some() {
Some(MarketplaceInterface {
display_name: interface.display_name,
})
} else {
None
}
}
#[cfg(test)]
#[path = "marketplace_tests.rs"]
mod tests;

View File

@@ -0,0 +1,859 @@
use super::*;
use codex_protocol::protocol::Product;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
#[test]
fn resolve_marketplace_plugin_finds_repo_marketplace_plugin() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::create_dir_all(repo_root.join("nested")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "local-plugin",
"source": {
"source": "local",
"path": "./plugin-1"
}
}
]
}"#,
)
.unwrap();
let resolved = resolve_marketplace_plugin(
&AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json")).unwrap(),
"local-plugin",
Some(Product::Codex),
)
.unwrap();
assert_eq!(
resolved,
ResolvedMarketplacePlugin {
plugin_id: PluginId::new("local-plugin".to_string(), "codex-curated".to_string())
.unwrap(),
source_path: AbsolutePathBuf::try_from(repo_root.join("plugin-1")).unwrap(),
auth_policy: MarketplacePluginAuthPolicy::OnInstall,
}
);
}
#[test]
fn resolve_marketplace_plugin_reports_missing_plugin() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{"name":"codex-curated","plugins":[]}"#,
)
.unwrap();
let err = resolve_marketplace_plugin(
&AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json")).unwrap(),
"missing",
Some(Product::Codex),
)
.unwrap_err();
assert_eq!(
err.to_string(),
"plugin `missing` was not found in marketplace `codex-curated`"
);
}
#[test]
fn list_marketplaces_returns_home_and_repo_marketplaces() {
let tmp = tempdir().unwrap();
let home_root = tmp.path().join("home");
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(home_root.join(".agents/plugins")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
home_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "shared-plugin",
"source": {
"source": "local",
"path": "./home-shared"
}
},
{
"name": "home-only",
"source": {
"source": "local",
"path": "./home-only"
}
}
]
}"#,
)
.unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "shared-plugin",
"source": {
"source": "local",
"path": "./repo-shared"
}
},
{
"name": "repo-only",
"source": {
"source": "local",
"path": "./repo-only"
}
}
]
}"#,
)
.unwrap();
let marketplaces = list_marketplaces_with_home(
&[AbsolutePathBuf::try_from(repo_root.clone()).unwrap()],
Some(&home_root),
)
.unwrap();
assert_eq!(
marketplaces,
vec![
Marketplace {
name: "codex-curated".to_string(),
path:
AbsolutePathBuf::try_from(home_root.join(".agents/plugins/marketplace.json"),)
.unwrap(),
interface: None,
plugins: vec![
MarketplacePlugin {
name: "shared-plugin".to_string(),
source: MarketplacePluginSource::Local {
path: AbsolutePathBuf::try_from(home_root.join("home-shared")).unwrap(),
},
policy: MarketplacePluginPolicy {
installation: MarketplacePluginInstallPolicy::Available,
authentication: MarketplacePluginAuthPolicy::OnInstall,
products: None,
},
interface: None,
},
MarketplacePlugin {
name: "home-only".to_string(),
source: MarketplacePluginSource::Local {
path: AbsolutePathBuf::try_from(home_root.join("home-only")).unwrap(),
},
policy: MarketplacePluginPolicy {
installation: MarketplacePluginInstallPolicy::Available,
authentication: MarketplacePluginAuthPolicy::OnInstall,
products: None,
},
interface: None,
},
],
},
Marketplace {
name: "codex-curated".to_string(),
path:
AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json"),)
.unwrap(),
interface: None,
plugins: vec![
MarketplacePlugin {
name: "shared-plugin".to_string(),
source: MarketplacePluginSource::Local {
path: AbsolutePathBuf::try_from(repo_root.join("repo-shared")).unwrap(),
},
policy: MarketplacePluginPolicy {
installation: MarketplacePluginInstallPolicy::Available,
authentication: MarketplacePluginAuthPolicy::OnInstall,
products: None,
},
interface: None,
},
MarketplacePlugin {
name: "repo-only".to_string(),
source: MarketplacePluginSource::Local {
path: AbsolutePathBuf::try_from(repo_root.join("repo-only")).unwrap(),
},
policy: MarketplacePluginPolicy {
installation: MarketplacePluginInstallPolicy::Available,
authentication: MarketplacePluginAuthPolicy::OnInstall,
products: None,
},
interface: None,
},
],
},
]
);
}
#[test]
fn list_marketplaces_keeps_distinct_entries_for_same_name() {
let tmp = tempdir().unwrap();
let home_root = tmp.path().join("home");
let repo_root = tmp.path().join("repo");
let home_marketplace = home_root.join(".agents/plugins/marketplace.json");
let repo_marketplace = repo_root.join(".agents/plugins/marketplace.json");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(home_root.join(".agents/plugins")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
home_marketplace.clone(),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "local-plugin",
"source": {
"source": "local",
"path": "./home-plugin"
}
}
]
}"#,
)
.unwrap();
fs::write(
repo_marketplace.clone(),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "local-plugin",
"source": {
"source": "local",
"path": "./repo-plugin"
}
}
]
}"#,
)
.unwrap();
let marketplaces = list_marketplaces_with_home(
&[AbsolutePathBuf::try_from(repo_root.clone()).unwrap()],
Some(&home_root),
)
.unwrap();
assert_eq!(
marketplaces,
vec![
Marketplace {
name: "codex-curated".to_string(),
path: AbsolutePathBuf::try_from(home_marketplace).unwrap(),
interface: None,
plugins: vec![MarketplacePlugin {
name: "local-plugin".to_string(),
source: MarketplacePluginSource::Local {
path: AbsolutePathBuf::try_from(home_root.join("home-plugin")).unwrap(),
},
policy: MarketplacePluginPolicy {
installation: MarketplacePluginInstallPolicy::Available,
authentication: MarketplacePluginAuthPolicy::OnInstall,
products: None,
},
interface: None,
}],
},
Marketplace {
name: "codex-curated".to_string(),
path: AbsolutePathBuf::try_from(repo_marketplace.clone()).unwrap(),
interface: None,
plugins: vec![MarketplacePlugin {
name: "local-plugin".to_string(),
source: MarketplacePluginSource::Local {
path: AbsolutePathBuf::try_from(repo_root.join("repo-plugin")).unwrap(),
},
policy: MarketplacePluginPolicy {
installation: MarketplacePluginInstallPolicy::Available,
authentication: MarketplacePluginAuthPolicy::OnInstall,
products: None,
},
interface: None,
}],
},
]
);
let resolved = resolve_marketplace_plugin(
&AbsolutePathBuf::try_from(repo_marketplace).unwrap(),
"local-plugin",
Some(Product::Codex),
)
.unwrap();
assert_eq!(
resolved.source_path,
AbsolutePathBuf::try_from(repo_root.join("repo-plugin")).unwrap()
);
}
#[test]
fn list_marketplaces_dedupes_multiple_roots_in_same_repo() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
let nested_root = repo_root.join("nested/project");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::create_dir_all(&nested_root).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "local-plugin",
"source": {
"source": "local",
"path": "./plugin"
}
}
]
}"#,
)
.unwrap();
let marketplaces = list_marketplaces_with_home(
&[
AbsolutePathBuf::try_from(repo_root.clone()).unwrap(),
AbsolutePathBuf::try_from(nested_root).unwrap(),
],
None,
)
.unwrap();
assert_eq!(
marketplaces,
vec![Marketplace {
name: "codex-curated".to_string(),
path: AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json"))
.unwrap(),
interface: None,
plugins: vec![MarketplacePlugin {
name: "local-plugin".to_string(),
source: MarketplacePluginSource::Local {
path: AbsolutePathBuf::try_from(repo_root.join("plugin")).unwrap(),
},
policy: MarketplacePluginPolicy {
installation: MarketplacePluginInstallPolicy::Available,
authentication: MarketplacePluginAuthPolicy::OnInstall,
products: None,
},
interface: None,
}],
}]
);
}
#[test]
fn list_marketplaces_reads_marketplace_display_name() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "openai-curated",
"interface": {
"displayName": "ChatGPT Official"
},
"plugins": [
{
"name": "local-plugin",
"source": {
"source": "local",
"path": "./plugin"
}
}
]
}"#,
)
.unwrap();
let marketplaces =
list_marketplaces_with_home(&[AbsolutePathBuf::try_from(repo_root).unwrap()], None)
.unwrap();
assert_eq!(
marketplaces[0].interface,
Some(MarketplaceInterface {
display_name: Some("ChatGPT Official".to_string()),
})
);
}
#[test]
fn list_marketplaces_skips_marketplaces_that_fail_to_load() {
let tmp = tempdir().unwrap();
let valid_repo_root = tmp.path().join("valid-repo");
let invalid_repo_root = tmp.path().join("invalid-repo");
fs::create_dir_all(valid_repo_root.join(".git")).unwrap();
fs::create_dir_all(valid_repo_root.join(".agents/plugins")).unwrap();
fs::create_dir_all(invalid_repo_root.join(".git")).unwrap();
fs::create_dir_all(invalid_repo_root.join(".agents/plugins")).unwrap();
fs::write(
valid_repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "valid-marketplace",
"plugins": [
{
"name": "valid-plugin",
"source": {
"source": "local",
"path": "./plugin"
}
}
]
}"#,
)
.unwrap();
fs::write(
invalid_repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "invalid-marketplace",
"plugins": [
{
"name": "broken-plugin",
"source": {
"source": "local",
"path": "plugin-without-dot-slash"
}
}
]
}"#,
)
.unwrap();
let marketplaces = list_marketplaces_with_home(
&[
AbsolutePathBuf::try_from(valid_repo_root).unwrap(),
AbsolutePathBuf::try_from(invalid_repo_root).unwrap(),
],
None,
)
.unwrap();
assert_eq!(marketplaces.len(), 1);
assert_eq!(marketplaces[0].name, "valid-marketplace");
}
#[test]
fn list_marketplaces_resolves_plugin_interface_paths_to_absolute() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
let plugin_root = repo_root.join("plugins/demo-plugin");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::create_dir_all(plugin_root.join(".codex-plugin")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "demo-plugin",
"source": {
"source": "local",
"path": "./plugins/demo-plugin"
},
"policy": {
"installation": "AVAILABLE",
"authentication": "ON_INSTALL",
"products": ["CODEX", "CHATGPT", "ATLAS"]
},
"category": "Design"
}
]
}"#,
)
.unwrap();
fs::write(
plugin_root.join(".codex-plugin/plugin.json"),
r#"{
"name": "demo-plugin",
"interface": {
"displayName": "Demo",
"category": "Productivity",
"capabilities": ["Interactive", "Write"],
"composerIcon": "./assets/icon.png",
"logo": "./assets/logo.png",
"screenshots": ["./assets/shot1.png"]
}
}"#,
)
.unwrap();
let marketplaces =
list_marketplaces_with_home(&[AbsolutePathBuf::try_from(repo_root).unwrap()], None)
.unwrap();
assert_eq!(
marketplaces[0].plugins[0].policy.installation,
MarketplacePluginInstallPolicy::Available
);
assert_eq!(
marketplaces[0].plugins[0].policy.authentication,
MarketplacePluginAuthPolicy::OnInstall
);
assert_eq!(
marketplaces[0].plugins[0].policy.products,
Some(vec![Product::Codex, Product::Chatgpt, Product::Atlas])
);
assert_eq!(
marketplaces[0].plugins[0].interface,
Some(PluginManifestInterface {
display_name: Some("Demo".to_string()),
short_description: None,
long_description: None,
developer_name: None,
category: Some("Design".to_string()),
capabilities: vec!["Interactive".to_string(), "Write".to_string()],
website_url: None,
privacy_policy_url: None,
terms_of_service_url: None,
default_prompt: None,
brand_color: None,
composer_icon: Some(
AbsolutePathBuf::try_from(plugin_root.join("assets/icon.png")).unwrap(),
),
logo: Some(AbsolutePathBuf::try_from(plugin_root.join("assets/logo.png")).unwrap()),
screenshots: vec![
AbsolutePathBuf::try_from(plugin_root.join("assets/shot1.png")).unwrap(),
],
})
);
}
#[test]
fn list_marketplaces_ignores_legacy_top_level_policy_fields() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "demo-plugin",
"source": {
"source": "local",
"path": "./plugins/demo-plugin"
},
"installPolicy": "NOT_AVAILABLE",
"authPolicy": "ON_USE"
}
]
}"#,
)
.unwrap();
let marketplaces =
list_marketplaces_with_home(&[AbsolutePathBuf::try_from(repo_root).unwrap()], None)
.unwrap();
assert_eq!(
marketplaces[0].plugins[0].policy.installation,
MarketplacePluginInstallPolicy::Available
);
assert_eq!(
marketplaces[0].plugins[0].policy.authentication,
MarketplacePluginAuthPolicy::OnInstall
);
assert_eq!(marketplaces[0].plugins[0].policy.products, None);
}
#[test]
fn list_marketplaces_ignores_plugin_interface_assets_without_dot_slash() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
let plugin_root = repo_root.join("plugins/demo-plugin");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::create_dir_all(plugin_root.join(".codex-plugin")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "demo-plugin",
"source": {
"source": "local",
"path": "./plugins/demo-plugin"
}
}
]
}"#,
)
.unwrap();
fs::write(
plugin_root.join(".codex-plugin/plugin.json"),
r#"{
"name": "demo-plugin",
"interface": {
"displayName": "Demo",
"capabilities": ["Interactive"],
"composerIcon": "assets/icon.png",
"logo": "/tmp/logo.png",
"screenshots": ["assets/shot1.png"]
}
}"#,
)
.unwrap();
let marketplaces =
list_marketplaces_with_home(&[AbsolutePathBuf::try_from(repo_root).unwrap()], None)
.unwrap();
assert_eq!(
marketplaces[0].plugins[0].interface,
Some(PluginManifestInterface {
display_name: Some("Demo".to_string()),
short_description: None,
long_description: None,
developer_name: None,
category: None,
capabilities: vec!["Interactive".to_string()],
website_url: None,
privacy_policy_url: None,
terms_of_service_url: None,
default_prompt: None,
brand_color: None,
composer_icon: None,
logo: None,
screenshots: Vec::new(),
})
);
assert_eq!(
marketplaces[0].plugins[0].policy.installation,
MarketplacePluginInstallPolicy::Available
);
assert_eq!(
marketplaces[0].plugins[0].policy.authentication,
MarketplacePluginAuthPolicy::OnInstall
);
assert_eq!(marketplaces[0].plugins[0].policy.products, None);
}
#[test]
fn resolve_marketplace_plugin_rejects_non_relative_local_paths() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "local-plugin",
"source": {
"source": "local",
"path": "../plugin-1"
}
}
]
}"#,
)
.unwrap();
let err = resolve_marketplace_plugin(
&AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json")).unwrap(),
"local-plugin",
Some(Product::Codex),
)
.unwrap_err();
assert_eq!(
err.to_string(),
format!(
"invalid marketplace file `{}`: local plugin source path must start with `./`",
repo_root.join(".agents/plugins/marketplace.json").display()
)
);
}
#[test]
fn resolve_marketplace_plugin_uses_first_duplicate_entry() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "local-plugin",
"source": {
"source": "local",
"path": "./first"
}
},
{
"name": "local-plugin",
"source": {
"source": "local",
"path": "./second"
}
}
]
}"#,
)
.unwrap();
let resolved = resolve_marketplace_plugin(
&AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json")).unwrap(),
"local-plugin",
Some(Product::Codex),
)
.unwrap();
assert_eq!(
resolved.source_path,
AbsolutePathBuf::try_from(repo_root.join("first")).unwrap()
);
}
#[test]
fn resolve_marketplace_plugin_rejects_disallowed_product() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "chatgpt-plugin",
"source": {
"source": "local",
"path": "./plugin"
},
"policy": {
"products": ["CHATGPT"]
}
}
]
}"#,
)
.unwrap();
let err = resolve_marketplace_plugin(
&AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json")).unwrap(),
"chatgpt-plugin",
Some(Product::Atlas),
)
.unwrap_err();
assert_eq!(
err.to_string(),
"plugin `chatgpt-plugin` is not available for install in marketplace `codex-curated`"
);
}
#[test]
fn resolve_marketplace_plugin_allows_missing_products_field() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "default-plugin",
"source": {
"source": "local",
"path": "./plugin"
},
"policy": {}
}
]
}"#,
)
.unwrap();
let resolved = resolve_marketplace_plugin(
&AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json")).unwrap(),
"default-plugin",
Some(Product::Codex),
)
.unwrap();
assert_eq!(resolved.plugin_id.as_key(), "default-plugin@codex-curated");
}
#[test]
fn resolve_marketplace_plugin_rejects_explicit_empty_products() {
let tmp = tempdir().unwrap();
let repo_root = tmp.path().join("repo");
fs::create_dir_all(repo_root.join(".git")).unwrap();
fs::create_dir_all(repo_root.join(".agents/plugins")).unwrap();
fs::write(
repo_root.join(".agents/plugins/marketplace.json"),
r#"{
"name": "codex-curated",
"plugins": [
{
"name": "disabled-plugin",
"source": {
"source": "local",
"path": "./plugin"
},
"policy": {
"products": []
}
}
]
}"#,
)
.unwrap();
let err = resolve_marketplace_plugin(
&AbsolutePathBuf::try_from(repo_root.join(".agents/plugins/marketplace.json")).unwrap(),
"disabled-plugin",
Some(Product::Codex),
)
.unwrap_err();
assert_eq!(
err.to_string(),
"plugin `disabled-plugin` is not available for install in marketplace `codex-curated`"
);
}

View File

@@ -0,0 +1,57 @@
mod discoverable;
mod manager;
mod manifest;
mod marketplace;
mod remote;
mod render;
mod runtime;
mod startup_sync;
mod store;
mod toggles;
pub use discoverable::list_tool_suggest_discoverable_plugins;
pub use manager::AppConnectorId;
pub use manager::ConfiguredMarketplace;
pub use manager::ConfiguredMarketplacePlugin;
pub use manager::LoadedPlugin;
pub use manager::OPENAI_CURATED_MARKETPLACE_NAME;
pub use manager::PluginCapabilitySummary;
pub use manager::PluginDetail;
pub use manager::PluginInstallError;
pub use manager::PluginInstallOutcome;
pub use manager::PluginInstallRequest;
pub use manager::PluginLoadOutcome;
pub use manager::PluginReadOutcome;
pub use manager::PluginReadRequest;
pub use manager::PluginRemoteSyncError;
pub use manager::PluginTelemetryMetadata;
pub use manager::PluginUninstallError;
pub use manager::PluginsManager;
pub use manager::RemotePluginSyncResult;
pub use manager::installed_plugin_telemetry_metadata;
pub use manager::load_plugin_apps;
pub use manager::load_plugin_mcp_servers;
pub use manager::plugin_namespace_for_skill_path;
pub use manager::plugin_telemetry_metadata_from_root;
pub use manifest::PluginManifestInterface;
pub use manifest::PluginManifestPaths;
pub use manifest::load_plugin_manifest;
pub use marketplace::MarketplaceError;
pub use marketplace::MarketplacePluginAuthPolicy;
pub use marketplace::MarketplacePluginInstallPolicy;
pub use marketplace::MarketplacePluginPolicy;
pub use marketplace::MarketplacePluginSource;
pub use remote::RemotePluginFetchError;
pub use remote::fetch_remote_featured_plugin_ids;
pub use render::render_explicit_plugin_instructions;
pub use render::render_plugins_section;
pub use runtime::PluginAnalyticsHook;
pub use runtime::PluginConfigEdit;
pub use runtime::PluginConfigPersister;
pub use runtime::PluginLoadRequest;
pub use runtime::PluginRemoteRequest;
pub use startup_sync::curated_plugins_repo_path;
pub use startup_sync::read_curated_plugins_sha;
pub use startup_sync::sync_openai_plugins_repo;
pub use store::PluginId;
pub use toggles::collect_plugin_enabled_candidates;

View File

@@ -0,0 +1,307 @@
use super::runtime::PluginRemoteRequest;
use codex_login::CodexAuth;
use codex_login::default_client::build_reqwest_client;
use serde::Deserialize;
use std::time::Duration;
use url::Url;
const DEFAULT_REMOTE_MARKETPLACE_NAME: &str = "openai-curated";
const REMOTE_PLUGIN_FETCH_TIMEOUT: Duration = Duration::from_secs(30);
const REMOTE_FEATURED_PLUGIN_FETCH_TIMEOUT: Duration = Duration::from_secs(10);
const REMOTE_PLUGIN_MUTATION_TIMEOUT: Duration = Duration::from_secs(30);
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
pub(crate) struct RemotePluginStatusSummary {
pub(crate) name: String,
#[serde(default = "default_remote_marketplace_name")]
pub(crate) marketplace_name: String,
pub(crate) enabled: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
#[serde(rename_all = "camelCase")]
struct RemotePluginMutationResponse {
pub id: String,
pub enabled: bool,
}
#[derive(Debug, thiserror::Error)]
pub enum RemotePluginMutationError {
#[error("chatgpt authentication required for remote plugin mutation")]
AuthRequired,
#[error(
"chatgpt authentication required for remote plugin mutation; api key auth is not supported"
)]
UnsupportedAuthMode,
#[error("failed to read auth token for remote plugin mutation: {0}")]
AuthToken(#[source] std::io::Error),
#[error("invalid chatgpt base url for remote plugin mutation: {0}")]
InvalidBaseUrl(#[source] url::ParseError),
#[error("chatgpt base url cannot be used for plugin mutation")]
InvalidBaseUrlPath,
#[error("failed to send remote plugin mutation request to {url}: {source}")]
Request {
url: String,
#[source]
source: reqwest::Error,
},
#[error("remote plugin mutation failed with status {status} from {url}: {body}")]
UnexpectedStatus {
url: String,
status: reqwest::StatusCode,
body: String,
},
#[error("failed to parse remote plugin mutation response from {url}: {source}")]
Decode {
url: String,
#[source]
source: serde_json::Error,
},
#[error(
"remote plugin mutation returned unexpected plugin id: expected `{expected}`, got `{actual}`"
)]
UnexpectedPluginId { expected: String, actual: String },
#[error(
"remote plugin mutation returned unexpected enabled state for `{plugin_id}`: expected {expected_enabled}, got {actual_enabled}"
)]
UnexpectedEnabledState {
plugin_id: String,
expected_enabled: bool,
actual_enabled: bool,
},
}
#[derive(Debug, thiserror::Error)]
pub enum RemotePluginFetchError {
#[error("chatgpt authentication required to sync remote plugins")]
AuthRequired,
#[error(
"chatgpt authentication required to sync remote plugins; api key auth is not supported"
)]
UnsupportedAuthMode,
#[error("failed to read auth token for remote plugin sync: {0}")]
AuthToken(#[source] std::io::Error),
#[error("failed to send remote plugin sync request to {url}: {source}")]
Request {
url: String,
#[source]
source: reqwest::Error,
},
#[error("remote plugin sync request to {url} failed with status {status}: {body}")]
UnexpectedStatus {
url: String,
status: reqwest::StatusCode,
body: String,
},
#[error("failed to parse remote plugin sync response from {url}: {source}")]
Decode {
url: String,
#[source]
source: serde_json::Error,
},
}
pub(crate) async fn fetch_remote_plugin_status(
request: &PluginRemoteRequest,
auth: Option<&CodexAuth>,
) -> Result<Vec<RemotePluginStatusSummary>, RemotePluginFetchError> {
let Some(auth) = auth else {
return Err(RemotePluginFetchError::AuthRequired);
};
if !auth.is_chatgpt_auth() {
return Err(RemotePluginFetchError::UnsupportedAuthMode);
}
let base_url = request.chatgpt_base_url.trim_end_matches('/');
let url = format!("{base_url}/plugins/list");
let client = build_reqwest_client();
let token = auth
.get_token()
.map_err(RemotePluginFetchError::AuthToken)?;
let mut request = client
.get(&url)
.timeout(REMOTE_PLUGIN_FETCH_TIMEOUT)
.bearer_auth(token);
if let Some(account_id) = auth.get_account_id() {
request = request.header("chatgpt-account-id", account_id);
}
let response = request
.send()
.await
.map_err(|source| RemotePluginFetchError::Request {
url: url.clone(),
source,
})?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(RemotePluginFetchError::UnexpectedStatus { url, status, body });
}
serde_json::from_str(&body).map_err(|source| RemotePluginFetchError::Decode {
url: url.clone(),
source,
})
}
pub async fn fetch_remote_featured_plugin_ids(
request: &PluginRemoteRequest,
auth: Option<&CodexAuth>,
) -> Result<Vec<String>, RemotePluginFetchError> {
let base_url = request.chatgpt_base_url.trim_end_matches('/');
let url = format!("{base_url}/plugins/featured");
let client = build_reqwest_client();
let mut request = client
.get(&url)
.timeout(REMOTE_FEATURED_PLUGIN_FETCH_TIMEOUT);
if let Some(auth) = auth.filter(|auth| auth.is_chatgpt_auth()) {
let token = auth
.get_token()
.map_err(RemotePluginFetchError::AuthToken)?;
request = request.bearer_auth(token);
if let Some(account_id) = auth.get_account_id() {
request = request.header("chatgpt-account-id", account_id);
}
}
let response = request
.send()
.await
.map_err(|source| RemotePluginFetchError::Request {
url: url.clone(),
source,
})?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(RemotePluginFetchError::UnexpectedStatus { url, status, body });
}
serde_json::from_str(&body).map_err(|source| RemotePluginFetchError::Decode {
url: url.clone(),
source,
})
}
pub(crate) async fn enable_remote_plugin(
request: &PluginRemoteRequest,
auth: Option<&CodexAuth>,
plugin_id: &str,
) -> Result<(), RemotePluginMutationError> {
post_remote_plugin_mutation(request, auth, plugin_id, "enable").await?;
Ok(())
}
pub(crate) async fn uninstall_remote_plugin(
request: &PluginRemoteRequest,
auth: Option<&CodexAuth>,
plugin_id: &str,
) -> Result<(), RemotePluginMutationError> {
post_remote_plugin_mutation(request, auth, plugin_id, "uninstall").await?;
Ok(())
}
fn ensure_chatgpt_auth(auth: Option<&CodexAuth>) -> Result<&CodexAuth, RemotePluginMutationError> {
let Some(auth) = auth else {
return Err(RemotePluginMutationError::AuthRequired);
};
if !auth.is_chatgpt_auth() {
return Err(RemotePluginMutationError::UnsupportedAuthMode);
}
Ok(auth)
}
fn default_remote_marketplace_name() -> String {
DEFAULT_REMOTE_MARKETPLACE_NAME.to_string()
}
async fn post_remote_plugin_mutation(
request: &PluginRemoteRequest,
auth: Option<&CodexAuth>,
plugin_id: &str,
action: &str,
) -> Result<RemotePluginMutationResponse, RemotePluginMutationError> {
let auth = ensure_chatgpt_auth(auth)?;
let url = remote_plugin_mutation_url(request, plugin_id, action)?;
let client = build_reqwest_client();
let token = auth
.get_token()
.map_err(RemotePluginMutationError::AuthToken)?;
let mut request = client
.post(url.clone())
.timeout(REMOTE_PLUGIN_MUTATION_TIMEOUT)
.bearer_auth(token);
if let Some(account_id) = auth.get_account_id() {
request = request.header("chatgpt-account-id", account_id);
}
let response = request
.send()
.await
.map_err(|source| RemotePluginMutationError::Request {
url: url.clone(),
source,
})?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(RemotePluginMutationError::UnexpectedStatus { url, status, body });
}
let parsed: RemotePluginMutationResponse =
serde_json::from_str(&body).map_err(|source| RemotePluginMutationError::Decode {
url: url.clone(),
source,
})?;
let expected_enabled = action == "enable";
if parsed.id != plugin_id {
return Err(RemotePluginMutationError::UnexpectedPluginId {
expected: plugin_id.to_string(),
actual: parsed.id,
});
}
if parsed.enabled != expected_enabled {
return Err(RemotePluginMutationError::UnexpectedEnabledState {
plugin_id: plugin_id.to_string(),
expected_enabled,
actual_enabled: parsed.enabled,
});
}
Ok(parsed)
}
fn remote_plugin_mutation_url(
request: &PluginRemoteRequest,
plugin_id: &str,
action: &str,
) -> Result<String, RemotePluginMutationError> {
let mut url = Url::parse(request.chatgpt_base_url.trim_end_matches('/'))
.map_err(RemotePluginMutationError::InvalidBaseUrl)?;
{
let mut segments = url
.path_segments_mut()
.map_err(|()| RemotePluginMutationError::InvalidBaseUrlPath)?;
segments.pop_if_empty();
segments.push("plugins");
segments.push(plugin_id);
segments.push(action);
}
Ok(url.to_string())
}

View File

@@ -0,0 +1,92 @@
use crate::plugins::PluginCapabilitySummary;
use codex_protocol::protocol::PLUGINS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::PLUGINS_INSTRUCTIONS_OPEN_TAG;
pub fn render_plugins_section(plugins: &[PluginCapabilitySummary]) -> Option<String> {
if plugins.is_empty() {
return None;
}
let mut lines = vec![
"## Plugins".to_string(),
"A plugin is a local bundle of skills, MCP servers, and apps. Below is the list of plugins that are enabled and available in this session.".to_string(),
"### Available plugins".to_string(),
];
lines.extend(
plugins
.iter()
.map(|plugin| match plugin.description.as_deref() {
Some(description) => format!("- `{}`: {description}", plugin.display_name),
None => format!("- `{}`", plugin.display_name),
}),
);
lines.push("### How to use plugins".to_string());
lines.push(
r###"- Discovery: The list above is the plugins available in this session.
- Skill naming: If a plugin contributes skills, those skill entries are prefixed with `plugin_name:` in the Skills list.
- Trigger rules: If the user explicitly names a plugin, prefer capabilities associated with that plugin for that turn.
- Relationship to capabilities: Plugins are not invoked directly. Use their underlying skills, MCP tools, and app tools to help solve the task.
- Preference: When a relevant plugin is available, prefer using capabilities associated with that plugin over standalone capabilities that provide similar functionality.
- Missing/blocked: If the user requests a plugin that is not listed above, or the plugin does not have relevant callable capabilities for the task, say so briefly and continue with the best fallback."###
.to_string(),
);
let body = lines.join("\n");
Some(format!(
"{PLUGINS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{PLUGINS_INSTRUCTIONS_CLOSE_TAG}"
))
}
pub fn render_explicit_plugin_instructions(
plugin: &PluginCapabilitySummary,
available_mcp_servers: &[String],
available_apps: &[String],
) -> Option<String> {
let mut lines = vec![format!(
"Capabilities from the `{}` plugin:",
plugin.display_name
)];
if plugin.has_skills {
lines.push(format!(
"- Skills from this plugin are prefixed with `{}:`.",
plugin.display_name
));
}
if !available_mcp_servers.is_empty() {
lines.push(format!(
"- MCP servers from this plugin available in this session: {}.",
available_mcp_servers
.iter()
.map(|server| format!("`{server}`"))
.collect::<Vec<_>>()
.join(", ")
));
}
if !available_apps.is_empty() {
lines.push(format!(
"- Apps from this plugin available in this session: {}.",
available_apps
.iter()
.map(|app| format!("`{app}`"))
.collect::<Vec<_>>()
.join(", ")
));
}
if lines.len() == 1 {
return None;
}
lines.push("Use these plugin-associated capabilities to help solve the task.".to_string());
Some(lines.join("\n"))
}
#[cfg(test)]
#[path = "render_tests.rs"]
mod tests;

View File

@@ -0,0 +1,23 @@
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn render_plugins_section_returns_none_for_empty_plugins() {
assert_eq!(render_plugins_section(&[]), None);
}
#[test]
fn render_plugins_section_includes_descriptions_and_skill_naming_guidance() {
let rendered = render_plugins_section(&[PluginCapabilitySummary {
config_name: "sample@test".to_string(),
display_name: "sample".to_string(),
description: Some("inspect sample data".to_string()),
has_skills: true,
..PluginCapabilitySummary::default()
}])
.expect("plugin section should render");
let expected = "<plugins_instructions>\n## Plugins\nA plugin is a local bundle of skills, MCP servers, and apps. Below is the list of plugins that are enabled and available in this session.\n### Available plugins\n- `sample`: inspect sample data\n### How to use plugins\n- Discovery: The list above is the plugins available in this session.\n- Skill naming: If a plugin contributes skills, those skill entries are prefixed with `plugin_name:` in the Skills list.\n- Trigger rules: If the user explicitly names a plugin, prefer capabilities associated with that plugin for that turn.\n- Relationship to capabilities: Plugins are not invoked directly. Use their underlying skills, MCP tools, and app tools to help solve the task.\n- Preference: When a relevant plugin is available, prefer using capabilities associated with that plugin over standalone capabilities that provide similar functionality.\n- Missing/blocked: If the user requests a plugin that is not listed above, or the plugin does not have relevant callable capabilities for the task, say so briefly and continue with the best fallback.\n</plugins_instructions>";
assert_eq!(rendered, expected);
}

View File

@@ -0,0 +1,39 @@
use anyhow::Result;
use async_trait::async_trait;
use codex_config::ConfigLayerStack;
use super::PluginTelemetryMetadata;
#[derive(Debug, Clone)]
pub struct PluginLoadRequest {
pub plugins_enabled: bool,
pub config_layer_stack: ConfigLayerStack,
}
#[derive(Debug, Clone)]
pub struct PluginRemoteRequest {
pub plugins_enabled: bool,
pub config_layer_stack: ConfigLayerStack,
pub chatgpt_base_url: String,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PluginConfigEdit {
SetEnabled { plugin_id: String, enabled: bool },
ClearPlugin { plugin_id: String },
}
#[async_trait]
pub trait PluginConfigPersister: Send + Sync {
async fn enable_plugin(&self, plugin_id: &str) -> Result<()>;
async fn clear_plugin(&self, plugin_id: &str) -> Result<()>;
async fn apply_plugin_edits(&self, edits: &[PluginConfigEdit]) -> Result<()>;
}
pub trait PluginAnalyticsHook: Send + Sync {
fn track_plugin_installed(&self, metadata: PluginTelemetryMetadata);
fn track_plugin_uninstalled(&self, metadata: PluginTelemetryMetadata);
}

View File

@@ -0,0 +1,666 @@
use crate::plugins::PluginConfigPersister;
use crate::plugins::PluginRemoteRequest;
use codex_login::AuthManager;
use codex_login::default_client::build_reqwest_client;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use std::process::Output;
use std::process::Stdio;
use std::sync::Arc;
use std::time::Duration;
use reqwest::Client;
use serde::Deserialize;
use tracing::info;
use tracing::warn;
use zip::ZipArchive;
use super::PluginsManager;
const GITHUB_API_BASE_URL: &str = "https://api.github.com";
const GITHUB_API_ACCEPT_HEADER: &str = "application/vnd.github+json";
const GITHUB_API_VERSION_HEADER: &str = "2022-11-28";
const OPENAI_PLUGINS_OWNER: &str = "openai";
const OPENAI_PLUGINS_REPO: &str = "plugins";
const CURATED_PLUGINS_RELATIVE_DIR: &str = ".tmp/plugins";
const CURATED_PLUGINS_SHA_FILE: &str = ".tmp/plugins.sha";
const CURATED_PLUGINS_GIT_TIMEOUT: Duration = Duration::from_secs(30);
const CURATED_PLUGINS_HTTP_TIMEOUT: Duration = Duration::from_secs(30);
const STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE: &str = ".tmp/app-server-remote-plugin-sync-v1";
const STARTUP_REMOTE_PLUGIN_SYNC_PREREQUISITE_TIMEOUT: Duration = Duration::from_secs(5);
#[derive(Debug, Deserialize)]
struct GitHubRepositorySummary {
default_branch: String,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefSummary {
object: GitHubGitRefObject,
}
#[derive(Debug, Deserialize)]
struct GitHubGitRefObject {
sha: String,
}
pub fn curated_plugins_repo_path(codex_home: &Path) -> PathBuf {
codex_home.join(CURATED_PLUGINS_RELATIVE_DIR)
}
pub fn read_curated_plugins_sha(codex_home: &Path) -> Option<String> {
read_sha_file(codex_home.join(CURATED_PLUGINS_SHA_FILE).as_path())
}
pub fn sync_openai_plugins_repo(codex_home: &Path) -> Result<String, String> {
sync_openai_plugins_repo_with_transport_overrides(codex_home, "git", GITHUB_API_BASE_URL)
}
fn sync_openai_plugins_repo_with_transport_overrides(
codex_home: &Path,
git_binary: &str,
api_base_url: &str,
) -> Result<String, String> {
match sync_openai_plugins_repo_via_git(codex_home, git_binary) {
Ok(remote_sha) => Ok(remote_sha),
Err(err) => {
warn!(
error = %err,
git_binary,
"git sync failed for curated plugin sync; falling back to GitHub HTTP"
);
sync_openai_plugins_repo_via_http(codex_home, api_base_url)
}
}
}
fn sync_openai_plugins_repo_via_git(codex_home: &Path, git_binary: &str) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let remote_sha = git_ls_remote_head_sha(git_binary)?;
let local_sha = read_local_git_or_sha_file(&repo_path, &sha_path, git_binary);
if local_sha.as_deref() == Some(remote_sha.as_str()) && repo_path.join(".git").is_dir() {
return Ok(remote_sha);
}
let cloned_repo_path = prepare_curated_repo_parent_and_temp_dir(&repo_path)?;
let clone_output = run_git_command_with_timeout(
Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("clone")
.arg("--depth")
.arg("1")
.arg("https://github.com/openai/plugins.git")
.arg(&cloned_repo_path),
"git clone curated plugins repo",
CURATED_PLUGINS_GIT_TIMEOUT,
)?;
ensure_git_success(&clone_output, "git clone curated plugins repo")?;
let cloned_sha = git_head_sha(&cloned_repo_path, git_binary)?;
if cloned_sha != remote_sha {
return Err(format!(
"curated plugins clone HEAD mismatch: expected {remote_sha}, got {cloned_sha}"
));
}
ensure_marketplace_manifest_exists(&cloned_repo_path)?;
activate_curated_repo(&repo_path, &cloned_repo_path)?;
write_curated_plugins_sha(&sha_path, &remote_sha)?;
Ok(remote_sha)
}
fn sync_openai_plugins_repo_via_http(
codex_home: &Path,
api_base_url: &str,
) -> Result<String, String> {
let repo_path = curated_plugins_repo_path(codex_home);
let sha_path = codex_home.join(CURATED_PLUGINS_SHA_FILE);
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.map_err(|err| format!("failed to create curated plugins sync runtime: {err}"))?;
let remote_sha = runtime.block_on(fetch_curated_repo_remote_sha(api_base_url))?;
let local_sha = read_sha_file(&sha_path);
if local_sha.as_deref() == Some(remote_sha.as_str()) && repo_path.is_dir() {
return Ok(remote_sha);
}
let cloned_repo_path = prepare_curated_repo_parent_and_temp_dir(&repo_path)?;
let zipball_bytes = runtime.block_on(fetch_curated_repo_zipball(api_base_url, &remote_sha))?;
extract_zipball_to_dir(&zipball_bytes, &cloned_repo_path)?;
ensure_marketplace_manifest_exists(&cloned_repo_path)?;
activate_curated_repo(&repo_path, &cloned_repo_path)?;
write_curated_plugins_sha(&sha_path, &remote_sha)?;
Ok(remote_sha)
}
pub(super) fn start_startup_remote_plugin_sync_once(
manager: Arc<PluginsManager>,
codex_home: PathBuf,
request: PluginRemoteRequest,
auth_manager: Arc<AuthManager>,
config_persister: Arc<dyn PluginConfigPersister>,
) {
let marker_path = startup_remote_plugin_sync_marker_path(codex_home.as_path());
if marker_path.is_file() {
return;
}
tokio::spawn(async move {
if marker_path.is_file() {
return;
}
if !wait_for_startup_remote_plugin_sync_prerequisites(codex_home.as_path()).await {
warn!(
codex_home = %codex_home.display(),
"skipping startup remote plugin sync because curated marketplace is not ready"
);
return;
}
let auth = auth_manager.auth().await;
match manager
.sync_plugins_from_remote(
&request,
auth.as_ref(),
/*additive_only*/ true,
config_persister.as_ref(),
)
.await
{
Ok(sync_result) => {
info!(
installed_plugin_ids = ?sync_result.installed_plugin_ids,
enabled_plugin_ids = ?sync_result.enabled_plugin_ids,
disabled_plugin_ids = ?sync_result.disabled_plugin_ids,
uninstalled_plugin_ids = ?sync_result.uninstalled_plugin_ids,
"completed startup remote plugin sync"
);
if let Err(err) =
write_startup_remote_plugin_sync_marker(codex_home.as_path()).await
{
warn!(
error = %err,
path = %marker_path.display(),
"failed to persist startup remote plugin sync marker"
);
}
}
Err(err) => {
warn!(
error = %err,
"startup remote plugin sync failed; will retry on next app-server start"
);
}
}
});
}
fn startup_remote_plugin_sync_marker_path(codex_home: &Path) -> PathBuf {
codex_home.join(STARTUP_REMOTE_PLUGIN_SYNC_MARKER_FILE)
}
fn startup_remote_plugin_sync_prerequisites_ready(codex_home: &Path) -> bool {
codex_home
.join(".tmp/plugins/.agents/plugins/marketplace.json")
.is_file()
&& codex_home.join(".tmp/plugins.sha").is_file()
}
async fn wait_for_startup_remote_plugin_sync_prerequisites(codex_home: &Path) -> bool {
let deadline = tokio::time::Instant::now() + STARTUP_REMOTE_PLUGIN_SYNC_PREREQUISITE_TIMEOUT;
loop {
if startup_remote_plugin_sync_prerequisites_ready(codex_home) {
return true;
}
if tokio::time::Instant::now() >= deadline {
return false;
}
tokio::time::sleep(Duration::from_millis(50)).await;
}
}
async fn write_startup_remote_plugin_sync_marker(codex_home: &Path) -> std::io::Result<()> {
let marker_path = startup_remote_plugin_sync_marker_path(codex_home);
if let Some(parent) = marker_path.parent() {
tokio::fs::create_dir_all(parent).await?;
}
tokio::fs::write(marker_path, b"ok\n").await
}
fn prepare_curated_repo_parent_and_temp_dir(repo_path: &Path) -> Result<PathBuf, String> {
let Some(parent) = repo_path.parent() else {
return Err(format!(
"failed to determine curated plugins parent directory for {}",
repo_path.display()
));
};
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins parent directory {}: {err}",
parent.display()
)
})?;
let clone_dir = tempfile::Builder::new()
.prefix("plugins-clone-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create temporary curated plugins directory in {}: {err}",
parent.display()
)
})?;
Ok(clone_dir.keep())
}
fn ensure_marketplace_manifest_exists(repo_path: &Path) -> Result<(), String> {
if repo_path.join(".agents/plugins/marketplace.json").is_file() {
return Ok(());
}
Err(format!(
"curated plugins archive missing marketplace manifest at {}",
repo_path.join(".agents/plugins/marketplace.json").display()
))
}
fn activate_curated_repo(repo_path: &Path, staged_repo_path: &Path) -> Result<(), String> {
if repo_path.exists() {
let parent = repo_path.parent().ok_or_else(|| {
format!(
"failed to determine curated plugins parent directory for {}",
repo_path.display()
)
})?;
let backup_dir = tempfile::Builder::new()
.prefix("plugins-backup-")
.tempdir_in(parent)
.map_err(|err| {
format!(
"failed to create curated plugins backup directory in {}: {err}",
parent.display()
)
})?;
let backup_repo_path = backup_dir.path().join("repo");
std::fs::rename(repo_path, &backup_repo_path).map_err(|err| {
format!(
"failed to move previous curated plugins repo out of the way at {}: {err}",
repo_path.display()
)
})?;
if let Err(err) = std::fs::rename(staged_repo_path, repo_path) {
let rollback_result = std::fs::rename(&backup_repo_path, repo_path);
return match rollback_result {
Ok(()) => Err(format!(
"failed to activate new curated plugins repo at {}: {err}",
repo_path.display()
)),
Err(rollback_err) => {
let backup_path = backup_dir.keep().join("repo");
Err(format!(
"failed to activate new curated plugins repo at {}: {err}; failed to restore previous repo (left at {}): {rollback_err}",
repo_path.display(),
backup_path.display()
))
}
};
}
} else {
std::fs::rename(staged_repo_path, repo_path).map_err(|err| {
format!(
"failed to activate curated plugins repo at {}: {err}",
repo_path.display()
)
})?;
}
Ok(())
}
fn write_curated_plugins_sha(sha_path: &Path, remote_sha: &str) -> Result<(), String> {
if let Some(parent) = sha_path.parent() {
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins sha directory {}: {err}",
parent.display()
)
})?;
}
std::fs::write(sha_path, format!("{remote_sha}\n")).map_err(|err| {
format!(
"failed to write curated plugins sha file {}: {err}",
sha_path.display()
)
})
}
fn read_local_git_or_sha_file(
repo_path: &Path,
sha_path: &Path,
git_binary: &str,
) -> Option<String> {
if repo_path.join(".git").is_dir()
&& let Ok(sha) = git_head_sha(repo_path, git_binary)
{
return Some(sha);
}
read_sha_file(sha_path)
}
fn git_ls_remote_head_sha(git_binary: &str) -> Result<String, String> {
let output = run_git_command_with_timeout(
Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("ls-remote")
.arg("https://github.com/openai/plugins.git")
.arg("HEAD"),
"git ls-remote curated plugins repo",
CURATED_PLUGINS_GIT_TIMEOUT,
)?;
ensure_git_success(&output, "git ls-remote curated plugins repo")?;
let stdout = String::from_utf8_lossy(&output.stdout);
let Some(first_line) = stdout.lines().next() else {
return Err("git ls-remote returned empty output for curated plugins repo".to_string());
};
let Some((sha, _)) = first_line.split_once('\t') else {
return Err(format!(
"unexpected git ls-remote output for curated plugins repo: {first_line}"
));
};
if sha.is_empty() {
return Err("git ls-remote returned empty sha for curated plugins repo".to_string());
}
Ok(sha.to_string())
}
fn git_head_sha(repo_path: &Path, git_binary: &str) -> Result<String, String> {
let output = Command::new(git_binary)
.env("GIT_OPTIONAL_LOCKS", "0")
.arg("-C")
.arg(repo_path)
.arg("rev-parse")
.arg("HEAD")
.output()
.map_err(|err| {
format!(
"failed to run git rev-parse HEAD in {}: {err}",
repo_path.display()
)
})?;
ensure_git_success(&output, "git rev-parse HEAD")?;
let sha = String::from_utf8_lossy(&output.stdout).trim().to_string();
if sha.is_empty() {
return Err(format!(
"git rev-parse HEAD returned empty output in {}",
repo_path.display()
));
}
Ok(sha)
}
fn run_git_command_with_timeout(
command: &mut Command,
context: &str,
timeout: Duration,
) -> Result<Output, String> {
let mut child = command
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.map_err(|err| format!("failed to run {context}: {err}"))?;
let start = std::time::Instant::now();
loop {
match child.try_wait() {
Ok(Some(_)) => {
return child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context}: {err}"));
}
Ok(None) => {}
Err(err) => return Err(format!("failed to poll {context}: {err}")),
}
if start.elapsed() >= timeout {
match child.try_wait() {
Ok(Some(_)) => {
return child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context}: {err}"));
}
Ok(None) => {}
Err(err) => return Err(format!("failed to poll {context}: {err}")),
}
let _ = child.kill();
let output = child
.wait_with_output()
.map_err(|err| format!("failed to wait for {context} after timeout: {err}"))?;
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
return if stderr.is_empty() {
Err(format!("{context} timed out after {}s", timeout.as_secs()))
} else {
Err(format!(
"{context} timed out after {}s: {stderr}",
timeout.as_secs()
))
};
}
std::thread::sleep(Duration::from_millis(100));
}
}
fn ensure_git_success(output: &Output, context: &str) -> Result<(), String> {
if output.status.success() {
return Ok(());
}
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
if stderr.is_empty() {
Err(format!("{context} failed with status {}", output.status))
} else {
Err(format!(
"{context} failed with status {}: {stderr}",
output.status
))
}
}
async fn fetch_curated_repo_remote_sha(api_base_url: &str) -> Result<String, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let client = build_reqwest_client();
let repo_body = fetch_github_text(&client, &repo_url, "get curated plugins repository").await?;
let repo_summary: GitHubRepositorySummary =
serde_json::from_str(&repo_body).map_err(|err| {
format!("failed to parse curated plugins repository response from {repo_url}: {err}")
})?;
if repo_summary.default_branch.is_empty() {
return Err(format!(
"curated plugins repository response from {repo_url} did not include a default branch"
));
}
let git_ref_url = format!("{repo_url}/git/ref/heads/{}", repo_summary.default_branch);
let git_ref_body =
fetch_github_text(&client, &git_ref_url, "get curated plugins HEAD ref").await?;
let git_ref: GitHubGitRefSummary = serde_json::from_str(&git_ref_body).map_err(|err| {
format!("failed to parse curated plugins ref response from {git_ref_url}: {err}")
})?;
if git_ref.object.sha.is_empty() {
return Err(format!(
"curated plugins ref response from {git_ref_url} did not include a HEAD sha"
));
}
Ok(git_ref.object.sha)
}
async fn fetch_curated_repo_zipball(
api_base_url: &str,
remote_sha: &str,
) -> Result<Vec<u8>, String> {
let api_base_url = api_base_url.trim_end_matches('/');
let repo_url = format!("{api_base_url}/repos/{OPENAI_PLUGINS_OWNER}/{OPENAI_PLUGINS_REPO}");
let zipball_url = format!("{repo_url}/zipball/{remote_sha}");
let client = build_reqwest_client();
fetch_github_bytes(&client, &zipball_url, "download curated plugins archive").await
}
async fn fetch_github_text(client: &Client, url: &str, context: &str) -> Result<String, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
return Err(format!(
"{context} from {url} failed with status {status}: {body}"
));
}
Ok(body)
}
async fn fetch_github_bytes(client: &Client, url: &str, context: &str) -> Result<Vec<u8>, String> {
let response = github_request(client, url)
.send()
.await
.map_err(|err| format!("failed to {context} from {url}: {err}"))?;
let status = response.status();
let body = response
.bytes()
.await
.map_err(|err| format!("failed to read {context} response from {url}: {err}"))?;
if !status.is_success() {
let body_text = String::from_utf8_lossy(&body);
return Err(format!(
"{context} from {url} failed with status {status}: {body_text}"
));
}
Ok(body.to_vec())
}
fn github_request(client: &Client, url: &str) -> reqwest::RequestBuilder {
client
.get(url)
.timeout(CURATED_PLUGINS_HTTP_TIMEOUT)
.header("accept", GITHUB_API_ACCEPT_HEADER)
.header("x-github-api-version", GITHUB_API_VERSION_HEADER)
}
fn read_sha_file(sha_path: &Path) -> Option<String> {
std::fs::read_to_string(sha_path)
.ok()
.map(|sha| sha.trim().to_string())
.filter(|sha| !sha.is_empty())
}
fn extract_zipball_to_dir(bytes: &[u8], destination: &Path) -> Result<(), String> {
std::fs::create_dir_all(destination).map_err(|err| {
format!(
"failed to create curated plugins extraction directory {}: {err}",
destination.display()
)
})?;
let cursor = std::io::Cursor::new(bytes);
let mut archive = ZipArchive::new(cursor)
.map_err(|err| format!("failed to open curated plugins zip archive: {err}"))?;
for index in 0..archive.len() {
let mut entry = archive
.by_index(index)
.map_err(|err| format!("failed to read curated plugins zip entry: {err}"))?;
let Some(relative_path) = entry.enclosed_name() else {
return Err(format!(
"curated plugins zip entry `{}` escapes extraction root",
entry.name()
));
};
let mut components = relative_path.components();
let Some(std::path::Component::Normal(_)) = components.next() else {
continue;
};
let output_relative = components.fold(PathBuf::new(), |mut path, component| {
if let std::path::Component::Normal(segment) = component {
path.push(segment);
}
path
});
if output_relative.as_os_str().is_empty() {
continue;
}
let output_path = destination.join(&output_relative);
if entry.is_dir() {
std::fs::create_dir_all(&output_path).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
output_path.display()
)
})?;
continue;
}
if let Some(parent) = output_path.parent() {
std::fs::create_dir_all(parent).map_err(|err| {
format!(
"failed to create curated plugins directory {}: {err}",
parent.display()
)
})?;
}
let mut output = std::fs::File::create(&output_path).map_err(|err| {
format!(
"failed to create curated plugins file {}: {err}",
output_path.display()
)
})?;
std::io::copy(&mut entry, &mut output).map_err(|err| {
format!(
"failed to write curated plugins file {}: {err}",
output_path.display()
)
})?;
apply_zip_permissions(&entry, &output_path)?;
}
Ok(())
}
#[cfg(unix)]
fn apply_zip_permissions(entry: &zip::read::ZipFile<'_>, output_path: &Path) -> Result<(), String> {
use std::os::unix::fs::PermissionsExt;
let Some(mode) = entry.unix_mode() else {
return Ok(());
};
std::fs::set_permissions(output_path, std::fs::Permissions::from_mode(mode)).map_err(|err| {
format!(
"failed to set permissions on curated plugins file {}: {err}",
output_path.display()
)
})
}
#[cfg(not(unix))]
fn apply_zip_permissions(
_entry: &zip::read::ZipFile<'_>,
_output_path: &Path,
) -> Result<(), String> {
Ok(())
}

View File

@@ -0,0 +1,345 @@
use super::load_plugin_manifest;
use super::manifest::PLUGIN_MANIFEST_PATH;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::fs;
use std::io;
use std::path::Path;
use std::path::PathBuf;
pub(crate) const DEFAULT_PLUGIN_VERSION: &str = "local";
pub(crate) const PLUGINS_CACHE_DIR: &str = "plugins/cache";
#[derive(Debug, thiserror::Error)]
pub enum PluginIdError {
#[error("{0}")]
Invalid(String),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginId {
pub plugin_name: String,
pub marketplace_name: String,
}
impl PluginId {
pub fn new(plugin_name: String, marketplace_name: String) -> Result<Self, PluginIdError> {
validate_plugin_segment(&plugin_name, "plugin name").map_err(PluginIdError::Invalid)?;
validate_plugin_segment(&marketplace_name, "marketplace name")
.map_err(PluginIdError::Invalid)?;
Ok(Self {
plugin_name,
marketplace_name,
})
}
pub fn parse(plugin_key: &str) -> Result<Self, PluginIdError> {
let Some((plugin_name, marketplace_name)) = plugin_key.rsplit_once('@') else {
return Err(PluginIdError::Invalid(format!(
"invalid plugin key `{plugin_key}`; expected <plugin>@<marketplace>"
)));
};
if plugin_name.is_empty() || marketplace_name.is_empty() {
return Err(PluginIdError::Invalid(format!(
"invalid plugin key `{plugin_key}`; expected <plugin>@<marketplace>"
)));
}
Self::new(plugin_name.to_string(), marketplace_name.to_string()).map_err(|err| match err {
PluginIdError::Invalid(message) => {
PluginIdError::Invalid(format!("{message} in `{plugin_key}`"))
}
})
}
pub fn as_key(&self) -> String {
format!("{}@{}", self.plugin_name, self.marketplace_name)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PluginInstallResult {
pub plugin_id: PluginId,
pub plugin_version: String,
pub installed_path: AbsolutePathBuf,
}
#[derive(Debug, Clone)]
pub struct PluginStore {
root: AbsolutePathBuf,
}
impl PluginStore {
pub fn new(codex_home: PathBuf) -> Self {
Self {
root: AbsolutePathBuf::try_from(codex_home.join(PLUGINS_CACHE_DIR))
.unwrap_or_else(|err| panic!("plugin cache root should be absolute: {err}")),
}
}
pub fn root(&self) -> &AbsolutePathBuf {
&self.root
}
pub fn plugin_base_root(&self, plugin_id: &PluginId) -> AbsolutePathBuf {
AbsolutePathBuf::try_from(
self.root
.as_path()
.join(&plugin_id.marketplace_name)
.join(&plugin_id.plugin_name),
)
.unwrap_or_else(|err| panic!("plugin cache path should resolve to an absolute path: {err}"))
}
pub fn plugin_root(&self, plugin_id: &PluginId, plugin_version: &str) -> AbsolutePathBuf {
AbsolutePathBuf::try_from(
self.plugin_base_root(plugin_id)
.as_path()
.join(plugin_version),
)
.unwrap_or_else(|err| panic!("plugin cache path should resolve to an absolute path: {err}"))
}
pub fn active_plugin_version(&self, plugin_id: &PluginId) -> Option<String> {
let mut discovered_versions = fs::read_dir(self.plugin_base_root(plugin_id).as_path())
.ok()?
.filter_map(Result::ok)
.filter_map(|entry| {
entry.file_type().ok().filter(std::fs::FileType::is_dir)?;
entry.file_name().into_string().ok()
})
.filter(|version| validate_plugin_segment(version, "plugin version").is_ok())
.collect::<Vec<_>>();
discovered_versions.sort_unstable();
if discovered_versions.len() == 1 {
discovered_versions.pop()
} else {
None
}
}
pub fn active_plugin_root(&self, plugin_id: &PluginId) -> Option<AbsolutePathBuf> {
self.active_plugin_version(plugin_id)
.map(|plugin_version| self.plugin_root(plugin_id, &plugin_version))
}
pub fn is_installed(&self, plugin_id: &PluginId) -> bool {
self.active_plugin_version(plugin_id).is_some()
}
pub fn install(
&self,
source_path: AbsolutePathBuf,
plugin_id: PluginId,
) -> Result<PluginInstallResult, PluginStoreError> {
self.install_with_version(source_path, plugin_id, DEFAULT_PLUGIN_VERSION.to_string())
}
pub fn install_with_version(
&self,
source_path: AbsolutePathBuf,
plugin_id: PluginId,
plugin_version: String,
) -> Result<PluginInstallResult, PluginStoreError> {
if !source_path.as_path().is_dir() {
return Err(PluginStoreError::Invalid(format!(
"plugin source path is not a directory: {}",
source_path.display()
)));
}
let plugin_name = plugin_name_for_source(source_path.as_path())?;
if plugin_name != plugin_id.plugin_name {
return Err(PluginStoreError::Invalid(format!(
"plugin manifest name `{plugin_name}` does not match marketplace plugin name `{}`",
plugin_id.plugin_name
)));
}
validate_plugin_segment(&plugin_version, "plugin version")
.map_err(PluginStoreError::Invalid)?;
let installed_path = self.plugin_root(&plugin_id, &plugin_version);
replace_plugin_root_atomically(
source_path.as_path(),
self.plugin_base_root(&plugin_id).as_path(),
&plugin_version,
)?;
Ok(PluginInstallResult {
plugin_id,
plugin_version,
installed_path,
})
}
pub fn uninstall(&self, plugin_id: &PluginId) -> Result<(), PluginStoreError> {
remove_existing_target(self.plugin_base_root(plugin_id).as_path())
}
}
#[derive(Debug, thiserror::Error)]
pub enum PluginStoreError {
#[error("{context}: {source}")]
Io {
context: &'static str,
#[source]
source: io::Error,
},
#[error("{0}")]
Invalid(String),
}
impl PluginStoreError {
fn io(context: &'static str, source: io::Error) -> Self {
Self::Io { context, source }
}
}
fn plugin_name_for_source(source_path: &Path) -> Result<String, PluginStoreError> {
let manifest_path = source_path.join(PLUGIN_MANIFEST_PATH);
if !manifest_path.is_file() {
return Err(PluginStoreError::Invalid(format!(
"missing plugin manifest: {}",
manifest_path.display()
)));
}
let manifest = load_plugin_manifest(source_path).ok_or_else(|| {
PluginStoreError::Invalid(format!(
"missing or invalid plugin manifest: {}",
manifest_path.display()
))
})?;
let plugin_name = manifest.name;
validate_plugin_segment(&plugin_name, "plugin name")
.map_err(PluginStoreError::Invalid)
.map(|_| plugin_name)
}
fn validate_plugin_segment(segment: &str, kind: &str) -> Result<(), String> {
if segment.is_empty() {
return Err(format!("invalid {kind}: must not be empty"));
}
if !segment
.chars()
.all(|ch| ch.is_ascii_alphanumeric() || ch == '-' || ch == '_')
{
return Err(format!(
"invalid {kind}: only ASCII letters, digits, `_`, and `-` are allowed"
));
}
Ok(())
}
fn remove_existing_target(path: &Path) -> Result<(), PluginStoreError> {
if !path.exists() {
return Ok(());
}
if path.is_dir() {
fs::remove_dir_all(path).map_err(|err| {
PluginStoreError::io("failed to remove existing plugin cache entry", err)
})
} else {
fs::remove_file(path).map_err(|err| {
PluginStoreError::io("failed to remove existing plugin cache entry", err)
})
}
}
fn replace_plugin_root_atomically(
source: &Path,
target_root: &Path,
plugin_version: &str,
) -> Result<(), PluginStoreError> {
let Some(parent) = target_root.parent() else {
return Err(PluginStoreError::Invalid(format!(
"plugin cache path has no parent: {}",
target_root.display()
)));
};
fs::create_dir_all(parent)
.map_err(|err| PluginStoreError::io("failed to create plugin cache directory", err))?;
let Some(plugin_dir_name) = target_root.file_name() else {
return Err(PluginStoreError::Invalid(format!(
"plugin cache path has no directory name: {}",
target_root.display()
)));
};
let staged_dir = tempfile::Builder::new()
.prefix("plugin-install-")
.tempdir_in(parent)
.map_err(|err| {
PluginStoreError::io("failed to create temporary plugin cache directory", err)
})?;
let staged_root = staged_dir.path().join(plugin_dir_name);
let staged_version_root = staged_root.join(plugin_version);
copy_dir_recursive(source, &staged_version_root)?;
if target_root.exists() {
let backup_dir = tempfile::Builder::new()
.prefix("plugin-backup-")
.tempdir_in(parent)
.map_err(|err| {
PluginStoreError::io("failed to create plugin cache backup directory", err)
})?;
let backup_root = backup_dir.path().join(plugin_dir_name);
fs::rename(target_root, &backup_root)
.map_err(|err| PluginStoreError::io("failed to back up plugin cache entry", err))?;
if let Err(err) = fs::rename(&staged_root, target_root) {
let rollback_result = fs::rename(&backup_root, target_root);
return match rollback_result {
Ok(()) => Err(PluginStoreError::io(
"failed to activate updated plugin cache entry",
err,
)),
Err(rollback_err) => {
let backup_path = backup_dir.keep().join(plugin_dir_name);
Err(PluginStoreError::Invalid(format!(
"failed to activate updated plugin cache entry at {}: {err}; failed to restore previous cache entry (left at {}): {rollback_err}",
target_root.display(),
backup_path.display()
)))
}
};
}
} else {
fs::rename(&staged_root, target_root)
.map_err(|err| PluginStoreError::io("failed to activate plugin cache entry", err))?;
}
Ok(())
}
fn copy_dir_recursive(source: &Path, target: &Path) -> Result<(), PluginStoreError> {
fs::create_dir_all(target)
.map_err(|err| PluginStoreError::io("failed to create plugin target directory", err))?;
for entry in fs::read_dir(source)
.map_err(|err| PluginStoreError::io("failed to read plugin source directory", err))?
{
let entry =
entry.map_err(|err| PluginStoreError::io("failed to enumerate plugin source", err))?;
let source_path = entry.path();
let target_path = target.join(entry.file_name());
let file_type = entry
.file_type()
.map_err(|err| PluginStoreError::io("failed to inspect plugin source entry", err))?;
if file_type.is_dir() {
copy_dir_recursive(&source_path, &target_path)?;
} else if file_type.is_file() {
fs::copy(&source_path, &target_path)
.map_err(|err| PluginStoreError::io("failed to copy plugin file", err))?;
}
}
Ok(())
}
#[cfg(test)]
#[path = "store_tests.rs"]
mod tests;

View File

@@ -0,0 +1,192 @@
use super::*;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
fn write_plugin(root: &Path, dir_name: &str, manifest_name: &str) {
let plugin_root = root.join(dir_name);
fs::create_dir_all(plugin_root.join(".codex-plugin")).unwrap();
fs::create_dir_all(plugin_root.join("skills")).unwrap();
fs::write(
plugin_root.join(".codex-plugin/plugin.json"),
format!(r#"{{"name":"{manifest_name}"}}"#),
)
.unwrap();
fs::write(plugin_root.join("skills/SKILL.md"), "skill").unwrap();
fs::write(plugin_root.join(".mcp.json"), r#"{"mcpServers":{}}"#).unwrap();
}
#[test]
fn install_copies_plugin_into_default_marketplace() {
let tmp = tempdir().unwrap();
write_plugin(tmp.path(), "sample-plugin", "sample-plugin");
let plugin_id = PluginId::new("sample-plugin".to_string(), "debug".to_string()).unwrap();
let result = PluginStore::new(tmp.path().to_path_buf())
.install(
AbsolutePathBuf::try_from(tmp.path().join("sample-plugin")).unwrap(),
plugin_id.clone(),
)
.unwrap();
let installed_path = tmp.path().join("plugins/cache/debug/sample-plugin/local");
assert_eq!(
result,
PluginInstallResult {
plugin_id,
plugin_version: "local".to_string(),
installed_path: AbsolutePathBuf::try_from(installed_path.clone()).unwrap(),
}
);
assert!(installed_path.join(".codex-plugin/plugin.json").is_file());
assert!(installed_path.join("skills/SKILL.md").is_file());
}
#[test]
fn install_uses_manifest_name_for_destination_and_key() {
let tmp = tempdir().unwrap();
write_plugin(tmp.path(), "source-dir", "manifest-name");
let plugin_id = PluginId::new("manifest-name".to_string(), "market".to_string()).unwrap();
let result = PluginStore::new(tmp.path().to_path_buf())
.install(
AbsolutePathBuf::try_from(tmp.path().join("source-dir")).unwrap(),
plugin_id.clone(),
)
.unwrap();
assert_eq!(
result,
PluginInstallResult {
plugin_id,
plugin_version: "local".to_string(),
installed_path: AbsolutePathBuf::try_from(
tmp.path().join("plugins/cache/market/manifest-name/local"),
)
.unwrap(),
}
);
}
#[test]
fn plugin_root_derives_path_from_key_and_version() {
let tmp = tempdir().unwrap();
let store = PluginStore::new(tmp.path().to_path_buf());
let plugin_id = PluginId::new("sample".to_string(), "debug".to_string()).unwrap();
assert_eq!(
store.plugin_root(&plugin_id, "local").as_path(),
tmp.path().join("plugins/cache/debug/sample/local")
);
}
#[test]
fn install_with_version_uses_requested_cache_version() {
let tmp = tempdir().unwrap();
write_plugin(tmp.path(), "sample-plugin", "sample-plugin");
let plugin_id =
PluginId::new("sample-plugin".to_string(), "openai-curated".to_string()).unwrap();
let plugin_version = "0123456789abcdef".to_string();
let result = PluginStore::new(tmp.path().to_path_buf())
.install_with_version(
AbsolutePathBuf::try_from(tmp.path().join("sample-plugin")).unwrap(),
plugin_id.clone(),
plugin_version.clone(),
)
.unwrap();
let installed_path = tmp.path().join(format!(
"plugins/cache/openai-curated/sample-plugin/{plugin_version}"
));
assert_eq!(
result,
PluginInstallResult {
plugin_id,
plugin_version,
installed_path: AbsolutePathBuf::try_from(installed_path.clone()).unwrap(),
}
);
assert!(installed_path.join(".codex-plugin/plugin.json").is_file());
}
#[test]
fn active_plugin_version_reads_version_directory_name() {
let tmp = tempdir().unwrap();
write_plugin(
&tmp.path().join("plugins/cache/debug"),
"sample-plugin/local",
"sample-plugin",
);
let store = PluginStore::new(tmp.path().to_path_buf());
let plugin_id = PluginId::new("sample-plugin".to_string(), "debug".to_string()).unwrap();
assert_eq!(
store.active_plugin_version(&plugin_id),
Some("local".to_string())
);
assert_eq!(
store.active_plugin_root(&plugin_id).unwrap().as_path(),
tmp.path().join("plugins/cache/debug/sample-plugin/local")
);
}
#[test]
fn plugin_root_rejects_path_separators_in_key_segments() {
let err = PluginId::parse("../../etc@debug").unwrap_err();
assert_eq!(
err.to_string(),
"invalid plugin name: only ASCII letters, digits, `_`, and `-` are allowed in `../../etc@debug`"
);
let err = PluginId::parse("sample@../../etc").unwrap_err();
assert_eq!(
err.to_string(),
"invalid marketplace name: only ASCII letters, digits, `_`, and `-` are allowed in `sample@../../etc`"
);
}
#[test]
fn install_rejects_manifest_names_with_path_separators() {
let tmp = tempdir().unwrap();
write_plugin(tmp.path(), "source-dir", "../../etc");
let err = PluginStore::new(tmp.path().to_path_buf())
.install(
AbsolutePathBuf::try_from(tmp.path().join("source-dir")).unwrap(),
PluginId::new("source-dir".to_string(), "debug".to_string()).unwrap(),
)
.unwrap_err();
assert_eq!(
err.to_string(),
"invalid plugin name: only ASCII letters, digits, `_`, and `-` are allowed"
);
}
#[test]
fn install_rejects_marketplace_names_with_path_separators() {
let err = PluginId::new("sample-plugin".to_string(), "../../etc".to_string()).unwrap_err();
assert_eq!(
err.to_string(),
"invalid marketplace name: only ASCII letters, digits, `_`, and `-` are allowed"
);
}
#[test]
fn install_rejects_manifest_names_that_do_not_match_marketplace_plugin_name() {
let tmp = tempdir().unwrap();
write_plugin(tmp.path(), "source-dir", "manifest-name");
let err = PluginStore::new(tmp.path().to_path_buf())
.install(
AbsolutePathBuf::try_from(tmp.path().join("source-dir")).unwrap(),
PluginId::new("different-name".to_string(), "debug".to_string()).unwrap(),
)
.unwrap_err();
assert_eq!(
err.to_string(),
"plugin manifest name `manifest-name` does not match marketplace plugin name `different-name`"
);
}

View File

@@ -0,0 +1,100 @@
use serde_json::Value as JsonValue;
use std::collections::BTreeMap;
pub fn collect_plugin_enabled_candidates<'a>(
edits: impl Iterator<Item = (&'a String, &'a JsonValue)>,
) -> BTreeMap<String, bool> {
let mut pending_changes = BTreeMap::new();
for (key_path, value) in edits {
let segments = key_path
.split('.')
.map(str::to_string)
.collect::<Vec<String>>();
match segments.as_slice() {
[plugins, plugin_id, enabled]
if plugins == "plugins" && enabled == "enabled" && value.is_boolean() =>
{
if let Some(enabled) = value.as_bool() {
pending_changes.insert(plugin_id.clone(), enabled);
}
}
[plugins, plugin_id] if plugins == "plugins" => {
if let Some(enabled) = value.get("enabled").and_then(JsonValue::as_bool) {
pending_changes.insert(plugin_id.clone(), enabled);
}
}
[plugins] if plugins == "plugins" => {
let Some(entries) = value.as_object() else {
continue;
};
for (plugin_id, plugin_value) in entries {
let Some(enabled) = plugin_value.get("enabled").and_then(JsonValue::as_bool)
else {
continue;
};
pending_changes.insert(plugin_id.clone(), enabled);
}
}
_ => {}
}
}
pending_changes
}
#[cfg(test)]
mod tests {
use super::collect_plugin_enabled_candidates;
use pretty_assertions::assert_eq;
use serde_json::json;
use std::collections::BTreeMap;
#[test]
fn collect_plugin_enabled_candidates_tracks_direct_and_table_writes() {
let candidates = collect_plugin_enabled_candidates(
[
(&"plugins.sample@test.enabled".to_string(), &json!(true)),
(
&"plugins.other@test".to_string(),
&json!({ "enabled": false, "ignored": true }),
),
(
&"plugins".to_string(),
&json!({
"nested@test": { "enabled": true },
"skip@test": { "name": "skip" },
}),
),
]
.into_iter(),
);
assert_eq!(
candidates,
BTreeMap::from([
("nested@test".to_string(), true),
("other@test".to_string(), false),
("sample@test".to_string(), true),
])
);
}
#[test]
fn collect_plugin_enabled_candidates_uses_last_write_for_same_plugin() {
let candidates = collect_plugin_enabled_candidates(
[
(&"plugins.sample@test.enabled".to_string(), &json!(true)),
(
&"plugins.sample@test".to_string(),
&json!({ "enabled": false }),
),
]
.into_iter(),
);
assert_eq!(
candidates,
BTreeMap::from([("sample@test".to_string(), false)])
);
}
}

View File

@@ -0,0 +1,951 @@
use crate::plugins::plugin_namespace_for_skill_path;
use crate::skills::model::SkillDependencies;
use crate::skills::model::SkillError;
use crate::skills::model::SkillInterface;
use crate::skills::model::SkillLoadOutcome;
use crate::skills::model::SkillManagedNetworkOverride;
use crate::skills::model::SkillMetadata;
use crate::skills::model::SkillPolicy;
use crate::skills::model::SkillToolDependency;
use crate::skills::system::system_cache_root_dir;
use codex_app_server_protocol::ConfigLayerSource;
use codex_config::ConfigLayerStack;
use codex_config::ConfigLayerStackOrdering;
use codex_config::merge_toml_values;
use codex_protocol::models::FileSystemPermissions;
use codex_protocol::models::MacOsSeatbeltProfileExtensions;
use codex_protocol::models::NetworkPermissions;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
use codex_utils_absolute_path::AbsolutePathBufGuard;
use dirs::home_dir;
use dunce::canonicalize as canonicalize_path;
use serde::Deserialize;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::error::Error;
use std::fmt;
use std::fs;
use std::io;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use toml::Value as TomlValue;
use tracing::error;
#[derive(Debug, Deserialize)]
struct SkillFrontmatter {
#[serde(default)]
name: Option<String>,
#[serde(default)]
description: Option<String>,
#[serde(default)]
metadata: SkillFrontmatterMetadata,
}
#[derive(Debug, Default, Deserialize)]
struct SkillFrontmatterMetadata {
#[serde(default, rename = "short-description")]
short_description: Option<String>,
}
#[derive(Debug, Default, Deserialize)]
struct SkillMetadataFile {
#[serde(default)]
interface: Option<Interface>,
#[serde(default)]
dependencies: Option<Dependencies>,
#[serde(default)]
policy: Option<Policy>,
#[serde(default)]
permissions: Option<SkillPermissionProfile>,
}
#[derive(Default)]
struct LoadedSkillMetadata {
interface: Option<SkillInterface>,
dependencies: Option<SkillDependencies>,
policy: Option<SkillPolicy>,
permission_profile: Option<PermissionProfile>,
managed_network_override: Option<SkillManagedNetworkOverride>,
}
#[derive(Debug, Default, Deserialize, PartialEq, Eq)]
struct SkillPermissionProfile {
#[serde(default)]
network: Option<SkillNetworkPermissions>,
#[serde(default)]
file_system: Option<FileSystemPermissions>,
#[serde(default)]
macos: Option<MacOsSeatbeltProfileExtensions>,
}
#[derive(Debug, Default, Deserialize, PartialEq, Eq)]
struct SkillNetworkPermissions {
#[serde(default)]
enabled: Option<bool>,
#[serde(default)]
allowed_domains: Option<Vec<String>>,
#[serde(default)]
denied_domains: Option<Vec<String>>,
}
#[derive(Debug, Default, Deserialize)]
struct Interface {
display_name: Option<String>,
short_description: Option<String>,
icon_small: Option<PathBuf>,
icon_large: Option<PathBuf>,
brand_color: Option<String>,
default_prompt: Option<String>,
}
#[derive(Debug, Default, Deserialize)]
struct Dependencies {
#[serde(default)]
tools: Vec<DependencyTool>,
}
#[derive(Debug, Deserialize)]
struct Policy {
#[serde(default)]
allow_implicit_invocation: Option<bool>,
#[serde(default)]
products: Vec<Product>,
}
#[derive(Debug, Default, Deserialize)]
struct DependencyTool {
#[serde(rename = "type")]
kind: Option<String>,
value: Option<String>,
description: Option<String>,
transport: Option<String>,
command: Option<String>,
url: Option<String>,
}
const SKILLS_FILENAME: &str = "SKILL.md";
const AGENTS_DIR_NAME: &str = ".agents";
const SKILLS_METADATA_DIR: &str = "agents";
const SKILLS_METADATA_FILENAME: &str = "openai.yaml";
const SKILLS_DIR_NAME: &str = "skills";
const MAX_NAME_LEN: usize = 64;
const MAX_DESCRIPTION_LEN: usize = 1024;
const MAX_SHORT_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEFAULT_PROMPT_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEPENDENCY_TYPE_LEN: usize = MAX_NAME_LEN;
const MAX_DEPENDENCY_TRANSPORT_LEN: usize = MAX_NAME_LEN;
const MAX_DEPENDENCY_VALUE_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEPENDENCY_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEPENDENCY_COMMAND_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEPENDENCY_URL_LEN: usize = MAX_DESCRIPTION_LEN;
// Traversal depth from the skills root.
const MAX_SCAN_DEPTH: usize = 6;
const MAX_SKILLS_DIRS_PER_ROOT: usize = 2000;
const DEFAULT_PROJECT_ROOT_MARKERS: &[&str] =
&[".git", ".hg", ".svn", "package.json", "Cargo.toml"];
#[derive(Debug)]
enum SkillParseError {
Read(std::io::Error),
MissingFrontmatter,
InvalidYaml(serde_yaml::Error),
MissingField(&'static str),
InvalidField { field: &'static str, reason: String },
}
impl fmt::Display for SkillParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
SkillParseError::Read(e) => write!(f, "failed to read file: {e}"),
SkillParseError::MissingFrontmatter => {
write!(f, "missing YAML frontmatter delimited by ---")
}
SkillParseError::InvalidYaml(e) => write!(f, "invalid YAML: {e}"),
SkillParseError::MissingField(field) => write!(f, "missing field `{field}`"),
SkillParseError::InvalidField { field, reason } => {
write!(f, "invalid {field}: {reason}")
}
}
}
}
impl Error for SkillParseError {}
pub struct SkillRoot {
pub path: PathBuf,
pub scope: SkillScope,
}
pub(crate) fn load_skills_from_roots<I>(roots: I) -> SkillLoadOutcome
where
I: IntoIterator<Item = SkillRoot>,
{
let mut outcome = SkillLoadOutcome::default();
for root in roots {
discover_skills_under_root(&root.path, root.scope, &mut outcome);
}
let mut seen: HashSet<PathBuf> = HashSet::new();
outcome
.skills
.retain(|skill| seen.insert(skill.path_to_skills_md.clone()));
fn scope_rank(scope: SkillScope) -> u8 {
// Higher-priority scopes first (matches root scan order for dedupe).
match scope {
SkillScope::Repo => 0,
SkillScope::User => 1,
SkillScope::System => 2,
SkillScope::Admin => 3,
}
}
outcome.skills.sort_by(|a, b| {
scope_rank(a.scope)
.cmp(&scope_rank(b.scope))
.then_with(|| a.name.cmp(&b.name))
.then_with(|| a.path_to_skills_md.cmp(&b.path_to_skills_md))
});
outcome
}
pub(crate) fn skill_roots(
config_layer_stack: &ConfigLayerStack,
cwd: &Path,
plugin_skill_roots: Vec<PathBuf>,
) -> Vec<SkillRoot> {
skill_roots_with_home_dir(
config_layer_stack,
cwd,
home_dir().as_deref(),
plugin_skill_roots,
)
}
fn skill_roots_with_home_dir(
config_layer_stack: &ConfigLayerStack,
cwd: &Path,
home_dir: Option<&Path>,
plugin_skill_roots: Vec<PathBuf>,
) -> Vec<SkillRoot> {
let mut roots = skill_roots_from_layer_stack_inner(config_layer_stack, home_dir);
roots.extend(plugin_skill_roots.into_iter().map(|path| SkillRoot {
path,
scope: SkillScope::User,
}));
roots.extend(repo_agents_skill_roots(config_layer_stack, cwd));
dedupe_skill_roots_by_path(&mut roots);
roots
}
fn skill_roots_from_layer_stack_inner(
config_layer_stack: &ConfigLayerStack,
home_dir: Option<&Path>,
) -> Vec<SkillRoot> {
let mut roots = Vec::new();
for layer in config_layer_stack.get_layers(
ConfigLayerStackOrdering::HighestPrecedenceFirst,
/*include_disabled*/ true,
) {
let Some(config_folder) = layer.config_folder() else {
continue;
};
match &layer.name {
ConfigLayerSource::Project { .. } => {
roots.push(SkillRoot {
path: config_folder.as_path().join(SKILLS_DIR_NAME),
scope: SkillScope::Repo,
});
}
ConfigLayerSource::User { .. } => {
// Deprecated user skills location (`$CODEX_HOME/skills`), kept for backward
// compatibility.
roots.push(SkillRoot {
path: config_folder.as_path().join(SKILLS_DIR_NAME),
scope: SkillScope::User,
});
// `$HOME/.agents/skills` (user-installed skills).
if let Some(home_dir) = home_dir {
roots.push(SkillRoot {
path: home_dir.join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME),
scope: SkillScope::User,
});
}
// Embedded system skills are cached under `$CODEX_HOME/skills/.system` and are a
// special case (not a config layer).
roots.push(SkillRoot {
path: system_cache_root_dir(config_folder.as_path()),
scope: SkillScope::System,
});
}
ConfigLayerSource::System { .. } => {
// The system config layer lives under `/etc/codex/` on Unix, so treat
// `/etc/codex/skills` as admin-scoped skills.
roots.push(SkillRoot {
path: config_folder.as_path().join(SKILLS_DIR_NAME),
scope: SkillScope::Admin,
});
}
ConfigLayerSource::Mdm { .. }
| ConfigLayerSource::SessionFlags
| ConfigLayerSource::LegacyManagedConfigTomlFromFile { .. }
| ConfigLayerSource::LegacyManagedConfigTomlFromMdm => {}
}
}
roots
}
fn repo_agents_skill_roots(config_layer_stack: &ConfigLayerStack, cwd: &Path) -> Vec<SkillRoot> {
let project_root_markers = project_root_markers_from_stack(config_layer_stack);
let project_root = find_project_root(cwd, &project_root_markers);
let dirs = dirs_between_project_root_and_cwd(cwd, &project_root);
let mut roots = Vec::new();
for dir in dirs {
let agents_skills = dir.join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME);
if agents_skills.is_dir() {
roots.push(SkillRoot {
path: agents_skills,
scope: SkillScope::Repo,
});
}
}
roots
}
fn project_root_markers_from_stack(config_layer_stack: &ConfigLayerStack) -> Vec<String> {
let mut merged = TomlValue::Table(toml::map::Map::new());
for layer in config_layer_stack.get_layers(
ConfigLayerStackOrdering::LowestPrecedenceFirst,
/*include_disabled*/ false,
) {
if matches!(layer.name, ConfigLayerSource::Project { .. }) {
continue;
}
merge_toml_values(&mut merged, &layer.config);
}
match project_root_markers_from_config(&merged) {
Ok(Some(markers)) => markers,
Ok(None) => default_project_root_markers(),
Err(err) => {
tracing::warn!("invalid project_root_markers: {err}");
default_project_root_markers()
}
}
}
fn project_root_markers_from_config(config: &TomlValue) -> io::Result<Option<Vec<String>>> {
let Some(table) = config.as_table() else {
return Ok(None);
};
let Some(markers_value) = table.get("project_root_markers") else {
return Ok(None);
};
let TomlValue::Array(entries) = markers_value else {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"project_root_markers must be an array of strings",
));
};
if entries.is_empty() {
return Ok(Some(Vec::new()));
}
let mut markers = Vec::new();
for entry in entries {
let Some(marker) = entry.as_str() else {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"project_root_markers must be an array of strings",
));
};
markers.push(marker.to_string());
}
Ok(Some(markers))
}
fn default_project_root_markers() -> Vec<String> {
DEFAULT_PROJECT_ROOT_MARKERS
.iter()
.map(ToString::to_string)
.collect()
}
fn find_project_root(cwd: &Path, project_root_markers: &[String]) -> PathBuf {
if project_root_markers.is_empty() {
return cwd.to_path_buf();
}
for ancestor in cwd.ancestors() {
for marker in project_root_markers {
let marker_path = ancestor.join(marker);
if marker_path.exists() {
return ancestor.to_path_buf();
}
}
}
cwd.to_path_buf()
}
fn dirs_between_project_root_and_cwd(cwd: &Path, project_root: &Path) -> Vec<PathBuf> {
let mut dirs = cwd
.ancestors()
.scan(false, |done, a| {
if *done {
None
} else {
if a == project_root {
*done = true;
}
Some(a.to_path_buf())
}
})
.collect::<Vec<_>>();
dirs.reverse();
dirs
}
fn dedupe_skill_roots_by_path(roots: &mut Vec<SkillRoot>) {
let mut seen: HashSet<PathBuf> = HashSet::new();
roots.retain(|root| seen.insert(root.path.clone()));
}
fn discover_skills_under_root(root: &Path, scope: SkillScope, outcome: &mut SkillLoadOutcome) {
let Ok(root) = canonicalize_path(root) else {
return;
};
if !root.is_dir() {
return;
}
fn enqueue_dir(
queue: &mut VecDeque<(PathBuf, usize)>,
visited_dirs: &mut HashSet<PathBuf>,
truncated_by_dir_limit: &mut bool,
path: PathBuf,
depth: usize,
) {
if depth > MAX_SCAN_DEPTH {
return;
}
if visited_dirs.len() >= MAX_SKILLS_DIRS_PER_ROOT {
*truncated_by_dir_limit = true;
return;
}
if visited_dirs.insert(path.clone()) {
queue.push_back((path, depth));
}
}
// Follow symlinked directories for user, admin, and repo skills. System skills are written by Codex itself.
let follow_symlinks = matches!(
scope,
SkillScope::Repo | SkillScope::User | SkillScope::Admin
);
let mut visited_dirs: HashSet<PathBuf> = HashSet::new();
visited_dirs.insert(root.clone());
let mut queue: VecDeque<(PathBuf, usize)> = VecDeque::from([(root.clone(), 0)]);
let mut truncated_by_dir_limit = false;
while let Some((dir, depth)) = queue.pop_front() {
let entries = match fs::read_dir(&dir) {
Ok(entries) => entries,
Err(e) => {
error!("failed to read skills dir {}: {e:#}", dir.display());
continue;
}
};
for entry in entries.flatten() {
let path = entry.path();
let file_name = match path.file_name().and_then(|f| f.to_str()) {
Some(name) => name,
None => continue,
};
if file_name.starts_with('.') {
continue;
}
let Ok(file_type) = entry.file_type() else {
continue;
};
if file_type.is_symlink() {
if !follow_symlinks {
continue;
}
// Follow the symlink to determine what it points to.
let metadata = match fs::metadata(&path) {
Ok(metadata) => metadata,
Err(e) => {
error!(
"failed to stat skills entry {} (symlink): {e:#}",
path.display()
);
continue;
}
};
if metadata.is_dir() {
let Ok(resolved_dir) = canonicalize_path(&path) else {
continue;
};
enqueue_dir(
&mut queue,
&mut visited_dirs,
&mut truncated_by_dir_limit,
resolved_dir,
depth + 1,
);
continue;
}
continue;
}
if file_type.is_dir() {
let Ok(resolved_dir) = canonicalize_path(&path) else {
continue;
};
enqueue_dir(
&mut queue,
&mut visited_dirs,
&mut truncated_by_dir_limit,
resolved_dir,
depth + 1,
);
continue;
}
if file_type.is_file() && file_name == SKILLS_FILENAME {
match parse_skill_file(&path, scope) {
Ok(skill) => {
outcome.skills.push(skill);
}
Err(err) => {
if scope != SkillScope::System {
outcome.errors.push(SkillError {
path,
message: err.to_string(),
});
}
}
}
}
}
}
if truncated_by_dir_limit {
tracing::warn!(
"skills scan truncated after {} directories (root: {})",
MAX_SKILLS_DIRS_PER_ROOT,
root.display()
);
}
}
fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, SkillParseError> {
let contents = fs::read_to_string(path).map_err(SkillParseError::Read)?;
let frontmatter = extract_frontmatter(&contents).ok_or(SkillParseError::MissingFrontmatter)?;
let parsed: SkillFrontmatter =
serde_yaml::from_str(&frontmatter).map_err(SkillParseError::InvalidYaml)?;
let base_name = parsed
.name
.as_deref()
.map(sanitize_single_line)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| default_skill_name(path));
let name = namespaced_skill_name(path, &base_name);
let description = parsed
.description
.as_deref()
.map(sanitize_single_line)
.unwrap_or_default();
let short_description = parsed
.metadata
.short_description
.as_deref()
.map(sanitize_single_line)
.filter(|value| !value.is_empty());
let LoadedSkillMetadata {
interface,
dependencies,
policy,
permission_profile,
managed_network_override,
} = load_skill_metadata(path);
validate_len(&name, MAX_NAME_LEN, "name")?;
validate_len(&description, MAX_DESCRIPTION_LEN, "description")?;
if let Some(short_description) = short_description.as_deref() {
validate_len(
short_description,
MAX_SHORT_DESCRIPTION_LEN,
"metadata.short-description",
)?;
}
let resolved_path = canonicalize_path(path).unwrap_or_else(|_| path.to_path_buf());
Ok(SkillMetadata {
name,
description,
short_description,
interface,
dependencies,
policy,
permission_profile,
managed_network_override,
path_to_skills_md: resolved_path,
scope,
})
}
fn default_skill_name(path: &Path) -> String {
path.parent()
.and_then(Path::file_name)
.and_then(|name| name.to_str())
.map(sanitize_single_line)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| "skill".to_string())
}
fn namespaced_skill_name(path: &Path, base_name: &str) -> String {
plugin_namespace_for_skill_path(path)
.map(|namespace| format!("{namespace}:{base_name}"))
.unwrap_or_else(|| base_name.to_string())
}
fn load_skill_metadata(skill_path: &Path) -> LoadedSkillMetadata {
// Fail open: optional metadata should not block loading SKILL.md.
let Some(skill_dir) = skill_path.parent() else {
return LoadedSkillMetadata::default();
};
let metadata_path = skill_dir
.join(SKILLS_METADATA_DIR)
.join(SKILLS_METADATA_FILENAME);
if !metadata_path.exists() {
return LoadedSkillMetadata::default();
}
let contents = match fs::read_to_string(&metadata_path) {
Ok(contents) => contents,
Err(error) => {
tracing::warn!(
"ignoring {path}: failed to read {label}: {error}",
path = metadata_path.display(),
label = SKILLS_METADATA_FILENAME
);
return LoadedSkillMetadata::default();
}
};
let parsed: SkillMetadataFile = {
let _guard = AbsolutePathBufGuard::new(skill_dir);
match serde_yaml::from_str(&contents) {
Ok(parsed) => parsed,
Err(error) => {
tracing::warn!(
"ignoring {path}: invalid {label}: {error}",
path = metadata_path.display(),
label = SKILLS_METADATA_FILENAME
);
return LoadedSkillMetadata::default();
}
}
};
let SkillMetadataFile {
interface,
dependencies,
policy,
permissions,
} = parsed;
let (permission_profile, managed_network_override) = normalize_permissions(permissions);
LoadedSkillMetadata {
interface: resolve_interface(interface, skill_dir),
dependencies: resolve_dependencies(dependencies),
policy: resolve_policy(policy),
permission_profile,
managed_network_override,
}
}
fn normalize_permissions(
permissions: Option<SkillPermissionProfile>,
) -> (
Option<PermissionProfile>,
Option<SkillManagedNetworkOverride>,
) {
let Some(permissions) = permissions else {
return (None, None);
};
let managed_network_override = permissions
.network
.as_ref()
.map(|network| SkillManagedNetworkOverride {
allowed_domains: network.allowed_domains.clone(),
denied_domains: network.denied_domains.clone(),
})
.filter(SkillManagedNetworkOverride::has_domain_overrides);
let permission_profile = PermissionProfile {
network: permissions.network.and_then(|network| {
let network = NetworkPermissions {
enabled: network.enabled,
};
(!network.is_empty()).then_some(network)
}),
file_system: permissions
.file_system
.filter(|file_system| !file_system.is_empty()),
macos: permissions.macos,
};
(
(!permission_profile.is_empty()).then_some(permission_profile),
managed_network_override,
)
}
fn resolve_interface(interface: Option<Interface>, skill_dir: &Path) -> Option<SkillInterface> {
let interface = interface?;
let interface = SkillInterface {
display_name: resolve_str(
interface.display_name,
MAX_NAME_LEN,
"interface.display_name",
),
short_description: resolve_str(
interface.short_description,
MAX_SHORT_DESCRIPTION_LEN,
"interface.short_description",
),
icon_small: resolve_asset_path(skill_dir, "interface.icon_small", interface.icon_small),
icon_large: resolve_asset_path(skill_dir, "interface.icon_large", interface.icon_large),
brand_color: resolve_color_str(interface.brand_color, "interface.brand_color"),
default_prompt: resolve_str(
interface.default_prompt,
MAX_DEFAULT_PROMPT_LEN,
"interface.default_prompt",
),
};
let has_fields = interface.display_name.is_some()
|| interface.short_description.is_some()
|| interface.icon_small.is_some()
|| interface.icon_large.is_some()
|| interface.brand_color.is_some()
|| interface.default_prompt.is_some();
if has_fields { Some(interface) } else { None }
}
fn resolve_dependencies(dependencies: Option<Dependencies>) -> Option<SkillDependencies> {
let dependencies = dependencies?;
let tools: Vec<SkillToolDependency> = dependencies
.tools
.into_iter()
.filter_map(resolve_dependency_tool)
.collect();
if tools.is_empty() {
None
} else {
Some(SkillDependencies { tools })
}
}
fn resolve_policy(policy: Option<Policy>) -> Option<SkillPolicy> {
policy.map(|policy| SkillPolicy {
allow_implicit_invocation: policy.allow_implicit_invocation,
products: policy.products,
})
}
fn resolve_dependency_tool(tool: DependencyTool) -> Option<SkillToolDependency> {
let r#type = resolve_required_str(
tool.kind,
MAX_DEPENDENCY_TYPE_LEN,
"dependencies.tools.type",
)?;
let value = resolve_required_str(
tool.value,
MAX_DEPENDENCY_VALUE_LEN,
"dependencies.tools.value",
)?;
let description = resolve_str(
tool.description,
MAX_DEPENDENCY_DESCRIPTION_LEN,
"dependencies.tools.description",
);
let transport = resolve_str(
tool.transport,
MAX_DEPENDENCY_TRANSPORT_LEN,
"dependencies.tools.transport",
);
let command = resolve_str(
tool.command,
MAX_DEPENDENCY_COMMAND_LEN,
"dependencies.tools.command",
);
let url = resolve_str(tool.url, MAX_DEPENDENCY_URL_LEN, "dependencies.tools.url");
Some(SkillToolDependency {
r#type,
value,
description,
transport,
command,
url,
})
}
fn resolve_asset_path(
skill_dir: &Path,
field: &'static str,
path: Option<PathBuf>,
) -> Option<PathBuf> {
// Icons must be relative paths under the skill's assets/ directory; otherwise return None.
let path = path?;
if path.as_os_str().is_empty() {
return None;
}
let assets_dir = skill_dir.join("assets");
if path.is_absolute() {
tracing::warn!(
"ignoring {field}: icon must be a relative assets path (not {})",
assets_dir.display()
);
return None;
}
let mut normalized = PathBuf::new();
for component in path.components() {
match component {
Component::CurDir => {}
Component::Normal(component) => normalized.push(component),
Component::ParentDir => {
tracing::warn!("ignoring {field}: icon path must not contain '..'");
return None;
}
_ => {
tracing::warn!("ignoring {field}: icon path must be under assets/");
return None;
}
}
}
let mut components = normalized.components();
match components.next() {
Some(Component::Normal(component)) if component == "assets" => {}
_ => {
tracing::warn!("ignoring {field}: icon path must be under assets/");
return None;
}
}
Some(skill_dir.join(normalized))
}
fn sanitize_single_line(raw: &str) -> String {
raw.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn validate_len(
value: &str,
max_len: usize,
field_name: &'static str,
) -> Result<(), SkillParseError> {
if value.is_empty() {
return Err(SkillParseError::MissingField(field_name));
}
if value.chars().count() > max_len {
return Err(SkillParseError::InvalidField {
field: field_name,
reason: format!("exceeds maximum length of {max_len} characters"),
});
}
Ok(())
}
fn resolve_str(value: Option<String>, max_len: usize, field: &'static str) -> Option<String> {
let value = value?;
let value = sanitize_single_line(&value);
if value.is_empty() {
tracing::warn!("ignoring {field}: value is empty");
return None;
}
if value.chars().count() > max_len {
tracing::warn!("ignoring {field}: exceeds maximum length of {max_len} characters");
return None;
}
Some(value)
}
fn resolve_required_str(
value: Option<String>,
max_len: usize,
field: &'static str,
) -> Option<String> {
let Some(value) = value else {
tracing::warn!("ignoring {field}: value is missing");
return None;
};
resolve_str(Some(value), max_len, field)
}
fn resolve_color_str(value: Option<String>, field: &'static str) -> Option<String> {
let value = value?;
let value = value.trim();
if value.is_empty() {
tracing::warn!("ignoring {field}: value is empty");
return None;
}
let mut chars = value.chars();
if value.len() == 7 && chars.next() == Some('#') && chars.all(|c| c.is_ascii_hexdigit()) {
Some(value.to_string())
} else {
tracing::warn!("ignoring {field}: expected #RRGGBB, got {value}");
None
}
}
fn extract_frontmatter(contents: &str) -> Option<String> {
let mut lines = contents.lines();
if !matches!(lines.next(), Some(line) if line.trim() == "---") {
return None;
}
let mut frontmatter_lines: Vec<&str> = Vec::new();
let mut found_closing = false;
for line in lines.by_ref() {
if line.trim() == "---" {
found_closing = true;
break;
}
frontmatter_lines.push(line);
}
if frontmatter_lines.is_empty() || !found_closing {
return None;
}
Some(frontmatter_lines.join("\n"))
}

View File

@@ -0,0 +1,302 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::RwLock;
use codex_app_server_protocol::ConfigLayerSource;
use codex_config::ConfigLayerStack;
use codex_config::ConfigLayerStackOrdering;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
use tracing::info;
use tracing::warn;
use crate::config_types::SkillsConfig;
use crate::skills::loader::SkillRoot;
use crate::skills::loader::load_skills_from_roots;
use crate::skills::loader::skill_roots;
use crate::skills::model::SkillLoadOutcome;
use crate::skills::model::SkillMetadata;
use crate::skills::runtime::SkillLoadRequest;
use crate::skills::system::install_system_skills;
use crate::skills::system::uninstall_system_skills;
pub struct SkillsManager {
restriction_product: Option<Product>,
cache_by_cwd: RwLock<HashMap<PathBuf, SkillLoadOutcome>>,
cache_by_config: RwLock<HashMap<ConfigSkillsCacheKey, SkillLoadOutcome>>,
}
impl SkillsManager {
pub fn new(codex_home: PathBuf, bundled_skills_enabled: bool) -> Self {
Self::new_with_restriction_product(codex_home, bundled_skills_enabled, Some(Product::Codex))
}
pub fn new_with_restriction_product(
codex_home: PathBuf,
bundled_skills_enabled: bool,
restriction_product: Option<Product>,
) -> Self {
let manager = Self {
restriction_product,
cache_by_cwd: RwLock::new(HashMap::new()),
cache_by_config: RwLock::new(HashMap::new()),
};
if !bundled_skills_enabled {
uninstall_system_skills(&codex_home);
} else if let Err(err) = install_system_skills(&codex_home) {
tracing::error!("failed to install system skills: {err}");
}
manager
}
pub fn skills_for_request(&self, request: &SkillLoadRequest) -> SkillLoadOutcome {
let roots = self.skill_roots_for_request(request);
let cache_key = config_skills_cache_key(&roots, &request.config_layer_stack);
if let Some(outcome) = self.cached_outcome_for_config(&cache_key) {
return outcome;
}
let outcome = self.build_skill_outcome(roots, &request.config_layer_stack);
let mut cache = self
.cache_by_config
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
cache.insert(cache_key, outcome.clone());
outcome
}
pub fn skill_roots_for_request(&self, request: &SkillLoadRequest) -> Vec<SkillRoot> {
let mut roots = skill_roots(
&request.config_layer_stack,
&request.cwd,
request.plugin_skill_roots.clone(),
);
if !request.bundled_skills_enabled {
roots.retain(|root| root.scope != SkillScope::System);
}
roots.extend(
normalize_extra_user_roots(&request.extra_user_roots)
.into_iter()
.map(|path| SkillRoot {
path,
scope: SkillScope::User,
}),
);
roots
}
pub fn skills_for_cwd_request(
&self,
request: &SkillLoadRequest,
force_reload: bool,
) -> SkillLoadOutcome {
if !force_reload && let Some(outcome) = self.cached_outcome_for_cwd(&request.cwd) {
return outcome;
}
let outcome = self.build_skill_outcome(
self.skill_roots_for_request(request),
&request.config_layer_stack,
);
let mut cache = self
.cache_by_cwd
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
cache.insert(request.cwd.clone(), outcome.clone());
outcome
}
fn build_skill_outcome(
&self,
roots: Vec<SkillRoot>,
config_layer_stack: &ConfigLayerStack,
) -> SkillLoadOutcome {
crate::skills::filter_skill_load_outcome_for_product(
finalize_skill_outcome(load_skills_from_roots(roots), config_layer_stack),
self.restriction_product,
)
}
pub fn clear_cache(&self) {
let cleared_cwd = {
let mut cache = self
.cache_by_cwd
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let cleared = cache.len();
cache.clear();
cleared
};
let cleared_config = {
let mut cache = self
.cache_by_config
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let cleared = cache.len();
cache.clear();
cleared
};
let cleared = cleared_cwd + cleared_config;
info!("skills cache cleared ({cleared} entries)");
}
fn cached_outcome_for_cwd(&self, cwd: &Path) -> Option<SkillLoadOutcome> {
match self.cache_by_cwd.read() {
Ok(cache) => cache.get(cwd).cloned(),
Err(err) => err.into_inner().get(cwd).cloned(),
}
}
fn cached_outcome_for_config(
&self,
cache_key: &ConfigSkillsCacheKey,
) -> Option<SkillLoadOutcome> {
match self.cache_by_config.read() {
Ok(cache) => cache.get(cache_key).cloned(),
Err(err) => err.into_inner().get(cache_key).cloned(),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct ConfigSkillsCacheKey {
roots: Vec<(PathBuf, u8)>,
disabled_paths: Vec<PathBuf>,
}
pub fn bundled_skills_enabled_from_stack(config_layer_stack: &ConfigLayerStack) -> bool {
let effective_config = config_layer_stack.effective_config();
let Some(skills_value) = effective_config
.as_table()
.and_then(|table| table.get("skills"))
else {
return true;
};
let skills: SkillsConfig = match skills_value.clone().try_into() {
Ok(skills) => skills,
Err(err) => {
warn!("invalid skills config: {err}");
return true;
}
};
skills.bundled.unwrap_or_default().enabled
}
fn disabled_paths_from_stack(config_layer_stack: &ConfigLayerStack) -> HashSet<PathBuf> {
let mut configs = HashMap::new();
for layer in config_layer_stack.get_layers(
ConfigLayerStackOrdering::LowestPrecedenceFirst,
/*include_disabled*/ true,
) {
if !matches!(
layer.name,
ConfigLayerSource::User { .. } | ConfigLayerSource::SessionFlags
) {
continue;
}
let Some(skills_value) = layer.config.get("skills") else {
continue;
};
let skills: SkillsConfig = match skills_value.clone().try_into() {
Ok(skills) => skills,
Err(err) => {
warn!("invalid skills config: {err}");
continue;
}
};
for entry in skills.config {
let path = normalize_override_path(entry.path.as_path());
configs.insert(path, entry.enabled);
}
}
configs
.into_iter()
.filter_map(|(path, enabled)| (!enabled).then_some(path))
.collect()
}
fn config_skills_cache_key(
roots: &[SkillRoot],
config_layer_stack: &ConfigLayerStack,
) -> ConfigSkillsCacheKey {
let mut disabled_paths: Vec<PathBuf> = disabled_paths_from_stack(config_layer_stack)
.into_iter()
.collect();
disabled_paths.sort_unstable();
ConfigSkillsCacheKey {
roots: roots
.iter()
.map(|root| {
let scope_rank = match root.scope {
SkillScope::Repo => 0,
SkillScope::User => 1,
SkillScope::System => 2,
SkillScope::Admin => 3,
};
(root.path.clone(), scope_rank)
})
.collect(),
disabled_paths,
}
}
fn finalize_skill_outcome(
mut outcome: SkillLoadOutcome,
config_layer_stack: &ConfigLayerStack,
) -> SkillLoadOutcome {
outcome.disabled_paths = disabled_paths_from_stack(config_layer_stack);
let (by_scripts_dir, by_doc_path) =
build_implicit_skill_path_indexes(outcome.allowed_skills_for_implicit_invocation());
outcome.implicit_skills_by_scripts_dir = Arc::new(by_scripts_dir);
outcome.implicit_skills_by_doc_path = Arc::new(by_doc_path);
outcome
}
fn build_implicit_skill_path_indexes(
skills: Vec<SkillMetadata>,
) -> (
HashMap<PathBuf, SkillMetadata>,
HashMap<PathBuf, SkillMetadata>,
) {
let mut by_scripts_dir = HashMap::new();
let mut by_skill_doc_path = HashMap::new();
for skill in skills {
let skill_doc_path = normalize_override_path(skill.path_to_skills_md.as_path());
by_skill_doc_path.insert(skill_doc_path, skill.clone());
if let Some(skill_dir) = skill.path_to_skills_md.parent() {
let scripts_dir = normalize_override_path(&skill_dir.join("scripts"));
by_scripts_dir.insert(scripts_dir, skill);
}
}
(by_scripts_dir, by_skill_doc_path)
}
fn normalize_override_path(path: &Path) -> PathBuf {
dunce::canonicalize(path).unwrap_or_else(|_| path.to_path_buf())
}
fn normalize_extra_user_roots(extra_user_roots: &[PathBuf]) -> Vec<PathBuf> {
let mut normalized: Vec<PathBuf> = extra_user_roots
.iter()
.map(|path| dunce::canonicalize(path).unwrap_or_else(|_| path.clone()))
.collect();
normalized.sort_unstable();
normalized.dedup();
normalized
}
#[cfg(test)]
#[path = "manager_request_tests.rs"]
mod tests;

View File

@@ -0,0 +1,85 @@
use super::*;
use codex_app_server_protocol::ConfigLayerSource;
use codex_config::ConfigLayerEntry;
use codex_config::ConfigLayerStack;
use codex_config::ConfigRequirements;
use codex_config::ConfigRequirementsToml;
use codex_protocol::protocol::SkillScope;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use std::fs;
use std::path::Path;
use tempfile::tempdir;
use toml::Value as TomlValue;
fn write_skill(root: &Path, dir: &str, name: &str) {
let skill_dir = root.join(dir);
fs::create_dir_all(&skill_dir).expect("create skill dir");
fs::write(
skill_dir.join("SKILL.md"),
format!("---\nname: {name}\ndescription: {name}\n---\n"),
)
.expect("write skill");
}
fn user_config_layer_stack(codex_home: &Path) -> ConfigLayerStack {
ConfigLayerStack::new(
vec![ConfigLayerEntry::new(
ConfigLayerSource::User {
file: AbsolutePathBuf::from_absolute_path(codex_home.join("config.toml"))
.expect("absolute config path"),
},
TomlValue::Table(toml::map::Map::new()),
)],
ConfigRequirements::default(),
ConfigRequirementsToml::default(),
)
.expect("config layer stack")
}
#[test]
fn skill_roots_for_request_omits_system_scope_when_bundled_skills_are_disabled() {
let codex_home = tempdir().expect("tempdir");
let manager = SkillsManager::new(codex_home.path().to_path_buf(), false);
let request = SkillLoadRequest {
cwd: codex_home.path().to_path_buf(),
config_layer_stack: user_config_layer_stack(codex_home.path()),
bundled_skills_enabled: false,
plugin_skill_roots: Vec::new(),
extra_user_roots: Vec::new(),
};
let roots = manager.skill_roots_for_request(&request);
assert!(roots.iter().all(|root| root.scope != SkillScope::System));
assert!(roots.iter().any(
|root| root.scope == SkillScope::User && root.path == codex_home.path().join("skills")
));
}
#[test]
fn skills_for_request_loads_plugin_and_extra_user_roots() {
let codex_home = tempdir().expect("tempdir");
let plugin_root = tempdir().expect("tempdir");
let extra_root = tempdir().expect("tempdir");
write_skill(plugin_root.path(), "plugin-skill", "plugin-skill");
write_skill(extra_root.path(), "extra-skill", "extra-skill");
let manager = SkillsManager::new(codex_home.path().to_path_buf(), false);
let outcome = manager.skills_for_request(&SkillLoadRequest {
cwd: codex_home.path().to_path_buf(),
config_layer_stack: ConfigLayerStack::default(),
bundled_skills_enabled: false,
plugin_skill_roots: vec![plugin_root.path().to_path_buf()],
extra_user_roots: vec![extra_root.path().to_path_buf()],
});
assert_eq!(
outcome
.skills
.iter()
.map(|skill| skill.name.clone())
.collect::<Vec<_>>(),
vec!["extra-skill".to_string(), "plugin-skill".to_string()]
);
}

View File

@@ -0,0 +1,18 @@
pub mod loader;
pub mod manager;
pub mod model;
pub mod remote;
pub mod render;
mod runtime;
pub mod system;
pub use manager::SkillsManager;
pub use manager::bundled_skills_enabled_from_stack;
pub use model::SkillError;
pub use model::SkillLoadOutcome;
pub use model::SkillMetadata;
pub use model::SkillPolicy;
pub use model::filter_skill_load_outcome_for_product;
pub use render::render_skills_section;
pub use runtime::RemoteSkillRequest;
pub use runtime::SkillLoadRequest;

View File

@@ -0,0 +1,185 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
use serde::Deserialize;
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Eq)]
pub struct SkillManagedNetworkOverride {
pub allowed_domains: Option<Vec<String>>,
pub denied_domains: Option<Vec<String>>,
}
impl SkillManagedNetworkOverride {
pub fn has_domain_overrides(&self) -> bool {
self.allowed_domains.is_some() || self.denied_domains.is_some()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct SkillMetadata {
pub name: String,
pub description: String,
pub short_description: Option<String>,
pub interface: Option<SkillInterface>,
pub dependencies: Option<SkillDependencies>,
pub policy: Option<SkillPolicy>,
pub permission_profile: Option<PermissionProfile>,
pub managed_network_override: Option<SkillManagedNetworkOverride>,
/// Path to the SKILLS.md file that declares this skill.
pub path_to_skills_md: PathBuf,
pub scope: SkillScope,
}
impl SkillMetadata {
fn allow_implicit_invocation(&self) -> bool {
self.policy
.as_ref()
.and_then(|policy| policy.allow_implicit_invocation)
.unwrap_or(true)
}
pub fn matches_product_restriction_for_product(
&self,
restriction_product: Option<Product>,
) -> bool {
match &self.policy {
Some(policy) => {
policy.products.is_empty()
|| restriction_product.is_some_and(|product| {
product.matches_product_restriction(&policy.products)
})
}
None => true,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct SkillPolicy {
pub allow_implicit_invocation: Option<bool>,
// TODO: Enforce product gating in Codex skill selection/injection instead of only parsing and
// storing this metadata.
pub products: Vec<Product>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillInterface {
pub display_name: Option<String>,
pub short_description: Option<String>,
pub icon_small: Option<PathBuf>,
pub icon_large: Option<PathBuf>,
pub brand_color: Option<String>,
pub default_prompt: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillDependencies {
pub tools: Vec<SkillToolDependency>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillToolDependency {
pub r#type: String,
pub value: String,
pub description: Option<String>,
pub transport: Option<String>,
pub command: Option<String>,
pub url: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillError {
pub path: PathBuf,
pub message: String,
}
#[derive(Debug, Clone, Default)]
pub struct SkillLoadOutcome {
pub skills: Vec<SkillMetadata>,
pub errors: Vec<SkillError>,
pub disabled_paths: HashSet<PathBuf>,
pub(crate) implicit_skills_by_scripts_dir: Arc<HashMap<PathBuf, SkillMetadata>>,
pub(crate) implicit_skills_by_doc_path: Arc<HashMap<PathBuf, SkillMetadata>>,
}
impl SkillLoadOutcome {
pub fn with_implicit_skill_indexes(
implicit_skills_by_scripts_dir: HashMap<PathBuf, SkillMetadata>,
implicit_skills_by_doc_path: HashMap<PathBuf, SkillMetadata>,
) -> Self {
Self {
implicit_skills_by_scripts_dir: Arc::new(implicit_skills_by_scripts_dir),
implicit_skills_by_doc_path: Arc::new(implicit_skills_by_doc_path),
..Self::default()
}
}
pub fn is_skill_enabled(&self, skill: &SkillMetadata) -> bool {
!self.disabled_paths.contains(&skill.path_to_skills_md)
}
pub fn is_skill_allowed_for_implicit_invocation(&self, skill: &SkillMetadata) -> bool {
self.is_skill_enabled(skill) && skill.allow_implicit_invocation()
}
pub fn allowed_skills_for_implicit_invocation(&self) -> Vec<SkillMetadata> {
self.skills
.iter()
.filter(|skill| self.is_skill_allowed_for_implicit_invocation(skill))
.cloned()
.collect()
}
pub fn skills_with_enabled(&self) -> impl Iterator<Item = (&SkillMetadata, bool)> {
self.skills
.iter()
.map(|skill| (skill, self.is_skill_enabled(skill)))
}
pub fn implicit_skill_for_script_path(&self, script_path: &Path) -> Option<SkillMetadata> {
for ancestor in script_path.ancestors() {
if let Some(skill) = self.implicit_skills_by_scripts_dir.get(ancestor) {
return Some(skill.clone());
}
}
None
}
pub fn implicit_skill_for_doc_path(&self, skill_doc_path: &Path) -> Option<SkillMetadata> {
self.implicit_skills_by_doc_path
.get(skill_doc_path)
.cloned()
}
}
pub fn filter_skill_load_outcome_for_product(
mut outcome: SkillLoadOutcome,
restriction_product: Option<Product>,
) -> SkillLoadOutcome {
outcome
.skills
.retain(|skill| skill.matches_product_restriction_for_product(restriction_product));
outcome.implicit_skills_by_scripts_dir = Arc::new(
outcome
.implicit_skills_by_scripts_dir
.iter()
.filter(|(_, skill)| skill.matches_product_restriction_for_product(restriction_product))
.map(|(path, skill)| (path.clone(), skill.clone()))
.collect(),
);
outcome.implicit_skills_by_doc_path = Arc::new(
outcome
.implicit_skills_by_doc_path
.iter()
.filter(|(_, skill)| skill.matches_product_restriction_for_product(restriction_product))
.map(|(path, skill)| (path.clone(), skill.clone()))
.collect(),
);
outcome
}

View File

@@ -0,0 +1,269 @@
use super::runtime::RemoteSkillRequest;
use anyhow::Context;
use anyhow::Result;
use codex_login::CodexAuth;
use codex_login::default_client::build_reqwest_client;
use serde::Deserialize;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
const REMOTE_SKILLS_API_TIMEOUT: Duration = Duration::from_secs(30);
// Low-level client for the remote skill API. This is intentionally kept around for
// future wiring, but it is not used yet by any active product surface.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RemoteSkillScope {
WorkspaceShared,
AllShared,
Personal,
Example,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RemoteSkillProductSurface {
Chatgpt,
Codex,
Api,
Atlas,
}
fn as_query_scope(scope: RemoteSkillScope) -> Option<&'static str> {
match scope {
RemoteSkillScope::WorkspaceShared => Some("workspace-shared"),
RemoteSkillScope::AllShared => Some("all-shared"),
RemoteSkillScope::Personal => Some("personal"),
RemoteSkillScope::Example => Some("example"),
}
}
fn as_query_product_surface(product_surface: RemoteSkillProductSurface) -> &'static str {
match product_surface {
RemoteSkillProductSurface::Chatgpt => "chatgpt",
RemoteSkillProductSurface::Codex => "codex",
RemoteSkillProductSurface::Api => "api",
RemoteSkillProductSurface::Atlas => "atlas",
}
}
fn ensure_chatgpt_auth(auth: Option<&CodexAuth>) -> Result<&CodexAuth> {
let Some(auth) = auth else {
anyhow::bail!("chatgpt authentication required for remote skill scopes");
};
if !auth.is_chatgpt_auth() {
anyhow::bail!(
"chatgpt authentication required for remote skill scopes; api key auth is not supported"
);
}
Ok(auth)
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RemoteSkillSummary {
pub id: String,
pub name: String,
pub description: String,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RemoteSkillDownloadResult {
pub id: String,
pub path: PathBuf,
}
#[derive(Debug, Deserialize)]
struct RemoteSkillsResponse {
#[serde(rename = "hazelnuts")]
skills: Vec<RemoteSkill>,
}
#[derive(Debug, Deserialize)]
struct RemoteSkill {
id: String,
name: String,
description: String,
}
pub async fn list_remote_skills(
request: &RemoteSkillRequest,
auth: Option<&CodexAuth>,
scope: RemoteSkillScope,
product_surface: RemoteSkillProductSurface,
enabled: Option<bool>,
) -> Result<Vec<RemoteSkillSummary>> {
let base_url = request.chatgpt_base_url.trim_end_matches('/');
let auth = ensure_chatgpt_auth(auth)?;
let url = format!("{base_url}/hazelnuts");
let product_surface = as_query_product_surface(product_surface);
let mut query_params = vec![("product_surface", product_surface)];
if let Some(scope) = as_query_scope(scope) {
query_params.push(("scope", scope));
}
if let Some(enabled) = enabled {
let enabled = if enabled { "true" } else { "false" };
query_params.push(("enabled", enabled));
}
let client = build_reqwest_client();
let mut request = client
.get(&url)
.timeout(REMOTE_SKILLS_API_TIMEOUT)
.query(&query_params);
let token = auth
.get_token()
.context("Failed to read auth token for remote skills")?;
request = request.bearer_auth(token);
if let Some(account_id) = auth.get_account_id() {
request = request.header("chatgpt-account-id", account_id);
}
let response = request
.send()
.await
.with_context(|| format!("Failed to send request to {url}"))?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
anyhow::bail!("Request failed with status {status} from {url}: {body}");
}
let parsed: RemoteSkillsResponse =
serde_json::from_str(&body).context("Failed to parse skills response")?;
Ok(parsed
.skills
.into_iter()
.map(|skill| RemoteSkillSummary {
id: skill.id,
name: skill.name,
description: skill.description,
})
.collect())
}
pub async fn export_remote_skill(
request: &RemoteSkillRequest,
auth: Option<&CodexAuth>,
skill_id: &str,
) -> Result<RemoteSkillDownloadResult> {
let auth = ensure_chatgpt_auth(auth)?;
let client = build_reqwest_client();
let base_url = request.chatgpt_base_url.trim_end_matches('/');
let url = format!("{base_url}/hazelnuts/{skill_id}/export");
let mut download_request = client.get(&url).timeout(REMOTE_SKILLS_API_TIMEOUT);
let token = auth
.get_token()
.context("Failed to read auth token for remote skills")?;
download_request = download_request.bearer_auth(token);
if let Some(account_id) = auth.get_account_id() {
download_request = download_request.header("chatgpt-account-id", account_id);
}
let response = download_request
.send()
.await
.with_context(|| format!("Failed to send download request to {url}"))?;
let status = response.status();
let body = response.bytes().await.context("Failed to read download")?;
if !status.is_success() {
let body_text = String::from_utf8_lossy(&body);
anyhow::bail!("Download failed with status {status} from {url}: {body_text}");
}
if !is_zip_payload(&body) {
anyhow::bail!("Downloaded remote skill payload is not a zip archive");
}
let output_dir = request.codex_home.join("skills").join(skill_id);
tokio::fs::create_dir_all(&output_dir)
.await
.context("Failed to create downloaded skills directory")?;
let zip_bytes = body.to_vec();
let output_dir_clone = output_dir.clone();
let prefix_candidates = vec![skill_id.to_string()];
tokio::task::spawn_blocking(move || {
extract_zip_to_dir(zip_bytes, &output_dir_clone, &prefix_candidates)
})
.await
.context("Zip extraction task failed")??;
Ok(RemoteSkillDownloadResult {
id: skill_id.to_string(),
path: output_dir,
})
}
fn safe_join(base: &Path, name: &str) -> Result<PathBuf> {
let path = Path::new(name);
for component in path.components() {
match component {
Component::Normal(_) => {}
_ => {
anyhow::bail!("Invalid file path in remote skill payload: {name}");
}
}
}
Ok(base.join(path))
}
fn is_zip_payload(bytes: &[u8]) -> bool {
bytes.starts_with(b"PK\x03\x04")
|| bytes.starts_with(b"PK\x05\x06")
|| bytes.starts_with(b"PK\x07\x08")
}
fn extract_zip_to_dir(
bytes: Vec<u8>,
output_dir: &Path,
prefix_candidates: &[String],
) -> Result<()> {
let cursor = std::io::Cursor::new(bytes);
let mut archive = zip::ZipArchive::new(cursor).context("Failed to open zip archive")?;
for i in 0..archive.len() {
let mut file = archive.by_index(i).context("Failed to read zip entry")?;
if file.is_dir() {
continue;
}
let raw_name = file.name().to_string();
let normalized = normalize_zip_name(&raw_name, prefix_candidates);
let Some(normalized) = normalized else {
continue;
};
let file_path = safe_join(output_dir, &normalized)?;
if let Some(parent) = file_path.parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("Failed to create parent dir for {normalized}"))?;
}
let mut out = std::fs::File::create(&file_path)
.with_context(|| format!("Failed to create file {normalized}"))?;
std::io::copy(&mut file, &mut out)
.with_context(|| format!("Failed to write skill file {normalized}"))?;
}
Ok(())
}
fn normalize_zip_name(name: &str, prefix_candidates: &[String]) -> Option<String> {
let mut trimmed = name.trim_start_matches("./");
for prefix in prefix_candidates {
if prefix.is_empty() {
continue;
}
let prefix = format!("{prefix}/");
if let Some(rest) = trimmed.strip_prefix(&prefix) {
trimmed = rest;
break;
}
}
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
}

View File

@@ -0,0 +1,48 @@
use crate::skills::model::SkillMetadata;
use codex_protocol::protocol::SKILLS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::SKILLS_INSTRUCTIONS_OPEN_TAG;
pub fn render_skills_section(skills: &[SkillMetadata]) -> Option<String> {
if skills.is_empty() {
return None;
}
let mut lines: Vec<String> = Vec::new();
lines.push("## Skills".to_string());
lines.push("A skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.".to_string());
lines.push("### Available skills".to_string());
for skill in skills {
let path_str = skill.path_to_skills_md.to_string_lossy().replace('\\', "/");
let name = skill.name.as_str();
let description = skill.description.as_str();
lines.push(format!("- {name}: {description} (file: {path_str})"));
}
lines.push("### How to use skills".to_string());
lines.push(
r###"- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.
- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.
- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.
- How to use a skill (progressive disclosure):
1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.
2) When `SKILL.md` references relative paths (e.g., `scripts/foo.py`), resolve them relative to the skill directory listed above first, and only consider other paths if needed.
3) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.
4) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.
5) If `assets/` or templates exist, reuse them instead of recreating from scratch.
- Coordination and sequencing:
- If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.
- Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.
- Context hygiene:
- Keep context small: summarize long sections instead of pasting them; only load extra files when needed.
- Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.
- When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.
- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue."###
.to_string(),
);
let body = lines.join("\n");
Some(format!(
"{SKILLS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{SKILLS_INSTRUCTIONS_CLOSE_TAG}"
))
}

View File

@@ -0,0 +1,17 @@
use codex_config::ConfigLayerStack;
use std::path::PathBuf;
#[derive(Debug, Clone)]
pub struct SkillLoadRequest {
pub cwd: PathBuf,
pub config_layer_stack: ConfigLayerStack,
pub bundled_skills_enabled: bool,
pub plugin_skill_roots: Vec<PathBuf>,
pub extra_user_roots: Vec<PathBuf>,
}
#[derive(Debug, Clone)]
pub struct RemoteSkillRequest {
pub chatgpt_base_url: String,
pub codex_home: PathBuf,
}

View File

@@ -0,0 +1,9 @@
pub use codex_skills::install_system_skills;
pub use codex_skills::system_cache_root_dir;
use std::path::Path;
pub fn uninstall_system_skills(codex_home: &Path) {
let system_skills_dir = system_cache_root_dir(codex_home);
let _ = std::fs::remove_dir_all(&system_skills_dir);
}

View File

@@ -29,6 +29,7 @@ chrono = { workspace = true, features = ["serde"] }
clap = { workspace = true, features = ["derive"] }
codex-api = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-capabilities = { workspace = true }
codex-apply-patch = { workspace = true }
codex-async-utils = { workspace = true }
codex-connectors = { workspace = true }
@@ -37,7 +38,6 @@ codex-exec-server = { workspace = true }
codex-features = { workspace = true }
codex-login = { workspace = true }
codex-shell-command = { workspace = true }
codex-skills = { workspace = true }
codex-execpolicy = { workspace = true }
codex-file-search = { workspace = true }
codex-git = { workspace = true }

View File

@@ -1,3 +1 @@
mod render;
pub(crate) use render::render_apps_section;
pub(crate) use codex_capabilities::apps::render_apps_section;

View File

@@ -3,7 +3,17 @@
// Note this file should generally be restricted to simple struct/enum
// definitions that do not contain business logic.
use crate::config_loader::RequirementSource;
pub use codex_capabilities::config_types::BundledSkillsConfig;
pub use codex_capabilities::config_types::McpServerConfig;
pub use codex_capabilities::config_types::McpServerDisabledReason;
pub use codex_capabilities::config_types::McpServerTransportConfig;
pub use codex_capabilities::config_types::PluginConfig;
pub use codex_capabilities::config_types::RawMcpServerConfig;
pub use codex_capabilities::config_types::SkillConfig;
pub use codex_capabilities::config_types::SkillsConfig;
pub use codex_capabilities::config_types::ToolSuggestConfig;
pub use codex_capabilities::config_types::ToolSuggestDiscoverable;
pub use codex_capabilities::config_types::ToolSuggestDiscoverableType;
pub use codex_protocol::config_types::AltScreenMode;
pub use codex_protocol::config_types::ApprovalsReviewer;
pub use codex_protocol::config_types::ModeKind;
@@ -14,15 +24,11 @@ use codex_utils_absolute_path::AbsolutePathBuf;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::fmt;
use std::path::PathBuf;
use std::time::Duration;
use wildmatch::WildMatchPattern;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Deserializer;
use serde::Serialize;
use serde::de::Error as SerdeError;
pub const DEFAULT_OTEL_ENVIRONMENT: &str = "dev";
pub const DEFAULT_MEMORIES_MAX_ROLLOUTS_PER_STARTUP: usize = 16;
@@ -31,6 +37,10 @@ pub const DEFAULT_MEMORIES_MIN_ROLLOUT_IDLE_HOURS: i64 = 6;
pub const DEFAULT_MEMORIES_MAX_RAW_MEMORIES_FOR_CONSOLIDATION: usize = 256;
pub const DEFAULT_MEMORIES_MAX_UNUSED_DAYS: i64 = 30;
const fn default_enabled() -> bool {
true
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "kebab-case")]
pub enum WindowsSandboxModeToml {
@@ -47,261 +57,6 @@ pub struct WindowsToml {
pub sandbox_private_desktop: Option<bool>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum McpServerDisabledReason {
Unknown,
Requirements { source: RequirementSource },
}
impl fmt::Display for McpServerDisabledReason {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
McpServerDisabledReason::Unknown => write!(f, "unknown"),
McpServerDisabledReason::Requirements { source } => {
write!(f, "requirements ({source})")
}
}
}
}
#[derive(Serialize, Debug, Clone, PartialEq)]
pub struct McpServerConfig {
#[serde(flatten)]
pub transport: McpServerTransportConfig,
/// When `false`, Codex skips initializing this MCP server.
#[serde(default = "default_enabled")]
pub enabled: bool,
/// When `true`, `codex exec` exits with an error if this MCP server fails to initialize.
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub required: bool,
/// Reason this server was disabled after applying requirements.
#[serde(skip)]
pub disabled_reason: Option<McpServerDisabledReason>,
/// Startup timeout in seconds for initializing MCP server & initially listing tools.
#[serde(
default,
with = "option_duration_secs",
skip_serializing_if = "Option::is_none"
)]
pub startup_timeout_sec: Option<Duration>,
/// Default timeout for MCP tool calls initiated via this server.
#[serde(default, with = "option_duration_secs")]
pub tool_timeout_sec: Option<Duration>,
/// Explicit allow-list of tools exposed from this server. When set, only these tools will be registered.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled_tools: Option<Vec<String>>,
/// Explicit deny-list of tools. These tools will be removed after applying `enabled_tools`.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub disabled_tools: Option<Vec<String>>,
/// Optional OAuth scopes to request during MCP login.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub scopes: Option<Vec<String>>,
/// Optional OAuth resource parameter to include during MCP login (RFC 8707).
#[serde(default, skip_serializing_if = "Option::is_none")]
pub oauth_resource: Option<String>,
}
// Raw MCP config shape used for deserialization and JSON Schema generation.
// Keep this in sync with the validation logic in `McpServerConfig`.
#[derive(Deserialize, Clone, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub(crate) struct RawMcpServerConfig {
// stdio
pub command: Option<String>,
#[serde(default)]
pub args: Option<Vec<String>>,
#[serde(default)]
pub env: Option<HashMap<String, String>>,
#[serde(default)]
pub env_vars: Option<Vec<String>>,
#[serde(default)]
pub cwd: Option<PathBuf>,
pub http_headers: Option<HashMap<String, String>>,
#[serde(default)]
pub env_http_headers: Option<HashMap<String, String>>,
// streamable_http
pub url: Option<String>,
pub bearer_token: Option<String>,
pub bearer_token_env_var: Option<String>,
// shared
#[serde(default)]
pub startup_timeout_sec: Option<f64>,
#[serde(default)]
pub startup_timeout_ms: Option<u64>,
#[serde(default, with = "option_duration_secs")]
#[schemars(with = "Option<f64>")]
pub tool_timeout_sec: Option<Duration>,
#[serde(default)]
pub enabled: Option<bool>,
#[serde(default)]
pub required: Option<bool>,
#[serde(default)]
pub enabled_tools: Option<Vec<String>>,
#[serde(default)]
pub disabled_tools: Option<Vec<String>>,
#[serde(default)]
pub scopes: Option<Vec<String>>,
#[serde(default)]
pub oauth_resource: Option<String>,
}
impl<'de> Deserialize<'de> for McpServerConfig {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let mut raw = RawMcpServerConfig::deserialize(deserializer)?;
let startup_timeout_sec = match (raw.startup_timeout_sec, raw.startup_timeout_ms) {
(Some(sec), _) => {
let duration = Duration::try_from_secs_f64(sec).map_err(SerdeError::custom)?;
Some(duration)
}
(None, Some(ms)) => Some(Duration::from_millis(ms)),
(None, None) => None,
};
let tool_timeout_sec = raw.tool_timeout_sec;
let enabled = raw.enabled.unwrap_or_else(default_enabled);
let required = raw.required.unwrap_or_default();
let enabled_tools = raw.enabled_tools.clone();
let disabled_tools = raw.disabled_tools.clone();
let scopes = raw.scopes.clone();
let oauth_resource = raw.oauth_resource.clone();
fn throw_if_set<E, T>(transport: &str, field: &str, value: Option<&T>) -> Result<(), E>
where
E: SerdeError,
{
if value.is_none() {
return Ok(());
}
Err(E::custom(format!(
"{field} is not supported for {transport}",
)))
}
let transport = if let Some(command) = raw.command.clone() {
throw_if_set("stdio", "url", raw.url.as_ref())?;
throw_if_set(
"stdio",
"bearer_token_env_var",
raw.bearer_token_env_var.as_ref(),
)?;
throw_if_set("stdio", "bearer_token", raw.bearer_token.as_ref())?;
throw_if_set("stdio", "http_headers", raw.http_headers.as_ref())?;
throw_if_set("stdio", "env_http_headers", raw.env_http_headers.as_ref())?;
throw_if_set("stdio", "oauth_resource", raw.oauth_resource.as_ref())?;
McpServerTransportConfig::Stdio {
command,
args: raw.args.clone().unwrap_or_default(),
env: raw.env.clone(),
env_vars: raw.env_vars.clone().unwrap_or_default(),
cwd: raw.cwd.take(),
}
} else if let Some(url) = raw.url.clone() {
throw_if_set("streamable_http", "args", raw.args.as_ref())?;
throw_if_set("streamable_http", "env", raw.env.as_ref())?;
throw_if_set("streamable_http", "env_vars", raw.env_vars.as_ref())?;
throw_if_set("streamable_http", "cwd", raw.cwd.as_ref())?;
throw_if_set("streamable_http", "bearer_token", raw.bearer_token.as_ref())?;
McpServerTransportConfig::StreamableHttp {
url,
bearer_token_env_var: raw.bearer_token_env_var.clone(),
http_headers: raw.http_headers.clone(),
env_http_headers: raw.env_http_headers.take(),
}
} else {
return Err(SerdeError::custom("invalid transport"));
};
Ok(Self {
transport,
startup_timeout_sec,
tool_timeout_sec,
enabled,
required,
disabled_reason: None,
enabled_tools,
disabled_tools,
scopes,
oauth_resource,
})
}
}
const fn default_enabled() -> bool {
true
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema)]
#[serde(untagged, deny_unknown_fields, rename_all = "snake_case")]
pub enum McpServerTransportConfig {
/// https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#stdio
Stdio {
command: String,
#[serde(default)]
args: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
env: Option<HashMap<String, String>>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
env_vars: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
cwd: Option<PathBuf>,
},
/// https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http
StreamableHttp {
url: String,
/// Name of the environment variable to read for an HTTP bearer token.
/// When set, requests will include the token via `Authorization: Bearer <token>`.
/// The actual secret value must be provided via the environment.
#[serde(default, skip_serializing_if = "Option::is_none")]
bearer_token_env_var: Option<String>,
/// Additional HTTP headers to include in requests to this server.
#[serde(default, skip_serializing_if = "Option::is_none")]
http_headers: Option<HashMap<String, String>>,
/// HTTP headers where the value is sourced from an environment variable.
#[serde(default, skip_serializing_if = "Option::is_none")]
env_http_headers: Option<HashMap<String, String>>,
},
}
mod option_duration_secs {
use serde::Deserialize;
use serde::Deserializer;
use serde::Serializer;
use std::time::Duration;
pub fn serialize<S>(value: &Option<Duration>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match value {
Some(duration) => serializer.serialize_some(&duration.as_secs_f64()),
None => serializer.serialize_none(),
}
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Duration>, D::Error>
where
D: Deserializer<'de>,
{
let secs = Option::<f64>::deserialize(deserializer)?;
secs.map(|secs| Duration::try_from_secs_f64(secs).map_err(serde::de::Error::custom))
.transpose()
}
}
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, JsonSchema)]
pub enum UriBasedFileOpener {
#[serde(rename = "vscode")]
@@ -372,28 +127,6 @@ pub struct FeedbackConfigToml {
pub enabled: Option<bool>,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum ToolSuggestDiscoverableType {
Connector,
Plugin,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct ToolSuggestDiscoverable {
#[serde(rename = "type")]
pub kind: ToolSuggestDiscoverableType,
pub id: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct ToolSuggestConfig {
#[serde(default)]
pub discoverables: Vec<ToolSuggestDiscoverable>,
}
/// Memories settings loaded from config.toml.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
#[schemars(deny_unknown_fields)]
@@ -801,43 +534,6 @@ impl Notice {
pub(crate) const TABLE_KEY: &'static str = "notice";
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct SkillConfig {
pub path: AbsolutePathBuf,
pub enabled: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct PluginConfig {
#[serde(default = "default_enabled")]
pub enabled: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct SkillsConfig {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub bundled: Option<BundledSkillsConfig>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub config: Vec<SkillConfig>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct BundledSkillsConfig {
#[serde(default = "default_enabled")]
pub enabled: bool,
}
impl Default for BundledSkillsConfig {
fn default() -> Self {
Self { enabled: true }
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, JsonSchema)]
#[schemars(deny_unknown_fields)]
pub struct SandboxWorkspaceWrite {

View File

@@ -1,81 +1,17 @@
use anyhow::Context;
use std::collections::HashSet;
use tracing::warn;
use super::OPENAI_CURATED_MARKETPLACE_NAME;
use super::PluginCapabilitySummary;
use super::PluginReadRequest;
use super::PluginsManager;
use crate::config::Config;
use crate::config::types::ToolSuggestDiscoverableType;
use codex_features::Feature;
const TOOL_SUGGEST_DISCOVERABLE_PLUGIN_ALLOWLIST: &[&str] = &[
"github@openai-curated",
"notion@openai-curated",
"slack@openai-curated",
"gmail@openai-curated",
"google-calendar@openai-curated",
"google-docs@openai-curated",
"google-drive@openai-curated",
"google-sheets@openai-curated",
"google-slides@openai-curated",
];
use codex_capabilities::plugins::list_tool_suggest_discoverable_plugins as list_discoverable;
pub(crate) fn list_tool_suggest_discoverable_plugins(
config: &Config,
) -> anyhow::Result<Vec<PluginCapabilitySummary>> {
if !config.features.enabled(Feature::Plugins) {
return Ok(Vec::new());
}
let plugins_manager = PluginsManager::new(config.codex_home.clone());
let configured_plugin_ids = config
.tool_suggest
.discoverables
.iter()
.filter(|discoverable| discoverable.kind == ToolSuggestDiscoverableType::Plugin)
.map(|discoverable| discoverable.id.as_str())
.collect::<HashSet<_>>();
let marketplaces = plugins_manager
.list_marketplaces_for_config(config, &[])
.context("failed to list plugin marketplaces for tool suggestions")?;
let Some(curated_marketplace) = marketplaces
.into_iter()
.find(|marketplace| marketplace.name == OPENAI_CURATED_MARKETPLACE_NAME)
else {
return Ok(Vec::new());
};
let mut discoverable_plugins = Vec::<PluginCapabilitySummary>::new();
for plugin in curated_marketplace.plugins {
if plugin.installed
|| (!TOOL_SUGGEST_DISCOVERABLE_PLUGIN_ALLOWLIST.contains(&plugin.id.as_str())
&& !configured_plugin_ids.contains(plugin.id.as_str()))
{
continue;
}
let plugin_id = plugin.id.clone();
let plugin_name = plugin.name.clone();
match plugins_manager.read_plugin_for_config(
config,
&PluginReadRequest {
plugin_name,
marketplace_path: curated_marketplace.path.clone(),
},
) {
Ok(plugin) => discoverable_plugins.push(plugin.plugin.into()),
Err(err) => warn!("failed to load discoverable plugin suggestion {plugin_id}: {err:#}"),
}
}
discoverable_plugins.sort_by(|left, right| {
left.display_name
.cmp(&right.display_name)
.then_with(|| left.config_name.cmp(&right.config_name))
});
Ok(discoverable_plugins)
list_discoverable(
plugins_manager.inner(),
&plugins_manager.load_request(config),
&config.tool_suggest.discoverables,
)
}
#[cfg(test)]

View File

@@ -6,6 +6,7 @@ use crate::plugins::test_support::write_file;
use crate::plugins::test_support::write_openai_curated_marketplace;
use crate::plugins::test_support::write_plugins_feature_config;
use crate::tools::discoverable::DiscoverablePluginInfo;
use codex_capabilities::plugins::curated_plugins_repo_path;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
@@ -13,7 +14,7 @@ use tempfile::tempdir;
#[tokio::test]
async fn list_tool_suggest_discoverable_plugins_returns_uninstalled_curated_plugins() {
let codex_home = tempdir().expect("tempdir should succeed");
let curated_root = crate::plugins::curated_plugins_repo_path(codex_home.path());
let curated_root = curated_plugins_repo_path(codex_home.path());
write_openai_curated_marketplace(&curated_root, &["sample", "slack"]);
write_plugins_feature_config(codex_home.path());
@@ -42,7 +43,7 @@ async fn list_tool_suggest_discoverable_plugins_returns_uninstalled_curated_plug
#[tokio::test]
async fn list_tool_suggest_discoverable_plugins_returns_empty_when_plugins_feature_disabled() {
let codex_home = tempdir().expect("tempdir should succeed");
let curated_root = crate::plugins::curated_plugins_repo_path(codex_home.path());
let curated_root = curated_plugins_repo_path(codex_home.path());
write_openai_curated_marketplace(&curated_root, &["slack"]);
let config = load_plugins_config(codex_home.path()).await;
@@ -58,7 +59,7 @@ async fn list_tool_suggest_discoverable_plugins_returns_empty_when_plugins_featu
#[tokio::test]
async fn list_tool_suggest_discoverable_plugins_normalizes_description() {
let codex_home = tempdir().expect("tempdir should succeed");
let curated_root = crate::plugins::curated_plugins_repo_path(codex_home.path());
let curated_root = curated_plugins_repo_path(codex_home.path());
write_openai_curated_marketplace(&curated_root, &["slack"]);
write_plugins_feature_config(codex_home.path());
write_file(
@@ -92,7 +93,7 @@ async fn list_tool_suggest_discoverable_plugins_normalizes_description() {
#[tokio::test]
async fn list_tool_suggest_discoverable_plugins_omits_installed_curated_plugins() {
let codex_home = tempdir().expect("tempdir should succeed");
let curated_root = crate::plugins::curated_plugins_repo_path(codex_home.path());
let curated_root = curated_plugins_repo_path(codex_home.path());
write_openai_curated_marketplace(&curated_root, &["slack"]);
write_curated_plugin_sha(codex_home.path());
write_plugins_feature_config(codex_home.path());
@@ -121,7 +122,7 @@ async fn list_tool_suggest_discoverable_plugins_omits_installed_curated_plugins(
#[tokio::test]
async fn list_tool_suggest_discoverable_plugins_includes_configured_plugin_ids() {
let codex_home = tempdir().expect("tempdir should succeed");
let curated_root = crate::plugins::curated_plugins_repo_path(codex_home.path());
let curated_root = curated_plugins_repo_path(codex_home.path());
write_openai_curated_marketplace(&curated_root, &["sample"]);
write_file(
&codex_home.path().join(crate::config::CONFIG_TOML_FILE),

File diff suppressed because it is too large Load Diff

View File

@@ -13,8 +13,12 @@ use crate::plugins::test_support::write_curated_plugin_sha_with as write_curated
use crate::plugins::test_support::write_file;
use crate::plugins::test_support::write_openai_curated_marketplace;
use codex_app_server_protocol::ConfigLayerSource;
use codex_capabilities::plugins::curated_plugins_repo_path;
use codex_capabilities::plugins::plugin_namespace_for_skill_path;
use pretty_assertions::assert_eq;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use tempfile::TempDir;
use toml::Value;
use wiremock::Mock;

View File

@@ -1,16 +1,20 @@
mod discoverable;
mod injection;
mod manager;
mod manifest;
mod marketplace;
mod remote;
mod render;
mod startup_sync;
mod store;
#[cfg(test)]
pub(crate) mod test_support;
mod toggles;
pub use codex_capabilities::plugins::MarketplaceError;
pub use codex_capabilities::plugins::MarketplacePluginAuthPolicy;
pub use codex_capabilities::plugins::MarketplacePluginInstallPolicy;
pub use codex_capabilities::plugins::MarketplacePluginPolicy;
pub use codex_capabilities::plugins::MarketplacePluginSource;
pub use codex_capabilities::plugins::PluginId;
pub use codex_capabilities::plugins::PluginManifestInterface;
pub use codex_capabilities::plugins::RemotePluginFetchError;
pub use codex_capabilities::plugins::collect_plugin_enabled_candidates;
pub use codex_capabilities::plugins::fetch_remote_featured_plugin_ids;
pub(crate) use discoverable::list_tool_suggest_discoverable_plugins;
pub(crate) use injection::build_plugin_injections;
pub use manager::AppConnectorId;
@@ -34,22 +38,6 @@ pub use manager::RemotePluginSyncResult;
pub use manager::installed_plugin_telemetry_metadata;
pub use manager::load_plugin_apps;
pub use manager::load_plugin_mcp_servers;
pub(crate) use manager::plugin_namespace_for_skill_path;
pub use manager::plugin_telemetry_metadata_from_root;
pub use manifest::PluginManifestInterface;
pub(crate) use manifest::PluginManifestPaths;
pub(crate) use manifest::load_plugin_manifest;
pub use marketplace::MarketplaceError;
pub use marketplace::MarketplacePluginAuthPolicy;
pub use marketplace::MarketplacePluginInstallPolicy;
pub use marketplace::MarketplacePluginPolicy;
pub use marketplace::MarketplacePluginSource;
pub use remote::RemotePluginFetchError;
pub use remote::fetch_remote_featured_plugin_ids;
pub(crate) use render::render_explicit_plugin_instructions;
pub(crate) use render::render_plugins_section;
pub(crate) use startup_sync::curated_plugins_repo_path;
pub(crate) use startup_sync::read_curated_plugins_sha;
pub(crate) use startup_sync::sync_openai_plugins_repo;
pub use store::PluginId;
pub use toggles::collect_plugin_enabled_candidates;

View File

@@ -1,91 +1,5 @@
use crate::plugins::PluginCapabilitySummary;
use codex_protocol::protocol::PLUGINS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::PLUGINS_INSTRUCTIONS_OPEN_TAG;
pub(crate) fn render_plugins_section(plugins: &[PluginCapabilitySummary]) -> Option<String> {
if plugins.is_empty() {
return None;
}
let mut lines = vec![
"## Plugins".to_string(),
"A plugin is a local bundle of skills, MCP servers, and apps. Below is the list of plugins that are enabled and available in this session.".to_string(),
"### Available plugins".to_string(),
];
lines.extend(
plugins
.iter()
.map(|plugin| match plugin.description.as_deref() {
Some(description) => format!("- `{}`: {description}", plugin.display_name),
None => format!("- `{}`", plugin.display_name),
}),
);
lines.push("### How to use plugins".to_string());
lines.push(
r###"- Discovery: The list above is the plugins available in this session.
- Skill naming: If a plugin contributes skills, those skill entries are prefixed with `plugin_name:` in the Skills list.
- Trigger rules: If the user explicitly names a plugin, prefer capabilities associated with that plugin for that turn.
- Relationship to capabilities: Plugins are not invoked directly. Use their underlying skills, MCP tools, and app tools to help solve the task.
- Preference: When a relevant plugin is available, prefer using capabilities associated with that plugin over standalone capabilities that provide similar functionality.
- Missing/blocked: If the user requests a plugin that is not listed above, or the plugin does not have relevant callable capabilities for the task, say so briefly and continue with the best fallback."###
.to_string(),
);
let body = lines.join("\n");
Some(format!(
"{PLUGINS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{PLUGINS_INSTRUCTIONS_CLOSE_TAG}"
))
}
pub(crate) fn render_explicit_plugin_instructions(
plugin: &PluginCapabilitySummary,
available_mcp_servers: &[String],
available_apps: &[String],
) -> Option<String> {
let mut lines = vec![format!(
"Capabilities from the `{}` plugin:",
plugin.display_name
)];
if plugin.has_skills {
lines.push(format!(
"- Skills from this plugin are prefixed with `{}:`.",
plugin.display_name
));
}
if !available_mcp_servers.is_empty() {
lines.push(format!(
"- MCP servers from this plugin available in this session: {}.",
available_mcp_servers
.iter()
.map(|server| format!("`{server}`"))
.collect::<Vec<_>>()
.join(", ")
));
}
if !available_apps.is_empty() {
lines.push(format!(
"- Apps from this plugin available in this session: {}.",
available_apps
.iter()
.map(|app| format!("`{app}`"))
.collect::<Vec<_>>()
.join(", ")
));
}
if lines.len() == 1 {
return None;
}
lines.push("Use these plugin-associated capabilities to help solve the task.".to_string());
Some(lines.join("\n"))
}
pub(crate) use codex_capabilities::plugins::render_explicit_plugin_instructions;
pub(crate) use codex_capabilities::plugins::render_plugins_section;
#[cfg(test)]
#[path = "render_tests.rs"]

View File

@@ -173,13 +173,7 @@ fn detect_skill_script_run(
};
let script_path = normalize_path(script_path.as_path());
for ancestor in script_path.ancestors() {
if let Some(candidate) = outcome.implicit_skills_by_scripts_dir.get(ancestor) {
return Some(candidate.clone());
}
}
None
outcome.implicit_skill_for_script_path(script_path.as_path())
}
fn detect_skill_doc_read(
@@ -201,8 +195,8 @@ fn detect_skill_doc_read(
} else {
normalize_path(&workdir.join(path))
};
if let Some(candidate) = outcome.implicit_skills_by_doc_path.get(&candidate_path) {
return Some(candidate.clone());
if let Some(candidate) = outcome.implicit_skill_for_doc_path(candidate_path.as_path()) {
return Some(candidate);
}
}

View File

@@ -8,7 +8,6 @@ use pretty_assertions::assert_eq;
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
fn test_skill_metadata(skill_doc_path: PathBuf) -> SkillMetadata {
SkillMetadata {
@@ -52,11 +51,10 @@ fn skill_doc_read_detection_matches_absolute_path() {
let skill_doc_path = PathBuf::from("/tmp/skill-test/SKILL.md");
let normalized_skill_doc_path = normalize_path(skill_doc_path.as_path());
let skill = test_skill_metadata(skill_doc_path);
let outcome = SkillLoadOutcome {
implicit_skills_by_scripts_dir: Arc::new(HashMap::new()),
implicit_skills_by_doc_path: Arc::new(HashMap::from([(normalized_skill_doc_path, skill)])),
..Default::default()
};
let outcome = SkillLoadOutcome::with_implicit_skill_indexes(
HashMap::new(),
HashMap::from([(normalized_skill_doc_path, skill)]),
);
let tokens = vec![
"cat".to_string(),
@@ -77,11 +75,10 @@ fn skill_script_run_detection_matches_relative_path_from_skill_root() {
let skill_doc_path = PathBuf::from("/tmp/skill-test/SKILL.md");
let scripts_dir = normalize_path(Path::new("/tmp/skill-test/scripts"));
let skill = test_skill_metadata(skill_doc_path);
let outcome = SkillLoadOutcome {
implicit_skills_by_scripts_dir: Arc::new(HashMap::from([(scripts_dir, skill)])),
implicit_skills_by_doc_path: Arc::new(HashMap::new()),
..Default::default()
};
let outcome = SkillLoadOutcome::with_implicit_skill_indexes(
HashMap::from([(scripts_dir, skill)]),
HashMap::new(),
);
let tokens = vec![
"python3".to_string(),
"scripts/fetch_comments.py".to_string(),
@@ -100,11 +97,10 @@ fn skill_script_run_detection_matches_absolute_path_from_any_workdir() {
let skill_doc_path = PathBuf::from("/tmp/skill-test/SKILL.md");
let scripts_dir = normalize_path(Path::new("/tmp/skill-test/scripts"));
let skill = test_skill_metadata(skill_doc_path);
let outcome = SkillLoadOutcome {
implicit_skills_by_scripts_dir: Arc::new(HashMap::from([(scripts_dir, skill)])),
implicit_skills_by_doc_path: Arc::new(HashMap::new()),
..Default::default()
};
let outcome = SkillLoadOutcome::with_implicit_skill_indexes(
HashMap::from([(scripts_dir, skill)]),
HashMap::new(),
);
let tokens = vec![
"python3".to_string(),
"/tmp/skill-test/scripts/fetch_comments.py".to_string(),

View File

@@ -1,926 +1 @@
use crate::config_loader::ConfigLayerStack;
use crate::config_loader::ConfigLayerStackOrdering;
use crate::config_loader::default_project_root_markers;
use crate::config_loader::merge_toml_values;
use crate::config_loader::project_root_markers_from_config;
use crate::plugins::plugin_namespace_for_skill_path;
use crate::skills::model::SkillDependencies;
use crate::skills::model::SkillError;
use crate::skills::model::SkillInterface;
use crate::skills::model::SkillLoadOutcome;
use crate::skills::model::SkillManagedNetworkOverride;
use crate::skills::model::SkillMetadata;
use crate::skills::model::SkillPolicy;
use crate::skills::model::SkillToolDependency;
use crate::skills::system::system_cache_root_dir;
use codex_app_server_protocol::ConfigLayerSource;
use codex_protocol::models::FileSystemPermissions;
use codex_protocol::models::MacOsSeatbeltProfileExtensions;
use codex_protocol::models::NetworkPermissions;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
use codex_utils_absolute_path::AbsolutePathBufGuard;
use dirs::home_dir;
use dunce::canonicalize as canonicalize_path;
use serde::Deserialize;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::error::Error;
use std::fmt;
use std::fs;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use toml::Value as TomlValue;
use tracing::error;
#[cfg(test)]
use crate::config::Config;
#[derive(Debug, Deserialize)]
struct SkillFrontmatter {
#[serde(default)]
name: Option<String>,
#[serde(default)]
description: Option<String>,
#[serde(default)]
metadata: SkillFrontmatterMetadata,
}
#[derive(Debug, Default, Deserialize)]
struct SkillFrontmatterMetadata {
#[serde(default, rename = "short-description")]
short_description: Option<String>,
}
#[derive(Debug, Default, Deserialize)]
struct SkillMetadataFile {
#[serde(default)]
interface: Option<Interface>,
#[serde(default)]
dependencies: Option<Dependencies>,
#[serde(default)]
policy: Option<Policy>,
#[serde(default)]
permissions: Option<SkillPermissionProfile>,
}
#[derive(Default)]
struct LoadedSkillMetadata {
interface: Option<SkillInterface>,
dependencies: Option<SkillDependencies>,
policy: Option<SkillPolicy>,
permission_profile: Option<PermissionProfile>,
managed_network_override: Option<SkillManagedNetworkOverride>,
}
#[derive(Debug, Default, Deserialize, PartialEq, Eq)]
struct SkillPermissionProfile {
#[serde(default)]
network: Option<SkillNetworkPermissions>,
#[serde(default)]
file_system: Option<FileSystemPermissions>,
#[serde(default)]
macos: Option<MacOsSeatbeltProfileExtensions>,
}
#[derive(Debug, Default, Deserialize, PartialEq, Eq)]
struct SkillNetworkPermissions {
#[serde(default)]
enabled: Option<bool>,
#[serde(default)]
allowed_domains: Option<Vec<String>>,
#[serde(default)]
denied_domains: Option<Vec<String>>,
}
#[derive(Debug, Default, Deserialize)]
struct Interface {
display_name: Option<String>,
short_description: Option<String>,
icon_small: Option<PathBuf>,
icon_large: Option<PathBuf>,
brand_color: Option<String>,
default_prompt: Option<String>,
}
#[derive(Debug, Default, Deserialize)]
struct Dependencies {
#[serde(default)]
tools: Vec<DependencyTool>,
}
#[derive(Debug, Deserialize)]
struct Policy {
#[serde(default)]
allow_implicit_invocation: Option<bool>,
#[serde(default)]
products: Vec<Product>,
}
#[derive(Debug, Default, Deserialize)]
struct DependencyTool {
#[serde(rename = "type")]
kind: Option<String>,
value: Option<String>,
description: Option<String>,
transport: Option<String>,
command: Option<String>,
url: Option<String>,
}
const SKILLS_FILENAME: &str = "SKILL.md";
const AGENTS_DIR_NAME: &str = ".agents";
const SKILLS_METADATA_DIR: &str = "agents";
const SKILLS_METADATA_FILENAME: &str = "openai.yaml";
const SKILLS_DIR_NAME: &str = "skills";
const MAX_NAME_LEN: usize = 64;
const MAX_DESCRIPTION_LEN: usize = 1024;
const MAX_SHORT_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEFAULT_PROMPT_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEPENDENCY_TYPE_LEN: usize = MAX_NAME_LEN;
const MAX_DEPENDENCY_TRANSPORT_LEN: usize = MAX_NAME_LEN;
const MAX_DEPENDENCY_VALUE_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEPENDENCY_DESCRIPTION_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEPENDENCY_COMMAND_LEN: usize = MAX_DESCRIPTION_LEN;
const MAX_DEPENDENCY_URL_LEN: usize = MAX_DESCRIPTION_LEN;
// Traversal depth from the skills root.
const MAX_SCAN_DEPTH: usize = 6;
const MAX_SKILLS_DIRS_PER_ROOT: usize = 2000;
#[derive(Debug)]
enum SkillParseError {
Read(std::io::Error),
MissingFrontmatter,
InvalidYaml(serde_yaml::Error),
MissingField(&'static str),
InvalidField { field: &'static str, reason: String },
}
impl fmt::Display for SkillParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
SkillParseError::Read(e) => write!(f, "failed to read file: {e}"),
SkillParseError::MissingFrontmatter => {
write!(f, "missing YAML frontmatter delimited by ---")
}
SkillParseError::InvalidYaml(e) => write!(f, "invalid YAML: {e}"),
SkillParseError::MissingField(field) => write!(f, "missing field `{field}`"),
SkillParseError::InvalidField { field, reason } => {
write!(f, "invalid {field}: {reason}")
}
}
}
}
impl Error for SkillParseError {}
pub(crate) struct SkillRoot {
pub(crate) path: PathBuf,
pub(crate) scope: SkillScope,
}
pub(crate) fn load_skills_from_roots<I>(roots: I) -> SkillLoadOutcome
where
I: IntoIterator<Item = SkillRoot>,
{
let mut outcome = SkillLoadOutcome::default();
for root in roots {
discover_skills_under_root(&root.path, root.scope, &mut outcome);
}
let mut seen: HashSet<PathBuf> = HashSet::new();
outcome
.skills
.retain(|skill| seen.insert(skill.path_to_skills_md.clone()));
fn scope_rank(scope: SkillScope) -> u8 {
// Higher-priority scopes first (matches root scan order for dedupe).
match scope {
SkillScope::Repo => 0,
SkillScope::User => 1,
SkillScope::System => 2,
SkillScope::Admin => 3,
}
}
outcome.skills.sort_by(|a, b| {
scope_rank(a.scope)
.cmp(&scope_rank(b.scope))
.then_with(|| a.name.cmp(&b.name))
.then_with(|| a.path_to_skills_md.cmp(&b.path_to_skills_md))
});
outcome
}
pub(crate) fn skill_roots(
config_layer_stack: &ConfigLayerStack,
cwd: &Path,
plugin_skill_roots: Vec<PathBuf>,
) -> Vec<SkillRoot> {
skill_roots_with_home_dir(
config_layer_stack,
cwd,
home_dir().as_deref(),
plugin_skill_roots,
)
}
fn skill_roots_with_home_dir(
config_layer_stack: &ConfigLayerStack,
cwd: &Path,
home_dir: Option<&Path>,
plugin_skill_roots: Vec<PathBuf>,
) -> Vec<SkillRoot> {
let mut roots = skill_roots_from_layer_stack_inner(config_layer_stack, home_dir);
roots.extend(plugin_skill_roots.into_iter().map(|path| SkillRoot {
path,
scope: SkillScope::User,
}));
roots.extend(repo_agents_skill_roots(config_layer_stack, cwd));
dedupe_skill_roots_by_path(&mut roots);
roots
}
fn skill_roots_from_layer_stack_inner(
config_layer_stack: &ConfigLayerStack,
home_dir: Option<&Path>,
) -> Vec<SkillRoot> {
let mut roots = Vec::new();
for layer in config_layer_stack.get_layers(
ConfigLayerStackOrdering::HighestPrecedenceFirst,
/*include_disabled*/ true,
) {
let Some(config_folder) = layer.config_folder() else {
continue;
};
match &layer.name {
ConfigLayerSource::Project { .. } => {
roots.push(SkillRoot {
path: config_folder.as_path().join(SKILLS_DIR_NAME),
scope: SkillScope::Repo,
});
}
ConfigLayerSource::User { .. } => {
// Deprecated user skills location (`$CODEX_HOME/skills`), kept for backward
// compatibility.
roots.push(SkillRoot {
path: config_folder.as_path().join(SKILLS_DIR_NAME),
scope: SkillScope::User,
});
// `$HOME/.agents/skills` (user-installed skills).
if let Some(home_dir) = home_dir {
roots.push(SkillRoot {
path: home_dir.join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME),
scope: SkillScope::User,
});
}
// Embedded system skills are cached under `$CODEX_HOME/skills/.system` and are a
// special case (not a config layer).
roots.push(SkillRoot {
path: system_cache_root_dir(config_folder.as_path()),
scope: SkillScope::System,
});
}
ConfigLayerSource::System { .. } => {
// The system config layer lives under `/etc/codex/` on Unix, so treat
// `/etc/codex/skills` as admin-scoped skills.
roots.push(SkillRoot {
path: config_folder.as_path().join(SKILLS_DIR_NAME),
scope: SkillScope::Admin,
});
}
ConfigLayerSource::Mdm { .. }
| ConfigLayerSource::SessionFlags
| ConfigLayerSource::LegacyManagedConfigTomlFromFile { .. }
| ConfigLayerSource::LegacyManagedConfigTomlFromMdm => {}
}
}
roots
}
fn repo_agents_skill_roots(config_layer_stack: &ConfigLayerStack, cwd: &Path) -> Vec<SkillRoot> {
let project_root_markers = project_root_markers_from_stack(config_layer_stack);
let project_root = find_project_root(cwd, &project_root_markers);
let dirs = dirs_between_project_root_and_cwd(cwd, &project_root);
let mut roots = Vec::new();
for dir in dirs {
let agents_skills = dir.join(AGENTS_DIR_NAME).join(SKILLS_DIR_NAME);
if agents_skills.is_dir() {
roots.push(SkillRoot {
path: agents_skills,
scope: SkillScope::Repo,
});
}
}
roots
}
fn project_root_markers_from_stack(config_layer_stack: &ConfigLayerStack) -> Vec<String> {
let mut merged = TomlValue::Table(toml::map::Map::new());
for layer in config_layer_stack.get_layers(
ConfigLayerStackOrdering::LowestPrecedenceFirst,
/*include_disabled*/ false,
) {
if matches!(layer.name, ConfigLayerSource::Project { .. }) {
continue;
}
merge_toml_values(&mut merged, &layer.config);
}
match project_root_markers_from_config(&merged) {
Ok(Some(markers)) => markers,
Ok(None) => default_project_root_markers(),
Err(err) => {
tracing::warn!("invalid project_root_markers: {err}");
default_project_root_markers()
}
}
}
fn find_project_root(cwd: &Path, project_root_markers: &[String]) -> PathBuf {
if project_root_markers.is_empty() {
return cwd.to_path_buf();
}
for ancestor in cwd.ancestors() {
for marker in project_root_markers {
let marker_path = ancestor.join(marker);
if marker_path.exists() {
return ancestor.to_path_buf();
}
}
}
cwd.to_path_buf()
}
fn dirs_between_project_root_and_cwd(cwd: &Path, project_root: &Path) -> Vec<PathBuf> {
let mut dirs = cwd
.ancestors()
.scan(false, |done, a| {
if *done {
None
} else {
if a == project_root {
*done = true;
}
Some(a.to_path_buf())
}
})
.collect::<Vec<_>>();
dirs.reverse();
dirs
}
fn dedupe_skill_roots_by_path(roots: &mut Vec<SkillRoot>) {
let mut seen: HashSet<PathBuf> = HashSet::new();
roots.retain(|root| seen.insert(root.path.clone()));
}
fn discover_skills_under_root(root: &Path, scope: SkillScope, outcome: &mut SkillLoadOutcome) {
let Ok(root) = canonicalize_path(root) else {
return;
};
if !root.is_dir() {
return;
}
fn enqueue_dir(
queue: &mut VecDeque<(PathBuf, usize)>,
visited_dirs: &mut HashSet<PathBuf>,
truncated_by_dir_limit: &mut bool,
path: PathBuf,
depth: usize,
) {
if depth > MAX_SCAN_DEPTH {
return;
}
if visited_dirs.len() >= MAX_SKILLS_DIRS_PER_ROOT {
*truncated_by_dir_limit = true;
return;
}
if visited_dirs.insert(path.clone()) {
queue.push_back((path, depth));
}
}
// Follow symlinked directories for user, admin, and repo skills. System skills are written by Codex itself.
let follow_symlinks = matches!(
scope,
SkillScope::Repo | SkillScope::User | SkillScope::Admin
);
let mut visited_dirs: HashSet<PathBuf> = HashSet::new();
visited_dirs.insert(root.clone());
let mut queue: VecDeque<(PathBuf, usize)> = VecDeque::from([(root.clone(), 0)]);
let mut truncated_by_dir_limit = false;
while let Some((dir, depth)) = queue.pop_front() {
let entries = match fs::read_dir(&dir) {
Ok(entries) => entries,
Err(e) => {
error!("failed to read skills dir {}: {e:#}", dir.display());
continue;
}
};
for entry in entries.flatten() {
let path = entry.path();
let file_name = match path.file_name().and_then(|f| f.to_str()) {
Some(name) => name,
None => continue,
};
if file_name.starts_with('.') {
continue;
}
let Ok(file_type) = entry.file_type() else {
continue;
};
if file_type.is_symlink() {
if !follow_symlinks {
continue;
}
// Follow the symlink to determine what it points to.
let metadata = match fs::metadata(&path) {
Ok(metadata) => metadata,
Err(e) => {
error!(
"failed to stat skills entry {} (symlink): {e:#}",
path.display()
);
continue;
}
};
if metadata.is_dir() {
let Ok(resolved_dir) = canonicalize_path(&path) else {
continue;
};
enqueue_dir(
&mut queue,
&mut visited_dirs,
&mut truncated_by_dir_limit,
resolved_dir,
depth + 1,
);
continue;
}
continue;
}
if file_type.is_dir() {
let Ok(resolved_dir) = canonicalize_path(&path) else {
continue;
};
enqueue_dir(
&mut queue,
&mut visited_dirs,
&mut truncated_by_dir_limit,
resolved_dir,
depth + 1,
);
continue;
}
if file_type.is_file() && file_name == SKILLS_FILENAME {
match parse_skill_file(&path, scope) {
Ok(skill) => {
outcome.skills.push(skill);
}
Err(err) => {
if scope != SkillScope::System {
outcome.errors.push(SkillError {
path,
message: err.to_string(),
});
}
}
}
}
}
}
if truncated_by_dir_limit {
tracing::warn!(
"skills scan truncated after {} directories (root: {})",
MAX_SKILLS_DIRS_PER_ROOT,
root.display()
);
}
}
fn parse_skill_file(path: &Path, scope: SkillScope) -> Result<SkillMetadata, SkillParseError> {
let contents = fs::read_to_string(path).map_err(SkillParseError::Read)?;
let frontmatter = extract_frontmatter(&contents).ok_or(SkillParseError::MissingFrontmatter)?;
let parsed: SkillFrontmatter =
serde_yaml::from_str(&frontmatter).map_err(SkillParseError::InvalidYaml)?;
let base_name = parsed
.name
.as_deref()
.map(sanitize_single_line)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| default_skill_name(path));
let name = namespaced_skill_name(path, &base_name);
let description = parsed
.description
.as_deref()
.map(sanitize_single_line)
.unwrap_or_default();
let short_description = parsed
.metadata
.short_description
.as_deref()
.map(sanitize_single_line)
.filter(|value| !value.is_empty());
let LoadedSkillMetadata {
interface,
dependencies,
policy,
permission_profile,
managed_network_override,
} = load_skill_metadata(path);
validate_len(&name, MAX_NAME_LEN, "name")?;
validate_len(&description, MAX_DESCRIPTION_LEN, "description")?;
if let Some(short_description) = short_description.as_deref() {
validate_len(
short_description,
MAX_SHORT_DESCRIPTION_LEN,
"metadata.short-description",
)?;
}
let resolved_path = canonicalize_path(path).unwrap_or_else(|_| path.to_path_buf());
Ok(SkillMetadata {
name,
description,
short_description,
interface,
dependencies,
policy,
permission_profile,
managed_network_override,
path_to_skills_md: resolved_path,
scope,
})
}
fn default_skill_name(path: &Path) -> String {
path.parent()
.and_then(Path::file_name)
.and_then(|name| name.to_str())
.map(sanitize_single_line)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| "skill".to_string())
}
fn namespaced_skill_name(path: &Path, base_name: &str) -> String {
plugin_namespace_for_skill_path(path)
.map(|namespace| format!("{namespace}:{base_name}"))
.unwrap_or_else(|| base_name.to_string())
}
fn load_skill_metadata(skill_path: &Path) -> LoadedSkillMetadata {
// Fail open: optional metadata should not block loading SKILL.md.
let Some(skill_dir) = skill_path.parent() else {
return LoadedSkillMetadata::default();
};
let metadata_path = skill_dir
.join(SKILLS_METADATA_DIR)
.join(SKILLS_METADATA_FILENAME);
if !metadata_path.exists() {
return LoadedSkillMetadata::default();
}
let contents = match fs::read_to_string(&metadata_path) {
Ok(contents) => contents,
Err(error) => {
tracing::warn!(
"ignoring {path}: failed to read {label}: {error}",
path = metadata_path.display(),
label = SKILLS_METADATA_FILENAME
);
return LoadedSkillMetadata::default();
}
};
let parsed: SkillMetadataFile = {
let _guard = AbsolutePathBufGuard::new(skill_dir);
match serde_yaml::from_str(&contents) {
Ok(parsed) => parsed,
Err(error) => {
tracing::warn!(
"ignoring {path}: invalid {label}: {error}",
path = metadata_path.display(),
label = SKILLS_METADATA_FILENAME
);
return LoadedSkillMetadata::default();
}
}
};
let SkillMetadataFile {
interface,
dependencies,
policy,
permissions,
} = parsed;
let (permission_profile, managed_network_override) = normalize_permissions(permissions);
LoadedSkillMetadata {
interface: resolve_interface(interface, skill_dir),
dependencies: resolve_dependencies(dependencies),
policy: resolve_policy(policy),
permission_profile,
managed_network_override,
}
}
fn normalize_permissions(
permissions: Option<SkillPermissionProfile>,
) -> (
Option<PermissionProfile>,
Option<SkillManagedNetworkOverride>,
) {
let Some(permissions) = permissions else {
return (None, None);
};
let managed_network_override = permissions
.network
.as_ref()
.map(|network| SkillManagedNetworkOverride {
allowed_domains: network.allowed_domains.clone(),
denied_domains: network.denied_domains.clone(),
})
.filter(SkillManagedNetworkOverride::has_domain_overrides);
let permission_profile = PermissionProfile {
network: permissions.network.and_then(|network| {
let network = NetworkPermissions {
enabled: network.enabled,
};
(!network.is_empty()).then_some(network)
}),
file_system: permissions
.file_system
.filter(|file_system| !file_system.is_empty()),
macos: permissions.macos,
};
(
(!permission_profile.is_empty()).then_some(permission_profile),
managed_network_override,
)
}
fn resolve_interface(interface: Option<Interface>, skill_dir: &Path) -> Option<SkillInterface> {
let interface = interface?;
let interface = SkillInterface {
display_name: resolve_str(
interface.display_name,
MAX_NAME_LEN,
"interface.display_name",
),
short_description: resolve_str(
interface.short_description,
MAX_SHORT_DESCRIPTION_LEN,
"interface.short_description",
),
icon_small: resolve_asset_path(skill_dir, "interface.icon_small", interface.icon_small),
icon_large: resolve_asset_path(skill_dir, "interface.icon_large", interface.icon_large),
brand_color: resolve_color_str(interface.brand_color, "interface.brand_color"),
default_prompt: resolve_str(
interface.default_prompt,
MAX_DEFAULT_PROMPT_LEN,
"interface.default_prompt",
),
};
let has_fields = interface.display_name.is_some()
|| interface.short_description.is_some()
|| interface.icon_small.is_some()
|| interface.icon_large.is_some()
|| interface.brand_color.is_some()
|| interface.default_prompt.is_some();
if has_fields { Some(interface) } else { None }
}
fn resolve_dependencies(dependencies: Option<Dependencies>) -> Option<SkillDependencies> {
let dependencies = dependencies?;
let tools: Vec<SkillToolDependency> = dependencies
.tools
.into_iter()
.filter_map(resolve_dependency_tool)
.collect();
if tools.is_empty() {
None
} else {
Some(SkillDependencies { tools })
}
}
fn resolve_policy(policy: Option<Policy>) -> Option<SkillPolicy> {
policy.map(|policy| SkillPolicy {
allow_implicit_invocation: policy.allow_implicit_invocation,
products: policy.products,
})
}
fn resolve_dependency_tool(tool: DependencyTool) -> Option<SkillToolDependency> {
let r#type = resolve_required_str(
tool.kind,
MAX_DEPENDENCY_TYPE_LEN,
"dependencies.tools.type",
)?;
let value = resolve_required_str(
tool.value,
MAX_DEPENDENCY_VALUE_LEN,
"dependencies.tools.value",
)?;
let description = resolve_str(
tool.description,
MAX_DEPENDENCY_DESCRIPTION_LEN,
"dependencies.tools.description",
);
let transport = resolve_str(
tool.transport,
MAX_DEPENDENCY_TRANSPORT_LEN,
"dependencies.tools.transport",
);
let command = resolve_str(
tool.command,
MAX_DEPENDENCY_COMMAND_LEN,
"dependencies.tools.command",
);
let url = resolve_str(tool.url, MAX_DEPENDENCY_URL_LEN, "dependencies.tools.url");
Some(SkillToolDependency {
r#type,
value,
description,
transport,
command,
url,
})
}
fn resolve_asset_path(
skill_dir: &Path,
field: &'static str,
path: Option<PathBuf>,
) -> Option<PathBuf> {
// Icons must be relative paths under the skill's assets/ directory; otherwise return None.
let path = path?;
if path.as_os_str().is_empty() {
return None;
}
let assets_dir = skill_dir.join("assets");
if path.is_absolute() {
tracing::warn!(
"ignoring {field}: icon must be a relative assets path (not {})",
assets_dir.display()
);
return None;
}
let mut normalized = PathBuf::new();
for component in path.components() {
match component {
Component::CurDir => {}
Component::Normal(component) => normalized.push(component),
Component::ParentDir => {
tracing::warn!("ignoring {field}: icon path must not contain '..'");
return None;
}
_ => {
tracing::warn!("ignoring {field}: icon path must be under assets/");
return None;
}
}
}
let mut components = normalized.components();
match components.next() {
Some(Component::Normal(component)) if component == "assets" => {}
_ => {
tracing::warn!("ignoring {field}: icon path must be under assets/");
return None;
}
}
Some(skill_dir.join(normalized))
}
fn sanitize_single_line(raw: &str) -> String {
raw.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn validate_len(
value: &str,
max_len: usize,
field_name: &'static str,
) -> Result<(), SkillParseError> {
if value.is_empty() {
return Err(SkillParseError::MissingField(field_name));
}
if value.chars().count() > max_len {
return Err(SkillParseError::InvalidField {
field: field_name,
reason: format!("exceeds maximum length of {max_len} characters"),
});
}
Ok(())
}
fn resolve_str(value: Option<String>, max_len: usize, field: &'static str) -> Option<String> {
let value = value?;
let value = sanitize_single_line(&value);
if value.is_empty() {
tracing::warn!("ignoring {field}: value is empty");
return None;
}
if value.chars().count() > max_len {
tracing::warn!("ignoring {field}: exceeds maximum length of {max_len} characters");
return None;
}
Some(value)
}
fn resolve_required_str(
value: Option<String>,
max_len: usize,
field: &'static str,
) -> Option<String> {
let Some(value) = value else {
tracing::warn!("ignoring {field}: value is missing");
return None;
};
resolve_str(Some(value), max_len, field)
}
fn resolve_color_str(value: Option<String>, field: &'static str) -> Option<String> {
let value = value?;
let value = value.trim();
if value.is_empty() {
tracing::warn!("ignoring {field}: value is empty");
return None;
}
let mut chars = value.chars();
if value.len() == 7 && chars.next() == Some('#') && chars.all(|c| c.is_ascii_hexdigit()) {
Some(value.to_string())
} else {
tracing::warn!("ignoring {field}: expected #RRGGBB, got {value}");
None
}
}
fn extract_frontmatter(contents: &str) -> Option<String> {
let mut lines = contents.lines();
if !matches!(lines.next(), Some(line) if line.trim() == "---") {
return None;
}
let mut frontmatter_lines: Vec<&str> = Vec::new();
let mut found_closing = false;
for line in lines.by_ref() {
if line.trim() == "---" {
found_closing = true;
break;
}
frontmatter_lines.push(line);
}
if frontmatter_lines.is_empty() || !found_closing {
return None;
}
Some(frontmatter_lines.join("\n"))
}
#[cfg(test)]
pub(crate) fn skill_roots_from_layer_stack(
config_layer_stack: &ConfigLayerStack,
home_dir: Option<&Path>,
) -> Vec<SkillRoot> {
skill_roots_with_home_dir(config_layer_stack, Path::new("."), home_dir, Vec::new())
}
#[cfg(test)]
#[path = "loader_tests.rs"]
mod tests;
pub use codex_capabilities::skills::loader::SkillRoot;

View File

@@ -1,39 +1,22 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::RwLock;
use codex_app_server_protocol::ConfigLayerSource;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
use codex_utils_absolute_path::AbsolutePathBuf;
use toml::Value as TomlValue;
use tracing::info;
use tracing::warn;
use crate::config::Config;
use crate::config::types::SkillsConfig;
use crate::config_loader::CloudRequirementsLoader;
use crate::config_loader::ConfigLayerStackOrdering;
use crate::config_loader::LoaderOverrides;
use crate::config_loader::load_config_layers_state;
use crate::plugins::PluginsManager;
use crate::skills::SkillError;
use crate::skills::SkillLoadOutcome;
use crate::skills::build_implicit_skill_path_indexes;
use crate::skills::loader::SkillRoot;
use crate::skills::loader::load_skills_from_roots;
use crate::skills::loader::skill_roots;
use crate::skills::system::install_system_skills;
use crate::skills::system::uninstall_system_skills;
use codex_capabilities::skills::SkillLoadRequest;
use codex_protocol::protocol::Product;
use codex_utils_absolute_path::AbsolutePathBuf;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use toml::Value as TomlValue;
pub struct SkillsManager {
codex_home: PathBuf,
plugins_manager: Arc<PluginsManager>,
restriction_product: Option<Product>,
cache_by_cwd: RwLock<HashMap<PathBuf, SkillLoadOutcome>>,
cache_by_config: RwLock<HashMap<ConfigSkillsCacheKey, SkillLoadOutcome>>,
inner: codex_capabilities::skills::SkillsManager,
}
impl SkillsManager {
@@ -56,59 +39,40 @@ impl SkillsManager {
bundled_skills_enabled: bool,
restriction_product: Option<Product>,
) -> Self {
let manager = Self {
codex_home,
Self {
plugins_manager,
restriction_product,
cache_by_cwd: RwLock::new(HashMap::new()),
cache_by_config: RwLock::new(HashMap::new()),
};
if !bundled_skills_enabled {
// The loader caches bundled skills under `skills/.system`. Clearing that directory is
// best-effort cleanup; root selection still enforces the config even if removal fails.
uninstall_system_skills(&manager.codex_home);
} else if let Err(err) = install_system_skills(&manager.codex_home) {
tracing::error!("failed to install system skills: {err}");
inner: codex_capabilities::skills::SkillsManager::new_with_restriction_product(
codex_home,
bundled_skills_enabled,
restriction_product,
),
}
manager
}
/// Load skills for an already-constructed [`Config`], avoiding any additional config-layer
/// loading.
///
/// This path uses a cache keyed by the effective skill-relevant config state rather than just
/// cwd so role-local and session-local skill overrides cannot bleed across sessions that happen
/// to share a directory.
pub fn skills_for_config(&self, config: &Config) -> SkillLoadOutcome {
let roots = self.skill_roots_for_config(config);
let cache_key = config_skills_cache_key(&roots, &config.config_layer_stack);
if let Some(outcome) = self.cached_outcome_for_config(&cache_key) {
return outcome;
}
let outcome = crate::skills::filter_skill_load_outcome_for_product(
finalize_skill_outcome(load_skills_from_roots(roots), &config.config_layer_stack),
self.restriction_product,
);
let mut cache = self
.cache_by_config
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
cache.insert(cache_key, outcome.clone());
outcome
self.inner.skills_for_request(
&self.skill_load_request(
config.cwd.clone(),
config.config_layer_stack.clone(),
self.plugins_manager
.plugins_for_config(config)
.effective_skill_roots(),
Vec::new(),
),
)
}
pub(crate) fn skill_roots_for_config(&self, config: &Config) -> Vec<SkillRoot> {
let loaded_plugins = self.plugins_manager.plugins_for_config(config);
let mut roots = skill_roots(
&config.config_layer_stack,
&config.cwd,
loaded_plugins.effective_skill_roots(),
);
if !config.bundled_skills_enabled() {
roots.retain(|root| root.scope != SkillScope::System);
}
roots
self.inner.skill_roots_for_request(
&self.skill_load_request(
config.cwd.clone(),
config.config_layer_stack.clone(),
self.plugins_manager
.plugins_for_config(config)
.effective_skill_roots(),
Vec::new(),
),
)
}
pub async fn skills_for_cwd(
@@ -117,10 +81,6 @@ impl SkillsManager {
config: &Config,
force_reload: bool,
) -> SkillLoadOutcome {
if !force_reload && let Some(outcome) = self.cached_outcome_for_cwd(cwd) {
return outcome;
}
self.skills_for_cwd_with_extra_user_roots(cwd, config, force_reload, &[])
.await
}
@@ -132,16 +92,11 @@ impl SkillsManager {
force_reload: bool,
extra_user_roots: &[PathBuf],
) -> SkillLoadOutcome {
if !force_reload && let Some(outcome) = self.cached_outcome_for_cwd(cwd) {
return outcome;
}
let normalized_extra_user_roots = normalize_extra_user_roots(extra_user_roots);
let cwd_abs = match AbsolutePathBuf::try_from(cwd) {
Ok(cwd_abs) => cwd_abs,
Err(err) => {
return SkillLoadOutcome {
errors: vec![crate::skills::model::SkillError {
errors: vec![SkillError {
path: cwd.to_path_buf(),
message: err.to_string(),
}],
@@ -152,7 +107,7 @@ impl SkillsManager {
let cli_overrides: Vec<(String, TomlValue)> = Vec::new();
let config_layer_stack = match load_config_layers_state(
&self.codex_home,
&config.codex_home,
Some(cwd_abs),
&cli_overrides,
LoaderOverrides::default(),
@@ -163,7 +118,7 @@ impl SkillsManager {
Ok(config_layer_stack) => config_layer_stack,
Err(err) => {
return SkillLoadOutcome {
errors: vec![crate::skills::model::SkillError {
errors: vec![SkillError {
path: cwd.to_path_buf(),
message: err.to_string(),
}],
@@ -172,203 +127,48 @@ impl SkillsManager {
}
};
let loaded_plugins = self
let plugin_skill_roots = self
.plugins_manager
.plugins_for_config_with_force_reload(config, force_reload);
let mut roots = skill_roots(
&config_layer_stack,
cwd,
loaded_plugins.effective_skill_roots(),
);
if !bundled_skills_enabled_from_stack(&config_layer_stack) {
roots.retain(|root| root.scope != SkillScope::System);
}
roots.extend(
normalized_extra_user_roots
.iter()
.cloned()
.map(|path| SkillRoot {
path,
scope: SkillScope::User,
}),
);
let outcome = self.build_skill_outcome(roots, &config_layer_stack);
let mut cache = self
.cache_by_cwd
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
cache.insert(cwd.to_path_buf(), outcome.clone());
outcome
}
fn build_skill_outcome(
&self,
roots: Vec<SkillRoot>,
config_layer_stack: &crate::config_loader::ConfigLayerStack,
) -> SkillLoadOutcome {
crate::skills::filter_skill_load_outcome_for_product(
finalize_skill_outcome(load_skills_from_roots(roots), config_layer_stack),
self.restriction_product,
.plugins_for_config_with_force_reload(config, force_reload)
.effective_skill_roots();
self.inner.skills_for_cwd_request(
&self.skill_load_request(
cwd.to_path_buf(),
config_layer_stack,
plugin_skill_roots,
extra_user_roots.to_vec(),
),
force_reload,
)
}
pub fn clear_cache(&self) {
let cleared_cwd = {
let mut cache = self
.cache_by_cwd
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let cleared = cache.len();
cache.clear();
cleared
};
let cleared_config = {
let mut cache = self
.cache_by_config
.write()
.unwrap_or_else(std::sync::PoisonError::into_inner);
let cleared = cache.len();
cache.clear();
cleared
};
let cleared = cleared_cwd + cleared_config;
info!("skills cache cleared ({cleared} entries)");
self.inner.clear_cache();
}
fn cached_outcome_for_cwd(&self, cwd: &Path) -> Option<SkillLoadOutcome> {
match self.cache_by_cwd.read() {
Ok(cache) => cache.get(cwd).cloned(),
Err(err) => err.into_inner().get(cwd).cloned(),
}
}
fn cached_outcome_for_config(
fn skill_load_request(
&self,
cache_key: &ConfigSkillsCacheKey,
) -> Option<SkillLoadOutcome> {
match self.cache_by_config.read() {
Ok(cache) => cache.get(cache_key).cloned(),
Err(err) => err.into_inner().get(cache_key).cloned(),
cwd: PathBuf,
config_layer_stack: crate::config_loader::ConfigLayerStack,
plugin_skill_roots: Vec<PathBuf>,
extra_user_roots: Vec<PathBuf>,
) -> SkillLoadRequest {
let bundled_skills_enabled =
codex_capabilities::skills::bundled_skills_enabled_from_stack(&config_layer_stack);
SkillLoadRequest {
cwd,
config_layer_stack,
bundled_skills_enabled,
plugin_skill_roots,
extra_user_roots,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct ConfigSkillsCacheKey {
roots: Vec<(PathBuf, u8)>,
disabled_paths: Vec<PathBuf>,
}
pub(crate) fn bundled_skills_enabled_from_stack(
config_layer_stack: &crate::config_loader::ConfigLayerStack,
) -> bool {
let effective_config = config_layer_stack.effective_config();
let Some(skills_value) = effective_config
.as_table()
.and_then(|table| table.get("skills"))
else {
return true;
};
let skills: SkillsConfig = match skills_value.clone().try_into() {
Ok(skills) => skills,
Err(err) => {
warn!("invalid skills config: {err}");
return true;
}
};
skills.bundled.unwrap_or_default().enabled
}
fn disabled_paths_from_stack(
config_layer_stack: &crate::config_loader::ConfigLayerStack,
) -> HashSet<PathBuf> {
let mut configs = HashMap::new();
for layer in config_layer_stack.get_layers(
ConfigLayerStackOrdering::LowestPrecedenceFirst,
/*include_disabled*/ true,
) {
if !matches!(
layer.name,
ConfigLayerSource::User { .. } | ConfigLayerSource::SessionFlags
) {
continue;
}
let Some(skills_value) = layer.config.get("skills") else {
continue;
};
let skills: SkillsConfig = match skills_value.clone().try_into() {
Ok(skills) => skills,
Err(err) => {
warn!("invalid skills config: {err}");
continue;
}
};
for entry in skills.config {
let path = normalize_override_path(entry.path.as_path());
configs.insert(path, entry.enabled);
}
}
configs
.into_iter()
.filter_map(|(path, enabled)| (!enabled).then_some(path))
.collect()
}
fn config_skills_cache_key(
roots: &[SkillRoot],
config_layer_stack: &crate::config_loader::ConfigLayerStack,
) -> ConfigSkillsCacheKey {
let mut disabled_paths: Vec<PathBuf> = disabled_paths_from_stack(config_layer_stack)
.into_iter()
.collect();
disabled_paths.sort_unstable();
ConfigSkillsCacheKey {
roots: roots
.iter()
.map(|root| {
let scope_rank = match root.scope {
SkillScope::Repo => 0,
SkillScope::User => 1,
SkillScope::System => 2,
SkillScope::Admin => 3,
};
(root.path.clone(), scope_rank)
})
.collect(),
disabled_paths,
}
}
fn finalize_skill_outcome(
mut outcome: SkillLoadOutcome,
config_layer_stack: &crate::config_loader::ConfigLayerStack,
) -> SkillLoadOutcome {
outcome.disabled_paths = disabled_paths_from_stack(config_layer_stack);
let (by_scripts_dir, by_doc_path) =
build_implicit_skill_path_indexes(outcome.allowed_skills_for_implicit_invocation());
outcome.implicit_skills_by_scripts_dir = Arc::new(by_scripts_dir);
outcome.implicit_skills_by_doc_path = Arc::new(by_doc_path);
outcome
}
fn normalize_override_path(path: &Path) -> PathBuf {
dunce::canonicalize(path).unwrap_or_else(|_| path.to_path_buf())
}
fn normalize_extra_user_roots(extra_user_roots: &[PathBuf]) -> Vec<PathBuf> {
let mut normalized: Vec<PathBuf> = extra_user_roots
.iter()
.map(|path| dunce::canonicalize(path).unwrap_or_else(|_| path.clone()))
.collect();
normalized.sort_unstable();
normalized.dedup();
normalized
codex_capabilities::skills::bundled_skills_enabled_from_stack(config_layer_stack)
}
#[cfg(test)]

View File

@@ -5,9 +5,11 @@ use crate::config_loader::ConfigLayerEntry;
use crate::config_loader::ConfigLayerStack;
use crate::config_loader::ConfigRequirementsToml;
use crate::plugins::PluginsManager;
use codex_protocol::protocol::SkillScope;
use pretty_assertions::assert_eq;
use std::fs;
use std::path::PathBuf;
use std::sync::Arc;
use tempfile::TempDir;
fn write_user_skill(codex_home: &TempDir, dir: &str, name: &str, description: &str) {

View File

@@ -13,7 +13,6 @@ pub(crate) use env_var_dependencies::resolve_skill_dependencies_for_turn;
pub(crate) use injection::SkillInjections;
pub(crate) use injection::build_skill_injections;
pub(crate) use injection::collect_explicit_skill_mentions;
pub(crate) use invocation_utils::build_implicit_skill_path_indexes;
pub(crate) use invocation_utils::maybe_emit_implicit_skill_invocation;
pub use manager::SkillsManager;
pub use model::SkillError;

View File

@@ -1,158 +1,9 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::Arc;
use codex_protocol::models::PermissionProfile;
use codex_protocol::protocol::Product;
use codex_protocol::protocol::SkillScope;
use serde::Deserialize;
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Eq)]
pub struct SkillManagedNetworkOverride {
pub allowed_domains: Option<Vec<String>>,
pub denied_domains: Option<Vec<String>>,
}
impl SkillManagedNetworkOverride {
pub fn has_domain_overrides(&self) -> bool {
self.allowed_domains.is_some() || self.denied_domains.is_some()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct SkillMetadata {
pub name: String,
pub description: String,
pub short_description: Option<String>,
pub interface: Option<SkillInterface>,
pub dependencies: Option<SkillDependencies>,
pub policy: Option<SkillPolicy>,
pub permission_profile: Option<PermissionProfile>,
pub managed_network_override: Option<SkillManagedNetworkOverride>,
/// Path to the SKILLS.md file that declares this skill.
pub path_to_skills_md: PathBuf,
pub scope: SkillScope,
}
impl SkillMetadata {
fn allow_implicit_invocation(&self) -> bool {
self.policy
.as_ref()
.and_then(|policy| policy.allow_implicit_invocation)
.unwrap_or(true)
}
pub fn matches_product_restriction_for_product(
&self,
restriction_product: Option<Product>,
) -> bool {
match &self.policy {
Some(policy) => {
policy.products.is_empty()
|| restriction_product.is_some_and(|product| {
product.matches_product_restriction(&policy.products)
})
}
None => true,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct SkillPolicy {
pub allow_implicit_invocation: Option<bool>,
// TODO: Enforce product gating in Codex skill selection/injection instead of only parsing and
// storing this metadata.
pub products: Vec<Product>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillInterface {
pub display_name: Option<String>,
pub short_description: Option<String>,
pub icon_small: Option<PathBuf>,
pub icon_large: Option<PathBuf>,
pub brand_color: Option<String>,
pub default_prompt: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillDependencies {
pub tools: Vec<SkillToolDependency>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillToolDependency {
pub r#type: String,
pub value: String,
pub description: Option<String>,
pub transport: Option<String>,
pub command: Option<String>,
pub url: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SkillError {
pub path: PathBuf,
pub message: String,
}
#[derive(Debug, Clone, Default)]
pub struct SkillLoadOutcome {
pub skills: Vec<SkillMetadata>,
pub errors: Vec<SkillError>,
pub disabled_paths: HashSet<PathBuf>,
pub(crate) implicit_skills_by_scripts_dir: Arc<HashMap<PathBuf, SkillMetadata>>,
pub(crate) implicit_skills_by_doc_path: Arc<HashMap<PathBuf, SkillMetadata>>,
}
impl SkillLoadOutcome {
pub fn is_skill_enabled(&self, skill: &SkillMetadata) -> bool {
!self.disabled_paths.contains(&skill.path_to_skills_md)
}
pub fn is_skill_allowed_for_implicit_invocation(&self, skill: &SkillMetadata) -> bool {
self.is_skill_enabled(skill) && skill.allow_implicit_invocation()
}
pub fn allowed_skills_for_implicit_invocation(&self) -> Vec<SkillMetadata> {
self.skills
.iter()
.filter(|skill| self.is_skill_allowed_for_implicit_invocation(skill))
.cloned()
.collect()
}
pub fn skills_with_enabled(&self) -> impl Iterator<Item = (&SkillMetadata, bool)> {
self.skills
.iter()
.map(|skill| (skill, self.is_skill_enabled(skill)))
}
}
pub fn filter_skill_load_outcome_for_product(
mut outcome: SkillLoadOutcome,
restriction_product: Option<Product>,
) -> SkillLoadOutcome {
outcome
.skills
.retain(|skill| skill.matches_product_restriction_for_product(restriction_product));
outcome.implicit_skills_by_scripts_dir = Arc::new(
outcome
.implicit_skills_by_scripts_dir
.iter()
.filter(|(_, skill)| skill.matches_product_restriction_for_product(restriction_product))
.map(|(path, skill)| (path.clone(), skill.clone()))
.collect(),
);
outcome.implicit_skills_by_doc_path = Arc::new(
outcome
.implicit_skills_by_doc_path
.iter()
.filter(|(_, skill)| skill.matches_product_restriction_for_product(restriction_product))
.map(|(path, skill)| (path.clone(), skill.clone()))
.collect(),
);
outcome
}
pub use codex_capabilities::skills::model::SkillDependencies;
pub use codex_capabilities::skills::model::SkillError;
pub use codex_capabilities::skills::model::SkillInterface;
pub use codex_capabilities::skills::model::SkillLoadOutcome;
pub use codex_capabilities::skills::model::SkillManagedNetworkOverride;
pub use codex_capabilities::skills::model::SkillMetadata;
pub use codex_capabilities::skills::model::SkillPolicy;
pub use codex_capabilities::skills::model::SkillToolDependency;
pub use codex_capabilities::skills::model::filter_skill_load_outcome_for_product;

View File

@@ -1,91 +1,10 @@
use anyhow::Context;
use anyhow::Result;
use serde::Deserialize;
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use std::time::Duration;
use crate::auth::CodexAuth;
use crate::CodexAuth;
use crate::config::Config;
use crate::default_client::build_reqwest_client;
const REMOTE_SKILLS_API_TIMEOUT: Duration = Duration::from_secs(30);
// Low-level client for the remote skill API. This is intentionally kept around for
// future wiring, but it is not used yet by any active product surface.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RemoteSkillScope {
WorkspaceShared,
AllShared,
Personal,
Example,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RemoteSkillProductSurface {
Chatgpt,
Codex,
Api,
Atlas,
}
fn as_query_scope(scope: RemoteSkillScope) -> Option<&'static str> {
match scope {
RemoteSkillScope::WorkspaceShared => Some("workspace-shared"),
RemoteSkillScope::AllShared => Some("all-shared"),
RemoteSkillScope::Personal => Some("personal"),
RemoteSkillScope::Example => Some("example"),
}
}
fn as_query_product_surface(product_surface: RemoteSkillProductSurface) -> &'static str {
match product_surface {
RemoteSkillProductSurface::Chatgpt => "chatgpt",
RemoteSkillProductSurface::Codex => "codex",
RemoteSkillProductSurface::Api => "api",
RemoteSkillProductSurface::Atlas => "atlas",
}
}
fn ensure_chatgpt_auth(auth: Option<&CodexAuth>) -> Result<&CodexAuth> {
let Some(auth) = auth else {
anyhow::bail!("chatgpt authentication required for remote skill scopes");
};
if !auth.is_chatgpt_auth() {
anyhow::bail!(
"chatgpt authentication required for remote skill scopes; api key auth is not supported"
);
}
Ok(auth)
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RemoteSkillSummary {
pub id: String,
pub name: String,
pub description: String,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RemoteSkillDownloadResult {
pub id: String,
pub path: PathBuf,
}
#[derive(Debug, Deserialize)]
struct RemoteSkillsResponse {
#[serde(rename = "hazelnuts")]
skills: Vec<RemoteSkill>,
}
#[derive(Debug, Deserialize)]
struct RemoteSkill {
id: String,
name: String,
description: String,
}
use codex_capabilities::skills::RemoteSkillRequest;
use codex_capabilities::skills::remote::RemoteSkillDownloadResult;
use codex_capabilities::skills::remote::RemoteSkillProductSurface;
use codex_capabilities::skills::remote::RemoteSkillScope;
use codex_capabilities::skills::remote::RemoteSkillSummary;
pub async fn list_remote_skills(
config: &Config,
@@ -93,178 +12,32 @@ pub async fn list_remote_skills(
scope: RemoteSkillScope,
product_surface: RemoteSkillProductSurface,
enabled: Option<bool>,
) -> Result<Vec<RemoteSkillSummary>> {
let base_url = config.chatgpt_base_url.trim_end_matches('/');
let auth = ensure_chatgpt_auth(auth)?;
let url = format!("{base_url}/hazelnuts");
let product_surface = as_query_product_surface(product_surface);
let mut query_params = vec![("product_surface", product_surface)];
if let Some(scope) = as_query_scope(scope) {
query_params.push(("scope", scope));
}
if let Some(enabled) = enabled {
let enabled = if enabled { "true" } else { "false" };
query_params.push(("enabled", enabled));
}
let client = build_reqwest_client();
let mut request = client
.get(&url)
.timeout(REMOTE_SKILLS_API_TIMEOUT)
.query(&query_params);
let token = auth
.get_token()
.context("Failed to read auth token for remote skills")?;
request = request.bearer_auth(token);
if let Some(account_id) = auth.get_account_id() {
request = request.header("chatgpt-account-id", account_id);
}
let response = request
.send()
.await
.with_context(|| format!("Failed to send request to {url}"))?;
let status = response.status();
let body = response.text().await.unwrap_or_default();
if !status.is_success() {
anyhow::bail!("Request failed with status {status} from {url}: {body}");
}
let parsed: RemoteSkillsResponse =
serde_json::from_str(&body).context("Failed to parse skills response")?;
Ok(parsed
.skills
.into_iter()
.map(|skill| RemoteSkillSummary {
id: skill.id,
name: skill.name,
description: skill.description,
})
.collect())
) -> anyhow::Result<Vec<RemoteSkillSummary>> {
codex_capabilities::skills::remote::list_remote_skills(
&RemoteSkillRequest {
chatgpt_base_url: config.chatgpt_base_url.clone(),
codex_home: config.codex_home.clone(),
},
auth,
scope,
product_surface,
enabled,
)
.await
}
pub async fn export_remote_skill(
config: &Config,
auth: Option<&CodexAuth>,
skill_id: &str,
) -> Result<RemoteSkillDownloadResult> {
let auth = ensure_chatgpt_auth(auth)?;
let client = build_reqwest_client();
let base_url = config.chatgpt_base_url.trim_end_matches('/');
let url = format!("{base_url}/hazelnuts/{skill_id}/export");
let mut request = client.get(&url).timeout(REMOTE_SKILLS_API_TIMEOUT);
let token = auth
.get_token()
.context("Failed to read auth token for remote skills")?;
request = request.bearer_auth(token);
if let Some(account_id) = auth.get_account_id() {
request = request.header("chatgpt-account-id", account_id);
}
let response = request
.send()
.await
.with_context(|| format!("Failed to send download request to {url}"))?;
let status = response.status();
let body = response.bytes().await.context("Failed to read download")?;
if !status.is_success() {
let body_text = String::from_utf8_lossy(&body);
anyhow::bail!("Download failed with status {status} from {url}: {body_text}");
}
if !is_zip_payload(&body) {
anyhow::bail!("Downloaded remote skill payload is not a zip archive");
}
let output_dir = config.codex_home.join("skills").join(skill_id);
tokio::fs::create_dir_all(&output_dir)
.await
.context("Failed to create downloaded skills directory")?;
let zip_bytes = body.to_vec();
let output_dir_clone = output_dir.clone();
let prefix_candidates = vec![skill_id.to_string()];
tokio::task::spawn_blocking(move || {
extract_zip_to_dir(zip_bytes, &output_dir_clone, &prefix_candidates)
})
) -> anyhow::Result<RemoteSkillDownloadResult> {
codex_capabilities::skills::remote::export_remote_skill(
&RemoteSkillRequest {
chatgpt_base_url: config.chatgpt_base_url.clone(),
codex_home: config.codex_home.clone(),
},
auth,
skill_id,
)
.await
.context("Zip extraction task failed")??;
Ok(RemoteSkillDownloadResult {
id: skill_id.to_string(),
path: output_dir,
})
}
fn safe_join(base: &Path, name: &str) -> Result<PathBuf> {
let path = Path::new(name);
for component in path.components() {
match component {
Component::Normal(_) => {}
_ => {
anyhow::bail!("Invalid file path in remote skill payload: {name}");
}
}
}
Ok(base.join(path))
}
fn is_zip_payload(bytes: &[u8]) -> bool {
bytes.starts_with(b"PK\x03\x04")
|| bytes.starts_with(b"PK\x05\x06")
|| bytes.starts_with(b"PK\x07\x08")
}
fn extract_zip_to_dir(
bytes: Vec<u8>,
output_dir: &Path,
prefix_candidates: &[String],
) -> Result<()> {
let cursor = std::io::Cursor::new(bytes);
let mut archive = zip::ZipArchive::new(cursor).context("Failed to open zip archive")?;
for i in 0..archive.len() {
let mut file = archive.by_index(i).context("Failed to read zip entry")?;
if file.is_dir() {
continue;
}
let raw_name = file.name().to_string();
let normalized = normalize_zip_name(&raw_name, prefix_candidates);
let Some(normalized) = normalized else {
continue;
};
let file_path = safe_join(output_dir, &normalized)?;
if let Some(parent) = file_path.parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("Failed to create parent dir for {normalized}"))?;
}
let mut out = std::fs::File::create(&file_path)
.with_context(|| format!("Failed to create file {normalized}"))?;
std::io::copy(&mut file, &mut out)
.with_context(|| format!("Failed to write skill file {normalized}"))?;
}
Ok(())
}
fn normalize_zip_name(name: &str, prefix_candidates: &[String]) -> Option<String> {
let mut trimmed = name.trim_start_matches("./");
for prefix in prefix_candidates {
if prefix.is_empty() {
continue;
}
let prefix = format!("{prefix}/");
if let Some(rest) = trimmed.strip_prefix(&prefix) {
trimmed = rest;
break;
}
}
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
}

View File

@@ -1,48 +1 @@
use crate::skills::model::SkillMetadata;
use codex_protocol::protocol::SKILLS_INSTRUCTIONS_CLOSE_TAG;
use codex_protocol::protocol::SKILLS_INSTRUCTIONS_OPEN_TAG;
pub fn render_skills_section(skills: &[SkillMetadata]) -> Option<String> {
if skills.is_empty() {
return None;
}
let mut lines: Vec<String> = Vec::new();
lines.push("## Skills".to_string());
lines.push("A skill is a set of local instructions to follow that is stored in a `SKILL.md` file. Below is the list of skills that can be used. Each entry includes a name, description, and file path so you can open the source for full instructions when using a specific skill.".to_string());
lines.push("### Available skills".to_string());
for skill in skills {
let path_str = skill.path_to_skills_md.to_string_lossy().replace('\\', "/");
let name = skill.name.as_str();
let description = skill.description.as_str();
lines.push(format!("- {name}: {description} (file: {path_str})"));
}
lines.push("### How to use skills".to_string());
lines.push(
r###"- Discovery: The list above is the skills available in this session (name + description + file path). Skill bodies live on disk at the listed paths.
- Trigger rules: If the user names a skill (with `$SkillName` or plain text) OR the task clearly matches a skill's description shown above, you must use that skill for that turn. Multiple mentions mean use them all. Do not carry skills across turns unless re-mentioned.
- Missing/blocked: If a named skill isn't in the list or the path can't be read, say so briefly and continue with the best fallback.
- How to use a skill (progressive disclosure):
1) After deciding to use a skill, open its `SKILL.md`. Read only enough to follow the workflow.
2) When `SKILL.md` references relative paths (e.g., `scripts/foo.py`), resolve them relative to the skill directory listed above first, and only consider other paths if needed.
3) If `SKILL.md` points to extra folders such as `references/`, load only the specific files needed for the request; don't bulk-load everything.
4) If `scripts/` exist, prefer running or patching them instead of retyping large code blocks.
5) If `assets/` or templates exist, reuse them instead of recreating from scratch.
- Coordination and sequencing:
- If multiple skills apply, choose the minimal set that covers the request and state the order you'll use them.
- Announce which skill(s) you're using and why (one short line). If you skip an obvious skill, say why.
- Context hygiene:
- Keep context small: summarize long sections instead of pasting them; only load extra files when needed.
- Avoid deep reference-chasing: prefer opening only files directly linked from `SKILL.md` unless you're blocked.
- When variants exist (frameworks, providers, domains), pick only the relevant reference file(s) and note that choice.
- Safety and fallback: If a skill can't be applied cleanly (missing files, unclear instructions), state the issue, pick the next-best approach, and continue."###
.to_string(),
);
let body = lines.join("\n");
Some(format!(
"{SKILLS_INSTRUCTIONS_OPEN_TAG}\n{body}\n{SKILLS_INSTRUCTIONS_CLOSE_TAG}"
))
}
pub use codex_capabilities::skills::render::render_skills_section;

View File

@@ -1,9 +1,3 @@
pub(crate) use codex_skills::install_system_skills;
pub(crate) use codex_skills::system_cache_root_dir;
use std::path::Path;
pub(crate) fn uninstall_system_skills(codex_home: &Path) {
let system_skills_dir = system_cache_root_dir(codex_home);
let _ = std::fs::remove_dir_all(&system_skills_dir);
}
pub use codex_capabilities::skills::system::install_system_skills;
pub use codex_capabilities::skills::system::system_cache_root_dir;
pub use codex_capabilities::skills::system::uninstall_system_skills;

View File

@@ -8,6 +8,7 @@ use crate::plugins::test_support::write_plugins_feature_config;
use crate::tools::discoverable::DiscoverablePluginInfo;
use crate::tools::discoverable::filter_tool_suggest_discoverable_tools_for_client;
use codex_app_server_protocol::AppInfo;
use codex_capabilities::plugins::curated_plugins_repo_path;
use codex_utils_absolute_path::AbsolutePathBuf;
use pretty_assertions::assert_eq;
use serde_json::json;
@@ -238,7 +239,7 @@ fn verified_connector_suggestion_completed_requires_accessible_connector() {
#[tokio::test]
async fn verified_plugin_suggestion_completed_requires_installed_plugin() {
let codex_home = tempdir().expect("tempdir should succeed");
let curated_root = crate::plugins::curated_plugins_repo_path(codex_home.path());
let curated_root = curated_plugins_repo_path(codex_home.path());
write_openai_curated_marketplace(&curated_root, &["sample"]);
write_curated_plugin_sha(codex_home.path());
write_plugins_feature_config(codex_home.path());