Compare commits
5 Commits
pr12442
...
dh--sessio
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bb0d04ce68 | ||
|
|
e7b6f38b58 | ||
|
|
f5d7a74568 | ||
|
|
85ce91a5b3 | ||
|
|
2fe4be1aa9 |
14
codex-rs/Cargo.lock
generated
@@ -1398,8 +1398,8 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"codex-apply-patch",
|
||||
"codex-core",
|
||||
"codex-linux-sandbox",
|
||||
"codex-utils-home-dir",
|
||||
"dotenvy",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
@@ -1644,6 +1644,7 @@ dependencies = [
|
||||
"codex-rmcp-client",
|
||||
"codex-secrets",
|
||||
"codex-shell-command",
|
||||
"codex-skills",
|
||||
"codex-state",
|
||||
"codex-utils-absolute-path",
|
||||
"codex-utils-cargo-bin",
|
||||
@@ -1663,7 +1664,6 @@ dependencies = [
|
||||
"futures",
|
||||
"http 1.4.0",
|
||||
"image",
|
||||
"include_dir",
|
||||
"indexmap 2.13.0",
|
||||
"indoc",
|
||||
"insta",
|
||||
@@ -1735,6 +1735,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"clap",
|
||||
"codex-apply-patch",
|
||||
"codex-arg0",
|
||||
"codex-cloud-requirements",
|
||||
"codex-core",
|
||||
@@ -2188,6 +2189,15 @@ dependencies = [
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-skills"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"codex-utils-absolute-path",
|
||||
"include_dir",
|
||||
"thiserror 2.0.18",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-state"
|
||||
version = "0.0.0"
|
||||
|
||||
@@ -17,6 +17,7 @@ members = [
|
||||
"cli",
|
||||
"config",
|
||||
"shell-command",
|
||||
"skills",
|
||||
"core",
|
||||
"hooks",
|
||||
"secrets",
|
||||
@@ -112,6 +113,7 @@ codex-responses-api-proxy = { path = "responses-api-proxy" }
|
||||
codex-rmcp-client = { path = "rmcp-client" }
|
||||
codex-secrets = { path = "secrets" }
|
||||
codex-shell-command = { path = "shell-command" }
|
||||
codex-skills = { path = "skills" }
|
||||
codex-state = { path = "state" }
|
||||
codex-stdio-to-uds = { path = "stdio-to-uds" }
|
||||
codex-tui = { path = "tui" }
|
||||
|
||||
@@ -25,6 +25,15 @@ use crate::invocation::ExtractHeredocError;
|
||||
/// Detailed instructions for gpt-4.1 on how to use the `apply_patch` tool.
|
||||
pub const APPLY_PATCH_TOOL_INSTRUCTIONS: &str = include_str!("../apply_patch_tool_instructions.md");
|
||||
|
||||
/// Special argv[1] flag used when the Codex executable self-invokes to run the
|
||||
/// internal `apply_patch` path.
|
||||
///
|
||||
/// Although this constant lives in `codex-apply-patch` (to avoid forcing
|
||||
/// `codex-arg0` to depend on `codex-core`), it is part of the "codex core"
|
||||
/// process-invocation contract between the apply-patch runtime and the arg0
|
||||
/// dispatcher.
|
||||
pub const CODEX_CORE_APPLY_PATCH_ARG1: &str = "--codex-run-as-apply-patch";
|
||||
|
||||
#[derive(Debug, Error, PartialEq)]
|
||||
pub enum ApplyPatchError {
|
||||
#[error(transparent)]
|
||||
|
||||
@@ -14,8 +14,8 @@ workspace = true
|
||||
[dependencies]
|
||||
anyhow = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-linux-sandbox = { workspace = true }
|
||||
codex-utils-home-dir = { workspace = true }
|
||||
dotenvy = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
tokio = { workspace = true, features = ["rt-multi-thread"] }
|
||||
|
||||
@@ -3,7 +3,8 @@ use std::future::Future;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_core::CODEX_APPLY_PATCH_ARG1;
|
||||
use codex_apply_patch::CODEX_CORE_APPLY_PATCH_ARG1;
|
||||
use codex_utils_home_dir::find_codex_home;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::symlink;
|
||||
use tempfile::TempDir;
|
||||
@@ -46,7 +47,7 @@ pub fn arg0_dispatch() -> Option<Arg0PathEntryGuard> {
|
||||
}
|
||||
|
||||
let argv1 = args.next().unwrap_or_default();
|
||||
if argv1 == CODEX_APPLY_PATCH_ARG1 {
|
||||
if argv1 == CODEX_CORE_APPLY_PATCH_ARG1 {
|
||||
let patch_arg = args.next().and_then(|s| s.to_str().map(str::to_owned));
|
||||
let exit_code = match patch_arg {
|
||||
Some(patch_arg) => {
|
||||
@@ -58,7 +59,7 @@ pub fn arg0_dispatch() -> Option<Arg0PathEntryGuard> {
|
||||
}
|
||||
}
|
||||
None => {
|
||||
eprintln!("Error: {CODEX_APPLY_PATCH_ARG1} requires a UTF-8 PATCH argument.");
|
||||
eprintln!("Error: {CODEX_CORE_APPLY_PATCH_ARG1} requires a UTF-8 PATCH argument.");
|
||||
1
|
||||
}
|
||||
};
|
||||
@@ -139,7 +140,7 @@ const ILLEGAL_ENV_VAR_PREFIX: &str = "CODEX_";
|
||||
/// Security: Do not allow `.env` files to create or modify any variables
|
||||
/// with names starting with `CODEX_`.
|
||||
fn load_dotenv() {
|
||||
if let Ok(codex_home) = codex_core::config::find_codex_home()
|
||||
if let Ok(codex_home) = find_codex_home()
|
||||
&& let Ok(iter) = dotenvy::from_path_iter(codex_home.join(".env"))
|
||||
{
|
||||
set_filtered(iter);
|
||||
@@ -175,7 +176,7 @@ where
|
||||
/// IMPORTANT: This function modifies the PATH environment variable, so it MUST
|
||||
/// be called before multiple threads are spawned.
|
||||
pub fn prepend_path_entry_for_codex_aliases() -> std::io::Result<Arg0PathEntryGuard> {
|
||||
let codex_home = codex_core::config::find_codex_home()?;
|
||||
let codex_home = find_codex_home()?;
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
// Guard against placing helpers in system temp directories outside debug builds.
|
||||
@@ -242,7 +243,7 @@ pub fn prepend_path_entry_for_codex_aliases() -> std::io::Result<Arg0PathEntryGu
|
||||
&batch_script,
|
||||
format!(
|
||||
r#"@echo off
|
||||
"{}" {CODEX_APPLY_PATCH_ARG1} %*
|
||||
"{}" {CODEX_CORE_APPLY_PATCH_ARG1} %*
|
||||
"#,
|
||||
exe.display()
|
||||
),
|
||||
|
||||
@@ -1,163 +0,0 @@
|
||||
use crate::common::ResponseEvent;
|
||||
use crate::common::ResponseStream;
|
||||
use crate::error::ApiError;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ReasoningItemContent;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use futures::Stream;
|
||||
use std::collections::VecDeque;
|
||||
use std::pin::Pin;
|
||||
use std::task::Context;
|
||||
use std::task::Poll;
|
||||
|
||||
/// Stream adapter that merges token deltas into a single assistant message per turn.
|
||||
pub struct AggregatedStream {
|
||||
inner: ResponseStream,
|
||||
cumulative: String,
|
||||
cumulative_reasoning: String,
|
||||
pending: VecDeque<ResponseEvent>,
|
||||
}
|
||||
|
||||
impl Stream for AggregatedStream {
|
||||
type Item = Result<ResponseEvent, ApiError>;
|
||||
|
||||
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
let this = self.get_mut();
|
||||
|
||||
if let Some(ev) = this.pending.pop_front() {
|
||||
return Poll::Ready(Some(Ok(ev)));
|
||||
}
|
||||
|
||||
loop {
|
||||
match Pin::new(&mut this.inner).poll_next(cx) {
|
||||
Poll::Pending => return Poll::Pending,
|
||||
Poll::Ready(None) => return Poll::Ready(None),
|
||||
Poll::Ready(Some(Err(err))) => return Poll::Ready(Some(Err(err))),
|
||||
Poll::Ready(Some(Ok(ResponseEvent::OutputItemDone(item)))) => {
|
||||
let is_assistant_message = matches!(
|
||||
&item,
|
||||
ResponseItem::Message { role, .. } if role == "assistant"
|
||||
);
|
||||
|
||||
if is_assistant_message {
|
||||
if this.cumulative.is_empty()
|
||||
&& let ResponseItem::Message { content, .. } = &item
|
||||
&& let Some(text) = content.iter().find_map(|c| match c {
|
||||
ContentItem::OutputText { text } => Some(text),
|
||||
_ => None,
|
||||
})
|
||||
{
|
||||
this.cumulative.push_str(text);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::OutputItemDone(item))));
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ServerReasoningIncluded(included)))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::ServerReasoningIncluded(included))));
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::RateLimits(snapshot)))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::RateLimits(snapshot))));
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ModelsEtag(etag)))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::ModelsEtag(etag))));
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ServerModel(model)))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::ServerModel(model))));
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage,
|
||||
can_append: _can_append,
|
||||
}))) => {
|
||||
let mut emitted_any = false;
|
||||
|
||||
if !this.cumulative_reasoning.is_empty() {
|
||||
let aggregated_reasoning = ResponseItem::Reasoning {
|
||||
id: String::new(),
|
||||
summary: Vec::new(),
|
||||
content: Some(vec![ReasoningItemContent::ReasoningText {
|
||||
text: std::mem::take(&mut this.cumulative_reasoning),
|
||||
}]),
|
||||
encrypted_content: None,
|
||||
};
|
||||
this.pending
|
||||
.push_back(ResponseEvent::OutputItemDone(aggregated_reasoning));
|
||||
emitted_any = true;
|
||||
}
|
||||
|
||||
if !this.cumulative.is_empty() {
|
||||
let aggregated_message = ResponseItem::Message {
|
||||
id: None,
|
||||
role: "assistant".to_string(),
|
||||
content: vec![ContentItem::OutputText {
|
||||
text: std::mem::take(&mut this.cumulative),
|
||||
}],
|
||||
end_turn: None,
|
||||
phase: None,
|
||||
};
|
||||
this.pending
|
||||
.push_back(ResponseEvent::OutputItemDone(aggregated_message));
|
||||
emitted_any = true;
|
||||
}
|
||||
|
||||
if emitted_any {
|
||||
this.pending.push_back(ResponseEvent::Completed {
|
||||
response_id: response_id.clone(),
|
||||
token_usage: token_usage.clone(),
|
||||
can_append: false,
|
||||
});
|
||||
if let Some(ev) = this.pending.pop_front() {
|
||||
return Poll::Ready(Some(Ok(ev)));
|
||||
}
|
||||
}
|
||||
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::Completed {
|
||||
response_id,
|
||||
token_usage,
|
||||
can_append: false,
|
||||
})));
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::Created))) => continue,
|
||||
Poll::Ready(Some(Ok(ResponseEvent::OutputTextDelta(delta)))) => {
|
||||
this.cumulative.push_str(&delta);
|
||||
continue;
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ReasoningContentDelta {
|
||||
delta,
|
||||
content_index: _,
|
||||
}))) => {
|
||||
this.cumulative_reasoning.push_str(&delta);
|
||||
continue;
|
||||
}
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryDelta { .. }))) => continue,
|
||||
Poll::Ready(Some(Ok(ResponseEvent::ReasoningSummaryPartAdded { .. }))) => continue,
|
||||
Poll::Ready(Some(Ok(ResponseEvent::OutputItemAdded(item)))) => {
|
||||
return Poll::Ready(Some(Ok(ResponseEvent::OutputItemAdded(item))));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AggregateStreamExt {
|
||||
fn aggregate(self) -> AggregatedStream;
|
||||
}
|
||||
|
||||
impl AggregateStreamExt for ResponseStream {
|
||||
fn aggregate(self) -> AggregatedStream {
|
||||
AggregatedStream::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl AggregatedStream {
|
||||
fn new(inner: ResponseStream) -> Self {
|
||||
AggregatedStream {
|
||||
inner,
|
||||
cumulative: String::new(),
|
||||
cumulative_reasoning: String::new(),
|
||||
pending: VecDeque::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
pub mod aggregate;
|
||||
pub mod compact;
|
||||
pub mod memories;
|
||||
pub mod models;
|
||||
|
||||
@@ -25,7 +25,6 @@ pub use crate::common::ResponseEvent;
|
||||
pub use crate::common::ResponseStream;
|
||||
pub use crate::common::ResponsesApiRequest;
|
||||
pub use crate::common::create_text_param_for_request;
|
||||
pub use crate::endpoint::aggregate::AggregateStreamExt;
|
||||
pub use crate::endpoint::compact::CompactClient;
|
||||
pub use crate::endpoint::memories::MemoriesClient;
|
||||
pub use crate::endpoint::models::ModelsClient;
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::time::Duration;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use bytes::Bytes;
|
||||
use codex_api::AggregateStreamExt;
|
||||
use codex_api::AuthProvider;
|
||||
use codex_api::Provider;
|
||||
use codex_api::ResponseEvent;
|
||||
@@ -14,7 +13,6 @@ use codex_client::Request;
|
||||
use codex_client::Response;
|
||||
use codex_client::StreamResponse;
|
||||
use codex_client::TransportError;
|
||||
use codex_protocol::models::ContentItem;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use futures::StreamExt;
|
||||
use http::HeaderMap;
|
||||
@@ -172,69 +170,3 @@ async fn responses_stream_parses_items_and_completed_end_to_end() -> Result<()>
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn responses_stream_aggregates_output_text_deltas() -> Result<()> {
|
||||
let delta1 = serde_json::json!({
|
||||
"type": "response.output_text.delta",
|
||||
"delta": "Hello, "
|
||||
});
|
||||
|
||||
let delta2 = serde_json::json!({
|
||||
"type": "response.output_text.delta",
|
||||
"delta": "world"
|
||||
});
|
||||
|
||||
let completed = serde_json::json!({
|
||||
"type": "response.completed",
|
||||
"response": { "id": "resp-agg" }
|
||||
});
|
||||
|
||||
let body = build_responses_body(vec![delta1, delta2, completed]);
|
||||
let transport = FixtureSseTransport::new(body);
|
||||
let client = ResponsesClient::new(transport, provider("openai"), NoAuth);
|
||||
|
||||
let stream = client
|
||||
.stream(
|
||||
serde_json::json!({"echo": true}),
|
||||
HeaderMap::new(),
|
||||
Compression::None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut stream = stream.aggregate();
|
||||
let mut events = Vec::new();
|
||||
while let Some(ev) = stream.next().await {
|
||||
events.push(ev?);
|
||||
}
|
||||
|
||||
let events: Vec<ResponseEvent> = events
|
||||
.into_iter()
|
||||
.filter(|ev| !matches!(ev, ResponseEvent::RateLimits(_)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(events.len(), 2);
|
||||
|
||||
match &events[0] {
|
||||
ResponseEvent::OutputItemDone(ResponseItem::Message { content, .. }) => {
|
||||
let mut aggregated = String::new();
|
||||
for item in content {
|
||||
if let ContentItem::OutputText { text } = item {
|
||||
aggregated.push_str(text);
|
||||
}
|
||||
}
|
||||
assert_eq!(aggregated, "Hello, world");
|
||||
}
|
||||
other => panic!("unexpected first event: {other:?}"),
|
||||
}
|
||||
|
||||
match &events[1] {
|
||||
ResponseEvent::Completed { response_id, .. } => {
|
||||
assert_eq!(response_id, "resp-agg");
|
||||
}
|
||||
other => panic!("unexpected second event: {other:?}"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ edition.workspace = true
|
||||
license.workspace = true
|
||||
name = "codex-core"
|
||||
version.workspace = true
|
||||
build = "build.rs"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@@ -34,6 +33,7 @@ codex-async-utils = { workspace = true }
|
||||
codex-client = { workspace = true }
|
||||
codex-config = { workspace = true }
|
||||
codex-shell-command = { workspace = true }
|
||||
codex-skills = { workspace = true }
|
||||
codex-execpolicy = { workspace = true }
|
||||
codex-file-search = { workspace = true }
|
||||
codex-git = { workspace = true }
|
||||
@@ -58,7 +58,6 @@ env-flags = { workspace = true }
|
||||
eventsource-stream = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
http = { workspace = true }
|
||||
include_dir = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
indoc = { workspace = true }
|
||||
keyring = { workspace = true, features = ["crypto-rust"] }
|
||||
|
||||
@@ -9,8 +9,6 @@ use codex_apply_patch::ApplyPatchFileChange;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub const CODEX_APPLY_PATCH_ARG1: &str = "--codex-run-as-apply-patch";
|
||||
|
||||
pub(crate) enum InternalApplyPatchInvocation {
|
||||
/// The `apply_patch` call was handled programmatically, without any sort
|
||||
/// of sandbox, because the user explicitly approved it. This is the
|
||||
@@ -20,7 +18,8 @@ pub(crate) enum InternalApplyPatchInvocation {
|
||||
/// The `apply_patch` call was approved, either automatically because it
|
||||
/// appears that it should be allowed based on the user's sandbox policy
|
||||
/// *or* because the user explicitly approved it. In either case, we use
|
||||
/// exec with [`CODEX_APPLY_PATCH_ARG1`] to realize the `apply_patch` call,
|
||||
/// exec with [`codex_apply_patch::CODEX_CORE_APPLY_PATCH_ARG1`] to realize
|
||||
/// the `apply_patch` call,
|
||||
/// but [`ApplyPatchExec::auto_approved`] is used to determine the sandbox
|
||||
/// used with the `exec()`.
|
||||
DelegateToExec(ApplyPatchExec),
|
||||
|
||||
@@ -144,7 +144,6 @@ pub(crate) use codex_shell_command::is_safe_command;
|
||||
pub(crate) use codex_shell_command::parse_command;
|
||||
pub(crate) use codex_shell_command::powershell;
|
||||
|
||||
pub use apply_patch::CODEX_APPLY_PATCH_ARG1;
|
||||
pub use client::X_CODEX_TURN_METADATA_HEADER;
|
||||
pub use exec_policy::ExecPolicyError;
|
||||
pub use exec_policy::check_execpolicy_for_warnings;
|
||||
|
||||
@@ -1,196 +1,2 @@
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use include_dir::Dir;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::fs;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
const SYSTEM_SKILLS_DIR: Dir =
|
||||
include_dir::include_dir!("$CARGO_MANIFEST_DIR/src/skills/assets/samples");
|
||||
|
||||
const SYSTEM_SKILLS_DIR_NAME: &str = ".system";
|
||||
const SKILLS_DIR_NAME: &str = "skills";
|
||||
const SYSTEM_SKILLS_MARKER_FILENAME: &str = ".codex-system-skills.marker";
|
||||
const SYSTEM_SKILLS_MARKER_SALT: &str = "v1";
|
||||
|
||||
/// Returns the on-disk cache location for embedded system skills.
|
||||
///
|
||||
/// This is typically located at `CODEX_HOME/skills/.system`.
|
||||
pub(crate) fn system_cache_root_dir(codex_home: &Path) -> PathBuf {
|
||||
AbsolutePathBuf::try_from(codex_home)
|
||||
.and_then(|codex_home| system_cache_root_dir_abs(&codex_home))
|
||||
.map(AbsolutePathBuf::into_path_buf)
|
||||
.unwrap_or_else(|_| {
|
||||
codex_home
|
||||
.join(SKILLS_DIR_NAME)
|
||||
.join(SYSTEM_SKILLS_DIR_NAME)
|
||||
})
|
||||
}
|
||||
|
||||
fn system_cache_root_dir_abs(codex_home: &AbsolutePathBuf) -> std::io::Result<AbsolutePathBuf> {
|
||||
codex_home
|
||||
.join(SKILLS_DIR_NAME)?
|
||||
.join(SYSTEM_SKILLS_DIR_NAME)
|
||||
}
|
||||
|
||||
/// Installs embedded system skills into `CODEX_HOME/skills/.system`.
|
||||
///
|
||||
/// Clears any existing system skills directory first and then writes the embedded
|
||||
/// skills directory into place.
|
||||
///
|
||||
/// To avoid doing unnecessary work on every startup, a marker file is written
|
||||
/// with a fingerprint of the embedded directory. When the marker matches, the
|
||||
/// install is skipped.
|
||||
pub(crate) fn install_system_skills(codex_home: &Path) -> Result<(), SystemSkillsError> {
|
||||
let codex_home = AbsolutePathBuf::try_from(codex_home)
|
||||
.map_err(|source| SystemSkillsError::io("normalize codex home dir", source))?;
|
||||
let skills_root_dir = codex_home
|
||||
.join(SKILLS_DIR_NAME)
|
||||
.map_err(|source| SystemSkillsError::io("resolve skills root dir", source))?;
|
||||
fs::create_dir_all(skills_root_dir.as_path())
|
||||
.map_err(|source| SystemSkillsError::io("create skills root dir", source))?;
|
||||
|
||||
let dest_system = system_cache_root_dir_abs(&codex_home)
|
||||
.map_err(|source| SystemSkillsError::io("resolve system skills cache root dir", source))?;
|
||||
|
||||
let marker_path = dest_system
|
||||
.join(SYSTEM_SKILLS_MARKER_FILENAME)
|
||||
.map_err(|source| SystemSkillsError::io("resolve system skills marker path", source))?;
|
||||
let expected_fingerprint = embedded_system_skills_fingerprint();
|
||||
if dest_system.as_path().is_dir()
|
||||
&& read_marker(&marker_path).is_ok_and(|marker| marker == expected_fingerprint)
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if dest_system.as_path().exists() {
|
||||
fs::remove_dir_all(dest_system.as_path())
|
||||
.map_err(|source| SystemSkillsError::io("remove existing system skills dir", source))?;
|
||||
}
|
||||
|
||||
write_embedded_dir(&SYSTEM_SKILLS_DIR, &dest_system)?;
|
||||
fs::write(marker_path.as_path(), format!("{expected_fingerprint}\n"))
|
||||
.map_err(|source| SystemSkillsError::io("write system skills marker", source))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_marker(path: &AbsolutePathBuf) -> Result<String, SystemSkillsError> {
|
||||
Ok(fs::read_to_string(path.as_path())
|
||||
.map_err(|source| SystemSkillsError::io("read system skills marker", source))?
|
||||
.trim()
|
||||
.to_string())
|
||||
}
|
||||
|
||||
fn embedded_system_skills_fingerprint() -> String {
|
||||
let mut items = Vec::new();
|
||||
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
|
||||
items.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
SYSTEM_SKILLS_MARKER_SALT.hash(&mut hasher);
|
||||
for (path, contents_hash) in items {
|
||||
path.hash(&mut hasher);
|
||||
contents_hash.hash(&mut hasher);
|
||||
}
|
||||
format!("{:x}", hasher.finish())
|
||||
}
|
||||
|
||||
fn collect_fingerprint_items(dir: &Dir<'_>, items: &mut Vec<(String, Option<u64>)>) {
|
||||
for entry in dir.entries() {
|
||||
match entry {
|
||||
include_dir::DirEntry::Dir(subdir) => {
|
||||
items.push((subdir.path().to_string_lossy().to_string(), None));
|
||||
collect_fingerprint_items(subdir, items);
|
||||
}
|
||||
include_dir::DirEntry::File(file) => {
|
||||
let mut file_hasher = DefaultHasher::new();
|
||||
file.contents().hash(&mut file_hasher);
|
||||
items.push((
|
||||
file.path().to_string_lossy().to_string(),
|
||||
Some(file_hasher.finish()),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the embedded `include_dir::Dir` to disk under `dest`.
|
||||
///
|
||||
/// Preserves the embedded directory structure.
|
||||
fn write_embedded_dir(dir: &Dir<'_>, dest: &AbsolutePathBuf) -> Result<(), SystemSkillsError> {
|
||||
fs::create_dir_all(dest.as_path())
|
||||
.map_err(|source| SystemSkillsError::io("create system skills dir", source))?;
|
||||
|
||||
for entry in dir.entries() {
|
||||
match entry {
|
||||
include_dir::DirEntry::Dir(subdir) => {
|
||||
let subdir_dest = dest.join(subdir.path()).map_err(|source| {
|
||||
SystemSkillsError::io("resolve system skills subdir", source)
|
||||
})?;
|
||||
fs::create_dir_all(subdir_dest.as_path()).map_err(|source| {
|
||||
SystemSkillsError::io("create system skills subdir", source)
|
||||
})?;
|
||||
write_embedded_dir(subdir, dest)?;
|
||||
}
|
||||
include_dir::DirEntry::File(file) => {
|
||||
let path = dest.join(file.path()).map_err(|source| {
|
||||
SystemSkillsError::io("resolve system skills file", source)
|
||||
})?;
|
||||
if let Some(parent) = path.as_path().parent() {
|
||||
fs::create_dir_all(parent).map_err(|source| {
|
||||
SystemSkillsError::io("create system skills file parent", source)
|
||||
})?;
|
||||
}
|
||||
fs::write(path.as_path(), file.contents())
|
||||
.map_err(|source| SystemSkillsError::io("write system skill file", source))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub(crate) enum SystemSkillsError {
|
||||
#[error("io error while {action}: {source}")]
|
||||
Io {
|
||||
action: &'static str,
|
||||
#[source]
|
||||
source: std::io::Error,
|
||||
},
|
||||
}
|
||||
|
||||
impl SystemSkillsError {
|
||||
fn io(action: &'static str, source: std::io::Error) -> Self {
|
||||
Self::Io { action, source }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::SYSTEM_SKILLS_DIR;
|
||||
use super::collect_fingerprint_items;
|
||||
|
||||
#[test]
|
||||
fn fingerprint_traverses_nested_entries() {
|
||||
let mut items = Vec::new();
|
||||
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
|
||||
let mut paths: Vec<String> = items.into_iter().map(|(path, _)| path).collect();
|
||||
paths.sort_unstable();
|
||||
|
||||
assert!(
|
||||
paths
|
||||
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/SKILL.md"))
|
||||
.is_ok()
|
||||
);
|
||||
assert!(
|
||||
paths
|
||||
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/scripts/init_skill.py"))
|
||||
.is_ok()
|
||||
);
|
||||
}
|
||||
}
|
||||
pub(crate) use codex_skills::install_system_skills;
|
||||
pub(crate) use codex_skills::system_cache_root_dir;
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
//! decision to avoid re-prompting, builds the self-invocation command for
|
||||
//! `codex --codex-run-as-apply-patch`, and runs under the current
|
||||
//! `SandboxAttempt` with a minimal environment.
|
||||
use crate::CODEX_APPLY_PATCH_ARG1;
|
||||
use crate::exec::ExecToolCallOutput;
|
||||
use crate::sandboxing::CommandSpec;
|
||||
use crate::sandboxing::SandboxPermissions;
|
||||
@@ -20,6 +19,7 @@ use crate::tools::sandboxing::ToolError;
|
||||
use crate::tools::sandboxing::ToolRuntime;
|
||||
use crate::tools::sandboxing::with_cached_approval;
|
||||
use codex_apply_patch::ApplyPatchAction;
|
||||
use codex_apply_patch::CODEX_CORE_APPLY_PATCH_ARG1;
|
||||
use codex_protocol::protocol::AskForApproval;
|
||||
use codex_protocol::protocol::FileChange;
|
||||
use codex_protocol::protocol::ReviewDecision;
|
||||
@@ -57,7 +57,10 @@ impl ApplyPatchRuntime {
|
||||
let program = exe.to_string_lossy().to_string();
|
||||
Ok(CommandSpec {
|
||||
program,
|
||||
args: vec![CODEX_APPLY_PATCH_ARG1.to_string(), req.action.patch.clone()],
|
||||
args: vec![
|
||||
CODEX_CORE_APPLY_PATCH_ARG1.to_string(),
|
||||
req.action.patch.clone(),
|
||||
],
|
||||
cwd: req.action.cwd.clone(),
|
||||
expiration: req.timeout_ms.into(),
|
||||
// Run apply_patch with a minimal environment for determinism and to avoid leaks.
|
||||
|
||||
@@ -51,6 +51,7 @@ uuid = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = { workspace = true }
|
||||
codex-apply-patch = { workspace = true }
|
||||
codex-utils-cargo-bin = { workspace = true }
|
||||
core_test_support = { workspace = true }
|
||||
libc = { workspace = true }
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
use anyhow::Context;
|
||||
use assert_cmd::prelude::*;
|
||||
use codex_core::CODEX_APPLY_PATCH_ARG1;
|
||||
use codex_apply_patch::CODEX_CORE_APPLY_PATCH_ARG1;
|
||||
use core_test_support::responses::ev_apply_patch_custom_tool_call;
|
||||
use core_test_support::responses::ev_apply_patch_function_call;
|
||||
use core_test_support::responses::ev_completed;
|
||||
@@ -24,7 +24,7 @@ fn test_standalone_exec_cli_can_use_apply_patch() -> anyhow::Result<()> {
|
||||
fs::write(&absolute_path, "original content\n")?;
|
||||
|
||||
Command::new(codex_utils_cargo_bin::cargo_bin("codex-exec")?)
|
||||
.arg(CODEX_APPLY_PATCH_ARG1)
|
||||
.arg(CODEX_CORE_APPLY_PATCH_ARG1)
|
||||
.arg(
|
||||
r#"*** Begin Patch
|
||||
*** Update File: source.txt
|
||||
|
||||
@@ -128,6 +128,9 @@ pub fn parse_shell_lc_single_command_prefix(command: &[String]) -> Option<Vec<St
|
||||
if root.has_error() {
|
||||
return None;
|
||||
}
|
||||
if !has_named_descendant_kind(root, "heredoc_redirect") {
|
||||
return None;
|
||||
}
|
||||
|
||||
let command_node = find_single_command_node(root)?;
|
||||
parse_heredoc_command_words(command_node, script)
|
||||
@@ -265,6 +268,20 @@ fn find_single_command_node(root: Node<'_>) -> Option<Node<'_>> {
|
||||
single_command
|
||||
}
|
||||
|
||||
fn has_named_descendant_kind(node: Node<'_>, kind: &str) -> bool {
|
||||
let mut stack = vec![node];
|
||||
while let Some(current) = stack.pop() {
|
||||
if current.kind() == kind {
|
||||
return true;
|
||||
}
|
||||
let mut cursor = current.walk();
|
||||
for child in current.named_children(&mut cursor) {
|
||||
stack.push(child);
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn parse_double_quoted_string(node: Node, src: &str) -> Option<String> {
|
||||
if node.kind() != "string" {
|
||||
return None;
|
||||
|
||||
15
codex-rs/skills/BUILD.bazel
Normal file
@@ -0,0 +1,15 @@
|
||||
load("//:defs.bzl", "codex_rust_crate")
|
||||
|
||||
codex_rust_crate(
|
||||
name = "skills",
|
||||
crate_name = "codex_skills",
|
||||
compile_data = glob(
|
||||
include = ["**"],
|
||||
exclude = [
|
||||
"**/* *",
|
||||
"BUILD.bazel",
|
||||
"Cargo.toml",
|
||||
],
|
||||
allow_empty = True,
|
||||
),
|
||||
)
|
||||
19
codex-rs/skills/Cargo.toml
Normal file
@@ -0,0 +1,19 @@
|
||||
[package]
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
name = "codex-skills"
|
||||
version.workspace = true
|
||||
build = "build.rs"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
name = "codex_skills"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
codex-utils-absolute-path = { workspace = true }
|
||||
include_dir = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
@@ -2,7 +2,7 @@ use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
fn main() {
|
||||
let samples_dir = Path::new("src/skills/assets/samples");
|
||||
let samples_dir = Path::new("src/assets/samples");
|
||||
if !samples_dir.exists() {
|
||||
return;
|
||||
}
|
||||
|
Before Width: | Height: | Size: 1.3 KiB After Width: | Height: | Size: 1.3 KiB |
|
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 1.5 KiB |
|
Before Width: | Height: | Size: 923 B After Width: | Height: | Size: 923 B |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
195
codex-rs/skills/src/lib.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
use codex_utils_absolute_path::AbsolutePathBuf;
|
||||
use include_dir::Dir;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::fs;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
const SYSTEM_SKILLS_DIR: Dir = include_dir::include_dir!("$CARGO_MANIFEST_DIR/src/assets/samples");
|
||||
|
||||
const SYSTEM_SKILLS_DIR_NAME: &str = ".system";
|
||||
const SKILLS_DIR_NAME: &str = "skills";
|
||||
const SYSTEM_SKILLS_MARKER_FILENAME: &str = ".codex-system-skills.marker";
|
||||
const SYSTEM_SKILLS_MARKER_SALT: &str = "v1";
|
||||
|
||||
/// Returns the on-disk cache location for embedded system skills.
|
||||
///
|
||||
/// This is typically located at `CODEX_HOME/skills/.system`.
|
||||
pub fn system_cache_root_dir(codex_home: &Path) -> PathBuf {
|
||||
AbsolutePathBuf::try_from(codex_home)
|
||||
.and_then(|codex_home| system_cache_root_dir_abs(&codex_home))
|
||||
.map(AbsolutePathBuf::into_path_buf)
|
||||
.unwrap_or_else(|_| {
|
||||
codex_home
|
||||
.join(SKILLS_DIR_NAME)
|
||||
.join(SYSTEM_SKILLS_DIR_NAME)
|
||||
})
|
||||
}
|
||||
|
||||
fn system_cache_root_dir_abs(codex_home: &AbsolutePathBuf) -> std::io::Result<AbsolutePathBuf> {
|
||||
codex_home
|
||||
.join(SKILLS_DIR_NAME)?
|
||||
.join(SYSTEM_SKILLS_DIR_NAME)
|
||||
}
|
||||
|
||||
/// Installs embedded system skills into `CODEX_HOME/skills/.system`.
|
||||
///
|
||||
/// Clears any existing system skills directory first and then writes the embedded
|
||||
/// skills directory into place.
|
||||
///
|
||||
/// To avoid doing unnecessary work on every startup, a marker file is written
|
||||
/// with a fingerprint of the embedded directory. When the marker matches, the
|
||||
/// install is skipped.
|
||||
pub fn install_system_skills(codex_home: &Path) -> Result<(), SystemSkillsError> {
|
||||
let codex_home = AbsolutePathBuf::try_from(codex_home)
|
||||
.map_err(|source| SystemSkillsError::io("normalize codex home dir", source))?;
|
||||
let skills_root_dir = codex_home
|
||||
.join(SKILLS_DIR_NAME)
|
||||
.map_err(|source| SystemSkillsError::io("resolve skills root dir", source))?;
|
||||
fs::create_dir_all(skills_root_dir.as_path())
|
||||
.map_err(|source| SystemSkillsError::io("create skills root dir", source))?;
|
||||
|
||||
let dest_system = system_cache_root_dir_abs(&codex_home)
|
||||
.map_err(|source| SystemSkillsError::io("resolve system skills cache root dir", source))?;
|
||||
|
||||
let marker_path = dest_system
|
||||
.join(SYSTEM_SKILLS_MARKER_FILENAME)
|
||||
.map_err(|source| SystemSkillsError::io("resolve system skills marker path", source))?;
|
||||
let expected_fingerprint = embedded_system_skills_fingerprint();
|
||||
if dest_system.as_path().is_dir()
|
||||
&& read_marker(&marker_path).is_ok_and(|marker| marker == expected_fingerprint)
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if dest_system.as_path().exists() {
|
||||
fs::remove_dir_all(dest_system.as_path())
|
||||
.map_err(|source| SystemSkillsError::io("remove existing system skills dir", source))?;
|
||||
}
|
||||
|
||||
write_embedded_dir(&SYSTEM_SKILLS_DIR, &dest_system)?;
|
||||
fs::write(marker_path.as_path(), format!("{expected_fingerprint}\n"))
|
||||
.map_err(|source| SystemSkillsError::io("write system skills marker", source))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_marker(path: &AbsolutePathBuf) -> Result<String, SystemSkillsError> {
|
||||
Ok(fs::read_to_string(path.as_path())
|
||||
.map_err(|source| SystemSkillsError::io("read system skills marker", source))?
|
||||
.trim()
|
||||
.to_string())
|
||||
}
|
||||
|
||||
fn embedded_system_skills_fingerprint() -> String {
|
||||
let mut items = Vec::new();
|
||||
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
|
||||
items.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
SYSTEM_SKILLS_MARKER_SALT.hash(&mut hasher);
|
||||
for (path, contents_hash) in items {
|
||||
path.hash(&mut hasher);
|
||||
contents_hash.hash(&mut hasher);
|
||||
}
|
||||
format!("{:x}", hasher.finish())
|
||||
}
|
||||
|
||||
fn collect_fingerprint_items(dir: &Dir<'_>, items: &mut Vec<(String, Option<u64>)>) {
|
||||
for entry in dir.entries() {
|
||||
match entry {
|
||||
include_dir::DirEntry::Dir(subdir) => {
|
||||
items.push((subdir.path().to_string_lossy().to_string(), None));
|
||||
collect_fingerprint_items(subdir, items);
|
||||
}
|
||||
include_dir::DirEntry::File(file) => {
|
||||
let mut file_hasher = DefaultHasher::new();
|
||||
file.contents().hash(&mut file_hasher);
|
||||
items.push((
|
||||
file.path().to_string_lossy().to_string(),
|
||||
Some(file_hasher.finish()),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the embedded `include_dir::Dir` to disk under `dest`.
|
||||
///
|
||||
/// Preserves the embedded directory structure.
|
||||
fn write_embedded_dir(dir: &Dir<'_>, dest: &AbsolutePathBuf) -> Result<(), SystemSkillsError> {
|
||||
fs::create_dir_all(dest.as_path())
|
||||
.map_err(|source| SystemSkillsError::io("create system skills dir", source))?;
|
||||
|
||||
for entry in dir.entries() {
|
||||
match entry {
|
||||
include_dir::DirEntry::Dir(subdir) => {
|
||||
let subdir_dest = dest.join(subdir.path()).map_err(|source| {
|
||||
SystemSkillsError::io("resolve system skills subdir", source)
|
||||
})?;
|
||||
fs::create_dir_all(subdir_dest.as_path()).map_err(|source| {
|
||||
SystemSkillsError::io("create system skills subdir", source)
|
||||
})?;
|
||||
write_embedded_dir(subdir, dest)?;
|
||||
}
|
||||
include_dir::DirEntry::File(file) => {
|
||||
let path = dest.join(file.path()).map_err(|source| {
|
||||
SystemSkillsError::io("resolve system skills file", source)
|
||||
})?;
|
||||
if let Some(parent) = path.as_path().parent() {
|
||||
fs::create_dir_all(parent).map_err(|source| {
|
||||
SystemSkillsError::io("create system skills file parent", source)
|
||||
})?;
|
||||
}
|
||||
fs::write(path.as_path(), file.contents())
|
||||
.map_err(|source| SystemSkillsError::io("write system skill file", source))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum SystemSkillsError {
|
||||
#[error("io error while {action}: {source}")]
|
||||
Io {
|
||||
action: &'static str,
|
||||
#[source]
|
||||
source: std::io::Error,
|
||||
},
|
||||
}
|
||||
|
||||
impl SystemSkillsError {
|
||||
fn io(action: &'static str, source: std::io::Error) -> Self {
|
||||
Self::Io { action, source }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::SYSTEM_SKILLS_DIR;
|
||||
use super::collect_fingerprint_items;
|
||||
|
||||
#[test]
|
||||
fn fingerprint_traverses_nested_entries() {
|
||||
let mut items = Vec::new();
|
||||
collect_fingerprint_items(&SYSTEM_SKILLS_DIR, &mut items);
|
||||
let mut paths: Vec<String> = items.into_iter().map(|(path, _)| path).collect();
|
||||
paths.sort_unstable();
|
||||
|
||||
assert!(
|
||||
paths
|
||||
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/SKILL.md"))
|
||||
.is_ok()
|
||||
);
|
||||
assert!(
|
||||
paths
|
||||
.binary_search_by(|probe| probe.as_str().cmp("skill-creator/scripts/init_skill.py"))
|
||||
.is_ok()
|
||||
);
|
||||
}
|
||||
}
|
||||