Compare commits

..

5 Commits

Author SHA1 Message Date
Ahmed Ibrahim
8cca259b99 codex: fix CI failure on PR #15029
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 17:35:34 +00:00
Ahmed Ibrahim
08dcd3dc19 Update auth tests to use codex-auth
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:58:35 +00:00
Ahmed Ibrahim
8caf1ddb00 Remove stale auth env var imports
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:36:22 +00:00
Ahmed Ibrahim
21b00f2672 Fix external auth refresh constructor call
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:25:51 +00:00
Ahmed Ibrahim
184fb02a9a Extract codex-auth from codex-core
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 05:11:40 +00:00
38 changed files with 1770 additions and 1643 deletions

43
codex-rs/Cargo.lock generated
View File

@@ -1594,6 +1594,28 @@ dependencies = [
"tokio-util",
]
[[package]]
name = "codex-auth"
version = "0.0.0"
dependencies = [
"anyhow",
"base64 0.22.1",
"chrono",
"codex-app-server-protocol",
"codex-keyring-store",
"keyring",
"once_cell",
"pretty_assertions",
"schemars 0.8.22",
"serde",
"serde_json",
"sha2",
"tempfile",
"thiserror 2.0.18",
"tokio",
"tracing",
]
[[package]]
name = "codex-backend-client"
version = "0.0.0"
@@ -1841,6 +1863,7 @@ dependencies = [
"codex-arg0",
"codex-artifacts",
"codex-async-utils",
"codex-auth",
"codex-client",
"codex-config",
"codex-connectors",
@@ -1849,7 +1872,6 @@ dependencies = [
"codex-git",
"codex-hooks",
"codex-keyring-store",
"codex-memories",
"codex-network-proxy",
"codex-otel",
"codex-protocol",
@@ -2149,6 +2171,7 @@ dependencies = [
"base64 0.22.1",
"chrono",
"codex-app-server-protocol",
"codex-auth",
"codex-client",
"codex-core",
"core_test_support",
@@ -2195,23 +2218,6 @@ dependencies = [
"wiremock",
]
[[package]]
name = "codex-memories"
version = "0.0.0"
dependencies = [
"anyhow",
"chrono",
"codex-api",
"codex-protocol",
"codex-state",
"pretty_assertions",
"serde_json",
"tempfile",
"tokio",
"tracing",
"uuid",
]
[[package]]
name = "codex-network-proxy"
version = "0.0.0"
@@ -3067,6 +3073,7 @@ dependencies = [
"anyhow",
"assert_cmd",
"base64 0.22.1",
"codex-auth",
"codex-core",
"codex-protocol",
"codex-utils-absolute-path",

View File

@@ -2,6 +2,7 @@
members = [
"backend-client",
"ansi-escape",
"auth",
"async-utils",
"app-server",
"app-server-client",
@@ -32,7 +33,6 @@ members = [
"linux-sandbox",
"lmstudio",
"login",
"memories",
"mcp-server",
"network-proxy",
"ollama",
@@ -87,6 +87,7 @@ license = "Apache-2.0"
app_test_support = { path = "app-server/tests/common" }
codex-ansi-escape = { path = "ansi-escape" }
codex-api = { path = "codex-api" }
codex-auth = { path = "auth" }
codex-artifacts = { path = "artifacts" }
codex-package-manager = { path = "package-manager" }
codex-app-server = { path = "app-server" }
@@ -115,7 +116,6 @@ codex-keyring-store = { path = "keyring-store" }
codex-linux-sandbox = { path = "linux-sandbox" }
codex-lmstudio = { path = "lmstudio" }
codex-login = { path = "login" }
codex-memories = { path = "memories" }
codex-mcp-server = { path = "mcp-server" }
codex-network-proxy = { path = "network-proxy" }
codex-ollama = { path = "ollama" }

View File

@@ -1,6 +1,6 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "memories",
crate_name = "codex_memories",
name = "auth",
crate_name = "codex_auth",
)

39
codex-rs/auth/Cargo.toml Normal file
View File

@@ -0,0 +1,39 @@
[package]
name = "codex-auth"
version.workspace = true
edition.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
base64 = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
codex-app-server-protocol = { workspace = true }
codex-keyring-store = { workspace = true }
once_cell = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
sha2 = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
[target.'cfg(target_os = "linux")'.dependencies]
keyring = { workspace = true, features = ["linux-native-async-persistent"] }
[target.'cfg(target_os = "macos")'.dependencies]
keyring = { workspace = true, features = ["apple-native"] }
[target.'cfg(target_os = "windows")'.dependencies]
keyring = { workspace = true, features = ["windows-native"] }
[target.'cfg(any(target_os = "freebsd", target_os = "openbsd"))'.dependencies]
keyring = { workspace = true, features = ["sync-secret-service"] }
[dev-dependencies]
anyhow = { workspace = true }
pretty_assertions = { workspace = true }
tempfile = { workspace = true }
tokio = { workspace = true, features = ["macros", "rt"] }

View File

@@ -0,0 +1,52 @@
use crate::CODEX_API_KEY_ENV_VAR;
use crate::OPENAI_API_KEY_ENV_VAR;
use crate::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct AuthEnvTelemetry {
pub openai_api_key_env_present: bool,
pub codex_api_key_env_present: bool,
pub codex_api_key_env_enabled: bool,
pub provider_env_key_name: Option<String>,
pub provider_env_key_present: Option<bool>,
pub refresh_token_url_override_present: bool,
}
pub fn collect_auth_env_telemetry(
provider_env_key_configured: bool,
provider_env_key: Option<&str>,
codex_api_key_env_enabled: bool,
) -> AuthEnvTelemetry {
AuthEnvTelemetry {
openai_api_key_env_present: env_var_present(OPENAI_API_KEY_ENV_VAR),
codex_api_key_env_present: env_var_present(CODEX_API_KEY_ENV_VAR),
codex_api_key_env_enabled,
provider_env_key_name: provider_env_key_configured.then(|| "configured".to_string()),
provider_env_key_present: provider_env_key.map(env_var_present),
refresh_token_url_override_present: env_var_present(REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR),
}
}
fn env_var_present(name: &str) -> bool {
match std::env::var(name) {
Ok(value) => !value.trim().is_empty(),
Err(std::env::VarError::NotUnicode(_)) => true,
Err(std::env::VarError::NotPresent) => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn collect_auth_env_telemetry_buckets_provider_env_key_name() {
let telemetry = collect_auth_env_telemetry(true, Some("sk-should-not-leak"), false);
assert_eq!(
telemetry.provider_env_key_name,
Some("configured".to_string())
);
}
}

103
codex-rs/auth/src/lib.rs Normal file
View File

@@ -0,0 +1,103 @@
mod env_telemetry;
pub mod storage;
pub mod token_data;
use std::env;
use std::path::Path;
use codex_app_server_protocol::AuthMode;
pub use env_telemetry::AuthEnvTelemetry;
pub use env_telemetry::collect_auth_env_telemetry;
pub use storage::AuthCredentialsStoreMode;
pub use storage::AuthDotJson;
pub use storage::AuthStorageBackend;
pub use storage::create_auth_storage;
pub use token_data::IdTokenInfo;
pub use token_data::IdTokenInfoError;
pub use token_data::KnownPlan;
pub use token_data::PlanType;
pub use token_data::TokenData;
pub use token_data::parse_chatgpt_jwt_claims;
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
pub const CODEX_API_KEY_ENV_VAR: &str = "CODEX_API_KEY";
pub const REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR: &str = "CODEX_REFRESH_TOKEN_URL_OVERRIDE";
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExternalAuthTokens {
pub access_token: String,
pub chatgpt_account_id: String,
pub chatgpt_plan_type: Option<String>,
}
pub fn read_openai_api_key_from_env() -> Option<String> {
env::var(OPENAI_API_KEY_ENV_VAR)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
pub fn read_codex_api_key_from_env() -> Option<String> {
env::var(CODEX_API_KEY_ENV_VAR)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
pub fn logout(
codex_home: &Path,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<bool> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.delete()
}
pub fn login_with_api_key(
codex_home: &Path,
api_key: &str,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some(api_key.to_string()),
tokens: None,
last_refresh: None,
};
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
}
pub fn login_with_chatgpt_auth_tokens(
codex_home: &Path,
access_token: &str,
chatgpt_account_id: &str,
chatgpt_plan_type: Option<&str>,
) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson::from_external_access_token(
access_token,
chatgpt_account_id,
chatgpt_plan_type,
)?;
save_auth(
codex_home,
&auth_dot_json,
AuthCredentialsStoreMode::Ephemeral,
)
}
pub fn save_auth(
codex_home: &Path,
auth: &AuthDotJson,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<()> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.save(auth)
}
pub fn load_auth_dot_json(
codex_home: &Path,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<Option<AuthDotJson>> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.load()
}

View File

@@ -0,0 +1,371 @@
use chrono::DateTime;
use chrono::Utc;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use sha2::Digest;
use sha2::Sha256;
use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::Read;
use std::io::Write;
#[cfg(unix)]
use std::os::unix::fs::OpenOptionsExt;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use tracing::warn;
use crate::PlanType;
use crate::TokenData;
use crate::parse_chatgpt_jwt_claims;
use codex_app_server_protocol::AuthMode;
use codex_keyring_store::DefaultKeyringStore;
use codex_keyring_store::KeyringStore;
use once_cell::sync::Lazy;
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum AuthCredentialsStoreMode {
#[default]
File,
Keyring,
Auto,
Ephemeral,
}
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
pub struct AuthDotJson {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub auth_mode: Option<AuthMode>,
#[serde(rename = "OPENAI_API_KEY")]
pub openai_api_key: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tokens: Option<TokenData>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_refresh: Option<DateTime<Utc>>,
}
impl AuthDotJson {
pub fn from_external_access_token(
access_token: &str,
chatgpt_account_id: &str,
chatgpt_plan_type: Option<&str>,
) -> std::io::Result<Self> {
let mut token_info =
parse_chatgpt_jwt_claims(access_token).map_err(std::io::Error::other)?;
token_info.chatgpt_account_id = Some(chatgpt_account_id.to_string());
token_info.chatgpt_plan_type = chatgpt_plan_type
.map(PlanType::from_raw_value)
.or(token_info.chatgpt_plan_type)
.or(Some(PlanType::Unknown("unknown".to_string())));
let tokens = TokenData {
id_token: token_info,
access_token: access_token.to_string(),
refresh_token: String::new(),
account_id: Some(chatgpt_account_id.to_string()),
};
Ok(Self {
auth_mode: Some(AuthMode::ChatgptAuthTokens),
openai_api_key: None,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
})
}
pub fn resolved_mode(&self) -> AuthMode {
if let Some(mode) = self.auth_mode {
return mode;
}
if self.openai_api_key.is_some() {
return AuthMode::ApiKey;
}
AuthMode::Chatgpt
}
pub fn storage_mode(
&self,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> AuthCredentialsStoreMode {
if self.resolved_mode() == AuthMode::ChatgptAuthTokens {
AuthCredentialsStoreMode::Ephemeral
} else {
auth_credentials_store_mode
}
}
}
fn get_auth_file(codex_home: &Path) -> PathBuf {
codex_home.join("auth.json")
}
fn delete_file_if_exists(codex_home: &Path) -> std::io::Result<bool> {
let auth_file = get_auth_file(codex_home);
match std::fs::remove_file(&auth_file) {
Ok(()) => Ok(true),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(false),
Err(err) => Err(err),
}
}
pub trait AuthStorageBackend: Debug + Send + Sync {
fn load(&self) -> std::io::Result<Option<AuthDotJson>>;
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()>;
fn delete(&self) -> std::io::Result<bool>;
}
#[derive(Clone, Debug)]
struct FileAuthStorage {
codex_home: PathBuf,
}
impl FileAuthStorage {
fn new(codex_home: PathBuf) -> Self {
Self { codex_home }
}
fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
let mut file = File::open(auth_file)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let auth_dot_json: AuthDotJson = serde_json::from_str(&contents)?;
Ok(auth_dot_json)
}
}
impl AuthStorageBackend for FileAuthStorage {
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
let auth_file = get_auth_file(&self.codex_home);
let auth_dot_json = match self.try_read_auth_json(&auth_file) {
Ok(auth) => auth,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(err),
};
Ok(Some(auth_dot_json))
}
fn save(&self, auth_dot_json: &AuthDotJson) -> std::io::Result<()> {
let auth_file = get_auth_file(&self.codex_home);
if let Some(parent) = auth_file.parent() {
std::fs::create_dir_all(parent)?;
}
let json_data = serde_json::to_string_pretty(auth_dot_json)?;
let mut options = OpenOptions::new();
options.truncate(true).write(true).create(true);
#[cfg(unix)]
{
options.mode(0o600);
}
let mut file = options.open(auth_file)?;
file.write_all(json_data.as_bytes())?;
file.flush()?;
Ok(())
}
fn delete(&self) -> std::io::Result<bool> {
delete_file_if_exists(&self.codex_home)
}
}
const KEYRING_SERVICE: &str = "Codex Auth";
fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
let canonical = codex_home
.canonicalize()
.unwrap_or_else(|_| codex_home.to_path_buf());
let path_str = canonical.to_string_lossy();
let mut hasher = Sha256::new();
hasher.update(path_str.as_bytes());
let digest = hasher.finalize();
let hex = format!("{digest:x}");
let truncated = hex.get(..16).unwrap_or(&hex);
Ok(format!("cli|{truncated}"))
}
#[derive(Clone, Debug)]
struct KeyringAuthStorage {
codex_home: PathBuf,
keyring_store: Arc<dyn KeyringStore>,
}
impl KeyringAuthStorage {
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
Self {
codex_home,
keyring_store,
}
}
fn load_from_keyring(&self, key: &str) -> std::io::Result<Option<AuthDotJson>> {
match self.keyring_store.load(KEYRING_SERVICE, key) {
Ok(Some(serialized)) => serde_json::from_str(&serialized).map(Some).map_err(|err| {
std::io::Error::other(format!(
"failed to deserialize CLI auth from keyring: {err}"
))
}),
Ok(None) => Ok(None),
Err(error) => Err(std::io::Error::other(format!(
"failed to load CLI auth from keyring: {}",
error.message()
))),
}
}
fn save_to_keyring(&self, key: &str, value: &str) -> std::io::Result<()> {
match self.keyring_store.save(KEYRING_SERVICE, key, value) {
Ok(()) => Ok(()),
Err(error) => {
let message = format!(
"failed to write OAuth tokens to keyring: {}",
error.message()
);
warn!("{message}");
Err(std::io::Error::other(message))
}
}
}
}
impl AuthStorageBackend for KeyringAuthStorage {
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
let key = compute_store_key(&self.codex_home)?;
self.load_from_keyring(&key)
}
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
let key = compute_store_key(&self.codex_home)?;
let serialized = serde_json::to_string(auth).map_err(std::io::Error::other)?;
self.save_to_keyring(&key, &serialized)?;
if let Err(err) = delete_file_if_exists(&self.codex_home) {
warn!("failed to remove CLI auth fallback file: {err}");
}
Ok(())
}
fn delete(&self) -> std::io::Result<bool> {
let key = compute_store_key(&self.codex_home)?;
let keyring_removed = self
.keyring_store
.delete(KEYRING_SERVICE, &key)
.map_err(|err| {
std::io::Error::other(format!("failed to delete auth from keyring: {err}"))
})?;
let file_removed = delete_file_if_exists(&self.codex_home)?;
Ok(keyring_removed || file_removed)
}
}
#[derive(Clone, Debug)]
struct AutoAuthStorage {
keyring_storage: Arc<KeyringAuthStorage>,
file_storage: Arc<FileAuthStorage>,
}
impl AutoAuthStorage {
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
Self {
keyring_storage: Arc::new(KeyringAuthStorage::new(codex_home.clone(), keyring_store)),
file_storage: Arc::new(FileAuthStorage::new(codex_home)),
}
}
}
impl AuthStorageBackend for AutoAuthStorage {
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
match self.keyring_storage.load() {
Ok(Some(auth)) => Ok(Some(auth)),
Ok(None) => self.file_storage.load(),
Err(err) => {
warn!("failed to load CLI auth from keyring, falling back to file storage: {err}");
self.file_storage.load()
}
}
}
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
match self.keyring_storage.save(auth) {
Ok(()) => Ok(()),
Err(err) => {
warn!("failed to save auth to keyring, falling back to file storage: {err}");
self.file_storage.save(auth)
}
}
}
fn delete(&self) -> std::io::Result<bool> {
self.keyring_storage.delete()
}
}
static EPHEMERAL_AUTH_STORE: Lazy<Mutex<HashMap<String, AuthDotJson>>> =
Lazy::new(|| Mutex::new(HashMap::new()));
#[derive(Clone, Debug)]
struct EphemeralAuthStorage {
codex_home: PathBuf,
}
impl EphemeralAuthStorage {
fn new(codex_home: PathBuf) -> Self {
Self { codex_home }
}
fn with_store<F, T>(&self, action: F) -> std::io::Result<T>
where
F: FnOnce(&mut HashMap<String, AuthDotJson>, String) -> std::io::Result<T>,
{
let key = compute_store_key(&self.codex_home)?;
let mut store = EPHEMERAL_AUTH_STORE
.lock()
.map_err(|_| std::io::Error::other("failed to lock ephemeral auth storage"))?;
action(&mut store, key)
}
}
impl AuthStorageBackend for EphemeralAuthStorage {
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
self.with_store(|store, key| Ok(store.get(&key).cloned()))
}
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
self.with_store(|store, key| {
store.insert(key, auth.clone());
Ok(())
})
}
fn delete(&self) -> std::io::Result<bool> {
self.with_store(|store, key| Ok(store.remove(&key).is_some()))
}
}
pub fn create_auth_storage(
codex_home: PathBuf,
mode: AuthCredentialsStoreMode,
) -> Arc<dyn AuthStorageBackend> {
let keyring_store: Arc<dyn KeyringStore> = Arc::new(DefaultKeyringStore);
create_auth_storage_with_keyring_store(codex_home, mode, keyring_store)
}
fn create_auth_storage_with_keyring_store(
codex_home: PathBuf,
mode: AuthCredentialsStoreMode,
keyring_store: Arc<dyn KeyringStore>,
) -> Arc<dyn AuthStorageBackend> {
match mode {
AuthCredentialsStoreMode::File => Arc::new(FileAuthStorage::new(codex_home)),
AuthCredentialsStoreMode::Keyring => {
Arc::new(KeyringAuthStorage::new(codex_home, keyring_store))
}
AuthCredentialsStoreMode::Auto => Arc::new(AutoAuthStorage::new(codex_home, keyring_store)),
AuthCredentialsStoreMode::Ephemeral => Arc::new(EphemeralAuthStorage::new(codex_home)),
}
}
#[cfg(test)]
#[path = "storage_tests.rs"]
mod tests;

View File

@@ -0,0 +1,289 @@
use super::*;
use crate::token_data::IdTokenInfo;
use anyhow::Context;
use base64::Engine;
use codex_keyring_store::tests::MockKeyringStore;
use keyring::Error as KeyringError;
use pretty_assertions::assert_eq;
use serde::Serialize;
use serde_json::json;
use tempfile::tempdir;
#[tokio::test]
async fn file_storage_load_returns_auth_dot_json() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("test-key".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
};
storage
.save(&auth_dot_json)
.context("failed to save auth file")?;
let loaded = storage.load().context("failed to load auth file")?;
assert_eq!(Some(auth_dot_json), loaded);
Ok(())
}
#[tokio::test]
async fn file_storage_save_persists_auth_dot_json() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let storage = FileAuthStorage::new(codex_home.path().to_path_buf());
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("test-key".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
};
let file = get_auth_file(codex_home.path());
storage
.save(&auth_dot_json)
.context("failed to save auth file")?;
let same_auth_dot_json = storage
.try_read_auth_json(&file)
.context("failed to read auth file after save")?;
assert_eq!(auth_dot_json, same_auth_dot_json);
Ok(())
}
#[test]
fn file_storage_delete_removes_auth_file() -> anyhow::Result<()> {
let dir = tempdir()?;
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("sk-test-key".to_string()),
tokens: None,
last_refresh: None,
};
let storage = create_auth_storage(dir.path().to_path_buf(), AuthCredentialsStoreMode::File);
storage.save(&auth_dot_json)?;
assert!(dir.path().join("auth.json").exists());
let storage = FileAuthStorage::new(dir.path().to_path_buf());
let removed = storage.delete()?;
assert!(removed);
assert!(!dir.path().join("auth.json").exists());
Ok(())
}
#[test]
fn ephemeral_storage_save_load_delete_is_in_memory_only() -> anyhow::Result<()> {
let dir = tempdir()?;
let storage = create_auth_storage(
dir.path().to_path_buf(),
AuthCredentialsStoreMode::Ephemeral,
);
let auth_dot_json = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("sk-ephemeral".to_string()),
tokens: None,
last_refresh: Some(Utc::now()),
};
storage.save(&auth_dot_json)?;
let loaded = storage.load()?;
assert_eq!(Some(auth_dot_json), loaded);
let removed = storage.delete()?;
assert!(removed);
let loaded = storage.load()?;
assert_eq!(None, loaded);
assert!(!get_auth_file(dir.path()).exists());
Ok(())
}
fn seed_keyring_and_fallback_auth_file_for_delete<F>(
mock_keyring: &MockKeyringStore,
codex_home: &Path,
compute_key: F,
) -> anyhow::Result<(String, PathBuf)>
where
F: FnOnce() -> std::io::Result<String>,
{
let key = compute_key()?;
mock_keyring.save(KEYRING_SERVICE, &key, "{}")?;
let auth_file = get_auth_file(codex_home);
std::fs::write(&auth_file, "stale")?;
Ok((key, auth_file))
}
fn seed_keyring_with_auth<F>(
mock_keyring: &MockKeyringStore,
compute_key: F,
auth: &AuthDotJson,
) -> anyhow::Result<()>
where
F: FnOnce() -> std::io::Result<String>,
{
let key = compute_key()?;
let serialized = serde_json::to_string(auth)?;
mock_keyring.save(KEYRING_SERVICE, &key, &serialized)?;
Ok(())
}
fn assert_keyring_saved_auth_and_removed_fallback(
mock_keyring: &MockKeyringStore,
key: &str,
codex_home: &Path,
expected: &AuthDotJson,
) {
let saved_value = mock_keyring
.saved_value(key)
.expect("keyring entry should exist");
let expected_serialized = serde_json::to_string(expected).expect("serialize expected auth");
assert_eq!(saved_value, expected_serialized);
let auth_file = get_auth_file(codex_home);
assert!(
!auth_file.exists(),
"fallback auth.json should be removed after keyring save"
);
}
fn id_token_with_prefix(prefix: &str) -> IdTokenInfo {
#[derive(Serialize)]
struct Header {
alg: &'static str,
typ: &'static str,
}
let header = Header {
alg: "none",
typ: "JWT",
};
let payload = json!({
"email": format!("{prefix}@example.com"),
"https://api.openai.com/auth": {
"chatgpt_account_id": format!("{prefix}-account"),
},
});
let encode = |bytes: &[u8]| base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes);
let header_b64 = encode(&serde_json::to_vec(&header).expect("serialize header"));
let payload_b64 = encode(&serde_json::to_vec(&payload).expect("serialize payload"));
let signature_b64 = encode(b"sig");
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
crate::token_data::parse_chatgpt_jwt_claims(&fake_jwt).expect("fake JWT should parse")
}
fn auth_with_prefix(prefix: &str) -> AuthDotJson {
AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some(format!("{prefix}-api-key")),
tokens: Some(TokenData {
id_token: id_token_with_prefix(prefix),
access_token: format!("{prefix}-access"),
refresh_token: format!("{prefix}-refresh"),
account_id: Some(format!("{prefix}-account-id")),
}),
last_refresh: None,
}
}
#[test]
fn keyring_auth_storage_load_returns_deserialized_auth() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let mock_keyring = MockKeyringStore::default();
let storage = KeyringAuthStorage::new(
codex_home.path().to_path_buf(),
Arc::new(mock_keyring.clone()),
);
let expected = AuthDotJson {
auth_mode: Some(AuthMode::ApiKey),
openai_api_key: Some("sk-test".to_string()),
tokens: None,
last_refresh: None,
};
seed_keyring_with_auth(
&mock_keyring,
|| compute_store_key(codex_home.path()),
&expected,
)?;
let loaded = storage.load()?;
assert_eq!(Some(expected), loaded);
Ok(())
}
#[test]
fn keyring_auth_storage_compute_store_key_for_home_directory() -> anyhow::Result<()> {
let codex_home = PathBuf::from("~/.codex");
let key = compute_store_key(codex_home.as_path())?;
assert_eq!(key, "cli|940db7b1d0e4eb40");
Ok(())
}
#[test]
fn keyring_auth_storage_save_persists_and_removes_fallback_file() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let mock_keyring = MockKeyringStore::default();
let storage = KeyringAuthStorage::new(
codex_home.path().to_path_buf(),
Arc::new(mock_keyring.clone()),
);
let auth_file = get_auth_file(codex_home.path());
std::fs::write(&auth_file, "stale")?;
let auth = AuthDotJson {
auth_mode: Some(AuthMode::Chatgpt),
openai_api_key: None,
tokens: Some(TokenData {
id_token: Default::default(),
access_token: "access".to_string(),
refresh_token: "refresh".to_string(),
account_id: Some("account".to_string()),
}),
last_refresh: Some(Utc::now()),
};
storage.save(&auth)?;
let key = compute_store_key(codex_home.path())?;
assert_keyring_saved_auth_and_removed_fallback(&mock_keyring, &key, codex_home.path(), &auth);
Ok(())
}
#[test]
fn keyring_auth_storage_delete_removes_keyring_and_file() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let mock_keyring = MockKeyringStore::default();
let storage = KeyringAuthStorage::new(
codex_home.path().to_path_buf(),
Arc::new(mock_keyring.clone()),
);
let (key, auth_file) =
seed_keyring_and_fallback_auth_file_for_delete(&mock_keyring, codex_home.path(), || {
compute_store_key(codex_home.path())
})?;
let removed = storage.delete()?;
assert!(removed);
assert!(!mock_keyring.contains(&key));
assert!(!auth_file.exists());
Ok(())
}
#[test]
fn auto_auth_storage_falls_back_to_file_on_keyring_load_error() -> anyhow::Result<()> {
let codex_home = tempdir()?;
let mock_keyring = MockKeyringStore::default();
let key = compute_store_key(codex_home.path())?;
mock_keyring.set_error(&key, KeyringError::NoEntry);
let storage = AutoAuthStorage::new(
codex_home.path().to_path_buf(),
Arc::new(mock_keyring.clone()),
);
let expected = auth_with_prefix("fallback");
FileAuthStorage::new(codex_home.path().to_path_buf()).save(&expected)?;
let loaded = storage.load()?;
assert_eq!(Some(expected), loaded);
Ok(())
}

View File

@@ -0,0 +1,167 @@
use base64::Engine;
use serde::Deserialize;
use serde::Serialize;
use thiserror::Error;
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)]
pub struct TokenData {
#[serde(
deserialize_with = "deserialize_id_token",
serialize_with = "serialize_id_token"
)]
pub id_token: IdTokenInfo,
pub access_token: String,
pub refresh_token: String,
pub account_id: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
pub struct IdTokenInfo {
pub email: Option<String>,
pub chatgpt_plan_type: Option<PlanType>,
pub chatgpt_user_id: Option<String>,
pub chatgpt_account_id: Option<String>,
pub raw_jwt: String,
}
impl IdTokenInfo {
pub fn get_chatgpt_plan_type(&self) -> Option<String> {
self.chatgpt_plan_type.as_ref().map(|t| match t {
PlanType::Known(plan) => format!("{plan:?}"),
PlanType::Unknown(s) => s.clone(),
})
}
pub fn is_workspace_account(&self) -> bool {
matches!(
self.chatgpt_plan_type,
Some(PlanType::Known(
KnownPlan::Team | KnownPlan::Business | KnownPlan::Enterprise | KnownPlan::Edu
))
)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum PlanType {
Known(KnownPlan),
Unknown(String),
}
impl PlanType {
pub fn from_raw_value(raw: &str) -> Self {
match raw.to_ascii_lowercase().as_str() {
"free" => Self::Known(KnownPlan::Free),
"go" => Self::Known(KnownPlan::Go),
"plus" => Self::Known(KnownPlan::Plus),
"pro" => Self::Known(KnownPlan::Pro),
"team" => Self::Known(KnownPlan::Team),
"business" => Self::Known(KnownPlan::Business),
"enterprise" => Self::Known(KnownPlan::Enterprise),
"education" | "edu" => Self::Known(KnownPlan::Edu),
_ => Self::Unknown(raw.to_string()),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum KnownPlan {
Free,
Go,
Plus,
Pro,
Team,
Business,
Enterprise,
Edu,
}
#[derive(Deserialize)]
struct IdClaims {
#[serde(default)]
email: Option<String>,
#[serde(rename = "https://api.openai.com/profile", default)]
profile: Option<ProfileClaims>,
#[serde(rename = "https://api.openai.com/auth", default)]
auth: Option<AuthClaims>,
}
#[derive(Deserialize)]
struct ProfileClaims {
#[serde(default)]
email: Option<String>,
}
#[derive(Deserialize)]
struct AuthClaims {
#[serde(default)]
chatgpt_plan_type: Option<PlanType>,
#[serde(default)]
chatgpt_user_id: Option<String>,
#[serde(default)]
user_id: Option<String>,
#[serde(default)]
chatgpt_account_id: Option<String>,
}
#[derive(Debug, Error)]
pub enum IdTokenInfoError {
#[error("invalid ID token format")]
InvalidFormat,
#[error(transparent)]
Base64(#[from] base64::DecodeError),
#[error(transparent)]
Json(#[from] serde_json::Error),
}
pub fn parse_chatgpt_jwt_claims(jwt: &str) -> Result<IdTokenInfo, IdTokenInfoError> {
let mut parts = jwt.split('.');
let (_header_b64, payload_b64, _sig_b64) = match (parts.next(), parts.next(), parts.next()) {
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
_ => return Err(IdTokenInfoError::InvalidFormat),
};
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD.decode(payload_b64)?;
let claims: IdClaims = serde_json::from_slice(&payload_bytes)?;
let email = claims
.email
.or_else(|| claims.profile.and_then(|profile| profile.email));
match claims.auth {
Some(auth) => Ok(IdTokenInfo {
email,
raw_jwt: jwt.to_string(),
chatgpt_plan_type: auth.chatgpt_plan_type,
chatgpt_user_id: auth.chatgpt_user_id.or(auth.user_id),
chatgpt_account_id: auth.chatgpt_account_id,
}),
None => Ok(IdTokenInfo {
email,
raw_jwt: jwt.to_string(),
chatgpt_plan_type: None,
chatgpt_user_id: None,
chatgpt_account_id: None,
}),
}
}
fn deserialize_id_token<'de, D>(deserializer: D) -> Result<IdTokenInfo, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
parse_chatgpt_jwt_claims(&s).map_err(serde::de::Error::custom)
}
fn serialize_id_token<S>(id_token: &IdTokenInfo, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&id_token.raw_jwt)
}
#[cfg(test)]
#[path = "token_data_tests.rs"]
mod tests;

View File

@@ -0,0 +1,20 @@
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn parses_id_token_claims() {
let jwt = "eyJhbGciOiJub25lIn0.eyJlbWFpbCI6InVzZXJAZXhhbXBsZS5jb20iLCJodHRwczovL2FwaS5vcGVuYWkuY29tL2F1dGgiOnsiY2hhdGdwdF9wbGFuX3R5cGUiOiJwcm8iLCJjaGF0Z3B0X3VzZXJfaWQiOiJ1c2VyLTEiLCJjaGF0Z3B0X2FjY291bnRfaWQiOiJ3cy0xIn19.c2ln";
let claims = parse_chatgpt_jwt_claims(jwt).expect("jwt should parse");
assert_eq!(
claims,
IdTokenInfo {
email: Some("user@example.com".to_string()),
chatgpt_plan_type: Some(PlanType::Known(KnownPlan::Pro)),
chatgpt_user_id: Some("user-1".to_string()),
chatgpt_account_id: Some("ws-1".to_string()),
raw_jwt: jwt.to_string(),
}
);
}

View File

@@ -28,6 +28,7 @@ chardetng = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
clap = { workspace = true, features = ["derive"] }
codex-api = { workspace = true }
codex-auth = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-apply-patch = { workspace = true }
codex-async-utils = { workspace = true }
@@ -41,7 +42,6 @@ codex-file-search = { workspace = true }
codex-git = { workspace = true }
codex-hooks = { workspace = true }
codex-keyring-store = { workspace = true }
codex-memories = { workspace = true }
codex-network-proxy = { workspace = true }
codex-otel = { workspace = true }
codex-artifacts = { workspace = true }

View File

@@ -7,7 +7,6 @@ use serde::Deserialize;
use serde::Serialize;
#[cfg(test)]
use serial_test::serial;
use std::env;
use std::fmt::Debug;
use std::path::Path;
use std::path::PathBuf;
@@ -31,6 +30,14 @@ use crate::token_data::PlanType as InternalPlanType;
use crate::token_data::TokenData;
use crate::token_data::parse_chatgpt_jwt_claims;
use crate::util::try_parse_error_message;
pub use codex_auth::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
pub use codex_auth::load_auth_dot_json;
pub use codex_auth::login_with_api_key;
pub use codex_auth::login_with_chatgpt_auth_tokens;
pub use codex_auth::logout;
pub use codex_auth::read_codex_api_key_from_env;
pub use codex_auth::read_openai_api_key_from_env;
pub use codex_auth::save_auth;
use codex_client::CodexHttpClient;
use codex_protocol::account::PlanType as AccountPlanType;
use serde_json::Value;
@@ -102,7 +109,6 @@ const REFRESH_TOKEN_UNKNOWN_MESSAGE: &str =
"Your access token could not be refreshed. Please log out and sign in again.";
const REFRESH_TOKEN_ACCOUNT_MISMATCH_MESSAGE: &str = "Your access token could not be refreshed because you have since logged out or signed in to another account. Please sign in again.";
const REFRESH_TOKEN_URL: &str = "https://auth.openai.com/oauth/token";
pub const REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR: &str = "CODEX_REFRESH_TOKEN_URL_OVERRIDE";
#[derive(Debug, Error)]
pub enum RefreshTokenError {
@@ -112,12 +118,7 @@ pub enum RefreshTokenError {
Transient(#[from] std::io::Error),
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExternalAuthTokens {
pub access_token: String,
pub chatgpt_account_id: String,
pub chatgpt_plan_type: Option<String>,
}
pub use codex_auth::ExternalAuthTokens;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ExternalAuthRefreshReason {
@@ -374,90 +375,6 @@ impl ChatgptAuth {
}
}
pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY";
pub const CODEX_API_KEY_ENV_VAR: &str = "CODEX_API_KEY";
pub fn read_openai_api_key_from_env() -> Option<String> {
env::var(OPENAI_API_KEY_ENV_VAR)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
pub fn read_codex_api_key_from_env() -> Option<String> {
env::var(CODEX_API_KEY_ENV_VAR)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
/// Delete the auth.json file inside `codex_home` if it exists. Returns `Ok(true)`
/// if a file was removed, `Ok(false)` if no auth file was present.
pub fn logout(
codex_home: &Path,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<bool> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.delete()
}
/// Writes an `auth.json` that contains only the API key.
pub fn login_with_api_key(
codex_home: &Path,
api_key: &str,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson {
auth_mode: Some(ApiAuthMode::ApiKey),
openai_api_key: Some(api_key.to_string()),
tokens: None,
last_refresh: None,
};
save_auth(codex_home, &auth_dot_json, auth_credentials_store_mode)
}
/// Writes an in-memory auth payload for externally managed ChatGPT tokens.
pub fn login_with_chatgpt_auth_tokens(
codex_home: &Path,
access_token: &str,
chatgpt_account_id: &str,
chatgpt_plan_type: Option<&str>,
) -> std::io::Result<()> {
let auth_dot_json = AuthDotJson::from_external_access_token(
access_token,
chatgpt_account_id,
chatgpt_plan_type,
)?;
save_auth(
codex_home,
&auth_dot_json,
AuthCredentialsStoreMode::Ephemeral,
)
}
/// Persist the provided auth payload using the specified backend.
pub fn save_auth(
codex_home: &Path,
auth: &AuthDotJson,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<()> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.save(auth)
}
/// Load CLI auth data using the configured credential store backend.
/// Returns `None` when no credentials are stored. This function is
/// provided only for tests. Production code should not directly load
/// from the auth.json storage. It should use the AuthManager abstraction
/// instead.
pub fn load_auth_dot_json(
codex_home: &Path,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> std::io::Result<Option<AuthDotJson>> {
let storage = create_auth_storage(codex_home.to_path_buf(), auth_credentials_store_mode);
storage.load()
}
pub fn enforce_login_restrictions(config: &Config) -> std::io::Result<()> {
let Some(auth) = load_auth(
&config.codex_home,
@@ -752,67 +669,6 @@ fn refresh_token_endpoint() -> String {
.unwrap_or_else(|_| REFRESH_TOKEN_URL.to_string())
}
impl AuthDotJson {
fn from_external_tokens(external: &ExternalAuthTokens) -> std::io::Result<Self> {
let mut token_info =
parse_chatgpt_jwt_claims(&external.access_token).map_err(std::io::Error::other)?;
token_info.chatgpt_account_id = Some(external.chatgpt_account_id.clone());
token_info.chatgpt_plan_type = external
.chatgpt_plan_type
.as_deref()
.map(InternalPlanType::from_raw_value)
.or(token_info.chatgpt_plan_type)
.or(Some(InternalPlanType::Unknown("unknown".to_string())));
let tokens = TokenData {
id_token: token_info,
access_token: external.access_token.clone(),
refresh_token: String::new(),
account_id: Some(external.chatgpt_account_id.clone()),
};
Ok(Self {
auth_mode: Some(ApiAuthMode::ChatgptAuthTokens),
openai_api_key: None,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
})
}
fn from_external_access_token(
access_token: &str,
chatgpt_account_id: &str,
chatgpt_plan_type: Option<&str>,
) -> std::io::Result<Self> {
let external = ExternalAuthTokens {
access_token: access_token.to_string(),
chatgpt_account_id: chatgpt_account_id.to_string(),
chatgpt_plan_type: chatgpt_plan_type.map(str::to_string),
};
Self::from_external_tokens(&external)
}
fn resolved_mode(&self) -> ApiAuthMode {
if let Some(mode) = self.auth_mode {
return mode;
}
if self.openai_api_key.is_some() {
return ApiAuthMode::ApiKey;
}
ApiAuthMode::Chatgpt
}
fn storage_mode(
&self,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> AuthCredentialsStoreMode {
if self.resolved_mode() == ApiAuthMode::ChatgptAuthTokens {
AuthCredentialsStoreMode::Ephemeral
} else {
auth_credentials_store_mode
}
}
}
/// Internal cached auth state.
#[derive(Clone)]
struct CachedAuth {
@@ -1412,8 +1268,12 @@ impl AuthManager {
),
)));
}
let auth_dot_json =
AuthDotJson::from_external_tokens(&refreshed).map_err(RefreshTokenError::Transient)?;
let auth_dot_json = AuthDotJson::from_external_access_token(
&refreshed.access_token,
&refreshed.chatgpt_account_id,
refreshed.chatgpt_plan_type.as_deref(),
)
.map_err(RefreshTokenError::Transient)?;
save_auth(
&self.codex_home,
&auth_dot_json,

View File

@@ -1,336 +1,4 @@
use chrono::DateTime;
use chrono::Utc;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use sha2::Digest;
use sha2::Sha256;
use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::Read;
use std::io::Write;
#[cfg(unix)]
use std::os::unix::fs::OpenOptionsExt;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use tracing::warn;
use crate::token_data::TokenData;
use codex_app_server_protocol::AuthMode;
use codex_keyring_store::DefaultKeyringStore;
use codex_keyring_store::KeyringStore;
use once_cell::sync::Lazy;
/// Determine where Codex should store CLI auth credentials.
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum AuthCredentialsStoreMode {
#[default]
/// Persist credentials in CODEX_HOME/auth.json.
File,
/// Persist credentials in the keyring. Fail if unavailable.
Keyring,
/// Use keyring when available; otherwise, fall back to a file in CODEX_HOME.
Auto,
/// Store credentials in memory only for the current process.
Ephemeral,
}
/// Expected structure for $CODEX_HOME/auth.json.
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq)]
pub struct AuthDotJson {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub auth_mode: Option<AuthMode>,
#[serde(rename = "OPENAI_API_KEY")]
pub openai_api_key: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tokens: Option<TokenData>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_refresh: Option<DateTime<Utc>>,
}
pub(super) fn get_auth_file(codex_home: &Path) -> PathBuf {
codex_home.join("auth.json")
}
pub(super) fn delete_file_if_exists(codex_home: &Path) -> std::io::Result<bool> {
let auth_file = get_auth_file(codex_home);
match std::fs::remove_file(&auth_file) {
Ok(()) => Ok(true),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(false),
Err(err) => Err(err),
}
}
pub(super) trait AuthStorageBackend: Debug + Send + Sync {
fn load(&self) -> std::io::Result<Option<AuthDotJson>>;
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()>;
fn delete(&self) -> std::io::Result<bool>;
}
#[derive(Clone, Debug)]
pub(super) struct FileAuthStorage {
codex_home: PathBuf,
}
impl FileAuthStorage {
pub(super) fn new(codex_home: PathBuf) -> Self {
Self { codex_home }
}
/// Attempt to read and parse the `auth.json` file in the given `CODEX_HOME` directory.
/// Returns the full AuthDotJson structure.
pub(super) fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
let mut file = File::open(auth_file)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let auth_dot_json: AuthDotJson = serde_json::from_str(&contents)?;
Ok(auth_dot_json)
}
}
impl AuthStorageBackend for FileAuthStorage {
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
let auth_file = get_auth_file(&self.codex_home);
let auth_dot_json = match self.try_read_auth_json(&auth_file) {
Ok(auth) => auth,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(err),
};
Ok(Some(auth_dot_json))
}
fn save(&self, auth_dot_json: &AuthDotJson) -> std::io::Result<()> {
let auth_file = get_auth_file(&self.codex_home);
if let Some(parent) = auth_file.parent() {
std::fs::create_dir_all(parent)?;
}
let json_data = serde_json::to_string_pretty(auth_dot_json)?;
let mut options = OpenOptions::new();
options.truncate(true).write(true).create(true);
#[cfg(unix)]
{
options.mode(0o600);
}
let mut file = options.open(auth_file)?;
file.write_all(json_data.as_bytes())?;
file.flush()?;
Ok(())
}
fn delete(&self) -> std::io::Result<bool> {
delete_file_if_exists(&self.codex_home)
}
}
const KEYRING_SERVICE: &str = "Codex Auth";
// turns codex_home path into a stable, short key string
fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
let canonical = codex_home
.canonicalize()
.unwrap_or_else(|_| codex_home.to_path_buf());
let path_str = canonical.to_string_lossy();
let mut hasher = Sha256::new();
hasher.update(path_str.as_bytes());
let digest = hasher.finalize();
let hex = format!("{digest:x}");
let truncated = hex.get(..16).unwrap_or(&hex);
Ok(format!("cli|{truncated}"))
}
#[derive(Clone, Debug)]
struct KeyringAuthStorage {
codex_home: PathBuf,
keyring_store: Arc<dyn KeyringStore>,
}
impl KeyringAuthStorage {
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
Self {
codex_home,
keyring_store,
}
}
fn load_from_keyring(&self, key: &str) -> std::io::Result<Option<AuthDotJson>> {
match self.keyring_store.load(KEYRING_SERVICE, key) {
Ok(Some(serialized)) => serde_json::from_str(&serialized).map(Some).map_err(|err| {
std::io::Error::other(format!(
"failed to deserialize CLI auth from keyring: {err}"
))
}),
Ok(None) => Ok(None),
Err(error) => Err(std::io::Error::other(format!(
"failed to load CLI auth from keyring: {}",
error.message()
))),
}
}
fn save_to_keyring(&self, key: &str, value: &str) -> std::io::Result<()> {
match self.keyring_store.save(KEYRING_SERVICE, key, value) {
Ok(()) => Ok(()),
Err(error) => {
let message = format!(
"failed to write OAuth tokens to keyring: {}",
error.message()
);
warn!("{message}");
Err(std::io::Error::other(message))
}
}
}
}
impl AuthStorageBackend for KeyringAuthStorage {
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
let key = compute_store_key(&self.codex_home)?;
self.load_from_keyring(&key)
}
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
let key = compute_store_key(&self.codex_home)?;
// Simpler error mapping per style: prefer method reference over closure
let serialized = serde_json::to_string(auth).map_err(std::io::Error::other)?;
self.save_to_keyring(&key, &serialized)?;
if let Err(err) = delete_file_if_exists(&self.codex_home) {
warn!("failed to remove CLI auth fallback file: {err}");
}
Ok(())
}
fn delete(&self) -> std::io::Result<bool> {
let key = compute_store_key(&self.codex_home)?;
let keyring_removed = self
.keyring_store
.delete(KEYRING_SERVICE, &key)
.map_err(|err| {
std::io::Error::other(format!("failed to delete auth from keyring: {err}"))
})?;
let file_removed = delete_file_if_exists(&self.codex_home)?;
Ok(keyring_removed || file_removed)
}
}
#[derive(Clone, Debug)]
struct AutoAuthStorage {
keyring_storage: Arc<KeyringAuthStorage>,
file_storage: Arc<FileAuthStorage>,
}
impl AutoAuthStorage {
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
Self {
keyring_storage: Arc::new(KeyringAuthStorage::new(codex_home.clone(), keyring_store)),
file_storage: Arc::new(FileAuthStorage::new(codex_home)),
}
}
}
impl AuthStorageBackend for AutoAuthStorage {
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
match self.keyring_storage.load() {
Ok(Some(auth)) => Ok(Some(auth)),
Ok(None) => self.file_storage.load(),
Err(err) => {
warn!("failed to load CLI auth from keyring, falling back to file storage: {err}");
self.file_storage.load()
}
}
}
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
match self.keyring_storage.save(auth) {
Ok(()) => Ok(()),
Err(err) => {
warn!("failed to save auth to keyring, falling back to file storage: {err}");
self.file_storage.save(auth)
}
}
}
fn delete(&self) -> std::io::Result<bool> {
// Keyring storage will delete from disk as well
self.keyring_storage.delete()
}
}
// A global in-memory store for mapping codex_home -> AuthDotJson.
static EPHEMERAL_AUTH_STORE: Lazy<Mutex<HashMap<String, AuthDotJson>>> =
Lazy::new(|| Mutex::new(HashMap::new()));
#[derive(Clone, Debug)]
struct EphemeralAuthStorage {
codex_home: PathBuf,
}
impl EphemeralAuthStorage {
fn new(codex_home: PathBuf) -> Self {
Self { codex_home }
}
fn with_store<F, T>(&self, action: F) -> std::io::Result<T>
where
F: FnOnce(&mut HashMap<String, AuthDotJson>, String) -> std::io::Result<T>,
{
let key = compute_store_key(&self.codex_home)?;
let mut store = EPHEMERAL_AUTH_STORE
.lock()
.map_err(|_| std::io::Error::other("failed to lock ephemeral auth storage"))?;
action(&mut store, key)
}
}
impl AuthStorageBackend for EphemeralAuthStorage {
fn load(&self) -> std::io::Result<Option<AuthDotJson>> {
self.with_store(|store, key| Ok(store.get(&key).cloned()))
}
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()> {
self.with_store(|store, key| {
store.insert(key, auth.clone());
Ok(())
})
}
fn delete(&self) -> std::io::Result<bool> {
self.with_store(|store, key| Ok(store.remove(&key).is_some()))
}
}
pub(super) fn create_auth_storage(
codex_home: PathBuf,
mode: AuthCredentialsStoreMode,
) -> Arc<dyn AuthStorageBackend> {
let keyring_store: Arc<dyn KeyringStore> = Arc::new(DefaultKeyringStore);
create_auth_storage_with_keyring_store(codex_home, mode, keyring_store)
}
fn create_auth_storage_with_keyring_store(
codex_home: PathBuf,
mode: AuthCredentialsStoreMode,
keyring_store: Arc<dyn KeyringStore>,
) -> Arc<dyn AuthStorageBackend> {
match mode {
AuthCredentialsStoreMode::File => Arc::new(FileAuthStorage::new(codex_home)),
AuthCredentialsStoreMode::Keyring => {
Arc::new(KeyringAuthStorage::new(codex_home, keyring_store))
}
AuthCredentialsStoreMode::Auto => Arc::new(AutoAuthStorage::new(codex_home, keyring_store)),
AuthCredentialsStoreMode::Ephemeral => Arc::new(EphemeralAuthStorage::new(codex_home)),
}
}
#[cfg(test)]
#[path = "storage_tests.rs"]
mod tests;
pub use codex_auth::AuthCredentialsStoreMode;
pub use codex_auth::AuthDotJson;
pub use codex_auth::AuthStorageBackend;
pub use codex_auth::create_auth_storage;

View File

@@ -1,8 +1,5 @@
use codex_otel::AuthEnvTelemetryMetadata;
use crate::auth::CODEX_API_KEY_ENV_VAR;
use crate::auth::OPENAI_API_KEY_ENV_VAR;
use crate::auth::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
use crate::model_provider_info::ModelProviderInfo;
#[derive(Debug, Clone, Default, PartialEq, Eq)]
@@ -32,55 +29,17 @@ pub(crate) fn collect_auth_env_telemetry(
provider: &ModelProviderInfo,
codex_api_key_env_enabled: bool,
) -> AuthEnvTelemetry {
AuthEnvTelemetry {
openai_api_key_env_present: env_var_present(OPENAI_API_KEY_ENV_VAR),
codex_api_key_env_present: env_var_present(CODEX_API_KEY_ENV_VAR),
let telemetry = codex_auth::collect_auth_env_telemetry(
provider.env_key.is_some(),
provider.env_key.as_deref(),
codex_api_key_env_enabled,
// Custom provider `env_key` is arbitrary config text, so emit only a safe bucket.
provider_env_key_name: provider.env_key.as_ref().map(|_| "configured".to_string()),
provider_env_key_present: provider.env_key.as_deref().map(env_var_present),
refresh_token_url_override_present: env_var_present(REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR),
}
}
fn env_var_present(name: &str) -> bool {
match std::env::var(name) {
Ok(value) => !value.trim().is_empty(),
Err(std::env::VarError::NotUnicode(_)) => true,
Err(std::env::VarError::NotPresent) => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn collect_auth_env_telemetry_buckets_provider_env_key_name() {
let provider = ModelProviderInfo {
name: "Custom".to_string(),
base_url: None,
env_key: Some("sk-should-not-leak".to_string()),
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: crate::model_provider_info::WireApi::Responses,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
};
let telemetry = collect_auth_env_telemetry(&provider, false);
assert_eq!(
telemetry.provider_env_key_name,
Some("configured".to_string())
);
);
AuthEnvTelemetry {
openai_api_key_env_present: telemetry.openai_api_key_env_present,
codex_api_key_env_present: telemetry.codex_api_key_env_present,
codex_api_key_env_enabled: telemetry.codex_api_key_env_enabled,
provider_env_key_name: telemetry.provider_env_key_name,
provider_env_key_present: telemetry.provider_env_key_present,
refresh_token_url_override_present: telemetry.refresh_token_url_override_present,
}
}

View File

@@ -4798,10 +4798,8 @@ mod handlers {
errors.push("state db unavailable; memory rows were not cleared".to_string());
}
let memory_root = codex_memories::memories::memory_root(&config.codex_home);
if let Err(err) =
codex_memories::memories::control::clear_memory_root_contents(&memory_root).await
{
let memory_root = crate::memories::memory_root(&config.codex_home);
if let Err(err) = crate::memories::clear_memory_root_contents(&memory_root).await {
errors.push(format!(
"failed clearing memory directory {}: {err}",
memory_root.display()

View File

@@ -41,6 +41,7 @@ use crate::features::FeatureOverrides;
use crate::features::Features;
use crate::features::FeaturesToml;
use crate::git_info::resolve_root_git_project_for_trust;
use crate::memories::memory_root;
use crate::model_provider_info::LEGACY_OLLAMA_CHAT_PROVIDER_ID;
use crate::model_provider_info::LMSTUDIO_OSS_PROVIDER_ID;
use crate::model_provider_info::ModelProviderInfo;
@@ -61,7 +62,6 @@ use crate::windows_sandbox::resolve_windows_sandbox_mode;
use crate::windows_sandbox::resolve_windows_sandbox_private_desktop;
use codex_app_server_protocol::Tools;
use codex_app_server_protocol::UserSavedConfig;
use codex_memories::memories::memory_root;
use codex_protocol::config_types::AltScreenMode;
use codex_protocol::config_types::ForcedLoginMethod;
use codex_protocol::config_types::Personality;

View File

@@ -32,31 +32,5 @@ pub fn get_thread_id_from_citations(citations: Vec<String>) -> Vec<ThreadId> {
}
#[cfg(test)]
mod tests {
use super::get_thread_id_from_citations;
use codex_protocol::ThreadId;
use pretty_assertions::assert_eq;
#[test]
fn get_thread_id_from_citations_extracts_thread_ids() {
let first = ThreadId::new();
let second = ThreadId::new();
let citations = vec![format!(
"<memory_citation>\n<citation_entries>\nMEMORY.md:1-2|note=[x]\n</citation_entries>\n<thread_ids>\n{first}\nnot-a-uuid\n{second}\n</thread_ids>\n</memory_citation>"
)];
assert_eq!(get_thread_id_from_citations(citations), vec![first, second]);
}
#[test]
fn get_thread_id_from_citations_supports_legacy_rollout_ids() {
let thread_id = ThreadId::new();
let citations = vec![format!(
"<memory_citation>\n<rollout_ids>\n{thread_id}\n</rollout_ids>\n</memory_citation>"
)];
assert_eq!(get_thread_id_from_citations(citations), vec![thread_id]);
}
}
#[path = "citations_tests.rs"]
mod tests;

View File

@@ -0,0 +1,26 @@
use super::get_thread_id_from_citations;
use codex_protocol::ThreadId;
use pretty_assertions::assert_eq;
#[test]
fn get_thread_id_from_citations_extracts_thread_ids() {
let first = ThreadId::new();
let second = ThreadId::new();
let citations = vec![format!(
"<memory_citation>\n<citation_entries>\nMEMORY.md:1-2|note=[x]\n</citation_entries>\n<thread_ids>\n{first}\nnot-a-uuid\n{second}\n</thread_ids>\n</memory_citation>"
)];
assert_eq!(get_thread_id_from_citations(citations), vec![first, second]);
}
#[test]
fn get_thread_id_from_citations_supports_legacy_rollout_ids() {
let thread_id = ThreadId::new();
let citations = vec![format!(
"<memory_citation>\n<rollout_ids>\n{thread_id}\n</rollout_ids>\n</memory_citation>"
)];
assert_eq!(get_thread_id_from_citations(citations), vec![thread_id]);
}

View File

@@ -1,6 +1,6 @@
use std::path::Path;
pub async fn clear_memory_root_contents(memory_root: &Path) -> std::io::Result<()> {
pub(crate) async fn clear_memory_root_contents(memory_root: &Path) -> std::io::Result<()> {
match tokio::fs::symlink_metadata(memory_root).await {
Ok(metadata) if metadata.file_type().is_symlink() => {
return Err(std::io::Error::new(

View File

@@ -4,29 +4,29 @@
//! - Phase 1: select rollouts, extract stage-1 raw memories, persist stage-1 outputs, and enqueue consolidation.
//! - Phase 2: claim a global consolidation lock, materialize consolidation inputs, and dispatch one consolidation agent.
pub(crate) mod citations;
mod control;
mod phase1;
mod phase2;
pub(crate) mod prompts;
mod start;
mod storage;
#[cfg(test)]
mod tests;
pub(crate) mod usage;
use codex_protocol::openai_models::ReasoningEffort;
pub(crate) use codex_memories::memories::memory_root;
pub(crate) use control::clear_memory_root_contents;
/// Starts the memory startup pipeline for eligible root sessions.
/// This is the single entrypoint that `codex` uses to trigger memory startup.
///
/// This is the entry point to read and understand this module.
pub(crate) use start::start_memories_startup_task;
pub(crate) mod citations {
pub(crate) use codex_memories::memories::citations::*;
}
pub(crate) mod storage {
pub(crate) use codex_memories::memories::storage::*;
mod artifacts {
pub(super) const ROLLOUT_SUMMARIES_SUBDIR: &str = "rollout_summaries";
pub(super) const RAW_MEMORIES_FILENAME: &str = "raw_memories.md";
}
/// Phase 1 (startup extraction).
@@ -95,27 +95,21 @@ mod metrics {
pub(super) const MEMORY_PHASE_TWO_TOKEN_USAGE: &str = "codex.memory.phase2.token_usage";
}
#[cfg(test)]
use std::path::Path;
#[cfg(test)]
use std::path::PathBuf;
#[cfg(test)]
pub(crate) async fn clear_memory_root_contents(memory_root: &Path) -> std::io::Result<()> {
codex_memories::memories::control::clear_memory_root_contents(memory_root).await
pub fn memory_root(codex_home: &Path) -> PathBuf {
codex_home.join("memories")
}
#[cfg(test)]
pub(crate) fn rollout_summaries_dir(root: &Path) -> PathBuf {
codex_memories::memories::rollout_summaries_dir(root)
fn rollout_summaries_dir(root: &Path) -> PathBuf {
root.join(artifacts::ROLLOUT_SUMMARIES_SUBDIR)
}
#[cfg(test)]
pub(crate) fn raw_memories_file(root: &Path) -> PathBuf {
codex_memories::memories::raw_memories_file(root)
fn raw_memories_file(root: &Path) -> PathBuf {
root.join(artifacts::RAW_MEMORIES_FILENAME)
}
#[cfg(test)]
pub(crate) async fn ensure_layout(root: &Path) -> std::io::Result<()> {
codex_memories::memories::ensure_layout(root).await
async fn ensure_layout(root: &Path) -> std::io::Result<()> {
tokio::fs::create_dir_all(rollout_summaries_dir(root)).await
}

View File

@@ -0,0 +1,260 @@
use codex_state::Stage1Output;
use std::collections::HashSet;
use std::fmt::Write as _;
use std::path::Path;
use tracing::warn;
use uuid::Uuid;
use crate::memories::ensure_layout;
use crate::memories::raw_memories_file;
use crate::memories::rollout_summaries_dir;
/// Rebuild `raw_memories.md` from DB-backed stage-1 outputs.
pub(super) async fn rebuild_raw_memories_file_from_memories(
root: &Path,
memories: &[Stage1Output],
max_raw_memories_for_consolidation: usize,
) -> std::io::Result<()> {
ensure_layout(root).await?;
rebuild_raw_memories_file(root, memories, max_raw_memories_for_consolidation).await
}
/// Syncs canonical rollout summary files from DB-backed stage-1 output rows.
pub(super) async fn sync_rollout_summaries_from_memories(
root: &Path,
memories: &[Stage1Output],
max_raw_memories_for_consolidation: usize,
) -> std::io::Result<()> {
ensure_layout(root).await?;
let retained = retained_memories(memories, max_raw_memories_for_consolidation);
let keep = retained
.iter()
.map(rollout_summary_file_stem)
.collect::<HashSet<_>>();
prune_rollout_summaries(root, &keep).await?;
for memory in retained {
write_rollout_summary_for_thread(root, memory).await?;
}
if retained.is_empty() {
for file_name in ["MEMORY.md", "memory_summary.md"] {
let path = root.join(file_name);
if let Err(err) = tokio::fs::remove_file(path).await
&& err.kind() != std::io::ErrorKind::NotFound
{
return Err(err);
}
}
let skills_dir = root.join("skills");
if let Err(err) = tokio::fs::remove_dir_all(skills_dir).await
&& err.kind() != std::io::ErrorKind::NotFound
{
return Err(err);
}
}
Ok(())
}
async fn rebuild_raw_memories_file(
root: &Path,
memories: &[Stage1Output],
max_raw_memories_for_consolidation: usize,
) -> std::io::Result<()> {
let retained = retained_memories(memories, max_raw_memories_for_consolidation);
let mut body = String::from("# Raw Memories\n\n");
if retained.is_empty() {
body.push_str("No raw memories yet.\n");
return tokio::fs::write(raw_memories_file(root), body).await;
}
body.push_str("Merged stage-1 raw memories (latest first):\n\n");
for memory in retained {
writeln!(body, "## Thread `{}`", memory.thread_id).map_err(raw_memories_format_error)?;
writeln!(
body,
"updated_at: {}",
memory.source_updated_at.to_rfc3339()
)
.map_err(raw_memories_format_error)?;
writeln!(body, "cwd: {}", memory.cwd.display()).map_err(raw_memories_format_error)?;
writeln!(body, "rollout_path: {}", memory.rollout_path.display())
.map_err(raw_memories_format_error)?;
let rollout_summary_file = format!("{}.md", rollout_summary_file_stem(memory));
writeln!(body, "rollout_summary_file: {rollout_summary_file}")
.map_err(raw_memories_format_error)?;
writeln!(body).map_err(raw_memories_format_error)?;
body.push_str(memory.raw_memory.trim());
body.push_str("\n\n");
}
tokio::fs::write(raw_memories_file(root), body).await
}
async fn prune_rollout_summaries(root: &Path, keep: &HashSet<String>) -> std::io::Result<()> {
let dir_path = rollout_summaries_dir(root);
let mut dir = match tokio::fs::read_dir(&dir_path).await {
Ok(dir) => dir,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(err) => return Err(err),
};
while let Some(entry) = dir.next_entry().await? {
let path = entry.path();
let Some(file_name) = path.file_name().and_then(|name| name.to_str()) else {
continue;
};
let Some(stem) = file_name.strip_suffix(".md") else {
continue;
};
if !keep.contains(stem)
&& let Err(err) = tokio::fs::remove_file(&path).await
&& err.kind() != std::io::ErrorKind::NotFound
{
warn!(
"failed pruning outdated rollout summary {}: {err}",
path.display()
);
}
}
Ok(())
}
async fn write_rollout_summary_for_thread(
root: &Path,
memory: &Stage1Output,
) -> std::io::Result<()> {
let file_stem = rollout_summary_file_stem(memory);
let path = rollout_summaries_dir(root).join(format!("{file_stem}.md"));
let mut body = String::new();
writeln!(body, "thread_id: {}", memory.thread_id).map_err(rollout_summary_format_error)?;
writeln!(
body,
"updated_at: {}",
memory.source_updated_at.to_rfc3339()
)
.map_err(rollout_summary_format_error)?;
writeln!(body, "rollout_path: {}", memory.rollout_path.display())
.map_err(rollout_summary_format_error)?;
writeln!(body, "cwd: {}", memory.cwd.display()).map_err(rollout_summary_format_error)?;
if let Some(git_branch) = memory.git_branch.as_deref() {
writeln!(body, "git_branch: {git_branch}").map_err(rollout_summary_format_error)?;
}
writeln!(body).map_err(rollout_summary_format_error)?;
body.push_str(&memory.rollout_summary);
body.push('\n');
tokio::fs::write(path, body).await
}
fn retained_memories(
memories: &[Stage1Output],
max_raw_memories_for_consolidation: usize,
) -> &[Stage1Output] {
&memories[..memories.len().min(max_raw_memories_for_consolidation)]
}
fn raw_memories_format_error(err: std::fmt::Error) -> std::io::Error {
std::io::Error::other(format!("format raw memories: {err}"))
}
fn rollout_summary_format_error(err: std::fmt::Error) -> std::io::Error {
std::io::Error::other(format!("format rollout summary: {err}"))
}
pub(crate) fn rollout_summary_file_stem(memory: &Stage1Output) -> String {
rollout_summary_file_stem_from_parts(
memory.thread_id,
memory.source_updated_at,
memory.rollout_slug.as_deref(),
)
}
pub(super) fn rollout_summary_file_stem_from_parts(
thread_id: codex_protocol::ThreadId,
source_updated_at: chrono::DateTime<chrono::Utc>,
rollout_slug: Option<&str>,
) -> String {
const ROLLOUT_SLUG_MAX_LEN: usize = 60;
const SHORT_HASH_ALPHABET: &[u8; 62] =
b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
const SHORT_HASH_SPACE: u32 = 14_776_336;
let thread_id = thread_id.to_string();
let (timestamp_fragment, short_hash_seed) = match Uuid::parse_str(&thread_id) {
Ok(thread_uuid) => {
let timestamp = thread_uuid
.get_timestamp()
.and_then(|uuid_timestamp| {
let (seconds, nanos) = uuid_timestamp.to_unix();
i64::try_from(seconds).ok().and_then(|secs| {
chrono::DateTime::<chrono::Utc>::from_timestamp(secs, nanos)
})
})
.unwrap_or(source_updated_at);
let short_hash_seed = (thread_uuid.as_u128() & 0xFFFF_FFFF) as u32;
(
timestamp.format("%Y-%m-%dT%H-%M-%S").to_string(),
short_hash_seed,
)
}
Err(_) => {
let mut short_hash_seed = 0u32;
for byte in thread_id.bytes() {
short_hash_seed = short_hash_seed
.wrapping_mul(31)
.wrapping_add(u32::from(byte));
}
(
source_updated_at.format("%Y-%m-%dT%H-%M-%S").to_string(),
short_hash_seed,
)
}
};
let mut short_hash_value = short_hash_seed % SHORT_HASH_SPACE;
let mut short_hash_chars = ['0'; 4];
for idx in (0..short_hash_chars.len()).rev() {
let alphabet_idx = (short_hash_value % SHORT_HASH_ALPHABET.len() as u32) as usize;
short_hash_chars[idx] = SHORT_HASH_ALPHABET[alphabet_idx] as char;
short_hash_value /= SHORT_HASH_ALPHABET.len() as u32;
}
let short_hash: String = short_hash_chars.iter().collect();
let file_prefix = format!("{timestamp_fragment}-{short_hash}");
let Some(raw_slug) = rollout_slug else {
return file_prefix;
};
let mut slug = String::with_capacity(ROLLOUT_SLUG_MAX_LEN);
for ch in raw_slug.chars() {
if slug.len() >= ROLLOUT_SLUG_MAX_LEN {
break;
}
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
} else {
slug.push('_');
}
}
while slug.ends_with('_') {
slug.pop();
}
if slug.is_empty() {
file_prefix
} else {
format!("{file_prefix}-{slug}")
}
}
#[cfg(test)]
#[path = "storage_tests.rs"]
mod tests;

View File

@@ -0,0 +1,70 @@
use super::rollout_summary_file_stem;
use super::rollout_summary_file_stem_from_parts;
use chrono::TimeZone;
use chrono::Utc;
use codex_protocol::ThreadId;
use codex_state::Stage1Output;
use pretty_assertions::assert_eq;
use std::path::PathBuf;
const FIXED_PREFIX: &str = "2025-02-11T15-35-19-jqmb";
fn stage1_output_with_slug(thread_id: ThreadId, rollout_slug: Option<&str>) -> Stage1Output {
Stage1Output {
thread_id,
source_updated_at: Utc.timestamp_opt(123, 0).single().expect("timestamp"),
raw_memory: "raw memory".to_string(),
rollout_summary: "summary".to_string(),
rollout_slug: rollout_slug.map(ToString::to_string),
rollout_path: PathBuf::from("/tmp/rollout.jsonl"),
cwd: PathBuf::from("/tmp/workspace"),
git_branch: None,
generated_at: Utc.timestamp_opt(124, 0).single().expect("timestamp"),
}
}
fn fixed_thread_id() -> ThreadId {
ThreadId::try_from("0194f5a6-89ab-7cde-8123-456789abcdef").expect("valid thread id")
}
#[test]
fn rollout_summary_file_stem_uses_uuid_timestamp_and_hash_when_slug_missing() {
let thread_id = fixed_thread_id();
let memory = stage1_output_with_slug(thread_id, None);
assert_eq!(rollout_summary_file_stem(&memory), FIXED_PREFIX);
assert_eq!(
rollout_summary_file_stem_from_parts(
memory.thread_id,
memory.source_updated_at,
memory.rollout_slug.as_deref(),
),
FIXED_PREFIX
);
}
#[test]
fn rollout_summary_file_stem_sanitizes_and_truncates_slug() {
let thread_id = fixed_thread_id();
let memory = stage1_output_with_slug(
thread_id,
Some("Unsafe Slug/With Spaces & Symbols + EXTRA_LONG_12345_67890_ABCDE_fghij_klmno"),
);
let stem = rollout_summary_file_stem(&memory);
let slug = stem
.strip_prefix(&format!("{FIXED_PREFIX}-"))
.expect("slug suffix should be present");
assert_eq!(slug.len(), 60);
assert_eq!(
slug,
"unsafe_slug_with_spaces___symbols___extra_long_12345_67890_a"
);
}
#[test]
fn rollout_summary_file_stem_uses_uuid_timestamp_and_hash_when_slug_is_empty() {
let thread_id = fixed_thread_id();
let memory = stage1_output_with_slug(thread_id, Some(""));
assert_eq!(rollout_summary_file_stem(&memory), FIXED_PREFIX);
}

View File

@@ -1,13 +1,30 @@
use std::io;
use std::path::Path;
use std::path::PathBuf;
use crate::ModelClient;
use crate::error::CodexErr;
use crate::error::Result;
pub use codex_memories::memory_trace::BuiltMemory;
use codex_api::RawMemory as ApiRawMemory;
use codex_api::RawMemoryMetadata as ApiRawMemoryMetadata;
use codex_otel::SessionTelemetry;
use codex_protocol::openai_models::ModelInfo;
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
use serde_json::Map;
use serde_json::Value;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BuiltMemory {
pub memory_id: String,
pub source_path: PathBuf,
pub raw_memory: String,
pub memory_summary: String,
}
struct PreparedTrace {
memory_id: String,
source_path: PathBuf,
payload: ApiRawMemory,
}
/// Loads raw trace files, normalizes items, and builds memory summaries.
///
@@ -27,20 +44,187 @@ pub async fn build_memories_from_trace_files(
return Ok(Vec::new());
}
let prepared = codex_memories::memory_trace::load_trace_requests(trace_paths)
.await
.map_err(map_trace_load_error)?;
let mut prepared = Vec::with_capacity(trace_paths.len());
for (index, path) in trace_paths.iter().enumerate() {
prepared.push(prepare_trace(index + 1, path).await?);
}
let raw_memories = prepared.iter().map(|trace| trace.payload.clone()).collect();
let output = client
.summarize_memories(raw_memories, model_info, effort, session_telemetry)
.await?;
codex_memories::memory_trace::build_memories_from_output(prepared, output)
.map_err(|err| CodexErr::InvalidRequest(err.to_string()))
if output.len() != prepared.len() {
return Err(CodexErr::InvalidRequest(format!(
"unexpected memory summarize output length: expected {}, got {}",
prepared.len(),
output.len()
)));
}
Ok(prepared
.into_iter()
.zip(output)
.map(|(trace, summary)| BuiltMemory {
memory_id: trace.memory_id,
source_path: trace.source_path,
raw_memory: summary.raw_memory,
memory_summary: summary.memory_summary,
})
.collect())
}
fn map_trace_load_error(err: anyhow::Error) -> CodexErr {
match err.downcast::<io::Error>() {
Ok(err) => CodexErr::Io(err),
Err(err) => CodexErr::InvalidRequest(err.to_string()),
}
async fn prepare_trace(index: usize, path: &Path) -> Result<PreparedTrace> {
let text = load_trace_text(path).await?;
let items = load_trace_items(path, &text)?;
let memory_id = build_memory_id(index, path);
let source_path = path.to_path_buf();
Ok(PreparedTrace {
memory_id: memory_id.clone(),
source_path: source_path.clone(),
payload: ApiRawMemory {
id: memory_id,
metadata: ApiRawMemoryMetadata {
source_path: source_path.display().to_string(),
},
items,
},
})
}
async fn load_trace_text(path: &Path) -> Result<String> {
let raw = tokio::fs::read(path).await?;
Ok(decode_trace_bytes(&raw))
}
fn decode_trace_bytes(raw: &[u8]) -> String {
if let Some(without_bom) = raw.strip_prefix(&[0xEF, 0xBB, 0xBF])
&& let Ok(text) = String::from_utf8(without_bom.to_vec())
{
return text;
}
if let Ok(text) = String::from_utf8(raw.to_vec()) {
return text;
}
raw.iter().map(|b| char::from(*b)).collect()
}
fn load_trace_items(path: &Path, text: &str) -> Result<Vec<Value>> {
if let Ok(Value::Array(items)) = serde_json::from_str::<Value>(text) {
let dict_items = items
.into_iter()
.filter(serde_json::Value::is_object)
.collect::<Vec<_>>();
if dict_items.is_empty() {
return Err(CodexErr::InvalidRequest(format!(
"no object items found in trace file: {}",
path.display()
)));
}
return normalize_trace_items(dict_items, path);
}
let mut parsed_items = Vec::new();
for line in text.lines() {
let line = line.trim();
if line.is_empty() || (!line.starts_with('{') && !line.starts_with('[')) {
continue;
}
let Ok(obj) = serde_json::from_str::<Value>(line) else {
continue;
};
match obj {
Value::Object(_) => parsed_items.push(obj),
Value::Array(inner) => {
parsed_items.extend(inner.into_iter().filter(serde_json::Value::is_object))
}
_ => {}
}
}
if parsed_items.is_empty() {
return Err(CodexErr::InvalidRequest(format!(
"no JSON items parsed from trace file: {}",
path.display()
)));
}
normalize_trace_items(parsed_items, path)
}
fn normalize_trace_items(items: Vec<Value>, path: &Path) -> Result<Vec<Value>> {
let mut normalized = Vec::new();
for item in items {
let Value::Object(obj) = item else {
continue;
};
if let Some(payload) = obj.get("payload") {
if obj.get("type").and_then(Value::as_str) != Some("response_item") {
continue;
}
match payload {
Value::Object(payload_item) => {
if is_allowed_trace_item(payload_item) {
normalized.push(Value::Object(payload_item.clone()));
}
}
Value::Array(payload_items) => {
for payload_item in payload_items {
if let Value::Object(payload_item) = payload_item
&& is_allowed_trace_item(payload_item)
{
normalized.push(Value::Object(payload_item.clone()));
}
}
}
_ => {}
}
continue;
}
if is_allowed_trace_item(&obj) {
normalized.push(Value::Object(obj));
}
}
if normalized.is_empty() {
return Err(CodexErr::InvalidRequest(format!(
"no valid trace items after normalization: {}",
path.display()
)));
}
Ok(normalized)
}
fn is_allowed_trace_item(item: &Map<String, Value>) -> bool {
let Some(item_type) = item.get("type").and_then(Value::as_str) else {
return false;
};
if item_type == "message" {
return matches!(
item.get("role").and_then(Value::as_str),
Some("assistant" | "system" | "developer" | "user")
);
}
true
}
fn build_memory_id(index: usize, path: &Path) -> String {
let stem = path
.file_stem()
.map(|stem| stem.to_string_lossy().into_owned())
.filter(|stem| !stem.is_empty())
.unwrap_or_else(|| "memory".to_string());
format!("memory_{index}_{stem}")
}
#[cfg(test)]
#[path = "memory_trace_tests.rs"]
mod tests;

View File

@@ -0,0 +1,73 @@
use super::*;
use pretty_assertions::assert_eq;
use tempfile::tempdir;
#[test]
fn normalize_trace_items_handles_payload_wrapper_and_message_role_filtering() {
let items = vec![
serde_json::json!({
"type": "response_item",
"payload": {"type": "message", "role": "assistant", "content": []}
}),
serde_json::json!({
"type": "response_item",
"payload": [
{"type": "message", "role": "user", "content": []},
{"type": "message", "role": "tool", "content": []},
{"type": "function_call", "name": "shell", "arguments": "{}", "call_id": "c1"}
]
}),
serde_json::json!({
"type": "not_response_item",
"payload": {"type": "message", "role": "assistant", "content": []}
}),
serde_json::json!({
"type": "message",
"role": "developer",
"content": []
}),
];
let normalized = normalize_trace_items(items, Path::new("trace.json")).expect("normalize");
let expected = vec![
serde_json::json!({"type": "message", "role": "assistant", "content": []}),
serde_json::json!({"type": "message", "role": "user", "content": []}),
serde_json::json!({"type": "function_call", "name": "shell", "arguments": "{}", "call_id": "c1"}),
serde_json::json!({"type": "message", "role": "developer", "content": []}),
];
assert_eq!(normalized, expected);
}
#[test]
fn load_trace_items_supports_jsonl_arrays_and_objects() {
let text = r#"
{"type":"response_item","payload":{"type":"message","role":"assistant","content":[]}}
[{"type":"message","role":"user","content":[]},{"type":"message","role":"tool","content":[]}]
"#;
let loaded = load_trace_items(Path::new("trace.jsonl"), text).expect("load");
let expected = vec![
serde_json::json!({"type":"message","role":"assistant","content":[]}),
serde_json::json!({"type":"message","role":"user","content":[]}),
];
assert_eq!(loaded, expected);
}
#[tokio::test]
async fn load_trace_text_decodes_utf8_sig() {
let dir = tempdir().expect("tempdir");
let path = dir.path().join("trace.json");
tokio::fs::write(
&path,
[
0xEF, 0xBB, 0xBF, b'[', b'{', b'"', b't', b'y', b'p', b'e', b'"', b':', b'"', b'm',
b'e', b's', b's', b'a', b'g', b'e', b'"', b',', b'"', b'r', b'o', b'l', b'e', b'"',
b':', b'"', b'u', b's', b'e', b'r', b'"', b',', b'"', b'c', b'o', b'n', b't', b'e',
b'n', b't', b'"', b':', b'[', b']', b'}', b']',
],
)
.await
.expect("write");
let text = load_trace_text(&path).await.expect("decode");
assert!(text.starts_with('['));
}

View File

@@ -4,7 +4,6 @@ use std::sync::Arc;
use base64::Engine;
use base64::engine::general_purpose::STANDARD as BASE64_STANDARD;
use codex_memories::memories::citations::get_thread_id_from_citations;
use codex_protocol::config_types::ModeKind;
use codex_protocol::items::TurnItem;
use codex_utils_stream_parser::strip_citations;
@@ -15,6 +14,7 @@ use crate::codex::TurnContext;
use crate::error::CodexErr;
use crate::error::Result;
use crate::function_tool::FunctionCallError;
use crate::memories::citations::get_thread_id_from_citations;
use crate::parse_turn_item;
use crate::state_db;
use crate::tools::parallel::ToolCallRuntime;

View File

@@ -1,178 +1,9 @@
use base64::Engine;
use serde::Deserialize;
use serde::Serialize;
use thiserror::Error;
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)]
pub struct TokenData {
/// Flat info parsed from the JWT in auth.json.
#[serde(
deserialize_with = "deserialize_id_token",
serialize_with = "serialize_id_token"
)]
pub id_token: IdTokenInfo,
/// This is a JWT.
pub access_token: String,
pub refresh_token: String,
pub account_id: Option<String>,
}
/// Flat subset of useful claims in id_token from auth.json.
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
pub struct IdTokenInfo {
pub email: Option<String>,
/// The ChatGPT subscription plan type
/// (e.g., "free", "plus", "pro", "business", "enterprise", "edu").
/// (Note: values may vary by backend.)
pub(crate) chatgpt_plan_type: Option<PlanType>,
/// ChatGPT user identifier associated with the token, if present.
pub chatgpt_user_id: Option<String>,
/// Organization/workspace identifier associated with the token, if present.
pub chatgpt_account_id: Option<String>,
pub raw_jwt: String,
}
impl IdTokenInfo {
pub fn get_chatgpt_plan_type(&self) -> Option<String> {
self.chatgpt_plan_type.as_ref().map(|t| match t {
PlanType::Known(plan) => format!("{plan:?}"),
PlanType::Unknown(s) => s.clone(),
})
}
pub fn is_workspace_account(&self) -> bool {
matches!(
self.chatgpt_plan_type,
Some(PlanType::Known(
KnownPlan::Team | KnownPlan::Business | KnownPlan::Enterprise | KnownPlan::Edu
))
)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(untagged)]
pub(crate) enum PlanType {
Known(KnownPlan),
Unknown(String),
}
impl PlanType {
pub(crate) fn from_raw_value(raw: &str) -> Self {
match raw.to_ascii_lowercase().as_str() {
"free" => Self::Known(KnownPlan::Free),
"go" => Self::Known(KnownPlan::Go),
"plus" => Self::Known(KnownPlan::Plus),
"pro" => Self::Known(KnownPlan::Pro),
"team" => Self::Known(KnownPlan::Team),
"business" => Self::Known(KnownPlan::Business),
"enterprise" => Self::Known(KnownPlan::Enterprise),
"education" | "edu" => Self::Known(KnownPlan::Edu),
_ => Self::Unknown(raw.to_string()),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub(crate) enum KnownPlan {
Free,
Go,
Plus,
Pro,
Team,
Business,
Enterprise,
Edu,
}
#[derive(Deserialize)]
struct IdClaims {
#[serde(default)]
email: Option<String>,
#[serde(rename = "https://api.openai.com/profile", default)]
profile: Option<ProfileClaims>,
#[serde(rename = "https://api.openai.com/auth", default)]
auth: Option<AuthClaims>,
}
#[derive(Deserialize)]
struct ProfileClaims {
#[serde(default)]
email: Option<String>,
}
#[derive(Deserialize)]
struct AuthClaims {
#[serde(default)]
chatgpt_plan_type: Option<PlanType>,
#[serde(default)]
chatgpt_user_id: Option<String>,
#[serde(default)]
user_id: Option<String>,
#[serde(default)]
chatgpt_account_id: Option<String>,
}
#[derive(Debug, Error)]
pub enum IdTokenInfoError {
#[error("invalid ID token format")]
InvalidFormat,
#[error(transparent)]
Base64(#[from] base64::DecodeError),
#[error(transparent)]
Json(#[from] serde_json::Error),
}
pub fn parse_chatgpt_jwt_claims(jwt: &str) -> Result<IdTokenInfo, IdTokenInfoError> {
// JWT format: header.payload.signature
let mut parts = jwt.split('.');
let (_header_b64, payload_b64, _sig_b64) = match (parts.next(), parts.next(), parts.next()) {
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
_ => return Err(IdTokenInfoError::InvalidFormat),
};
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD.decode(payload_b64)?;
let claims: IdClaims = serde_json::from_slice(&payload_bytes)?;
let email = claims
.email
.or_else(|| claims.profile.and_then(|profile| profile.email));
match claims.auth {
Some(auth) => Ok(IdTokenInfo {
email,
raw_jwt: jwt.to_string(),
chatgpt_plan_type: auth.chatgpt_plan_type,
chatgpt_user_id: auth.chatgpt_user_id.or(auth.user_id),
chatgpt_account_id: auth.chatgpt_account_id,
}),
None => Ok(IdTokenInfo {
email,
raw_jwt: jwt.to_string(),
chatgpt_plan_type: None,
chatgpt_user_id: None,
chatgpt_account_id: None,
}),
}
}
fn deserialize_id_token<'de, D>(deserializer: D) -> Result<IdTokenInfo, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
parse_chatgpt_jwt_claims(&s).map_err(serde::de::Error::custom)
}
fn serialize_id_token<S>(id_token: &IdTokenInfo, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&id_token.raw_jwt)
}
pub use codex_auth::IdTokenInfo;
pub use codex_auth::IdTokenInfoError;
pub use codex_auth::KnownPlan;
pub use codex_auth::PlanType;
pub use codex_auth::TokenData;
pub use codex_auth::parse_chatgpt_jwt_claims;
#[cfg(test)]
#[path = "token_data_tests.rs"]

View File

@@ -11,6 +11,7 @@ path = "lib.rs"
anyhow = { workspace = true }
assert_cmd = { workspace = true }
base64 = { workspace = true }
codex-auth = { workspace = true }
codex-core = { workspace = true }
codex-protocol = { workspace = true }
codex-utils-absolute-path = { workspace = true }

View File

@@ -1,5 +1,5 @@
#![allow(clippy::expect_used)]
use codex_core::auth::CODEX_API_KEY_ENV_VAR;
use codex_auth::CODEX_API_KEY_ENV_VAR;
use std::path::Path;
use tempfile::TempDir;
use wiremock::MockServer;

View File

@@ -1,5 +1,4 @@
use assert_cmd::Command as AssertCommand;
use codex_core::auth::CODEX_API_KEY_ENV_VAR;
use codex_protocol::protocol::GitInfo;
use codex_utils_cargo_bin::find_resource;
use core_test_support::fs_wait;

View File

@@ -1,8 +1,8 @@
use anyhow::Context;
use anyhow::Result;
use chrono::Utc;
use codex_auth::OPENAI_API_KEY_ENV_VAR;
use codex_core::CodexAuth;
use codex_core::auth::OPENAI_API_KEY_ENV_VAR;
use codex_protocol::ThreadId;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::ConversationAudioParams;

View File

@@ -11,6 +11,7 @@ workspace = true
base64 = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
codex-client = { workspace = true }
codex-auth = { workspace = true }
codex-core = { workspace = true }
codex-app-server-protocol = { workspace = true }
rand = { workspace = true }

View File

@@ -2,6 +2,13 @@ mod device_code_auth;
mod pkce;
mod server;
pub use codex_auth::AuthDotJson;
pub use codex_auth::CODEX_API_KEY_ENV_VAR;
pub use codex_auth::OPENAI_API_KEY_ENV_VAR;
pub use codex_auth::TokenData;
pub use codex_auth::login_with_api_key;
pub use codex_auth::logout;
pub use codex_auth::save_auth;
pub use codex_client::BuildCustomCaTransportError as BuildLoginHttpClientError;
pub use device_code_auth::DeviceCode;
pub use device_code_auth::complete_device_code_login;
@@ -16,11 +23,4 @@ pub use server::run_login_server;
pub use codex_app_server_protocol::AuthMode;
pub use codex_core::AuthManager;
pub use codex_core::CodexAuth;
pub use codex_core::auth::AuthDotJson;
pub use codex_core::auth::CLIENT_ID;
pub use codex_core::auth::CODEX_API_KEY_ENV_VAR;
pub use codex_core::auth::OPENAI_API_KEY_ENV_VAR;
pub use codex_core::auth::login_with_api_key;
pub use codex_core::auth::logout;
pub use codex_core::auth::save_auth;
pub use codex_core::token_data::TokenData;

View File

@@ -28,13 +28,13 @@ use crate::pkce::generate_pkce;
use base64::Engine;
use chrono::Utc;
use codex_app_server_protocol::AuthMode;
use codex_auth::AuthCredentialsStoreMode;
use codex_auth::AuthDotJson;
use codex_auth::TokenData;
use codex_auth::parse_chatgpt_jwt_claims;
use codex_auth::save_auth;
use codex_client::build_reqwest_client_with_custom_ca;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::AuthDotJson;
use codex_core::auth::save_auth;
use codex_core::default_client::originator;
use codex_core::token_data::TokenData;
use codex_core::token_data::parse_chatgpt_jwt_claims;
use rand::RngCore;
use serde_json::Value as JsonValue;
use tiny_http::Header;

View File

@@ -1,27 +0,0 @@
[package]
name = "codex-memories"
version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
name = "codex_memories"
path = "src/lib.rs"
[lints]
workspace = true
[dependencies]
anyhow = { workspace = true }
chrono = { workspace = true }
codex-api = { workspace = true }
codex-protocol = { workspace = true }
codex-state = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true, features = ["fs", "macros", "rt"] }
tracing = { workspace = true }
uuid = { workspace = true, features = ["serde", "v4", "v5"] }
[dev-dependencies]
pretty_assertions = { workspace = true }
tempfile = { workspace = true }

View File

@@ -1,2 +0,0 @@
pub mod memories;
pub mod memory_trace;

View File

@@ -1,29 +0,0 @@
//! Shared memory filesystem and artifact utilities.
pub mod citations;
pub mod control;
pub mod storage;
use std::path::Path;
use std::path::PathBuf;
mod artifacts {
pub(super) const ROLLOUT_SUMMARIES_SUBDIR: &str = "rollout_summaries";
pub(super) const RAW_MEMORIES_FILENAME: &str = "raw_memories.md";
}
pub fn memory_root(codex_home: &Path) -> PathBuf {
codex_home.join("memories")
}
pub fn rollout_summaries_dir(root: &Path) -> PathBuf {
root.join(artifacts::ROLLOUT_SUMMARIES_SUBDIR)
}
pub fn raw_memories_file(root: &Path) -> PathBuf {
root.join(artifacts::RAW_MEMORIES_FILENAME)
}
pub async fn ensure_layout(root: &Path) -> std::io::Result<()> {
tokio::fs::create_dir_all(rollout_summaries_dir(root)).await
}

View File

@@ -1,480 +0,0 @@
use crate::memories::ensure_layout;
use crate::memories::raw_memories_file;
use crate::memories::rollout_summaries_dir;
use codex_state::Stage1Output;
use std::collections::HashSet;
use std::fmt::Write as _;
use std::path::Path;
use tracing::warn;
use uuid::Uuid;
/// Rebuild `raw_memories.md` from DB-backed stage-1 outputs.
pub async fn rebuild_raw_memories_file_from_memories(
root: &Path,
memories: &[Stage1Output],
max_raw_memories_for_consolidation: usize,
) -> std::io::Result<()> {
ensure_layout(root).await?;
rebuild_raw_memories_file(root, memories, max_raw_memories_for_consolidation).await
}
/// Syncs canonical rollout summary files from DB-backed stage-1 output rows.
pub async fn sync_rollout_summaries_from_memories(
root: &Path,
memories: &[Stage1Output],
max_raw_memories_for_consolidation: usize,
) -> std::io::Result<()> {
ensure_layout(root).await?;
let retained = retained_memories(memories, max_raw_memories_for_consolidation);
let keep = retained
.iter()
.map(rollout_summary_file_stem)
.collect::<HashSet<_>>();
prune_rollout_summaries(root, &keep).await?;
for memory in retained {
write_rollout_summary_for_thread(root, memory).await?;
}
if retained.is_empty() {
for file_name in ["MEMORY.md", "memory_summary.md"] {
let path = root.join(file_name);
if let Err(err) = tokio::fs::remove_file(path).await
&& err.kind() != std::io::ErrorKind::NotFound
{
return Err(err);
}
}
let skills_dir = root.join("skills");
if let Err(err) = tokio::fs::remove_dir_all(skills_dir).await
&& err.kind() != std::io::ErrorKind::NotFound
{
return Err(err);
}
}
Ok(())
}
async fn rebuild_raw_memories_file(
root: &Path,
memories: &[Stage1Output],
max_raw_memories_for_consolidation: usize,
) -> std::io::Result<()> {
let retained = retained_memories(memories, max_raw_memories_for_consolidation);
let mut body = String::from("# Raw Memories\n\n");
if retained.is_empty() {
body.push_str("No raw memories yet.\n");
return tokio::fs::write(raw_memories_file(root), body).await;
}
body.push_str("Merged stage-1 raw memories (latest first):\n\n");
for memory in retained {
writeln!(body, "## Thread `{}`", memory.thread_id).map_err(raw_memories_format_error)?;
writeln!(
body,
"updated_at: {}",
memory.source_updated_at.to_rfc3339()
)
.map_err(raw_memories_format_error)?;
writeln!(body, "cwd: {}", memory.cwd.display()).map_err(raw_memories_format_error)?;
writeln!(body, "rollout_path: {}", memory.rollout_path.display())
.map_err(raw_memories_format_error)?;
let rollout_summary_file = format!("{}.md", rollout_summary_file_stem(memory));
writeln!(body, "rollout_summary_file: {rollout_summary_file}")
.map_err(raw_memories_format_error)?;
writeln!(body).map_err(raw_memories_format_error)?;
body.push_str(memory.raw_memory.trim());
body.push_str("\n\n");
}
tokio::fs::write(raw_memories_file(root), body).await
}
async fn prune_rollout_summaries(root: &Path, keep: &HashSet<String>) -> std::io::Result<()> {
let dir_path = rollout_summaries_dir(root);
let mut dir = match tokio::fs::read_dir(&dir_path).await {
Ok(dir) => dir,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(err) => return Err(err),
};
while let Some(entry) = dir.next_entry().await? {
let path = entry.path();
let Some(file_name) = path.file_name().and_then(|name| name.to_str()) else {
continue;
};
let Some(stem) = file_name.strip_suffix(".md") else {
continue;
};
if !keep.contains(stem)
&& let Err(err) = tokio::fs::remove_file(&path).await
&& err.kind() != std::io::ErrorKind::NotFound
{
warn!(
"failed pruning outdated rollout summary {}: {err}",
path.display()
);
}
}
Ok(())
}
async fn write_rollout_summary_for_thread(
root: &Path,
memory: &Stage1Output,
) -> std::io::Result<()> {
let file_stem = rollout_summary_file_stem(memory);
let path = rollout_summaries_dir(root).join(format!("{file_stem}.md"));
let mut body = String::new();
writeln!(body, "thread_id: {}", memory.thread_id).map_err(rollout_summary_format_error)?;
writeln!(
body,
"updated_at: {}",
memory.source_updated_at.to_rfc3339()
)
.map_err(rollout_summary_format_error)?;
writeln!(body, "rollout_path: {}", memory.rollout_path.display())
.map_err(rollout_summary_format_error)?;
writeln!(body, "cwd: {}", memory.cwd.display()).map_err(rollout_summary_format_error)?;
if let Some(git_branch) = memory.git_branch.as_deref() {
writeln!(body, "git_branch: {git_branch}").map_err(rollout_summary_format_error)?;
}
writeln!(body).map_err(rollout_summary_format_error)?;
body.push_str(&memory.rollout_summary);
body.push('\n');
tokio::fs::write(path, body).await
}
fn retained_memories(
memories: &[Stage1Output],
max_raw_memories_for_consolidation: usize,
) -> &[Stage1Output] {
&memories[..memories.len().min(max_raw_memories_for_consolidation)]
}
fn raw_memories_format_error(err: std::fmt::Error) -> std::io::Error {
std::io::Error::other(format!("format raw memories: {err}"))
}
fn rollout_summary_format_error(err: std::fmt::Error) -> std::io::Error {
std::io::Error::other(format!("format rollout summary: {err}"))
}
pub fn rollout_summary_file_stem(memory: &Stage1Output) -> String {
rollout_summary_file_stem_from_parts(
memory.thread_id,
memory.source_updated_at,
memory.rollout_slug.as_deref(),
)
}
pub fn rollout_summary_file_stem_from_parts(
thread_id: codex_protocol::ThreadId,
source_updated_at: chrono::DateTime<chrono::Utc>,
rollout_slug: Option<&str>,
) -> String {
const ROLLOUT_SLUG_MAX_LEN: usize = 60;
const SHORT_HASH_ALPHABET: &[u8; 62] =
b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
const SHORT_HASH_SPACE: u32 = 14_776_336;
let thread_id = thread_id.to_string();
let (timestamp_fragment, short_hash_seed) = match Uuid::parse_str(&thread_id) {
Ok(thread_uuid) => {
let timestamp = thread_uuid
.get_timestamp()
.and_then(|uuid_timestamp| {
let (seconds, nanos) = uuid_timestamp.to_unix();
i64::try_from(seconds).ok().and_then(|secs| {
chrono::DateTime::<chrono::Utc>::from_timestamp(secs, nanos)
})
})
.unwrap_or(source_updated_at);
let short_hash_seed = (thread_uuid.as_u128() & 0xFFFF_FFFF) as u32;
(
timestamp.format("%Y-%m-%dT%H-%M-%S").to_string(),
short_hash_seed,
)
}
Err(_) => {
let mut short_hash_seed = 0u32;
for byte in thread_id.bytes() {
short_hash_seed = short_hash_seed
.wrapping_mul(31)
.wrapping_add(u32::from(byte));
}
(
source_updated_at.format("%Y-%m-%dT%H-%M-%S").to_string(),
short_hash_seed,
)
}
};
let mut short_hash_value = short_hash_seed % SHORT_HASH_SPACE;
let mut short_hash = ['0'; 4];
for digit in short_hash.iter_mut().rev() {
let idx = usize::try_from(short_hash_value % 62).unwrap_or(0);
*digit = char::from(SHORT_HASH_ALPHABET[idx]);
short_hash_value /= 62;
}
let mut stem = format!(
"{timestamp_fragment}-{}",
short_hash.iter().collect::<String>()
);
let sanitized_slug = rollout_slug
.map(str::trim)
.filter(|slug| !slug.is_empty())
.map(|slug| {
slug.chars()
.map(|ch| {
if ch.is_ascii_alphanumeric() {
ch.to_ascii_lowercase()
} else {
'_'
}
})
.collect::<String>()
})
.map(|slug| slug.chars().take(ROLLOUT_SLUG_MAX_LEN).collect::<String>())
.filter(|slug| !slug.is_empty());
if let Some(slug) = sanitized_slug {
stem.push('-');
stem.push_str(&slug);
}
stem
}
#[cfg(test)]
mod tests {
use super::rebuild_raw_memories_file_from_memories;
use super::rollout_summary_file_stem;
use super::rollout_summary_file_stem_from_parts;
use super::sync_rollout_summaries_from_memories;
use crate::memories::control::clear_memory_root_contents;
use crate::memories::ensure_layout;
use crate::memories::memory_root;
use crate::memories::raw_memories_file;
use crate::memories::rollout_summaries_dir;
use chrono::TimeZone;
use chrono::Utc;
use codex_protocol::ThreadId;
use codex_state::Stage1Output;
use pretty_assertions::assert_eq;
use std::path::PathBuf;
use tempfile::tempdir;
const FIXED_PREFIX: &str = "2025-02-11T15-35-19-jqmb";
fn stage1_output_with_slug(thread_id: ThreadId, rollout_slug: Option<&str>) -> Stage1Output {
Stage1Output {
thread_id,
source_updated_at: Utc.timestamp_opt(123, 0).single().expect("timestamp"),
raw_memory: "raw memory".to_string(),
rollout_summary: "summary".to_string(),
rollout_slug: rollout_slug.map(ToString::to_string),
rollout_path: PathBuf::from("/tmp/rollout.jsonl"),
cwd: PathBuf::from("/tmp/workspace"),
git_branch: None,
generated_at: Utc.timestamp_opt(124, 0).single().expect("timestamp"),
}
}
fn fixed_thread_id() -> ThreadId {
ThreadId::try_from("0194f5a6-89ab-7cde-8123-456789abcdef").expect("valid thread id")
}
#[test]
fn memory_root_uses_shared_global_path() {
let dir = tempdir().expect("tempdir");
let codex_home = dir.path().join("codex");
assert_eq!(memory_root(&codex_home), codex_home.join("memories"));
}
#[tokio::test]
async fn clear_memory_root_contents_preserves_root_directory() {
let dir = tempdir().expect("tempdir");
let root = dir.path().join("memory");
let nested_dir = root.join("rollout_summaries");
tokio::fs::create_dir_all(&nested_dir)
.await
.expect("create rollout summaries dir");
tokio::fs::write(root.join("MEMORY.md"), "stale memory index\n")
.await
.expect("write memory index");
tokio::fs::write(nested_dir.join("rollout.md"), "stale rollout\n")
.await
.expect("write rollout summary");
clear_memory_root_contents(&root)
.await
.expect("clear memory root contents");
assert!(
tokio::fs::try_exists(&root)
.await
.expect("check memory root existence"),
"memory root should still exist after clearing contents"
);
let mut entries = tokio::fs::read_dir(&root)
.await
.expect("read memory root after clear");
assert!(
entries
.next_entry()
.await
.expect("read next entry")
.is_none(),
"memory root should be empty after clearing contents"
);
}
#[cfg(unix)]
#[tokio::test]
async fn clear_memory_root_contents_rejects_symlinked_root() {
let dir = tempdir().expect("tempdir");
let target = dir.path().join("outside");
tokio::fs::create_dir_all(&target)
.await
.expect("create symlink target dir");
let target_file = target.join("keep.txt");
tokio::fs::write(&target_file, "keep\n")
.await
.expect("write target file");
let root = dir.path().join("memory");
std::os::unix::fs::symlink(&target, &root).expect("create memory root symlink");
let err = clear_memory_root_contents(&root)
.await
.expect_err("symlinked memory root should be rejected");
assert_eq!(err.kind(), std::io::ErrorKind::InvalidInput);
assert!(
tokio::fs::try_exists(&target_file)
.await
.expect("check target file existence"),
"rejecting a symlinked memory root should not delete the symlink target"
);
}
#[tokio::test]
async fn sync_rollout_summaries_and_raw_memories_file_keeps_latest_memories_only() {
let dir = tempdir().expect("tempdir");
let root = dir.path().join("memory");
ensure_layout(&root).await.expect("ensure layout");
let keep_id = ThreadId::default().to_string();
let drop_id = ThreadId::default().to_string();
let keep_path = rollout_summaries_dir(&root).join(format!("{keep_id}.md"));
let drop_path = rollout_summaries_dir(&root).join(format!("{drop_id}.md"));
tokio::fs::write(&keep_path, "keep")
.await
.expect("write keep");
tokio::fs::write(&drop_path, "drop")
.await
.expect("write drop");
let memories = vec![Stage1Output {
thread_id: ThreadId::try_from(keep_id.clone()).expect("thread id"),
source_updated_at: Utc.timestamp_opt(100, 0).single().expect("timestamp"),
raw_memory: "raw memory".to_string(),
rollout_summary: "short summary".to_string(),
rollout_slug: None,
rollout_path: PathBuf::from("/tmp/rollout-100.jsonl"),
cwd: PathBuf::from("/tmp/workspace"),
git_branch: None,
generated_at: Utc.timestamp_opt(101, 0).single().expect("timestamp"),
}];
sync_rollout_summaries_from_memories(&root, &memories, 100)
.await
.expect("sync rollout summaries");
rebuild_raw_memories_file_from_memories(&root, &memories, 100)
.await
.expect("rebuild raw memories");
assert!(
!tokio::fs::try_exists(&keep_path)
.await
.expect("check stale keep path"),
"sync should prune stale filename that used thread id only"
);
assert!(
!tokio::fs::try_exists(&drop_path)
.await
.expect("check stale drop path"),
"sync should prune stale filename for dropped thread"
);
let mut dir = tokio::fs::read_dir(rollout_summaries_dir(&root))
.await
.expect("open rollout summaries dir");
let mut files = Vec::new();
while let Some(entry) = dir.next_entry().await.expect("read dir entry") {
files.push(entry.file_name().to_string_lossy().to_string());
}
files.sort_unstable();
assert_eq!(files.len(), 1);
let canonical_rollout_summary_file = &files[0];
let raw_memories = tokio::fs::read_to_string(raw_memories_file(&root))
.await
.expect("read raw memories");
assert!(raw_memories.contains("raw memory"));
assert!(raw_memories.contains(&keep_id));
assert!(raw_memories.contains("cwd: /tmp/workspace"));
assert!(raw_memories.contains("rollout_path: /tmp/rollout-100.jsonl"));
assert!(raw_memories.contains(&format!(
"rollout_summary_file: {canonical_rollout_summary_file}"
)));
}
#[test]
fn rollout_summary_file_stem_uses_uuid_timestamp_and_hash_when_slug_missing() {
let thread_id = fixed_thread_id();
let memory = stage1_output_with_slug(thread_id, None);
assert_eq!(rollout_summary_file_stem(&memory), FIXED_PREFIX);
assert_eq!(
rollout_summary_file_stem_from_parts(
memory.thread_id,
memory.source_updated_at,
memory.rollout_slug.as_deref(),
),
FIXED_PREFIX
);
}
#[test]
fn rollout_summary_file_stem_sanitizes_and_truncates_slug() {
let thread_id = fixed_thread_id();
let memory = stage1_output_with_slug(
thread_id,
Some("Unsafe Slug/With Spaces & Symbols + EXTRA_LONG_12345_67890_ABCDE_fghij_klmno"),
);
let stem = rollout_summary_file_stem(&memory);
let slug = stem
.strip_prefix(&format!("{FIXED_PREFIX}-"))
.expect("slug suffix should be present");
assert_eq!(slug.len(), 60);
assert_eq!(
slug,
"unsafe_slug_with_spaces___symbols___extra_long_12345_67890_a"
);
}
#[test]
fn rollout_summary_file_stem_uses_uuid_timestamp_and_hash_when_slug_is_empty() {
let thread_id = fixed_thread_id();
let memory = stage1_output_with_slug(thread_id, Some(""));
assert_eq!(rollout_summary_file_stem(&memory), FIXED_PREFIX);
}
}

View File

@@ -1,281 +0,0 @@
use anyhow::Result;
use codex_api::MemorySummarizeOutput;
use codex_api::RawMemory as ApiRawMemory;
use codex_api::RawMemoryMetadata as ApiRawMemoryMetadata;
use serde_json::Map;
use serde_json::Value;
use std::path::Path;
use std::path::PathBuf;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BuiltMemory {
pub memory_id: String,
pub source_path: PathBuf,
pub raw_memory: String,
pub memory_summary: String,
}
#[derive(Debug, Clone)]
pub struct PreparedTrace {
pub memory_id: String,
pub source_path: PathBuf,
pub payload: ApiRawMemory,
}
pub async fn load_trace_requests(trace_paths: &[PathBuf]) -> Result<Vec<PreparedTrace>> {
let mut prepared = Vec::with_capacity(trace_paths.len());
for (index, path) in trace_paths.iter().enumerate() {
prepared.push(prepare_trace(index + 1, path).await?);
}
Ok(prepared)
}
pub fn build_memories_from_output(
prepared: Vec<PreparedTrace>,
output: Vec<MemorySummarizeOutput>,
) -> Result<Vec<BuiltMemory>> {
if output.len() != prepared.len() {
anyhow::bail!(
"unexpected memory summarize output length: expected {}, got {}",
prepared.len(),
output.len()
);
}
Ok(prepared
.into_iter()
.zip(output)
.map(|(trace, summary)| BuiltMemory {
memory_id: trace.memory_id,
source_path: trace.source_path,
raw_memory: summary.raw_memory,
memory_summary: summary.memory_summary,
})
.collect())
}
async fn prepare_trace(index: usize, path: &Path) -> Result<PreparedTrace> {
let text = load_trace_text(path).await?;
let items = load_trace_items(path, &text)?;
let memory_id = build_memory_id(index, path);
let source_path = path.to_path_buf();
Ok(PreparedTrace {
memory_id: memory_id.clone(),
source_path: source_path.clone(),
payload: ApiRawMemory {
id: memory_id,
metadata: ApiRawMemoryMetadata {
source_path: source_path.display().to_string(),
},
items,
},
})
}
async fn load_trace_text(path: &Path) -> Result<String> {
let raw = tokio::fs::read(path).await?;
Ok(decode_trace_bytes(&raw))
}
fn decode_trace_bytes(raw: &[u8]) -> String {
if let Some(without_bom) = raw.strip_prefix(&[0xEF, 0xBB, 0xBF])
&& let Ok(text) = String::from_utf8(without_bom.to_vec())
{
return text;
}
if let Ok(text) = String::from_utf8(raw.to_vec()) {
return text;
}
raw.iter().map(|b| char::from(*b)).collect()
}
fn load_trace_items(path: &Path, text: &str) -> Result<Vec<Value>> {
if let Ok(Value::Array(items)) = serde_json::from_str::<Value>(text) {
let dict_items = items
.into_iter()
.filter(serde_json::Value::is_object)
.collect::<Vec<_>>();
if dict_items.is_empty() {
anyhow::bail!("no object items found in trace file: {}", path.display());
}
return normalize_trace_items(dict_items, path);
}
let mut parsed_items = Vec::new();
for line in text.lines() {
let line = line.trim();
if line.is_empty() || (!line.starts_with('{') && !line.starts_with('[')) {
continue;
}
let Ok(obj) = serde_json::from_str::<Value>(line) else {
continue;
};
match obj {
Value::Object(_) => parsed_items.push(obj),
Value::Array(inner) => {
parsed_items.extend(inner.into_iter().filter(serde_json::Value::is_object))
}
_ => {}
}
}
if parsed_items.is_empty() {
anyhow::bail!("no JSON items parsed from trace file: {}", path.display());
}
normalize_trace_items(parsed_items, path)
}
fn normalize_trace_items(items: Vec<Value>, path: &Path) -> Result<Vec<Value>> {
let mut normalized = Vec::new();
for item in items {
let Value::Object(obj) = item else {
continue;
};
if let Some(payload) = obj.get("payload") {
if obj.get("type").and_then(Value::as_str) != Some("response_item") {
continue;
}
match payload {
Value::Object(payload_item) => {
if is_allowed_trace_item(payload_item) {
normalized.push(Value::Object(payload_item.clone()));
}
}
Value::Array(payload_items) => {
for payload_item in payload_items {
if let Value::Object(payload_item) = payload_item
&& is_allowed_trace_item(payload_item)
{
normalized.push(Value::Object(payload_item.clone()));
}
}
}
_ => {}
}
continue;
}
if is_allowed_trace_item(&obj) {
normalized.push(Value::Object(obj));
}
}
if normalized.is_empty() {
anyhow::bail!(
"no valid trace items after normalization: {}",
path.display()
);
}
Ok(normalized)
}
fn is_allowed_trace_item(item: &Map<String, Value>) -> bool {
let Some(item_type) = item.get("type").and_then(Value::as_str) else {
return false;
};
if item_type == "message" {
return matches!(
item.get("role").and_then(Value::as_str),
Some("assistant" | "system" | "developer" | "user")
);
}
true
}
fn build_memory_id(index: usize, path: &Path) -> String {
let stem = path
.file_stem()
.map(|stem| stem.to_string_lossy().into_owned())
.filter(|stem| !stem.is_empty())
.unwrap_or_else(|| "memory".to_string());
format!("memory_{index}_{stem}")
}
#[cfg(test)]
mod tests {
use super::load_trace_items;
use super::load_trace_text;
use super::normalize_trace_items;
use pretty_assertions::assert_eq;
use std::path::Path;
use tempfile::tempdir;
#[test]
fn normalize_trace_items_handles_payload_wrapper_and_message_role_filtering() {
let items = vec![
serde_json::json!({
"type": "response_item",
"payload": {"type": "message", "role": "assistant", "content": []}
}),
serde_json::json!({
"type": "response_item",
"payload": [
{"type": "message", "role": "user", "content": []},
{"type": "message", "role": "tool", "content": []},
{"type": "function_call", "name": "shell", "arguments": "{}", "call_id": "c1"}
]
}),
serde_json::json!({
"type": "not_response_item",
"payload": {"type": "message", "role": "assistant", "content": []}
}),
serde_json::json!({
"type": "message",
"role": "developer",
"content": []
}),
];
let normalized = normalize_trace_items(items, Path::new("trace.json")).expect("normalize");
let expected = vec![
serde_json::json!({"type": "message", "role": "assistant", "content": []}),
serde_json::json!({"type": "message", "role": "user", "content": []}),
serde_json::json!({"type": "function_call", "name": "shell", "arguments": "{}", "call_id": "c1"}),
serde_json::json!({"type": "message", "role": "developer", "content": []}),
];
assert_eq!(normalized, expected);
}
#[test]
fn load_trace_items_supports_jsonl_arrays_and_objects() {
let text = r#"
{"type":"response_item","payload":{"type":"message","role":"assistant","content":[]}}
[{"type":"message","role":"user","content":[]},{"type":"message","role":"tool","content":[]}]
"#;
let loaded = load_trace_items(Path::new("trace.jsonl"), text).expect("load");
let expected = vec![
serde_json::json!({"type":"message","role":"assistant","content":[]}),
serde_json::json!({"type":"message","role":"user","content":[]}),
];
assert_eq!(loaded, expected);
}
#[tokio::test]
async fn load_trace_text_decodes_utf8_sig() {
let dir = tempdir().expect("tempdir");
let path = dir.path().join("trace.json");
tokio::fs::write(
&path,
[
0xEF, 0xBB, 0xBF, b'[', b'{', b'"', b't', b'y', b'p', b'e', b'"', b':', b'"', b'm',
b'e', b's', b's', b'a', b'g', b'e', b'"', b',', b'"', b'r', b'o', b'l', b'e', b'"',
b':', b'"', b'u', b's', b'e', b'r', b'"', b',', b'"', b'c', b'o', b'n', b't', b'e',
b'n', b't', b'"', b':', b'[', b']', b'}', b']',
],
)
.await
.expect("write");
let text = load_trace_text(&path).await.expect("decode");
assert!(text.starts_with('['));
}
}