Compare commits

...

11 Commits

Author SHA1 Message Date
Ahmed Ibrahim
8131c67a89 auth 2026-03-18 14:32:02 -07:00
Ahmed Ibrahim
8287818db4 auth 2026-03-18 14:31:58 -07:00
Ahmed Ibrahim
8adbf6b15b codex: fix PR #15079 token data imports
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 13:44:37 -07:00
Ahmed Ibrahim
2c48ebdbfb codex: remove token_data shim from core (#15079)
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 13:38:11 -07:00
Ahmed Ibrahim
79225930db codex: address PR review feedback (#15079)
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 13:27:31 -07:00
Ahmed Ibrahim
c217015a88 codex: preserve provider schema descriptions
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 13:00:00 -07:00
Ahmed Ibrahim
eaeb041503 codex: fix CI failures for codex-auth extraction
Co-authored-by: Codex <noreply@openai.com>
2026-03-18 12:48:36 -07:00
Ahmed Ibrahim
25b6190ab5 Remove migration plan from codex-auth PR
Keep the PR focused on the crate extraction itself by dropping the planning note from the branch.

Co-authored-by: Codex <noreply@openai.com>
2026-03-18 12:31:19 -07:00
Ahmed Ibrahim
ccfcea8125 Fix codex-auth test import ordering
Apply the rustfmt import ordering expected by CI for the moved token data tests.

Co-authored-by: Codex <noreply@openai.com>
2026-03-18 12:30:01 -07:00
Ahmed Ibrahim
bbcf29c235 Update config schema for codex-auth extraction
Regenerate the config schema after moving provider definitions into codex-auth.

Co-authored-by: Codex <noreply@openai.com>
2026-03-18 12:24:10 -07:00
Ahmed Ibrahim
cf801bad4d Extract provider and token modules into codex-auth
Move the foundational provider and token modules into codex-auth while keeping codex-core as the facade. Also move the corresponding unit tests and record the 3-PR migration checkpoints.

Co-authored-by: Codex <noreply@openai.com>
2026-03-18 12:24:10 -07:00
32 changed files with 591 additions and 176 deletions

41
codex-rs/Cargo.lock generated
View File

@@ -1596,6 +1596,23 @@ dependencies = [
"tokio-util",
]
[[package]]
name = "codex-auth"
version = "0.0.0"
dependencies = [
"base64 0.22.1",
"codex-api",
"codex-app-server-protocol",
"http 1.4.0",
"maplit",
"pretty_assertions",
"schemars 0.8.22",
"serde",
"serde_json",
"thiserror 2.0.18",
"toml 0.9.11+spec-1.1.0",
]
[[package]]
name = "codex-backend-client"
version = "0.0.0"
@@ -1840,9 +1857,11 @@ dependencies = [
"codex-arg0",
"codex-artifacts",
"codex-async-utils",
"codex-auth",
"codex-client",
"codex-config",
"codex-connectors",
"codex-core-auth",
"codex-environment",
"codex-execpolicy",
"codex-file-search",
@@ -1935,6 +1954,28 @@ dependencies = [
"zstd",
]
[[package]]
name = "codex-core-auth"
version = "0.0.0"
dependencies = [
"anyhow",
"base64 0.22.1",
"chrono",
"codex-app-server-protocol",
"codex-auth",
"codex-keyring-store",
"keyring",
"once_cell",
"pretty_assertions",
"schemars 0.8.22",
"serde",
"serde_json",
"sha2",
"tempfile",
"tokio",
"tracing",
]
[[package]]
name = "codex-debug-client"
version = "0.0.0"

View File

@@ -18,10 +18,12 @@ members = [
"cli",
"connectors",
"config",
"codex-auth",
"shell-command",
"shell-escalation",
"skills",
"core",
"core/auth",
"environment",
"hooks",
"secrets",
@@ -87,6 +89,7 @@ license = "Apache-2.0"
app_test_support = { path = "app-server/tests/common" }
codex-ansi-escape = { path = "ansi-escape" }
codex-api = { path = "codex-api" }
codex-auth = { path = "codex-auth" }
codex-artifacts = { path = "artifacts" }
codex-package-manager = { path = "package-manager" }
codex-app-server = { path = "app-server" }
@@ -104,6 +107,7 @@ codex-cloud-requirements = { path = "cloud-requirements" }
codex-connectors = { path = "connectors" }
codex-config = { path = "config" }
codex-core = { path = "core" }
codex-core-auth = { path = "core/auth" }
codex-environment = { path = "environment" }
codex-exec = { path = "exec" }
codex-execpolicy = { path = "execpolicy" }

View File

@@ -7,11 +7,11 @@ use base64::engine::general_purpose::URL_SAFE_NO_PAD;
use chrono::DateTime;
use chrono::Utc;
use codex_app_server_protocol::AuthMode;
use codex_core::TokenData;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::AuthDotJson;
use codex_core::auth::save_auth;
use codex_core::token_data::TokenData;
use codex_core::token_data::parse_chatgpt_jwt_claims;
use codex_core::parse_chatgpt_jwt_claims;
use serde_json::json;
/// Builder for writing a fake ChatGPT auth.json in tests.

View File

@@ -1,10 +1,10 @@
use codex_core::AuthManager;
use codex_core::TokenData;
use std::path::Path;
use std::sync::LazyLock;
use std::sync::RwLock;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::token_data::TokenData;
static CHATGPT_TOKEN: LazyLock<RwLock<Option<TokenData>>> = LazyLock::new(|| RwLock::new(None));

View File

@@ -1,6 +1,6 @@
use codex_core::AuthManager;
use codex_core::TokenData;
use codex_core::config::Config;
use codex_core::token_data::TokenData;
use std::collections::HashSet;
use std::time::Duration;

View File

@@ -0,0 +1,6 @@
load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "codex-auth",
crate_name = "codex_auth",
)

View File

@@ -0,0 +1,23 @@
[package]
name = "codex-auth"
version.workspace = true
edition.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
base64 = { workspace = true }
codex-api = { workspace = true }
codex-app-server-protocol = { workspace = true }
http = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
[dev-dependencies]
maplit = { workspace = true }
pretty_assertions = { workspace = true }
toml = { workspace = true }

View File

@@ -0,0 +1,15 @@
#[derive(Debug)]
pub struct EnvVarError {
pub var: String,
pub instructions: Option<String>,
}
impl std::fmt::Display for EnvVarError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Missing environment variable: `{}`.", self.var)?;
if let Some(instructions) = &self.instructions {
write!(f, " {instructions}")?;
}
Ok(())
}
}

View File

@@ -0,0 +1,19 @@
pub mod error;
pub mod provider;
pub mod token_data;
#[cfg(test)]
mod model_provider_info_tests;
#[cfg(test)]
mod token_data_tests;
pub use error::EnvVarError;
pub use provider::DEFAULT_LMSTUDIO_PORT;
pub use provider::DEFAULT_OLLAMA_PORT;
pub use provider::LMSTUDIO_OSS_PROVIDER_ID;
pub use provider::ModelProviderInfo;
pub use provider::OLLAMA_OSS_PROVIDER_ID;
pub use provider::OPENAI_PROVIDER_ID;
pub use provider::WireApi;
pub use provider::built_in_model_providers;
pub use provider::create_oss_provider_with_base_url;

View File

@@ -1,4 +1,4 @@
use super::*;
use super::provider::*;
use pretty_assertions::assert_eq;
#[test]

View File

@@ -0,0 +1,291 @@
use crate::error::EnvVarError;
use codex_api::Provider as ApiProvider;
use codex_api::provider::RetryConfig as ApiRetryConfig;
use codex_app_server_protocol::AuthMode as ApiAuthMode;
use http::HeaderMap;
use http::header::HeaderName;
use http::header::HeaderValue;
use schemars::JsonSchema;
use serde::Deserialize;
use serde::Serialize;
use std::collections::HashMap;
use std::fmt;
use std::time::Duration;
const DEFAULT_STREAM_IDLE_TIMEOUT_MS: u64 = 300_000;
const DEFAULT_STREAM_MAX_RETRIES: u64 = 5;
const DEFAULT_REQUEST_MAX_RETRIES: u64 = 4;
pub const DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS: u64 = 15_000;
const MAX_STREAM_MAX_RETRIES: u64 = 100;
const MAX_REQUEST_MAX_RETRIES: u64 = 100;
const OPENAI_PROVIDER_NAME: &str = "OpenAI";
pub const OPENAI_PROVIDER_ID: &str = "openai";
pub const CHAT_WIRE_API_REMOVED_ERROR: &str = "`wire_api = \"chat\"` is no longer supported.\nHow to fix: set `wire_api = \"responses\"` in your provider config.\nMore info: https://github.com/openai/codex/discussions/7782";
pub const LEGACY_OLLAMA_CHAT_PROVIDER_ID: &str = "ollama-chat";
pub const OLLAMA_CHAT_PROVIDER_REMOVED_ERROR: &str = "`ollama-chat` is no longer supported.\nHow to fix: replace `ollama-chat` with `ollama` in `model_provider`, `oss_provider`, or `--local-provider`.\nMore info: https://github.com/openai/codex/discussions/7782";
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, JsonSchema)]
#[serde(rename_all = "lowercase")]
#[schemars(rename = "WireApi")]
pub enum WireApi {
#[default]
Responses,
}
impl fmt::Display for WireApi {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let value = match self {
Self::Responses => "responses",
};
f.write_str(value)
}
}
impl<'de> Deserialize<'de> for WireApi {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
match value.as_str() {
"responses" => Ok(Self::Responses),
"chat" => Err(serde::de::Error::custom(CHAT_WIRE_API_REMOVED_ERROR)),
_ => Err(serde::de::Error::unknown_variant(&value, &["responses"])),
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema)]
#[schemars(deny_unknown_fields)]
#[schemars(rename = "ModelProviderInfo")]
pub struct ModelProviderInfo {
pub name: String,
pub base_url: Option<String>,
pub env_key: Option<String>,
pub env_key_instructions: Option<String>,
pub experimental_bearer_token: Option<String>,
#[serde(default)]
pub wire_api: WireApi,
pub query_params: Option<HashMap<String, String>>,
pub http_headers: Option<HashMap<String, String>>,
pub env_http_headers: Option<HashMap<String, String>>,
pub request_max_retries: Option<u64>,
pub stream_max_retries: Option<u64>,
pub stream_idle_timeout_ms: Option<u64>,
#[schemars(
description = "Maximum time (in milliseconds) to wait for a websocket connection attempt before treating it as failed."
)]
pub websocket_connect_timeout_ms: Option<u64>,
#[serde(default)]
pub requires_openai_auth: bool,
#[serde(default)]
pub supports_websockets: bool,
}
impl ModelProviderInfo {
fn build_header_map(&self) -> HeaderMap {
let capacity = self.http_headers.as_ref().map_or(0, HashMap::len)
+ self.env_http_headers.as_ref().map_or(0, HashMap::len);
let mut headers = HeaderMap::with_capacity(capacity);
if let Some(extra) = &self.http_headers {
for (k, v) in extra {
if let (Ok(name), Ok(value)) = (HeaderName::try_from(k), HeaderValue::try_from(v)) {
headers.insert(name, value);
}
}
}
if let Some(env_headers) = &self.env_http_headers {
for (header, env_var) in env_headers {
if let Ok(val) = std::env::var(env_var)
&& !val.trim().is_empty()
&& let (Ok(name), Ok(value)) =
(HeaderName::try_from(header), HeaderValue::try_from(val))
{
headers.insert(name, value);
}
}
}
headers
}
pub fn to_api_provider(
&self,
auth_mode: Option<ApiAuthMode>,
) -> Result<ApiProvider, EnvVarError> {
let default_base_url = if matches!(
auth_mode,
Some(ApiAuthMode::Chatgpt | ApiAuthMode::ChatgptAuthTokens)
) {
"https://chatgpt.com/backend-api/codex"
} else {
"https://api.openai.com/v1"
};
let base_url = self
.base_url
.clone()
.unwrap_or_else(|| default_base_url.to_string());
let retry = ApiRetryConfig {
max_attempts: self.request_max_retries(),
base_delay: Duration::from_millis(200),
retry_429: false,
retry_5xx: true,
retry_transport: true,
};
Ok(ApiProvider {
name: self.name.clone(),
base_url,
query_params: self.query_params.clone(),
headers: self.build_header_map(),
retry,
stream_idle_timeout: self.stream_idle_timeout(),
})
}
pub fn api_key(&self) -> Result<Option<String>, EnvVarError> {
match &self.env_key {
Some(env_key) => {
let api_key = std::env::var(env_key)
.ok()
.filter(|v| !v.trim().is_empty())
.ok_or_else(|| EnvVarError {
var: env_key.clone(),
instructions: self.env_key_instructions.clone(),
})?;
Ok(Some(api_key))
}
None => Ok(None),
}
}
pub fn request_max_retries(&self) -> u64 {
self.request_max_retries
.unwrap_or(DEFAULT_REQUEST_MAX_RETRIES)
.min(MAX_REQUEST_MAX_RETRIES)
}
pub fn stream_max_retries(&self) -> u64 {
self.stream_max_retries
.unwrap_or(DEFAULT_STREAM_MAX_RETRIES)
.min(MAX_STREAM_MAX_RETRIES)
}
pub fn stream_idle_timeout(&self) -> Duration {
self.stream_idle_timeout_ms
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_STREAM_IDLE_TIMEOUT_MS))
}
pub fn websocket_connect_timeout(&self) -> Duration {
self.websocket_connect_timeout_ms
.map(Duration::from_millis)
.unwrap_or(Duration::from_millis(DEFAULT_WEBSOCKET_CONNECT_TIMEOUT_MS))
}
pub fn create_openai_provider(base_url: Option<String>) -> ModelProviderInfo {
ModelProviderInfo {
name: OPENAI_PROVIDER_NAME.into(),
base_url,
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api: WireApi::Responses,
query_params: None,
http_headers: Some(
[("version".to_string(), env!("CARGO_PKG_VERSION").to_string())]
.into_iter()
.collect(),
),
env_http_headers: Some(
[
(
"OpenAI-Organization".to_string(),
"OPENAI_ORGANIZATION".to_string(),
),
("OpenAI-Project".to_string(), "OPENAI_PROJECT".to_string()),
]
.into_iter()
.collect(),
),
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: true,
supports_websockets: true,
}
}
pub fn is_openai(&self) -> bool {
self.name == OPENAI_PROVIDER_NAME
}
}
pub const DEFAULT_LMSTUDIO_PORT: u16 = 1234;
pub const DEFAULT_OLLAMA_PORT: u16 = 11434;
pub const LMSTUDIO_OSS_PROVIDER_ID: &str = "lmstudio";
pub const OLLAMA_OSS_PROVIDER_ID: &str = "ollama";
pub fn built_in_model_providers(
openai_base_url: Option<String>,
) -> HashMap<String, ModelProviderInfo> {
use ModelProviderInfo as P;
let openai_provider = P::create_openai_provider(openai_base_url);
[
(OPENAI_PROVIDER_ID, openai_provider),
(
OLLAMA_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_OLLAMA_PORT, WireApi::Responses),
),
(
LMSTUDIO_OSS_PROVIDER_ID,
create_oss_provider(DEFAULT_LMSTUDIO_PORT, WireApi::Responses),
),
]
.into_iter()
.map(|(k, v)| (k.to_string(), v))
.collect()
}
pub fn create_oss_provider(default_provider_port: u16, wire_api: WireApi) -> ModelProviderInfo {
let default_codex_oss_base_url = format!(
"http://localhost:{codex_oss_port}/v1",
codex_oss_port = std::env::var("CODEX_OSS_PORT")
.ok()
.filter(|value| !value.trim().is_empty())
.and_then(|value| value.parse::<u16>().ok())
.unwrap_or(default_provider_port)
);
let codex_oss_base_url = std::env::var("CODEX_OSS_BASE_URL")
.ok()
.filter(|v| !v.trim().is_empty())
.unwrap_or(default_codex_oss_base_url);
create_oss_provider_with_base_url(&codex_oss_base_url, wire_api)
}
pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> ModelProviderInfo {
ModelProviderInfo {
name: "gpt-oss".into(),
base_url: Some(base_url.into()),
env_key: None,
env_key_instructions: None,
experimental_bearer_token: None,
wire_api,
query_params: None,
http_headers: None,
env_http_headers: None,
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
websocket_connect_timeout_ms: None,
requires_openai_auth: false,
supports_websockets: false,
}
}

View File

@@ -5,32 +5,21 @@ use thiserror::Error;
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)]
pub struct TokenData {
/// Flat info parsed from the JWT in auth.json.
#[serde(
deserialize_with = "deserialize_id_token",
serialize_with = "serialize_id_token"
)]
pub id_token: IdTokenInfo,
/// This is a JWT.
pub access_token: String,
pub refresh_token: String,
pub account_id: Option<String>,
}
/// Flat subset of useful claims in id_token from auth.json.
#[derive(Debug, Clone, PartialEq, Eq, Default, Serialize, Deserialize)]
pub struct IdTokenInfo {
pub email: Option<String>,
/// The ChatGPT subscription plan type
/// (e.g., "free", "plus", "pro", "business", "enterprise", "edu").
/// (Note: values may vary by backend.)
pub(crate) chatgpt_plan_type: Option<PlanType>,
/// ChatGPT user identifier associated with the token, if present.
pub chatgpt_plan_type: Option<PlanType>,
pub chatgpt_user_id: Option<String>,
/// Organization/workspace identifier associated with the token, if present.
pub chatgpt_account_id: Option<String>,
pub raw_jwt: String,
}
@@ -55,13 +44,13 @@ impl IdTokenInfo {
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(untagged)]
pub(crate) enum PlanType {
pub enum PlanType {
Known(KnownPlan),
Unknown(String),
}
impl PlanType {
pub(crate) fn from_raw_value(raw: &str) -> Self {
pub fn from_raw_value(raw: &str) -> Self {
match raw.to_ascii_lowercase().as_str() {
"free" => Self::Known(KnownPlan::Free),
"go" => Self::Known(KnownPlan::Go),
@@ -78,7 +67,7 @@ impl PlanType {
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub(crate) enum KnownPlan {
pub enum KnownPlan {
Free,
Go,
Plus,
@@ -128,7 +117,6 @@ pub enum IdTokenInfoError {
}
pub fn parse_chatgpt_jwt_claims(jwt: &str) -> Result<IdTokenInfo, IdTokenInfoError> {
// JWT format: header.payload.signature
let mut parts = jwt.split('.');
let (_header_b64, payload_b64, _sig_b64) = match (parts.next(), parts.next(), parts.next()) {
(Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s),
@@ -173,7 +161,3 @@ where
{
serializer.serialize_str(&id_token.raw_jwt)
}
#[cfg(test)]
#[path = "token_data_tests.rs"]
mod tests;

View File

@@ -1,4 +1,5 @@
use super::*;
use super::token_data::*;
use base64::Engine;
use pretty_assertions::assert_eq;
use serde::Serialize;

View File

@@ -28,12 +28,14 @@ chardetng = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
clap = { workspace = true, features = ["derive"] }
codex-api = { workspace = true }
codex-auth = { workspace = true }
codex-app-server-protocol = { workspace = true }
codex-apply-patch = { workspace = true }
codex-async-utils = { workspace = true }
codex-client = { workspace = true }
codex-connectors = { workspace = true }
codex-config = { workspace = true }
codex-core-auth = { workspace = true }
codex-environment = { workspace = true }
codex-shell-command = { workspace = true }
codex-skills = { workspace = true }

View File

@@ -0,0 +1,29 @@
[package]
name = "codex-core-auth"
version.workspace = true
edition.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
chrono = { workspace = true, features = ["serde"] }
codex-app-server-protocol = { workspace = true }
codex-auth = { workspace = true }
codex-keyring-store = { workspace = true }
once_cell = { workspace = true }
schemars = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
sha2 = { workspace = true }
tracing = { workspace = true }
[dev-dependencies]
anyhow = { workspace = true }
base64 = { workspace = true }
keyring = { workspace = true }
pretty_assertions = { workspace = true }
serde = { workspace = true, features = ["derive"] }
tempfile = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }

View File

@@ -0,0 +1,14 @@
//! Auth storage backend for Codex CLI credentials.
//!
//! This crate provides the storage layer for auth.json (file, keyring, auto, ephemeral)
//! and the AuthDotJson / AuthCredentialsStoreMode types. The higher-level auth logic
//! (CodexAuth, AuthManager, token refresh) lives in codex-core.
pub mod storage;
pub use storage::AuthCredentialsStoreMode;
pub use storage::AuthDotJson;
pub use storage::AuthStorageBackend;
pub use storage::FileAuthStorage;
pub use storage::create_auth_storage;
pub use storage::get_auth_file;

View File

@@ -19,8 +19,10 @@ use std::sync::Arc;
use std::sync::Mutex;
use tracing::warn;
use crate::token_data::TokenData;
use codex_app_server_protocol::AuthMode;
use codex_auth::token_data::PlanType;
use codex_auth::token_data::TokenData;
use codex_auth::token_data::parse_chatgpt_jwt_claims;
use codex_keyring_store::DefaultKeyringStore;
use codex_keyring_store::KeyringStore;
use once_cell::sync::Lazy;
@@ -56,7 +58,57 @@ pub struct AuthDotJson {
pub last_refresh: Option<DateTime<Utc>>,
}
pub(super) fn get_auth_file(codex_home: &Path) -> PathBuf {
impl AuthDotJson {
pub fn from_external_access_token(
access_token: &str,
chatgpt_account_id: &str,
chatgpt_plan_type: Option<&str>,
) -> std::io::Result<Self> {
let mut token_info =
parse_chatgpt_jwt_claims(access_token).map_err(std::io::Error::other)?;
token_info.chatgpt_account_id = Some(chatgpt_account_id.to_string());
token_info.chatgpt_plan_type = chatgpt_plan_type
.map(PlanType::from_raw_value)
.or(token_info.chatgpt_plan_type)
.or(Some(PlanType::Unknown("unknown".to_string())));
let tokens = TokenData {
id_token: token_info,
access_token: access_token.to_string(),
refresh_token: String::new(),
account_id: Some(chatgpt_account_id.to_string()),
};
Ok(Self {
auth_mode: Some(AuthMode::ChatgptAuthTokens),
openai_api_key: None,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
})
}
pub fn resolved_mode(&self) -> AuthMode {
if let Some(mode) = self.auth_mode {
return mode;
}
if self.openai_api_key.is_some() {
return AuthMode::ApiKey;
}
AuthMode::Chatgpt
}
pub fn storage_mode(
&self,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> AuthCredentialsStoreMode {
if self.resolved_mode() == AuthMode::ChatgptAuthTokens {
AuthCredentialsStoreMode::Ephemeral
} else {
auth_credentials_store_mode
}
}
}
pub fn get_auth_file(codex_home: &Path) -> PathBuf {
codex_home.join("auth.json")
}
@@ -69,25 +121,25 @@ pub(super) fn delete_file_if_exists(codex_home: &Path) -> std::io::Result<bool>
}
}
pub(super) trait AuthStorageBackend: Debug + Send + Sync {
pub trait AuthStorageBackend: Debug + Send + Sync {
fn load(&self) -> std::io::Result<Option<AuthDotJson>>;
fn save(&self, auth: &AuthDotJson) -> std::io::Result<()>;
fn delete(&self) -> std::io::Result<bool>;
}
#[derive(Clone, Debug)]
pub(super) struct FileAuthStorage {
pub struct FileAuthStorage {
codex_home: PathBuf,
}
impl FileAuthStorage {
pub(super) fn new(codex_home: PathBuf) -> Self {
pub fn new(codex_home: PathBuf) -> Self {
Self { codex_home }
}
/// Attempt to read and parse the `auth.json` file in the given `CODEX_HOME` directory.
/// Returns the full AuthDotJson structure.
pub(super) fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
pub fn try_read_auth_json(&self, auth_file: &Path) -> std::io::Result<AuthDotJson> {
let mut file = File::open(auth_file)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
@@ -135,7 +187,7 @@ impl AuthStorageBackend for FileAuthStorage {
const KEYRING_SERVICE: &str = "Codex Auth";
// turns codex_home path into a stable, short key string
fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
pub(crate) fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
let canonical = codex_home
.canonicalize()
.unwrap_or_else(|_| codex_home.to_path_buf());
@@ -149,13 +201,13 @@ fn compute_store_key(codex_home: &Path) -> std::io::Result<String> {
}
#[derive(Clone, Debug)]
struct KeyringAuthStorage {
codex_home: PathBuf,
keyring_store: Arc<dyn KeyringStore>,
pub(crate) struct KeyringAuthStorage {
pub(crate) codex_home: PathBuf,
pub(crate) keyring_store: Arc<dyn KeyringStore>,
}
impl KeyringAuthStorage {
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
pub(crate) fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
Self {
codex_home,
keyring_store,
@@ -223,13 +275,13 @@ impl AuthStorageBackend for KeyringAuthStorage {
}
#[derive(Clone, Debug)]
struct AutoAuthStorage {
keyring_storage: Arc<KeyringAuthStorage>,
file_storage: Arc<FileAuthStorage>,
pub(crate) struct AutoAuthStorage {
pub(crate) keyring_storage: Arc<KeyringAuthStorage>,
pub(crate) file_storage: Arc<FileAuthStorage>,
}
impl AutoAuthStorage {
fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
pub(crate) fn new(codex_home: PathBuf, keyring_store: Arc<dyn KeyringStore>) -> Self {
Self {
keyring_storage: Arc::new(KeyringAuthStorage::new(codex_home.clone(), keyring_store)),
file_storage: Arc::new(FileAuthStorage::new(codex_home)),
@@ -308,7 +360,7 @@ impl AuthStorageBackend for EphemeralAuthStorage {
}
}
pub(super) fn create_auth_storage(
pub fn create_auth_storage(
codex_home: PathBuf,
mode: AuthCredentialsStoreMode,
) -> Arc<dyn AuthStorageBackend> {

View File

@@ -1,13 +1,15 @@
use super::*;
use crate::token_data::IdTokenInfo;
use anyhow::Context;
use base64::Engine;
use pretty_assertions::assert_eq;
use serde_json::json;
use tempfile::tempdir;
use codex_auth::token_data::IdTokenInfo;
use codex_auth::token_data::TokenData;
use codex_auth::token_data::parse_chatgpt_jwt_claims;
use codex_keyring_store::tests::MockKeyringStore;
use keyring::Error as KeyringError;
use pretty_assertions::assert_eq;
use serde::Serialize;
use serde_json::json;
use tempfile::tempdir;
#[tokio::test]
async fn file_storage_load_returns_auth_dot_json() -> anyhow::Result<()> {
@@ -167,7 +169,7 @@ fn id_token_with_prefix(prefix: &str) -> IdTokenInfo {
let signature_b64 = encode(b"sig");
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
crate::token_data::parse_chatgpt_jwt_claims(&fake_jwt).expect("fake JWT should parse")
parse_chatgpt_jwt_claims(&fake_jwt).expect("fake JWT should parse")
}
fn auth_with_prefix(prefix: &str) -> AuthDotJson {

View File

@@ -10,13 +10,13 @@ use http::HeaderMap;
use serde::Deserialize;
use serde_json::Value;
use crate::PlanType;
use crate::auth::CodexAuth;
use crate::error::CodexErr;
use crate::error::RetryLimitReachedError;
use crate::error::UnexpectedResponseError;
use crate::error::UsageLimitReachedError;
use crate::model_provider_info::ModelProviderInfo;
use crate::token_data::PlanType;
pub(crate) fn map_api_error(err: ApiError) -> CodexErr {
match err {

View File

@@ -1,5 +1,3 @@
mod storage;
use async_trait::async_trait;
use chrono::Utc;
use reqwest::StatusCode;
@@ -19,17 +17,19 @@ use codex_app_server_protocol::AuthMode as ApiAuthMode;
use codex_otel::TelemetryAuthMode;
use codex_protocol::config_types::ForcedLoginMethod;
pub use crate::auth::storage::AuthCredentialsStoreMode;
pub use crate::auth::storage::AuthDotJson;
use crate::auth::storage::AuthStorageBackend;
use crate::auth::storage::create_auth_storage;
use codex_core_auth::AuthStorageBackend;
use codex_core_auth::create_auth_storage;
pub use codex_core_auth::AuthCredentialsStoreMode;
pub use codex_core_auth::AuthDotJson;
use crate::KnownPlan as InternalKnownPlan;
use crate::PlanType as InternalPlanType;
use crate::TokenData;
use crate::config::Config;
use crate::error::RefreshTokenFailedError;
use crate::error::RefreshTokenFailedReason;
use crate::token_data::KnownPlan as InternalKnownPlan;
use crate::token_data::PlanType as InternalPlanType;
use crate::token_data::TokenData;
use crate::token_data::parse_chatgpt_jwt_claims;
use crate::parse_chatgpt_jwt_claims;
use crate::util::try_parse_error_message;
use codex_client::CodexHttpClient;
use codex_protocol::account::PlanType as AccountPlanType;
@@ -752,67 +752,6 @@ fn refresh_token_endpoint() -> String {
.unwrap_or_else(|_| REFRESH_TOKEN_URL.to_string())
}
impl AuthDotJson {
fn from_external_tokens(external: &ExternalAuthTokens) -> std::io::Result<Self> {
let mut token_info =
parse_chatgpt_jwt_claims(&external.access_token).map_err(std::io::Error::other)?;
token_info.chatgpt_account_id = Some(external.chatgpt_account_id.clone());
token_info.chatgpt_plan_type = external
.chatgpt_plan_type
.as_deref()
.map(InternalPlanType::from_raw_value)
.or(token_info.chatgpt_plan_type)
.or(Some(InternalPlanType::Unknown("unknown".to_string())));
let tokens = TokenData {
id_token: token_info,
access_token: external.access_token.clone(),
refresh_token: String::new(),
account_id: Some(external.chatgpt_account_id.clone()),
};
Ok(Self {
auth_mode: Some(ApiAuthMode::ChatgptAuthTokens),
openai_api_key: None,
tokens: Some(tokens),
last_refresh: Some(Utc::now()),
})
}
fn from_external_access_token(
access_token: &str,
chatgpt_account_id: &str,
chatgpt_plan_type: Option<&str>,
) -> std::io::Result<Self> {
let external = ExternalAuthTokens {
access_token: access_token.to_string(),
chatgpt_account_id: chatgpt_account_id.to_string(),
chatgpt_plan_type: chatgpt_plan_type.map(str::to_string),
};
Self::from_external_tokens(&external)
}
fn resolved_mode(&self) -> ApiAuthMode {
if let Some(mode) = self.auth_mode {
return mode;
}
if self.openai_api_key.is_some() {
return ApiAuthMode::ApiKey;
}
ApiAuthMode::Chatgpt
}
fn storage_mode(
&self,
auth_credentials_store_mode: AuthCredentialsStoreMode,
) -> AuthCredentialsStoreMode {
if self.resolved_mode() == ApiAuthMode::ChatgptAuthTokens {
AuthCredentialsStoreMode::Ephemeral
} else {
auth_credentials_store_mode
}
}
}
/// Internal cached auth state.
#[derive(Clone)]
struct CachedAuth {
@@ -1412,8 +1351,12 @@ impl AuthManager {
),
)));
}
let auth_dot_json =
AuthDotJson::from_external_tokens(&refreshed).map_err(RefreshTokenError::Transient)?;
let auth_dot_json = AuthDotJson::from_external_access_token(
&refreshed.access_token,
&refreshed.chatgpt_account_id,
refreshed.chatgpt_plan_type.as_deref(),
)
.map_err(RefreshTokenError::Transient)?;
save_auth(
&self.codex_home,
&auth_dot_json,

View File

@@ -1,11 +1,12 @@
use super::*;
use crate::auth::storage::FileAuthStorage;
use crate::auth::storage::get_auth_file;
use crate::IdTokenInfo;
use crate::KnownPlan as InternalKnownPlan;
use crate::PlanType as InternalPlanType;
use crate::TokenData;
use crate::config::Config;
use crate::config::ConfigBuilder;
use crate::token_data::IdTokenInfo;
use crate::token_data::KnownPlan as InternalKnownPlan;
use crate::token_data::PlanType as InternalPlanType;
use codex_core_auth::FileAuthStorage;
use codex_core_auth::get_auth_file;
use codex_protocol::account::PlanType as AccountPlanType;
use base64::Engine;

View File

@@ -528,7 +528,7 @@ impl ModelClient {
let api_provider = self
.state
.provider
.to_api_provider(auth.as_ref().map(CodexAuth::auth_mode))?;
.to_api_provider(auth.as_ref().map(CodexAuth::api_auth_mode))?;
let api_auth = auth_provider_from_auth(auth.clone(), &self.state.provider)?;
Ok(CurrentClientSetup {
auth,

View File

@@ -25,6 +25,7 @@ use tracing::warn;
use crate::AuthManager;
use crate::CodexAuth;
use crate::SandboxState;
use crate::TokenData;
use crate::config::Config;
use crate::config::types::AppToolApproval;
use crate::config::types::AppsConfigToml;
@@ -44,7 +45,6 @@ use crate::mcp_connection_manager::codex_apps_tools_cache_key;
use crate::plugins::AppConnectorId;
use crate::plugins::PluginsManager;
use crate::plugins::list_tool_suggest_discoverable_plugins;
use crate::token_data::TokenData;
use crate::tools::discoverable::DiscoverablePluginInfo;
use crate::tools::discoverable::DiscoverableTool;

View File

@@ -1,7 +1,7 @@
use crate::KnownPlan;
use crate::PlanType;
use crate::exec::ExecToolCallOutput;
use crate::network_policy_decision::NetworkPolicyDecisionPayload;
use crate::token_data::KnownPlan;
use crate::token_data::PlanType;
use crate::truncate::TruncationPolicy;
use crate::truncate::truncate_text;
use chrono::DateTime;
@@ -9,6 +9,7 @@ use chrono::Datelike;
use chrono::Local;
use chrono::Utc;
use codex_async_utils::CancelErr;
pub use codex_auth::EnvVarError;
use codex_protocol::ThreadId;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::ErrorEvent;
@@ -191,6 +192,12 @@ impl From<CancelErr> for CodexErr {
}
}
impl From<EnvVarError> for CodexErr {
fn from(error: EnvVarError) -> Self {
Self::EnvVar(error)
}
}
impl CodexErr {
pub fn is_retryable(&self) -> bool {
match self {
@@ -551,26 +558,6 @@ fn now_for_retry() -> DateTime<Utc> {
Utc::now()
}
#[derive(Debug)]
pub struct EnvVarError {
/// Name of the environment variable that is missing.
pub var: String,
/// Optional instructions to help the user get a valid value for the
/// variable and set it.
pub instructions: Option<String>,
}
impl std::fmt::Display for EnvVarError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Missing environment variable: `{}`.", self.var)?;
if let Some(instructions) = &self.instructions {
write!(f, " {instructions}")?;
}
Ok(())
}
}
impl CodexErr {
/// Minimal shim so that existing `e.downcast_ref::<CodexErr>()` checks continue to compile
/// after replacing `anyhow::Error` in the return signature. This mirrors the behavior of

View File

@@ -76,11 +76,16 @@ mod shell_detect;
mod stream_events_utils;
pub mod test_support;
mod text_encoding;
pub mod token_data;
mod truncate;
mod unified_exec;
pub mod windows_sandbox;
pub use client::X_RESPONSESAPI_INCLUDE_TIMING_METRICS_HEADER;
pub use codex_auth::token_data::IdTokenInfo;
pub use codex_auth::token_data::IdTokenInfoError;
pub use codex_auth::token_data::KnownPlan;
pub use codex_auth::token_data::PlanType;
pub use codex_auth::token_data::TokenData;
pub use codex_auth::token_data::parse_chatgpt_jwt_claims;
pub use model_provider_info::DEFAULT_LMSTUDIO_PORT;
pub use model_provider_info::DEFAULT_OLLAMA_PORT;
pub use model_provider_info::LMSTUDIO_OSS_PROVIDER_ID;

View File

@@ -5,10 +5,10 @@
//! 2. User-defined entries inside `~/.codex/config.toml` under the `model_providers`
//! key. These override or extend the defaults at runtime.
use crate::auth::AuthMode;
use crate::error::EnvVarError;
use codex_api::Provider as ApiProvider;
use codex_api::provider::RetryConfig as ApiRetryConfig;
use codex_app_server_protocol::AuthMode as ApiAuthMode;
use http::HeaderMap;
use http::header::HeaderName;
use http::header::HeaderValue;
@@ -159,9 +159,12 @@ impl ModelProviderInfo {
pub(crate) fn to_api_provider(
&self,
auth_mode: Option<AuthMode>,
auth_mode: Option<ApiAuthMode>,
) -> crate::error::Result<ApiProvider> {
let default_base_url = if matches!(auth_mode, Some(AuthMode::Chatgpt)) {
let default_base_url = if matches!(
auth_mode,
Some(ApiAuthMode::Chatgpt | ApiAuthMode::ChatgptAuthTokens)
) {
"https://chatgpt.com/backend-api/codex"
} else {
"https://api.openai.com/v1"
@@ -264,7 +267,6 @@ impl ModelProviderInfo {
.into_iter()
.collect(),
),
// Use global defaults for retry/timeout unless overridden in config.toml.
request_max_retries: None,
stream_max_retries: None,
stream_idle_timeout_ms: None,
@@ -292,10 +294,6 @@ pub fn built_in_model_providers(
use ModelProviderInfo as P;
let openai_provider = P::create_openai_provider(openai_base_url);
// We do not want to be in the business of adjucating which third-party
// providers are bundled with Codex CLI, so we only include the OpenAI and
// open source ("oss") providers by default. Users are encouraged to add to
// `model_providers` in config.toml to add their own providers.
[
(OPENAI_PROVIDER_ID, openai_provider),
(
@@ -313,8 +311,6 @@ pub fn built_in_model_providers(
}
pub fn create_oss_provider(default_provider_port: u16, wire_api: WireApi) -> ModelProviderInfo {
// These CODEX_OSS_ environment variables are experimental: we may
// switch to reading values from config.toml instead.
let default_codex_oss_base_url = format!(
"http://localhost:{codex_oss_port}/v1",
codex_oss_port = std::env::var("CODEX_OSS_PORT")
@@ -350,7 +346,3 @@ pub fn create_oss_provider_with_base_url(base_url: &str, wire_api: WireApi) -> M
supports_websockets: false,
}
}
#[cfg(test)]
#[path = "model_provider_info_tests.rs"]
mod tests;

View File

@@ -433,7 +433,9 @@ impl ModelsManager {
codex_otel::start_global_timer("codex.remote_models.fetch_update.duration_ms", &[]);
let auth = self.auth_manager.auth().await;
let auth_mode = auth.as_ref().map(CodexAuth::auth_mode);
let api_provider = self.provider.to_api_provider(auth_mode)?;
let api_provider = self
.provider
.to_api_provider(auth.as_ref().map(CodexAuth::api_auth_mode))?;
let api_auth = auth_provider_from_auth(auth.clone(), &self.provider)?;
let auth_env = collect_auth_env_telemetry(
&self.provider,

View File

@@ -22,6 +22,7 @@ use codex_api::RealtimeSessionMode;
use codex_api::RealtimeWebsocketClient;
use codex_api::endpoint::realtime_websocket::RealtimeWebsocketEvents;
use codex_api::endpoint::realtime_websocket::RealtimeWebsocketWriter;
use codex_app_server_protocol::AuthMode as ApiAuthMode;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::protocol::ConversationAudioParams;
use codex_protocol::protocol::ConversationStartParams;
@@ -454,7 +455,7 @@ async fn prepare_realtime_start(
let provider = sess.provider().await;
let auth = sess.services.auth_manager.auth().await;
let realtime_api_key = realtime_api_key(auth.as_ref(), &provider)?;
let mut api_provider = provider.to_api_provider(Some(crate::auth::AuthMode::ApiKey))?;
let mut api_provider = provider.to_api_provider(Some(ApiAuthMode::ApiKey))?;
let config = sess.get_config().await;
if let Some(realtime_ws_base_url) = &config.experimental_realtime_ws_base_url {
api_provider.base_url = realtime_ws_base_url.clone();

View File

@@ -5,6 +5,8 @@ use chrono::Duration;
use chrono::Utc;
use codex_app_server_protocol::AuthMode;
use codex_core::AuthManager;
use codex_core::IdTokenInfo;
use codex_core::TokenData;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::AuthDotJson;
use codex_core::auth::REFRESH_TOKEN_URL_OVERRIDE_ENV_VAR;
@@ -12,8 +14,6 @@ use codex_core::auth::RefreshTokenError;
use codex_core::auth::load_auth_dot_json;
use codex_core::auth::save_auth;
use codex_core::error::RefreshTokenFailedReason;
use codex_core::token_data::IdTokenInfo;
use codex_core::token_data::TokenData;
use core_test_support::skip_if_no_network;
use pretty_assertions::assert_eq;
use serde::Serialize;

View File

@@ -16,6 +16,7 @@ pub use server::run_login_server;
pub use codex_app_server_protocol::AuthMode;
pub use codex_core::AuthManager;
pub use codex_core::CodexAuth;
pub use codex_core::TokenData;
pub use codex_core::auth::AuthDotJson;
pub use codex_core::auth::CLIENT_ID;
pub use codex_core::auth::CODEX_API_KEY_ENV_VAR;
@@ -23,4 +24,3 @@ pub use codex_core::auth::OPENAI_API_KEY_ENV_VAR;
pub use codex_core::auth::login_with_api_key;
pub use codex_core::auth::logout;
pub use codex_core::auth::save_auth;
pub use codex_core::token_data::TokenData;

View File

@@ -29,12 +29,12 @@ use base64::Engine;
use chrono::Utc;
use codex_app_server_protocol::AuthMode;
use codex_client::build_reqwest_client_with_custom_ca;
use codex_core::TokenData;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::AuthDotJson;
use codex_core::auth::save_auth;
use codex_core::default_client::originator;
use codex_core::token_data::TokenData;
use codex_core::token_data::parse_chatgpt_jwt_claims;
use codex_core::parse_chatgpt_jwt_claims;
use rand::RngCore;
use serde_json::Value as JsonValue;
use tiny_http::Header;

View File

@@ -2,8 +2,10 @@ use std::path::Path;
use codex_app_server_protocol::AuthMode;
use codex_app_server_protocol::ChatgptAuthTokensRefreshResponse;
use codex_core::TokenData;
use codex_core::auth::AuthCredentialsStoreMode;
use codex_core::auth::load_auth_dot_json;
use codex_core::parse_chatgpt_jwt_claims;
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct LocalChatgptAuth {
@@ -69,10 +71,10 @@ mod tests {
use base64::Engine;
use chrono::Utc;
use codex_app_server_protocol::AuthMode;
use codex_core::TokenData;
use codex_core::auth::AuthDotJson;
use codex_core::auth::login_with_chatgpt_auth_tokens;
use codex_core::auth::save_auth;
use codex_core::token_data::TokenData;
use pretty_assertions::assert_eq;
use serde::Serialize;
use serde_json::json;
@@ -110,8 +112,7 @@ mod tests {
auth_mode: Some(AuthMode::Chatgpt),
openai_api_key: None,
tokens: Some(TokenData {
id_token: codex_core::token_data::parse_chatgpt_jwt_claims(&id_token)
.expect("id token should parse"),
id_token: parse_chatgpt_jwt_claims(&id_token).expect("id token should parse"),
access_token,
refresh_token: "refresh-token".to_string(),
account_id: Some("workspace-1".to_string()),