Compare commits

..

3 Commits

Author SHA1 Message Date
Celia Chen
49710533fe Merge branch 'main' into dev/cc/exp 2026-03-23 16:11:59 -07:00
Celia Chen
2e30bbbe02 Merge branch 'main' into dev/cc/exp 2026-03-23 14:18:10 -07:00
celia-oai
8e5aeddda4 changes 2026-03-23 12:35:02 -07:00
16 changed files with 204 additions and 722 deletions

View File

@@ -6,56 +6,11 @@ on:
pull_request: {}
jobs:
changed:
name: Detect changed areas
runs-on: ubuntu-24.04
outputs:
python: ${{ steps.detect.outputs.python }}
typescript: ${{ steps.detect.outputs.typescript }}
workflows: ${{ steps.detect.outputs.workflows }}
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Detect changed paths (no external action)
id: detect
shell: bash
run: |
set -euo pipefail
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
BASE_SHA='${{ github.event.pull_request.base.sha }}'
HEAD_SHA='${{ github.event.pull_request.head.sha }}'
echo "Base SHA: $BASE_SHA"
echo "Head SHA: $HEAD_SHA"
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA" "$HEAD_SHA")
else
files=("sdk/force" "codex-rs/force" ".github/force" "package.json")
fi
python=false
typescript=false
workflows=false
for f in "${files[@]}"; do
[[ $f == sdk/python/* || $f == sdk/python-runtime/* ]] && python=true
[[ $f == sdk/typescript/* || $f == codex-rs/* || $f == package.json || $f == pnpm-lock.yaml || $f == pnpm-workspace.yaml ]] && typescript=true
[[ $f == .github/* ]] && workflows=true
done
echo "python=$python" >> "$GITHUB_OUTPUT"
echo "typescript=$typescript" >> "$GITHUB_OUTPUT"
echo "workflows=$workflows" >> "$GITHUB_OUTPUT"
typescript_sdk:
name: TypeScript SDK
sdks:
runs-on:
group: codex-runners
labels: codex-linux-x64
timeout-minutes: 10
needs: changed
if: ${{ needs.changed.outputs.typescript == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
@@ -95,190 +50,3 @@ jobs:
- name: Test SDK packages
run: pnpm -r --filter ./sdk/typescript run test
python_generated:
name: Python Pinned Generated
runs-on:
group: codex-runners
labels: codex-linux-x64
timeout-minutes: 10
needs: changed
if: ${{ needs.changed.outputs.python == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
steps:
- uses: actions/checkout@v6
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: "3.13"
cache: pip
cache-dependency-path: sdk/python/pyproject.toml
- name: Install Python SDK dependencies
run: python -m pip install -e "sdk/python[dev]"
- name: Regenerate Python SDK artifacts from pinned runtime
run: python sdk/python/scripts/update_sdk_artifacts.py generate-types-for-pinned-runtime
- name: Check for generated drift
run: git diff --exit-code -- sdk/python
python_quality:
name: Python Quality (${{ matrix.python-version }})
runs-on:
group: codex-runners
labels: codex-linux-x64
timeout-minutes: 15
needs: changed
if: ${{ needs.changed.outputs.python == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.13"]
steps:
- uses: actions/checkout@v6
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: sdk/python/pyproject.toml
- name: Install Python SDK dependencies
run: python -m pip install -e "sdk/python[dev]" build twine mypy
- name: Ruff lint
run: >
python -m ruff check
sdk/python/src
sdk/python/tests
sdk/python/scripts
--exclude sdk/python/tests/test_real_app_server_integration.py
- name: Mypy
run: python -m mypy --config-file sdk/python/mypy.ini sdk/python/src/codex_app_server
- name: Pytest
run: >
python -m pytest
sdk/python/tests/test_artifact_workflow_and_binaries.py
sdk/python/tests/test_async_client_behavior.py
sdk/python/tests/test_client_rpc_methods.py
sdk/python/tests/test_public_api_runtime_behavior.py
sdk/python/tests/test_public_api_signatures.py
- name: Build Python SDK
run: python -m build sdk/python --outdir sdk/python/dist-ci
- name: Twine check
run: python -m twine check sdk/python/dist-ci/*
python_platform_smoke:
name: Python Platform Smoke (${{ matrix.name }})
runs-on: ${{ matrix.runs_on || matrix.runner }}
timeout-minutes: 15
needs: changed
if: ${{ needs.changed.outputs.python == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
strategy:
fail-fast: false
matrix:
include:
- name: macOS
runner: macos-15-xlarge
- name: Windows
runner: windows-x64
runs_on:
group: codex-runners
labels: codex-windows-x64
steps:
- uses: actions/checkout@v6
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: "3.13"
cache: pip
cache-dependency-path: sdk/python/pyproject.toml
- name: Install Python SDK dependencies
run: python -m pip install -e "sdk/python[dev]" build
- name: Pytest
run: >
python -m pytest
sdk/python/tests/test_async_client_behavior.py
sdk/python/tests/test_client_rpc_methods.py
sdk/python/tests/test_public_api_runtime_behavior.py
sdk/python/tests/test_public_api_signatures.py
- name: Build Python SDK wheel
run: python -m build sdk/python --outdir sdk/python/dist-ci
- name: Install built wheel and smoke imports
shell: bash
run: |
set -euo pipefail
python -m venv .venv-wheel-smoke
if [[ "${{ runner.os }}" == "Windows" ]]; then
VENV_PYTHON=".venv-wheel-smoke/Scripts/python.exe"
else
VENV_PYTHON=".venv-wheel-smoke/bin/python"
fi
"$VENV_PYTHON" -m pip install --upgrade pip
"$VENV_PYTHON" -m pip install sdk/python/dist-ci/*.whl
"$VENV_PYTHON" -c "import codex_app_server; from codex_app_server import AsyncCodex, AsyncThread, Codex, RunResult, Thread; assert codex_app_server.__name__ == 'codex_app_server'; assert Codex.__name__ == 'Codex'; assert AsyncCodex.__name__ == 'AsyncCodex'; assert Thread.__name__ == 'Thread'; assert AsyncThread.__name__ == 'AsyncThread'; assert RunResult.__name__ == 'RunResult'"
sdks:
name: sdks
runs-on: ubuntu-24.04
if: ${{ always() }}
needs:
- changed
- typescript_sdk
- python_generated
- python_quality
- python_platform_smoke
steps:
- name: Check SDK results
shell: bash
run: |
set -euo pipefail
should_run_typescript=false
should_run_python=false
if [[ "${{ github.event_name }}" == "push" || "${{ needs.changed.outputs.workflows }}" == "true" || "${{ needs.changed.outputs.typescript }}" == "true" ]]; then
should_run_typescript=true
fi
if [[ "${{ github.event_name }}" == "push" || "${{ needs.changed.outputs.workflows }}" == "true" || "${{ needs.changed.outputs.python }}" == "true" ]]; then
should_run_python=true
fi
if [[ "$should_run_typescript" == "false" && "$should_run_python" == "false" ]]; then
echo "No SDK-relevant changes detected."
exit 0
fi
if [[ "$should_run_typescript" == "true" && "${{ needs.typescript_sdk.result }}" != "success" ]]; then
echo "TypeScript job result: ${{ needs.typescript_sdk.result }}"
exit 1
fi
if [[ "$should_run_python" == "true" && "${{ needs.python_generated.result }}" != "success" ]]; then
echo "Python generated job result: ${{ needs.python_generated.result }}"
exit 1
fi
if [[ "$should_run_python" == "true" && "${{ needs.python_quality.result }}" != "success" ]]; then
echo "Python quality job result: ${{ needs.python_quality.result }}"
exit 1
fi
if [[ "$should_run_python" == "true" && "${{ needs.python_platform_smoke.result }}" != "success" ]]; then
echo "Python platform smoke job result: ${{ needs.python_platform_smoke.result }}"
exit 1
fi
echo "SDK checks passed."

View File

@@ -9,14 +9,12 @@ use crate::config::types::ShellEnvironmentPolicy;
use crate::function_tool::FunctionCallError;
use crate::protocol::AgentStatus;
use crate::protocol::AskForApproval;
use crate::protocol::EventMsg;
use crate::protocol::FileSystemSandboxPolicy;
use crate::protocol::NetworkSandboxPolicy;
use crate::protocol::Op;
use crate::protocol::SandboxPolicy;
use crate::protocol::SessionSource;
use crate::protocol::SubAgentSource;
use crate::protocol::TurnCompleteEvent;
use crate::state::TaskKind;
use crate::tasks::SessionTask;
use crate::tasks::SessionTaskContext;
@@ -1416,7 +1414,7 @@ async fn multi_agent_v2_wait_agent_accepts_targets_argument() {
assert_eq!(
result,
crate::tools::handlers::multi_agents_v2::wait::WaitAgentResult {
message: "Wait completed.".to_string(),
status: HashMap::from([(target, AgentStatus::NotFound)]),
timed_out: false,
}
);
@@ -1584,7 +1582,12 @@ async fn wait_agent_returns_final_status_without_timeout() {
}
#[tokio::test]
async fn multi_agent_v2_wait_agent_returns_summary_for_named_targets() {
async fn multi_agent_v2_wait_agent_returns_statuses_keyed_by_path() {
#[derive(Debug, Deserialize)]
struct SpawnAgentResult {
task_name: String,
}
let (mut session, mut turn) = make_session_and_context().await;
let manager = thread_manager();
let root = manager
@@ -1614,7 +1617,9 @@ async fn multi_agent_v2_wait_agent_returns_summary_for_named_targets() {
))
.await
.expect("spawn_agent should succeed");
let _ = expect_text_output(spawn_output);
let (content, _) = expect_text_output(spawn_output);
let spawn_result: SpawnAgentResult =
serde_json::from_str(&content).expect("spawn result should parse");
let agent_id = session
.services
@@ -1662,67 +1667,13 @@ async fn multi_agent_v2_wait_agent_returns_summary_for_named_targets() {
assert_eq!(
result,
crate::tools::handlers::multi_agents_v2::wait::WaitAgentResult {
message: "Wait completed.".to_string(),
status: HashMap::from([(spawn_result.task_name, AgentStatus::Shutdown)]),
timed_out: false,
}
);
assert_eq!(success, None);
}
#[tokio::test]
async fn multi_agent_v2_wait_agent_does_not_return_completed_content() {
let (mut session, mut turn) = make_session_and_context().await;
let manager = thread_manager();
session.services.agent_control = manager.agent_control();
let mut config = (*turn.config).clone();
config
.features
.enable(Feature::MultiAgentV2)
.expect("test config should allow feature update");
turn.config = Arc::new(config.clone());
let thread = manager.start_thread(config).await.expect("start thread");
let agent_id = thread.thread_id;
let child_turn = thread.thread.codex.session.new_default_turn().await;
thread
.thread
.codex
.session
.send_event(
child_turn.as_ref(),
EventMsg::TurnComplete(TurnCompleteEvent {
turn_id: child_turn.sub_id.clone(),
last_agent_message: Some("sensitive child output".to_string()),
}),
)
.await;
let output = WaitAgentHandlerV2
.handle(invocation(
Arc::new(session),
Arc::new(turn),
"wait_agent",
function_payload(json!({
"targets": [agent_id.to_string()],
"timeout_ms": 1000
})),
))
.await
.expect("wait_agent should succeed");
let (content, success) = expect_text_output(output);
let result: crate::tools::handlers::multi_agents_v2::wait::WaitAgentResult =
serde_json::from_str(&content).expect("wait_agent result should be json");
assert_eq!(
result,
crate::tools::handlers::multi_agents_v2::wait::WaitAgentResult {
message: "Wait completed.".to_string(),
timed_out: false,
}
);
assert!(!content.contains("sensitive child output"));
assert_eq!(success, None);
}
#[tokio::test]
async fn close_agent_submits_shutdown_and_returns_previous_status() {
let (mut session, turn) = make_session_and_context().await;

View File

@@ -35,12 +35,21 @@ impl ToolHandler for Handler {
let args: WaitArgs = parse_arguments(&arguments)?;
let receiver_thread_ids = resolve_agent_targets(&session, &turn, args.targets).await?;
let mut receiver_agents = Vec::with_capacity(receiver_thread_ids.len());
let mut target_by_thread_id = HashMap::with_capacity(receiver_thread_ids.len());
for receiver_thread_id in &receiver_thread_ids {
let agent_metadata = session
.services
.agent_control
.get_agent_metadata(*receiver_thread_id)
.unwrap_or_default();
target_by_thread_id.insert(
*receiver_thread_id,
agent_metadata
.agent_path
.as_ref()
.map(ToString::to_string)
.unwrap_or_else(|| receiver_thread_id.to_string()),
);
receiver_agents.push(CollabAgentRef {
thread_id: *receiver_thread_id,
agent_nickname: agent_metadata.agent_nickname,
@@ -143,7 +152,18 @@ impl ToolHandler for Handler {
let timed_out = statuses.is_empty();
let statuses_by_id = statuses.clone().into_iter().collect::<HashMap<_, _>>();
let agent_statuses = build_wait_agent_statuses(&statuses_by_id, &receiver_agents);
let result = WaitAgentResult::from_timed_out(timed_out);
let result = WaitAgentResult {
status: statuses
.into_iter()
.filter_map(|(thread_id, status)| {
target_by_thread_id
.get(&thread_id)
.cloned()
.map(|target| (target, status))
})
.collect(),
timed_out,
};
session
.send_event(
@@ -171,24 +191,10 @@ struct WaitArgs {
#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
pub(crate) struct WaitAgentResult {
pub(crate) message: String,
pub(crate) status: HashMap<String, AgentStatus>,
pub(crate) timed_out: bool,
}
impl WaitAgentResult {
fn from_timed_out(timed_out: bool) -> Self {
let message = if timed_out {
"Wait timed out."
} else {
"Wait completed."
};
Self {
message: message.to_string(),
timed_out,
}
}
}
impl ToolOutput for WaitAgentResult {
fn log_preview(&self) -> String {
tool_output_json_text(self, "wait_agent")

View File

@@ -178,41 +178,23 @@ fn resume_agent_output_schema() -> JsonValue {
})
}
fn wait_output_schema(multi_agent_v2: bool) -> JsonValue {
if multi_agent_v2 {
json!({
"type": "object",
"properties": {
"message": {
"type": "string",
"description": "Brief wait summary without the agent's final content."
},
"timed_out": {
"type": "boolean",
"description": "Whether the wait call returned due to timeout before any agent reached a final status."
}
fn wait_output_schema() -> JsonValue {
json!({
"type": "object",
"properties": {
"status": {
"type": "object",
"description": "Final statuses keyed by canonical task name when available, otherwise by agent id.",
"additionalProperties": agent_status_output_schema()
},
"required": ["message", "timed_out"],
"additionalProperties": false
})
} else {
json!({
"type": "object",
"properties": {
"status": {
"type": "object",
"description": "Final statuses keyed by canonical task name when available, otherwise by agent id.",
"additionalProperties": agent_status_output_schema()
},
"timed_out": {
"type": "boolean",
"description": "Whether the wait call returned due to timeout before any agent reached a final status."
}
},
"required": ["status", "timed_out"],
"additionalProperties": false
})
}
"timed_out": {
"type": "boolean",
"description": "Whether the wait call returned due to timeout before any agent reached a final status."
}
},
"required": ["status", "timed_out"],
"additionalProperties": false
})
}
fn close_agent_output_schema() -> JsonValue {
@@ -1440,7 +1422,7 @@ fn create_resume_agent_tool() -> ToolSpec {
})
}
fn create_wait_agent_tool(multi_agent_v2: bool) -> ToolSpec {
fn create_wait_agent_tool() -> ToolSpec {
let mut properties = BTreeMap::new();
properties.insert(
"targets".to_string(),
@@ -1463,13 +1445,8 @@ fn create_wait_agent_tool(multi_agent_v2: bool) -> ToolSpec {
ToolSpec::Function(ResponsesApiTool {
name: "wait_agent".to_string(),
description: if multi_agent_v2 {
"Wait for agents to reach a final status. Returns a brief wait summary instead of the agent's final content. Returns a timeout summary when no agent reaches a final status before the deadline."
.to_string()
} else {
"Wait for agents to reach a final status. Completed statuses may include the agent's final message. Returns empty status when timed out. Once the agent reaches a final status, a notification message will be received containing the same completed status."
.to_string()
},
description: "Wait for agents to reach a final status. Completed statuses may include the agent's final message. Returns empty status when timed out. Once the agent reaches a final status, a notification message will be received containing the same completed status."
.to_string(),
strict: false,
defer_loading: None,
parameters: JsonSchema::Object {
@@ -1477,7 +1454,7 @@ fn create_wait_agent_tool(multi_agent_v2: bool) -> ToolSpec {
required: Some(vec!["targets".to_string()]),
additional_properties: Some(false.into()),
},
output_schema: Some(wait_output_schema(multi_agent_v2)),
output_schema: Some(wait_output_schema()),
})
}
@@ -3029,7 +3006,7 @@ pub(crate) fn build_specs_with_discoverable_tools(
}
push_tool_spec(
&mut builder,
create_wait_agent_tool(config.multi_agent_v2),
create_wait_agent_tool(),
/*supports_parallel_tool_calls*/ false,
config.code_mode_enabled,
);

View File

@@ -469,7 +469,7 @@ fn test_full_toolset_specs_for_gpt5_codex_unified_exec_web_search() {
create_view_image_tool(config.can_request_original_image_detail),
create_spawn_agent_tool(&config),
create_send_input_tool(),
create_wait_agent_tool(config.multi_agent_v2),
create_wait_agent_tool(),
create_close_agent_tool(),
] {
expected.insert(tool_name(&spec).to_string(), spec);
@@ -607,8 +607,8 @@ fn test_build_specs_multi_agent_v2_uses_task_names_and_hides_resume() {
.as_ref()
.expect("wait_agent should define output schema");
assert_eq!(
output_schema["properties"]["message"]["description"],
json!("Brief wait summary without the agent's final content.")
output_schema["properties"]["status"]["description"],
json!("Final statuses keyed by canonical task name when available, otherwise by agent id.")
);
assert_lacks_tool_name(&tools, "resume_agent");
}

View File

@@ -294,13 +294,14 @@ async fn returns_fresh_tokens_as_is() -> Result<()> {
.await;
let ctx = RefreshTokenTestContext::new(&server)?;
let initial_last_refresh = Utc::now() - Duration::days(1);
let initial_tokens = build_tokens(INITIAL_ACCESS_TOKEN, INITIAL_REFRESH_TOKEN);
let stale_refresh = Utc::now() - Duration::days(9);
let fresh_access_token = access_token_with_expiration(Utc::now() + Duration::hours(1));
let initial_tokens = build_tokens(&fresh_access_token, INITIAL_REFRESH_TOKEN);
let initial_auth = AuthDotJson {
auth_mode: Some(AuthMode::Chatgpt),
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(initial_last_refresh),
last_refresh: Some(stale_refresh),
};
ctx.write_auth(&initial_auth)?;
@@ -325,7 +326,7 @@ async fn returns_fresh_tokens_as_is() -> Result<()> {
#[serial_test::serial(auth_refresh)]
#[tokio::test]
async fn refreshes_token_when_last_refresh_is_stale() -> Result<()> {
async fn refreshes_token_when_access_token_is_expired() -> Result<()> {
skip_if_no_network!(Ok(()));
let server = MockServer::start().await;
@@ -340,13 +341,14 @@ async fn refreshes_token_when_last_refresh_is_stale() -> Result<()> {
.await;
let ctx = RefreshTokenTestContext::new(&server)?;
let stale_refresh = Utc::now() - Duration::days(9);
let initial_tokens = build_tokens(INITIAL_ACCESS_TOKEN, INITIAL_REFRESH_TOKEN);
let fresh_refresh = Utc::now() - Duration::days(1);
let expired_access_token = access_token_with_expiration(Utc::now() - Duration::hours(1));
let initial_tokens = build_tokens(&expired_access_token, INITIAL_REFRESH_TOKEN);
let initial_auth = AuthDotJson {
auth_mode: Some(AuthMode::Chatgpt),
openai_api_key: None,
tokens: Some(initial_tokens.clone()),
last_refresh: Some(stale_refresh),
last_refresh: Some(fresh_refresh),
};
ctx.write_auth(&initial_auth)?;
@@ -373,7 +375,7 @@ async fn refreshes_token_when_last_refresh_is_stale() -> Result<()> {
.as_ref()
.context("last_refresh should be recorded")?;
assert!(
*refreshed_at >= stale_refresh,
*refreshed_at >= fresh_refresh,
"last_refresh should advance"
);
@@ -867,7 +869,7 @@ impl Drop for EnvGuard {
}
}
fn minimal_jwt() -> String {
fn jwt_with_payload(payload: serde_json::Value) -> String {
#[derive(Serialize)]
struct Header {
alg: &'static str,
@@ -878,7 +880,6 @@ fn minimal_jwt() -> String {
alg: "none",
typ: "JWT",
};
let payload = json!({ "sub": "user-123" });
fn b64(data: &[u8]) -> String {
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(data)
@@ -898,6 +899,14 @@ fn minimal_jwt() -> String {
format!("{header_b64}.{payload_b64}.{signature_b64}")
}
fn minimal_jwt() -> String {
jwt_with_payload(json!({ "sub": "user-123" }))
}
fn access_token_with_expiration(expires_at: chrono::DateTime<Utc>) -> String {
jwt_with_payload(json!({ "sub": "user-123", "exp": expires_at.timestamp() }))
}
fn build_tokens(access_token: &str, refresh_token: &str) -> TokenData {
let id_token = IdTokenInfo {
raw_jwt: minimal_jwt(),

View File

@@ -28,6 +28,7 @@ use crate::token_data::KnownPlan as InternalKnownPlan;
use crate::token_data::PlanType as InternalPlanType;
use crate::token_data::TokenData;
use crate::token_data::parse_chatgpt_jwt_claims;
use crate::token_data::parse_jwt_expiration;
use codex_client::CodexHttpClient;
use codex_protocol::account::PlanType as AccountPlanType;
use serde_json::Value;
@@ -69,7 +70,6 @@ impl PartialEq for CodexAuth {
}
}
// TODO(pakrym): use token exp field to check for expiration instead
const TOKEN_REFRESH_INTERVAL: i64 = 8;
const REFRESH_TOKEN_EXPIRED_MESSAGE: &str = "Your access token could not be refreshed because your refresh token has expired. Please log out and sign in again.";
@@ -1333,6 +1333,11 @@ impl AuthManager {
Some(auth_dot_json) => auth_dot_json,
None => return false,
};
if let Some(tokens) = auth_dot_json.tokens.as_ref()
&& let Ok(Some(expires_at)) = parse_jwt_expiration(&tokens.access_token)
{
return expires_at <= Utc::now();
}
let last_refresh = match auth_dot_json.last_refresh {
Some(last_refresh) => last_refresh,
None => return false,

View File

@@ -1,6 +1,9 @@
use base64::Engine;
use chrono::DateTime;
use chrono::Utc;
use serde::Deserialize;
use serde::Serialize;
use serde::de::DeserializeOwned;
use thiserror::Error;
#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)]
@@ -117,6 +120,12 @@ struct AuthClaims {
chatgpt_account_id: Option<String>,
}
#[derive(Deserialize)]
struct StandardJwtClaims {
#[serde(default)]
exp: Option<i64>,
}
#[derive(Debug, Error)]
pub enum IdTokenInfoError {
#[error("invalid ID token format")]
@@ -127,7 +136,7 @@ pub enum IdTokenInfoError {
Json(#[from] serde_json::Error),
}
pub fn parse_chatgpt_jwt_claims(jwt: &str) -> Result<IdTokenInfo, IdTokenInfoError> {
fn decode_jwt_payload<T: DeserializeOwned>(jwt: &str) -> Result<T, IdTokenInfoError> {
// JWT format: header.payload.signature
let mut parts = jwt.split('.');
let (_header_b64, payload_b64, _sig_b64) = match (parts.next(), parts.next(), parts.next()) {
@@ -136,7 +145,19 @@ pub fn parse_chatgpt_jwt_claims(jwt: &str) -> Result<IdTokenInfo, IdTokenInfoErr
};
let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD.decode(payload_b64)?;
let claims: IdClaims = serde_json::from_slice(&payload_bytes)?;
let claims = serde_json::from_slice(&payload_bytes)?;
Ok(claims)
}
pub fn parse_jwt_expiration(jwt: &str) -> Result<Option<DateTime<Utc>>, IdTokenInfoError> {
let claims: StandardJwtClaims = decode_jwt_payload(jwt)?;
Ok(claims
.exp
.and_then(|exp| DateTime::<Utc>::from_timestamp(exp, 0)))
}
pub fn parse_chatgpt_jwt_claims(jwt: &str) -> Result<IdTokenInfo, IdTokenInfoError> {
let claims: IdClaims = decode_jwt_payload(jwt)?;
let email = claims
.email
.or_else(|| claims.profile.and_then(|profile| profile.email));

View File

@@ -1,9 +1,10 @@
use super::*;
use chrono::TimeZone;
use chrono::Utc;
use pretty_assertions::assert_eq;
use serde::Serialize;
#[test]
fn id_token_info_parses_email_and_plan() {
fn fake_jwt(payload: serde_json::Value) -> String {
#[derive(Serialize)]
struct Header {
alg: &'static str,
@@ -13,12 +14,6 @@ fn id_token_info_parses_email_and_plan() {
alg: "none",
typ: "JWT",
};
let payload = serde_json::json!({
"email": "user@example.com",
"https://api.openai.com/auth": {
"chatgpt_plan_type": "pro"
}
});
fn b64url_no_pad(bytes: &[u8]) -> String {
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes)
@@ -27,7 +22,17 @@ fn id_token_info_parses_email_and_plan() {
let header_b64 = b64url_no_pad(&serde_json::to_vec(&header).unwrap());
let payload_b64 = b64url_no_pad(&serde_json::to_vec(&payload).unwrap());
let signature_b64 = b64url_no_pad(b"sig");
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
format!("{header_b64}.{payload_b64}.{signature_b64}")
}
#[test]
fn id_token_info_parses_email_and_plan() {
let fake_jwt = fake_jwt(serde_json::json!({
"email": "user@example.com",
"https://api.openai.com/auth": {
"chatgpt_plan_type": "pro"
}
}));
let info = parse_chatgpt_jwt_claims(&fake_jwt).expect("should parse");
assert_eq!(info.email.as_deref(), Some("user@example.com"));
@@ -36,30 +41,12 @@ fn id_token_info_parses_email_and_plan() {
#[test]
fn id_token_info_parses_go_plan() {
#[derive(Serialize)]
struct Header {
alg: &'static str,
typ: &'static str,
}
let header = Header {
alg: "none",
typ: "JWT",
};
let payload = serde_json::json!({
let fake_jwt = fake_jwt(serde_json::json!({
"email": "user@example.com",
"https://api.openai.com/auth": {
"chatgpt_plan_type": "go"
}
});
fn b64url_no_pad(bytes: &[u8]) -> String {
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes)
}
let header_b64 = b64url_no_pad(&serde_json::to_vec(&header).unwrap());
let payload_b64 = b64url_no_pad(&serde_json::to_vec(&payload).unwrap());
let signature_b64 = b64url_no_pad(b"sig");
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
}));
let info = parse_chatgpt_jwt_claims(&fake_jwt).expect("should parse");
assert_eq!(info.email.as_deref(), Some("user@example.com"));
@@ -68,31 +55,37 @@ fn id_token_info_parses_go_plan() {
#[test]
fn id_token_info_handles_missing_fields() {
#[derive(Serialize)]
struct Header {
alg: &'static str,
typ: &'static str,
}
let header = Header {
alg: "none",
typ: "JWT",
};
let payload = serde_json::json!({ "sub": "123" });
fn b64url_no_pad(bytes: &[u8]) -> String {
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(bytes)
}
let header_b64 = b64url_no_pad(&serde_json::to_vec(&header).unwrap());
let payload_b64 = b64url_no_pad(&serde_json::to_vec(&payload).unwrap());
let signature_b64 = b64url_no_pad(b"sig");
let fake_jwt = format!("{header_b64}.{payload_b64}.{signature_b64}");
let fake_jwt = fake_jwt(serde_json::json!({ "sub": "123" }));
let info = parse_chatgpt_jwt_claims(&fake_jwt).expect("should parse");
assert!(info.email.is_none());
assert!(info.get_chatgpt_plan_type().is_none());
}
#[test]
fn jwt_expiration_parses_exp_claim() {
let fake_jwt = fake_jwt(serde_json::json!({
"exp": 1_700_000_000_i64,
}));
let expires_at = parse_jwt_expiration(&fake_jwt).expect("should parse");
assert_eq!(expires_at, Utc.timestamp_opt(1_700_000_000, 0).single());
}
#[test]
fn jwt_expiration_handles_missing_exp() {
let fake_jwt = fake_jwt(serde_json::json!({ "sub": "123" }));
let expires_at = parse_jwt_expiration(&fake_jwt).expect("should parse");
assert_eq!(expires_at, None);
}
#[test]
fn jwt_expiration_rejects_malformed_jwt() {
let err = parse_jwt_expiration("not-a-jwt").expect_err("should fail");
assert_eq!(err.to_string(), "invalid ID token format");
}
#[test]
fn workspace_account_detection_matches_workspace_plans() {
let workspace = IdTokenInfo {

View File

@@ -66,7 +66,7 @@ notebook bootstrap the pinned runtime package automatically.
```bash
cd sdk/python
python scripts/update_sdk_artifacts.py generate-types-for-pinned-runtime
python scripts/update_sdk_artifacts.py generate-types
python scripts/update_sdk_artifacts.py \
stage-sdk \
/tmp/codex-python-release/codex-app-server-sdk \
@@ -80,7 +80,7 @@ python scripts/update_sdk_artifacts.py \
This supports the CI release flow:
- run `generate-types-for-pinned-runtime` before packaging
- run `generate-types` before packaging
- stage `codex-app-server-sdk` once with an exact `codex-cli-bin==...` dependency
- stage `codex-cli-bin` on each supported platform runner with the same pinned runtime version
- build and publish `codex-cli-bin` as platform wheels only; do not publish an sdist

View File

@@ -65,7 +65,7 @@ platform wheels only; do not publish an sdist:
```bash
cd sdk/python
python scripts/update_sdk_artifacts.py generate-types-for-pinned-runtime
python scripts/update_sdk_artifacts.py generate-types
python scripts/update_sdk_artifacts.py \
stage-sdk \
/tmp/codex-python-release/codex-app-server-sdk \

View File

@@ -1,19 +0,0 @@
[mypy]
python_version = 3.10
mypy_path = sdk/python/src
check_untyped_defs = True
warn_unused_ignores = True
no_implicit_optional = True
exclude = ^sdk/python/src/codex_app_server/generated/
[mypy-codex_app_server.api]
ignore_errors = True
[mypy-codex_app_server.async_client]
ignore_errors = True
[mypy-codex_app_server.client]
ignore_errors = True
[mypy-codex_app_server.generated.*]
ignore_errors = True

View File

@@ -3,7 +3,6 @@ from __future__ import annotations
import argparse
import importlib
import importlib.util
import json
import platform
import re
@@ -66,85 +65,6 @@ def run_python_module(module: str, args: list[str], cwd: Path) -> None:
run([sys.executable, "-m", module, *args], cwd)
def run_capture(cmd: list[str], cwd: Path) -> str:
result = subprocess.run(
cmd,
cwd=str(cwd),
text=True,
capture_output=True,
check=False,
)
if result.returncode != 0:
raise RuntimeError(
f"Command failed ({result.returncode}): {' '.join(cmd)}\n"
f"STDOUT:\n{result.stdout}\nSTDERR:\n{result.stderr}"
)
return result.stdout
def runtime_setup_path() -> Path:
return sdk_root() / "_runtime_setup.py"
def pinned_runtime_version() -> str:
spec = importlib.util.spec_from_file_location(
"_runtime_setup", runtime_setup_path()
)
if spec is None or spec.loader is None:
raise RuntimeError(f"Failed to load runtime setup module: {runtime_setup_path()}")
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module.pinned_runtime_version() # type: ignore[no-any-return]
def runtime_git_ref(version: str) -> str:
return f"rust-v{version}"
def pinned_runtime_git_ref() -> str:
return runtime_git_ref(pinned_runtime_version())
def ensure_git_ref_available(git_ref: str) -> None:
result = subprocess.run(
["git", "rev-parse", "--verify", git_ref],
cwd=str(repo_root()),
text=True,
capture_output=True,
check=False,
)
if result.returncode == 0:
return
run(["git", "fetch", "origin", "tag", git_ref, "--depth=1"], repo_root())
def read_git_file(git_ref: str, repo_path: str) -> str:
return run_capture(["git", "show", f"{git_ref}:{repo_path}"], repo_root())
def materialize_schema_files_from_git_ref(git_ref: str, out_dir: Path) -> tuple[Path, Path]:
out_dir.mkdir(parents=True, exist_ok=True)
schema_bundle = out_dir / "codex_app_server_protocol.v2.schemas.json"
schema_bundle.write_text(
read_git_file(
git_ref,
"codex-rs/app-server-protocol/schema/json/"
"codex_app_server_protocol.v2.schemas.json",
)
)
server_notification = out_dir / "ServerNotification.json"
server_notification.write_text(
read_git_file(
git_ref,
"codex-rs/app-server-protocol/schema/json/ServerNotification.json",
)
)
return schema_bundle, server_notification
def current_sdk_version() -> str:
match = re.search(
r'^version = "([^"]+)"$',
@@ -476,9 +396,8 @@ def _annotate_schema(value: Any, base: str | None = None) -> None:
_annotate_schema(child, base)
def _normalized_schema_bundle_text(schema_bundle: Path | None = None) -> str:
bundle = schema_bundle or schema_bundle_path()
schema = json.loads(bundle.read_text())
def _normalized_schema_bundle_text() -> str:
schema = json.loads(schema_bundle_path().read_text())
definitions = schema.get("definitions", {})
if isinstance(definitions, dict):
for definition in definitions.values():
@@ -490,17 +409,16 @@ def _normalized_schema_bundle_text(schema_bundle: Path | None = None) -> str:
return json.dumps(schema, indent=2, sort_keys=True) + "\n"
def generate_v2_all(schema_bundle: Path | None = None) -> None:
def generate_v2_all() -> None:
out_path = sdk_root() / "src" / "codex_app_server" / "generated" / "v2_all.py"
out_dir = out_path.parent
old_package_dir = out_dir / "v2_all"
if old_package_dir.exists():
shutil.rmtree(old_package_dir)
out_dir.mkdir(parents=True, exist_ok=True)
bundle = schema_bundle or schema_bundle_path()
with tempfile.TemporaryDirectory() as td:
normalized_bundle = Path(td) / bundle.name
normalized_bundle.write_text(_normalized_schema_bundle_text(bundle))
normalized_bundle = Path(td) / schema_bundle_path().name
normalized_bundle.write_text(_normalized_schema_bundle_text())
run_python_module(
"datamodel_code_generator",
[
@@ -537,14 +455,9 @@ def generate_v2_all(schema_bundle: Path | None = None) -> None:
_normalize_generated_timestamps(out_path)
def _notification_specs(
server_notification_schema: Path | None = None,
) -> list[tuple[str, str]]:
server_notification_path = server_notification_schema or (
schema_root_dir() / "ServerNotification.json"
)
def _notification_specs() -> list[tuple[str, str]]:
server_notifications = json.loads(
server_notification_path.read_text()
(schema_root_dir() / "ServerNotification.json").read_text()
)
one_of = server_notifications.get("oneOf", [])
generated_source = (
@@ -581,9 +494,7 @@ def _notification_specs(
return specs
def generate_notification_registry(
server_notification_schema: Path | None = None,
) -> None:
def generate_notification_registry() -> None:
out = (
sdk_root()
/ "src"
@@ -591,7 +502,7 @@ def generate_notification_registry(
/ "generated"
/ "notification_registry.py"
)
specs = _notification_specs(server_notification_schema)
specs = _notification_specs()
class_names = sorted({class_name for _, class_name in specs})
lines = [
@@ -647,7 +558,6 @@ class PublicFieldSpec:
@dataclass(frozen=True)
class CliOps:
generate_types: Callable[[], None]
generate_types_for_pinned_runtime: Callable[[str | None], None]
stage_python_sdk_package: Callable[[Path, str, str], Path]
stage_python_runtime_package: Callable[[Path, str, Path], Path]
current_sdk_version: Callable[[], str]
@@ -957,9 +867,9 @@ def generate_public_api_flat_methods() -> None:
exclude={"thread_id", "input"},
)
original_source = public_api_path.read_text()
source = public_api_path.read_text()
source = _replace_generated_block(
original_source,
source,
"Codex.flat_methods",
_render_codex_block(
thread_start_fields,
@@ -988,35 +898,16 @@ def generate_public_api_flat_methods() -> None:
"AsyncThread.flat_methods",
_render_async_thread_block(turn_start_fields),
)
if source == original_source:
return
public_api_path.write_text(source)
def generate_types(
schema_bundle: Path | None = None,
server_notification_schema: Path | None = None,
) -> None:
def generate_types() -> None:
# v2_all is the authoritative generated surface.
generate_v2_all(schema_bundle)
generate_notification_registry(server_notification_schema)
generate_v2_all()
generate_notification_registry()
generate_public_api_flat_methods()
def generate_types_for_pinned_runtime(git_ref: str | None = None) -> None:
pinned_ref = git_ref or pinned_runtime_git_ref()
ensure_git_ref_available(pinned_ref)
with tempfile.TemporaryDirectory(prefix="codex-python-pinned-schema-") as temp_root:
schema_bundle, server_notification_schema = materialize_schema_files_from_git_ref(
pinned_ref,
Path(temp_root),
)
generate_types(
schema_bundle=schema_bundle,
server_notification_schema=server_notification_schema,
)
def build_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description="Single SDK maintenance entrypoint")
subparsers = parser.add_subparsers(dest="command", required=True)
@@ -1024,14 +915,6 @@ def build_parser() -> argparse.ArgumentParser:
subparsers.add_parser(
"generate-types", help="Regenerate Python protocol-derived types"
)
pinned_types_parser = subparsers.add_parser(
"generate-types-for-pinned-runtime",
help="Regenerate Python protocol-derived types from the pinned runtime version",
)
pinned_types_parser.add_argument(
"--git-ref",
help="Optional git ref to source vendored schema files from",
)
stage_sdk_parser = subparsers.add_parser(
"stage-sdk",
@@ -1081,7 +964,6 @@ def parse_args(argv: Sequence[str] | None = None) -> argparse.Namespace:
def default_cli_ops() -> CliOps:
return CliOps(
generate_types=generate_types,
generate_types_for_pinned_runtime=generate_types_for_pinned_runtime,
stage_python_sdk_package=stage_python_sdk_package,
stage_python_runtime_package=stage_python_runtime_package,
current_sdk_version=current_sdk_version,
@@ -1091,10 +973,8 @@ def default_cli_ops() -> CliOps:
def run_command(args: argparse.Namespace, ops: CliOps) -> None:
if args.command == "generate-types":
ops.generate_types()
elif args.command == "generate-types-for-pinned-runtime":
ops.generate_types_for_pinned_runtime(args.git_ref)
elif args.command == "stage-sdk":
ops.generate_types_for_pinned_runtime(runtime_git_ref(args.runtime_version))
ops.generate_types()
ops.stage_python_sdk_package(
args.staging_dir,
args.sdk_version or ops.current_sdk_version(),

View File

@@ -52,23 +52,6 @@ from ._run import (
_collect_run_result,
)
__all__ = [
"AsyncCodex",
"AsyncThread",
"AsyncTurnHandle",
"Codex",
"ImageInput",
"Input",
"InputItem",
"LocalImageInput",
"MentionInput",
"RunResult",
"SkillInput",
"TextInput",
"Thread",
"TurnHandle",
]
def _split_user_agent(user_agent: str) -> tuple[str | None, str | None]:
raw = user_agent.strip()

View File

@@ -1133,13 +1133,6 @@ class GuardianRiskLevel(Enum):
high = "high"
class HazelnutScope(Enum):
example = "example"
workspace_shared = "workspace-shared"
all_shared = "all-shared"
personal = "personal"
class HookEventName(Enum):
session_start = "sessionStart"
stop = "stop"
@@ -1385,6 +1378,13 @@ class LogoutAccountResponse(BaseModel):
)
class MarketplaceInterface(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
display_name: Annotated[str | None, Field(alias="displayName")] = None
class McpAuthStatus(Enum):
unsupported = "unsupported"
not_logged_in = "notLoggedIn"
@@ -1633,6 +1633,13 @@ class PluginInstallParams(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
force_remote_sync: Annotated[
bool | None,
Field(
alias="forceRemoteSync",
description="When true, apply the remote plugin change before the local install flow.",
),
] = None
marketplace_path: Annotated[AbsolutePathBuf, Field(alias="marketplacePath")]
plugin_name: Annotated[str, Field(alias="pluginName")]
@@ -1737,6 +1744,13 @@ class PluginUninstallParams(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
force_remote_sync: Annotated[
bool | None,
Field(
alias="forceRemoteSync",
description="When true, apply the remote plugin change before the local uninstall flow.",
),
] = None
plugin_id: Annotated[str, Field(alias="pluginId")]
@@ -1747,13 +1761,6 @@ class PluginUninstallResponse(BaseModel):
)
class ProductSurface(Enum):
chatgpt = "chatgpt"
codex = "codex"
api = "api"
atlas = "atlas"
class RateLimitWindow(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
@@ -1906,15 +1913,6 @@ class ReasoningTextDeltaNotification(BaseModel):
turn_id: Annotated[str, Field(alias="turnId")]
class RemoteSkillSummary(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
description: str
id: str
name: str
class RequestId(RootModel[str | int]):
model_config = ConfigDict(
populate_by_name=True,
@@ -1974,7 +1972,6 @@ class ReasoningResponseItem(BaseModel):
)
content: list[ReasoningItemContent] | None = None
encrypted_content: str | None = None
id: str
summary: list[ReasoningItemReasoningSummary]
type: Annotated[Literal["reasoning"], Field(title="ReasoningResponseItemType")]
@@ -2599,41 +2596,6 @@ class SkillsListParams(BaseModel):
] = None
class SkillsRemoteReadParams(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
enabled: bool | None = False
hazelnut_scope: Annotated[HazelnutScope | None, Field(alias="hazelnutScope")] = (
"example"
)
product_surface: Annotated[ProductSurface | None, Field(alias="productSurface")] = (
"codex"
)
class SkillsRemoteReadResponse(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
data: list[RemoteSkillSummary]
class SkillsRemoteWriteParams(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
hazelnut_id: Annotated[str, Field(alias="hazelnutId")]
class SkillsRemoteWriteResponse(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
id: str
path: str
class SubAgentSourceValue(Enum):
review = "review"
compact = "compact"
@@ -3050,6 +3012,7 @@ class ThreadRealtimeAudioChunk(BaseModel):
populate_by_name=True,
)
data: str
item_id: Annotated[str | None, Field(alias="itemId")] = None
num_channels: Annotated[int, Field(alias="numChannels", ge=0)]
sample_rate: Annotated[int, Field(alias="sampleRate", ge=0)]
samples_per_channel: Annotated[
@@ -3798,29 +3761,6 @@ class PluginReadRequest(BaseModel):
params: PluginReadParams
class SkillsRemoteListRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
id: RequestId
method: Annotated[
Literal["skills/remote/list"], Field(title="Skills/remote/listRequestMethod")
]
params: SkillsRemoteReadParams
class SkillsRemoteExportRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
id: RequestId
method: Annotated[
Literal["skills/remote/export"],
Field(title="Skills/remote/exportRequestMethod"),
]
params: SkillsRemoteWriteParams
class AppListRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
@@ -4679,6 +4619,7 @@ class PluginMarketplaceEntry(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
interface: MarketplaceInterface | None = None
name: str
path: AbsolutePathBuf
plugins: list[PluginSummary]
@@ -5589,14 +5530,6 @@ class FunctionCallOutputBody(RootModel[str | list[FunctionCallOutputContentItem]
root: str | list[FunctionCallOutputContentItem]
class FunctionCallOutputPayload(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
body: FunctionCallOutputBody
success: bool | None = None
class GetAccountRateLimitsResponse(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
@@ -5694,7 +5627,7 @@ class FunctionCallOutputResponseItem(BaseModel):
populate_by_name=True,
)
call_id: str
output: FunctionCallOutputPayload
output: FunctionCallOutputBody
type: Annotated[
Literal["function_call_output"],
Field(title="FunctionCallOutputResponseItemType"),
@@ -5706,7 +5639,7 @@ class CustomToolCallOutputResponseItem(BaseModel):
populate_by_name=True,
)
call_id: str
output: FunctionCallOutputPayload
output: FunctionCallOutputBody
type: Annotated[
Literal["custom_tool_call_output"],
Field(title="CustomToolCallOutputResponseItemType"),
@@ -6139,8 +6072,6 @@ class ClientRequest(
| SkillsListRequest
| PluginListRequest
| PluginReadRequest
| SkillsRemoteListRequest
| SkillsRemoteExportRequest
| AppListRequest
| FsReadFileRequest
| FsWriteFileRequest
@@ -6202,8 +6133,6 @@ class ClientRequest(
| SkillsListRequest
| PluginListRequest
| PluginReadRequest
| SkillsRemoteListRequest
| SkillsRemoteExportRequest
| AppListRequest
| FsReadFileRequest
| FsWriteFileRequest

View File

@@ -5,6 +5,7 @@ import importlib.util
import io
import json
import sys
import tomllib
import urllib.error
from pathlib import Path
@@ -167,24 +168,6 @@ def test_examples_readme_matches_pinned_runtime_version() -> None:
)
def test_pinned_runtime_git_ref_matches_runtime_setup_pin() -> None:
script = _load_update_script_module()
runtime_setup = _load_runtime_setup_module()
assert script.pinned_runtime_git_ref() == (
f"rust-v{runtime_setup.pinned_runtime_version()}"
)
def test_parser_supports_generate_types_for_pinned_runtime() -> None:
script = _load_update_script_module()
args = script.parse_args(["generate-types-for-pinned-runtime"])
assert args.command == "generate-types-for-pinned-runtime"
assert args.git_ref is None
def test_release_metadata_retries_without_invalid_auth(monkeypatch: pytest.MonkeyPatch) -> None:
runtime_setup = _load_runtime_setup_module()
authorizations: list[str | None] = []
@@ -210,7 +193,9 @@ def test_release_metadata_retries_without_invalid_auth(monkeypatch: pytest.Monke
def test_runtime_package_is_wheel_only_and_builds_platform_specific_wheels() -> None:
pyproject_text = (ROOT.parent / "python-runtime" / "pyproject.toml").read_text()
pyproject = tomllib.loads(
(ROOT.parent / "python-runtime" / "pyproject.toml").read_text()
)
hook_source = (ROOT.parent / "python-runtime" / "hatch_build.py").read_text()
hook_tree = ast.parse(hook_source)
initialize_fn = next(
@@ -250,12 +235,14 @@ def test_runtime_package_is_wheel_only_and_builds_platform_specific_wheels() ->
and isinstance(node.value, ast.Constant)
}
assert "[tool.hatch.build.targets.wheel]" in pyproject_text
assert 'packages = ["src/codex_cli_bin"]' in pyproject_text
assert 'include = ["src/codex_cli_bin/bin/**"]' in pyproject_text
assert "[tool.hatch.build.targets.wheel.hooks.custom]" in pyproject_text
assert "[tool.hatch.build.targets.sdist]" in pyproject_text
assert "[tool.hatch.build.targets.sdist.hooks.custom]" in pyproject_text
assert pyproject["tool"]["hatch"]["build"]["targets"]["wheel"] == {
"packages": ["src/codex_cli_bin"],
"include": ["src/codex_cli_bin/bin/**"],
"hooks": {"custom": {}},
}
assert pyproject["tool"]["hatch"]["build"]["targets"]["sdist"] == {
"hooks": {"custom": {}},
}
assert sdist_guard is not None
assert build_data_assignments == {"pure_python": False, "infer_tag": True}
@@ -333,10 +320,7 @@ def test_stage_sdk_runs_type_generation_before_staging(tmp_path: Path) -> None:
)
def fake_generate_types() -> None:
raise AssertionError("stage-sdk should use pinned-runtime generation")
def fake_generate_types_for_pinned_runtime(git_ref: str | None = None) -> None:
calls.append(f"generate_types_for_pinned_runtime:{git_ref}")
calls.append("generate_types")
def fake_stage_sdk_package(
_staging_dir: Path, _sdk_version: str, _runtime_version: str
@@ -354,7 +338,6 @@ def test_stage_sdk_runs_type_generation_before_staging(tmp_path: Path) -> None:
ops = script.CliOps(
generate_types=fake_generate_types,
generate_types_for_pinned_runtime=fake_generate_types_for_pinned_runtime,
stage_python_sdk_package=fake_stage_sdk_package,
stage_python_runtime_package=fake_stage_runtime_package,
current_sdk_version=fake_current_sdk_version,
@@ -362,7 +345,7 @@ def test_stage_sdk_runs_type_generation_before_staging(tmp_path: Path) -> None:
script.run_command(args, ops)
assert calls == ["generate_types_for_pinned_runtime:rust-v1.2.3", "stage_sdk"]
assert calls == ["generate_types", "stage_sdk"]
def test_stage_runtime_stages_binary_without_type_generation(tmp_path: Path) -> None:
@@ -383,9 +366,6 @@ def test_stage_runtime_stages_binary_without_type_generation(tmp_path: Path) ->
def fake_generate_types() -> None:
calls.append("generate_types")
def fake_generate_types_for_pinned_runtime(_git_ref: str | None = None) -> None:
calls.append("generate_types_for_pinned_runtime")
def fake_stage_sdk_package(
_staging_dir: Path, _sdk_version: str, _runtime_version: str
) -> Path:
@@ -402,7 +382,6 @@ def test_stage_runtime_stages_binary_without_type_generation(tmp_path: Path) ->
ops = script.CliOps(
generate_types=fake_generate_types,
generate_types_for_pinned_runtime=fake_generate_types_for_pinned_runtime,
stage_python_sdk_package=fake_stage_sdk_package,
stage_python_runtime_package=fake_stage_runtime_package,
current_sdk_version=fake_current_sdk_version,