Compare commits

...

3 Commits

Author SHA1 Message Date
Shaqayeq
1765737b0c Add PR CI for the Python SDK 2026-03-19 17:13:10 -07:00
Shaqayeq
c270299b28 Make Python artifact tests work on 3.10 2026-03-19 17:12:59 -07:00
Shaqayeq
3d853de33f Maintain Python SDK artifacts for the pinned runtime 2026-03-19 17:00:43 -07:00
6 changed files with 528 additions and 52 deletions

View File

@@ -6,11 +6,56 @@ on:
pull_request: {}
jobs:
sdks:
changed:
name: Detect changed areas
runs-on: ubuntu-24.04
outputs:
python: ${{ steps.detect.outputs.python }}
typescript: ${{ steps.detect.outputs.typescript }}
workflows: ${{ steps.detect.outputs.workflows }}
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Detect changed paths (no external action)
id: detect
shell: bash
run: |
set -euo pipefail
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
BASE_SHA='${{ github.event.pull_request.base.sha }}'
HEAD_SHA='${{ github.event.pull_request.head.sha }}'
echo "Base SHA: $BASE_SHA"
echo "Head SHA: $HEAD_SHA"
mapfile -t files < <(git diff --name-only --no-renames "$BASE_SHA" "$HEAD_SHA")
else
files=("sdk/force" "codex-rs/force" ".github/force" "package.json")
fi
python=false
typescript=false
workflows=false
for f in "${files[@]}"; do
[[ $f == sdk/python/* || $f == sdk/python-runtime/* ]] && python=true
[[ $f == sdk/typescript/* || $f == codex-rs/* || $f == package.json || $f == pnpm-lock.yaml || $f == pnpm-workspace.yaml ]] && typescript=true
[[ $f == .github/* ]] && workflows=true
done
echo "python=$python" >> "$GITHUB_OUTPUT"
echo "typescript=$typescript" >> "$GITHUB_OUTPUT"
echo "workflows=$workflows" >> "$GITHUB_OUTPUT"
typescript_sdk:
name: TypeScript SDK
runs-on:
group: codex-runners
labels: codex-linux-x64
timeout-minutes: 10
needs: changed
if: ${{ needs.changed.outputs.typescript == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
@@ -50,3 +95,190 @@ jobs:
- name: Test SDK packages
run: pnpm -r --filter ./sdk/typescript run test
python_generated:
name: Python Pinned Generated
runs-on:
group: codex-runners
labels: codex-linux-x64
timeout-minutes: 10
needs: changed
if: ${{ needs.changed.outputs.python == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
steps:
- uses: actions/checkout@v6
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: "3.13"
cache: pip
cache-dependency-path: sdk/python/pyproject.toml
- name: Install Python SDK dependencies
run: python -m pip install -e "sdk/python[dev]"
- name: Regenerate Python SDK artifacts from pinned runtime
run: python sdk/python/scripts/update_sdk_artifacts.py generate-types-for-pinned-runtime
- name: Check for generated drift
run: git diff --exit-code -- sdk/python
python_quality:
name: Python Quality (${{ matrix.python-version }})
runs-on:
group: codex-runners
labels: codex-linux-x64
timeout-minutes: 15
needs: changed
if: ${{ needs.changed.outputs.python == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.13"]
steps:
- uses: actions/checkout@v6
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: sdk/python/pyproject.toml
- name: Install Python SDK dependencies
run: python -m pip install -e "sdk/python[dev]" build twine mypy
- name: Ruff lint
run: >
python -m ruff check
sdk/python/src
sdk/python/tests
sdk/python/scripts
--exclude sdk/python/tests/test_real_app_server_integration.py
- name: Mypy
run: python -m mypy --config-file sdk/python/mypy.ini sdk/python/src/codex_app_server
- name: Pytest
run: >
python -m pytest
sdk/python/tests/test_artifact_workflow_and_binaries.py
sdk/python/tests/test_async_client_behavior.py
sdk/python/tests/test_client_rpc_methods.py
sdk/python/tests/test_public_api_runtime_behavior.py
sdk/python/tests/test_public_api_signatures.py
- name: Build Python SDK
run: python -m build sdk/python --outdir sdk/python/dist-ci
- name: Twine check
run: python -m twine check sdk/python/dist-ci/*
python_platform_smoke:
name: Python Platform Smoke (${{ matrix.name }})
runs-on: ${{ matrix.runs_on || matrix.runner }}
timeout-minutes: 15
needs: changed
if: ${{ needs.changed.outputs.python == 'true' || needs.changed.outputs.workflows == 'true' || github.event_name == 'push' }}
strategy:
fail-fast: false
matrix:
include:
- name: macOS
runner: macos-15-xlarge
- name: Windows
runner: windows-x64
runs_on:
group: codex-runners
labels: codex-windows-x64
steps:
- uses: actions/checkout@v6
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: "3.13"
cache: pip
cache-dependency-path: sdk/python/pyproject.toml
- name: Install Python SDK dependencies
run: python -m pip install -e "sdk/python[dev]" build
- name: Pytest
run: >
python -m pytest
sdk/python/tests/test_async_client_behavior.py
sdk/python/tests/test_client_rpc_methods.py
sdk/python/tests/test_public_api_runtime_behavior.py
sdk/python/tests/test_public_api_signatures.py
- name: Build Python SDK wheel
run: python -m build sdk/python --outdir sdk/python/dist-ci
- name: Install built wheel and smoke imports
shell: bash
run: |
set -euo pipefail
python -m venv .venv-wheel-smoke
if [[ "${{ runner.os }}" == "Windows" ]]; then
VENV_PYTHON=".venv-wheel-smoke/Scripts/python.exe"
else
VENV_PYTHON=".venv-wheel-smoke/bin/python"
fi
"$VENV_PYTHON" -m pip install --upgrade pip
"$VENV_PYTHON" -m pip install sdk/python/dist-ci/*.whl
"$VENV_PYTHON" -c "import codex_app_server; from codex_app_server import AsyncCodex, AsyncThread, Codex, RunResult, Thread; assert codex_app_server.__name__ == 'codex_app_server'; assert Codex.__name__ == 'Codex'; assert AsyncCodex.__name__ == 'AsyncCodex'; assert Thread.__name__ == 'Thread'; assert AsyncThread.__name__ == 'AsyncThread'; assert RunResult.__name__ == 'RunResult'"
sdks:
name: sdks
runs-on: ubuntu-24.04
if: ${{ always() }}
needs:
- changed
- typescript_sdk
- python_generated
- python_quality
- python_platform_smoke
steps:
- name: Check SDK results
shell: bash
run: |
set -euo pipefail
should_run_typescript=false
should_run_python=false
if [[ "${{ github.event_name }}" == "push" || "${{ needs.changed.outputs.workflows }}" == "true" || "${{ needs.changed.outputs.typescript }}" == "true" ]]; then
should_run_typescript=true
fi
if [[ "${{ github.event_name }}" == "push" || "${{ needs.changed.outputs.workflows }}" == "true" || "${{ needs.changed.outputs.python }}" == "true" ]]; then
should_run_python=true
fi
if [[ "$should_run_typescript" == "false" && "$should_run_python" == "false" ]]; then
echo "No SDK-relevant changes detected."
exit 0
fi
if [[ "$should_run_typescript" == "true" && "${{ needs.typescript_sdk.result }}" != "success" ]]; then
echo "TypeScript job result: ${{ needs.typescript_sdk.result }}"
exit 1
fi
if [[ "$should_run_python" == "true" && "${{ needs.python_generated.result }}" != "success" ]]; then
echo "Python generated job result: ${{ needs.python_generated.result }}"
exit 1
fi
if [[ "$should_run_python" == "true" && "${{ needs.python_quality.result }}" != "success" ]]; then
echo "Python quality job result: ${{ needs.python_quality.result }}"
exit 1
fi
if [[ "$should_run_python" == "true" && "${{ needs.python_platform_smoke.result }}" != "success" ]]; then
echo "Python platform smoke job result: ${{ needs.python_platform_smoke.result }}"
exit 1
fi
echo "SDK checks passed."

19
sdk/python/mypy.ini Normal file
View File

@@ -0,0 +1,19 @@
[mypy]
python_version = 3.10
mypy_path = sdk/python/src
check_untyped_defs = True
warn_unused_ignores = True
no_implicit_optional = True
exclude = ^sdk/python/src/codex_app_server/generated/
[mypy-codex_app_server.api]
ignore_errors = True
[mypy-codex_app_server.async_client]
ignore_errors = True
[mypy-codex_app_server.client]
ignore_errors = True
[mypy-codex_app_server.generated.*]
ignore_errors = True

View File

@@ -3,6 +3,7 @@ from __future__ import annotations
import argparse
import importlib
import importlib.util
import json
import platform
import re
@@ -65,6 +66,81 @@ def run_python_module(module: str, args: list[str], cwd: Path) -> None:
run([sys.executable, "-m", module, *args], cwd)
def run_capture(cmd: list[str], cwd: Path) -> str:
result = subprocess.run(
cmd,
cwd=str(cwd),
text=True,
capture_output=True,
check=False,
)
if result.returncode != 0:
raise RuntimeError(
f"Command failed ({result.returncode}): {' '.join(cmd)}\n"
f"STDOUT:\n{result.stdout}\nSTDERR:\n{result.stderr}"
)
return result.stdout
def runtime_setup_path() -> Path:
return sdk_root() / "_runtime_setup.py"
def pinned_runtime_version() -> str:
spec = importlib.util.spec_from_file_location(
"_runtime_setup", runtime_setup_path()
)
if spec is None or spec.loader is None:
raise RuntimeError(f"Failed to load runtime setup module: {runtime_setup_path()}")
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module.pinned_runtime_version() # type: ignore[no-any-return]
def pinned_runtime_git_ref() -> str:
return f"rust-v{pinned_runtime_version()}"
def ensure_git_ref_available(git_ref: str) -> None:
result = subprocess.run(
["git", "rev-parse", "--verify", git_ref],
cwd=str(repo_root()),
text=True,
capture_output=True,
check=False,
)
if result.returncode == 0:
return
run(["git", "fetch", "origin", "tag", git_ref, "--depth=1"], repo_root())
def read_git_file(git_ref: str, repo_path: str) -> str:
return run_capture(["git", "show", f"{git_ref}:{repo_path}"], repo_root())
def materialize_schema_files_from_git_ref(git_ref: str, out_dir: Path) -> tuple[Path, Path]:
out_dir.mkdir(parents=True, exist_ok=True)
schema_bundle = out_dir / "codex_app_server_protocol.v2.schemas.json"
schema_bundle.write_text(
read_git_file(
git_ref,
"codex-rs/app-server-protocol/schema/json/"
"codex_app_server_protocol.v2.schemas.json",
)
)
server_notification = out_dir / "ServerNotification.json"
server_notification.write_text(
read_git_file(
git_ref,
"codex-rs/app-server-protocol/schema/json/ServerNotification.json",
)
)
return schema_bundle, server_notification
def current_sdk_version() -> str:
match = re.search(
r'^version = "([^"]+)"$',
@@ -396,8 +472,9 @@ def _annotate_schema(value: Any, base: str | None = None) -> None:
_annotate_schema(child, base)
def _normalized_schema_bundle_text() -> str:
schema = json.loads(schema_bundle_path().read_text())
def _normalized_schema_bundle_text(schema_bundle: Path | None = None) -> str:
bundle = schema_bundle or schema_bundle_path()
schema = json.loads(bundle.read_text())
definitions = schema.get("definitions", {})
if isinstance(definitions, dict):
for definition in definitions.values():
@@ -409,16 +486,17 @@ def _normalized_schema_bundle_text() -> str:
return json.dumps(schema, indent=2, sort_keys=True) + "\n"
def generate_v2_all() -> None:
def generate_v2_all(schema_bundle: Path | None = None) -> None:
out_path = sdk_root() / "src" / "codex_app_server" / "generated" / "v2_all.py"
out_dir = out_path.parent
old_package_dir = out_dir / "v2_all"
if old_package_dir.exists():
shutil.rmtree(old_package_dir)
out_dir.mkdir(parents=True, exist_ok=True)
bundle = schema_bundle or schema_bundle_path()
with tempfile.TemporaryDirectory() as td:
normalized_bundle = Path(td) / schema_bundle_path().name
normalized_bundle.write_text(_normalized_schema_bundle_text())
normalized_bundle = Path(td) / bundle.name
normalized_bundle.write_text(_normalized_schema_bundle_text(bundle))
run_python_module(
"datamodel_code_generator",
[
@@ -455,9 +533,14 @@ def generate_v2_all() -> None:
_normalize_generated_timestamps(out_path)
def _notification_specs() -> list[tuple[str, str]]:
def _notification_specs(
server_notification_schema: Path | None = None,
) -> list[tuple[str, str]]:
server_notification_path = server_notification_schema or (
schema_root_dir() / "ServerNotification.json"
)
server_notifications = json.loads(
(schema_root_dir() / "ServerNotification.json").read_text()
server_notification_path.read_text()
)
one_of = server_notifications.get("oneOf", [])
generated_source = (
@@ -494,7 +577,9 @@ def _notification_specs() -> list[tuple[str, str]]:
return specs
def generate_notification_registry() -> None:
def generate_notification_registry(
server_notification_schema: Path | None = None,
) -> None:
out = (
sdk_root()
/ "src"
@@ -502,7 +587,7 @@ def generate_notification_registry() -> None:
/ "generated"
/ "notification_registry.py"
)
specs = _notification_specs()
specs = _notification_specs(server_notification_schema)
class_names = sorted({class_name for _, class_name in specs})
lines = [
@@ -558,6 +643,7 @@ class PublicFieldSpec:
@dataclass(frozen=True)
class CliOps:
generate_types: Callable[[], None]
generate_types_for_pinned_runtime: Callable[[str | None], None]
stage_python_sdk_package: Callable[[Path, str, str], Path]
stage_python_runtime_package: Callable[[Path, str, Path], Path]
current_sdk_version: Callable[[], str]
@@ -867,9 +953,9 @@ def generate_public_api_flat_methods() -> None:
exclude={"thread_id", "input"},
)
source = public_api_path.read_text()
original_source = public_api_path.read_text()
source = _replace_generated_block(
source,
original_source,
"Codex.flat_methods",
_render_codex_block(
thread_start_fields,
@@ -898,16 +984,35 @@ def generate_public_api_flat_methods() -> None:
"AsyncThread.flat_methods",
_render_async_thread_block(turn_start_fields),
)
if source == original_source:
return
public_api_path.write_text(source)
def generate_types() -> None:
def generate_types(
schema_bundle: Path | None = None,
server_notification_schema: Path | None = None,
) -> None:
# v2_all is the authoritative generated surface.
generate_v2_all()
generate_notification_registry()
generate_v2_all(schema_bundle)
generate_notification_registry(server_notification_schema)
generate_public_api_flat_methods()
def generate_types_for_pinned_runtime(git_ref: str | None = None) -> None:
pinned_ref = git_ref or pinned_runtime_git_ref()
ensure_git_ref_available(pinned_ref)
with tempfile.TemporaryDirectory(prefix="codex-python-pinned-schema-") as temp_root:
schema_bundle, server_notification_schema = materialize_schema_files_from_git_ref(
pinned_ref,
Path(temp_root),
)
generate_types(
schema_bundle=schema_bundle,
server_notification_schema=server_notification_schema,
)
def build_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description="Single SDK maintenance entrypoint")
subparsers = parser.add_subparsers(dest="command", required=True)
@@ -915,6 +1020,14 @@ def build_parser() -> argparse.ArgumentParser:
subparsers.add_parser(
"generate-types", help="Regenerate Python protocol-derived types"
)
pinned_types_parser = subparsers.add_parser(
"generate-types-for-pinned-runtime",
help="Regenerate Python protocol-derived types from the pinned runtime version",
)
pinned_types_parser.add_argument(
"--git-ref",
help="Optional git ref to source vendored schema files from",
)
stage_sdk_parser = subparsers.add_parser(
"stage-sdk",
@@ -964,6 +1077,7 @@ def parse_args(argv: Sequence[str] | None = None) -> argparse.Namespace:
def default_cli_ops() -> CliOps:
return CliOps(
generate_types=generate_types,
generate_types_for_pinned_runtime=generate_types_for_pinned_runtime,
stage_python_sdk_package=stage_python_sdk_package,
stage_python_runtime_package=stage_python_runtime_package,
current_sdk_version=current_sdk_version,
@@ -973,6 +1087,8 @@ def default_cli_ops() -> CliOps:
def run_command(args: argparse.Namespace, ops: CliOps) -> None:
if args.command == "generate-types":
ops.generate_types()
elif args.command == "generate-types-for-pinned-runtime":
ops.generate_types_for_pinned_runtime(args.git_ref)
elif args.command == "stage-sdk":
ops.generate_types()
ops.stage_python_sdk_package(

View File

@@ -52,6 +52,23 @@ from ._run import (
_collect_run_result,
)
__all__ = [
"AsyncCodex",
"AsyncThread",
"AsyncTurnHandle",
"Codex",
"ImageInput",
"Input",
"InputItem",
"LocalImageInput",
"MentionInput",
"RunResult",
"SkillInput",
"TextInput",
"Thread",
"TurnHandle",
]
def _split_user_agent(user_agent: str) -> tuple[str | None, str | None]:
raw = user_agent.strip()

View File

@@ -1133,6 +1133,13 @@ class GuardianRiskLevel(Enum):
high = "high"
class HazelnutScope(Enum):
example = "example"
workspace_shared = "workspace-shared"
all_shared = "all-shared"
personal = "personal"
class HookEventName(Enum):
session_start = "sessionStart"
stop = "stop"
@@ -1378,13 +1385,6 @@ class LogoutAccountResponse(BaseModel):
)
class MarketplaceInterface(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
display_name: Annotated[str | None, Field(alias="displayName")] = None
class McpAuthStatus(Enum):
unsupported = "unsupported"
not_logged_in = "notLoggedIn"
@@ -1633,13 +1633,6 @@ class PluginInstallParams(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
force_remote_sync: Annotated[
bool | None,
Field(
alias="forceRemoteSync",
description="When true, apply the remote plugin change before the local install flow.",
),
] = None
marketplace_path: Annotated[AbsolutePathBuf, Field(alias="marketplacePath")]
plugin_name: Annotated[str, Field(alias="pluginName")]
@@ -1744,13 +1737,6 @@ class PluginUninstallParams(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
force_remote_sync: Annotated[
bool | None,
Field(
alias="forceRemoteSync",
description="When true, apply the remote plugin change before the local uninstall flow.",
),
] = None
plugin_id: Annotated[str, Field(alias="pluginId")]
@@ -1761,6 +1747,13 @@ class PluginUninstallResponse(BaseModel):
)
class ProductSurface(Enum):
chatgpt = "chatgpt"
codex = "codex"
api = "api"
atlas = "atlas"
class RateLimitWindow(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
@@ -1913,6 +1906,15 @@ class ReasoningTextDeltaNotification(BaseModel):
turn_id: Annotated[str, Field(alias="turnId")]
class RemoteSkillSummary(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
description: str
id: str
name: str
class RequestId(RootModel[str | int]):
model_config = ConfigDict(
populate_by_name=True,
@@ -1972,6 +1974,7 @@ class ReasoningResponseItem(BaseModel):
)
content: list[ReasoningItemContent] | None = None
encrypted_content: str | None = None
id: str
summary: list[ReasoningItemReasoningSummary]
type: Annotated[Literal["reasoning"], Field(title="ReasoningResponseItemType")]
@@ -2596,6 +2599,41 @@ class SkillsListParams(BaseModel):
] = None
class SkillsRemoteReadParams(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
enabled: bool | None = False
hazelnut_scope: Annotated[HazelnutScope | None, Field(alias="hazelnutScope")] = (
"example"
)
product_surface: Annotated[ProductSurface | None, Field(alias="productSurface")] = (
"codex"
)
class SkillsRemoteReadResponse(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
data: list[RemoteSkillSummary]
class SkillsRemoteWriteParams(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
hazelnut_id: Annotated[str, Field(alias="hazelnutId")]
class SkillsRemoteWriteResponse(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
id: str
path: str
class SubAgentSourceValue(Enum):
review = "review"
compact = "compact"
@@ -3012,7 +3050,6 @@ class ThreadRealtimeAudioChunk(BaseModel):
populate_by_name=True,
)
data: str
item_id: Annotated[str | None, Field(alias="itemId")] = None
num_channels: Annotated[int, Field(alias="numChannels", ge=0)]
sample_rate: Annotated[int, Field(alias="sampleRate", ge=0)]
samples_per_channel: Annotated[
@@ -3761,6 +3798,29 @@ class PluginReadRequest(BaseModel):
params: PluginReadParams
class SkillsRemoteListRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
id: RequestId
method: Annotated[
Literal["skills/remote/list"], Field(title="Skills/remote/listRequestMethod")
]
params: SkillsRemoteReadParams
class SkillsRemoteExportRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
id: RequestId
method: Annotated[
Literal["skills/remote/export"],
Field(title="Skills/remote/exportRequestMethod"),
]
params: SkillsRemoteWriteParams
class AppListRequest(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
@@ -4619,7 +4679,6 @@ class PluginMarketplaceEntry(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
interface: MarketplaceInterface | None = None
name: str
path: AbsolutePathBuf
plugins: list[PluginSummary]
@@ -5530,6 +5589,14 @@ class FunctionCallOutputBody(RootModel[str | list[FunctionCallOutputContentItem]
root: str | list[FunctionCallOutputContentItem]
class FunctionCallOutputPayload(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
)
body: FunctionCallOutputBody
success: bool | None = None
class GetAccountRateLimitsResponse(BaseModel):
model_config = ConfigDict(
populate_by_name=True,
@@ -5627,7 +5694,7 @@ class FunctionCallOutputResponseItem(BaseModel):
populate_by_name=True,
)
call_id: str
output: FunctionCallOutputBody
output: FunctionCallOutputPayload
type: Annotated[
Literal["function_call_output"],
Field(title="FunctionCallOutputResponseItemType"),
@@ -5639,7 +5706,7 @@ class CustomToolCallOutputResponseItem(BaseModel):
populate_by_name=True,
)
call_id: str
output: FunctionCallOutputBody
output: FunctionCallOutputPayload
type: Annotated[
Literal["custom_tool_call_output"],
Field(title="CustomToolCallOutputResponseItemType"),
@@ -6072,6 +6139,8 @@ class ClientRequest(
| SkillsListRequest
| PluginListRequest
| PluginReadRequest
| SkillsRemoteListRequest
| SkillsRemoteExportRequest
| AppListRequest
| FsReadFileRequest
| FsWriteFileRequest
@@ -6133,6 +6202,8 @@ class ClientRequest(
| SkillsListRequest
| PluginListRequest
| PluginReadRequest
| SkillsRemoteListRequest
| SkillsRemoteExportRequest
| AppListRequest
| FsReadFileRequest
| FsWriteFileRequest

View File

@@ -5,7 +5,6 @@ import importlib.util
import io
import json
import sys
import tomllib
import urllib.error
from pathlib import Path
@@ -168,6 +167,24 @@ def test_examples_readme_matches_pinned_runtime_version() -> None:
)
def test_pinned_runtime_git_ref_matches_runtime_setup_pin() -> None:
script = _load_update_script_module()
runtime_setup = _load_runtime_setup_module()
assert script.pinned_runtime_git_ref() == (
f"rust-v{runtime_setup.pinned_runtime_version()}"
)
def test_parser_supports_generate_types_for_pinned_runtime() -> None:
script = _load_update_script_module()
args = script.parse_args(["generate-types-for-pinned-runtime"])
assert args.command == "generate-types-for-pinned-runtime"
assert args.git_ref is None
def test_release_metadata_retries_without_invalid_auth(monkeypatch: pytest.MonkeyPatch) -> None:
runtime_setup = _load_runtime_setup_module()
authorizations: list[str | None] = []
@@ -193,9 +210,7 @@ def test_release_metadata_retries_without_invalid_auth(monkeypatch: pytest.Monke
def test_runtime_package_is_wheel_only_and_builds_platform_specific_wheels() -> None:
pyproject = tomllib.loads(
(ROOT.parent / "python-runtime" / "pyproject.toml").read_text()
)
pyproject_text = (ROOT.parent / "python-runtime" / "pyproject.toml").read_text()
hook_source = (ROOT.parent / "python-runtime" / "hatch_build.py").read_text()
hook_tree = ast.parse(hook_source)
initialize_fn = next(
@@ -235,14 +250,12 @@ def test_runtime_package_is_wheel_only_and_builds_platform_specific_wheels() ->
and isinstance(node.value, ast.Constant)
}
assert pyproject["tool"]["hatch"]["build"]["targets"]["wheel"] == {
"packages": ["src/codex_cli_bin"],
"include": ["src/codex_cli_bin/bin/**"],
"hooks": {"custom": {}},
}
assert pyproject["tool"]["hatch"]["build"]["targets"]["sdist"] == {
"hooks": {"custom": {}},
}
assert "[tool.hatch.build.targets.wheel]" in pyproject_text
assert 'packages = ["src/codex_cli_bin"]' in pyproject_text
assert 'include = ["src/codex_cli_bin/bin/**"]' in pyproject_text
assert "[tool.hatch.build.targets.wheel.hooks.custom]" in pyproject_text
assert "[tool.hatch.build.targets.sdist]" in pyproject_text
assert "[tool.hatch.build.targets.sdist.hooks.custom]" in pyproject_text
assert sdist_guard is not None
assert build_data_assignments == {"pure_python": False, "infer_tag": True}
@@ -322,6 +335,9 @@ def test_stage_sdk_runs_type_generation_before_staging(tmp_path: Path) -> None:
def fake_generate_types() -> None:
calls.append("generate_types")
def fake_generate_types_for_pinned_runtime(_git_ref: str | None = None) -> None:
calls.append("generate_types_for_pinned_runtime")
def fake_stage_sdk_package(
_staging_dir: Path, _sdk_version: str, _runtime_version: str
) -> Path:
@@ -338,6 +354,7 @@ def test_stage_sdk_runs_type_generation_before_staging(tmp_path: Path) -> None:
ops = script.CliOps(
generate_types=fake_generate_types,
generate_types_for_pinned_runtime=fake_generate_types_for_pinned_runtime,
stage_python_sdk_package=fake_stage_sdk_package,
stage_python_runtime_package=fake_stage_runtime_package,
current_sdk_version=fake_current_sdk_version,
@@ -366,6 +383,9 @@ def test_stage_runtime_stages_binary_without_type_generation(tmp_path: Path) ->
def fake_generate_types() -> None:
calls.append("generate_types")
def fake_generate_types_for_pinned_runtime(_git_ref: str | None = None) -> None:
calls.append("generate_types_for_pinned_runtime")
def fake_stage_sdk_package(
_staging_dir: Path, _sdk_version: str, _runtime_version: str
) -> Path:
@@ -382,6 +402,7 @@ def test_stage_runtime_stages_binary_without_type_generation(tmp_path: Path) ->
ops = script.CliOps(
generate_types=fake_generate_types,
generate_types_for_pinned_runtime=fake_generate_types_for_pinned_runtime,
stage_python_sdk_package=fake_stage_sdk_package,
stage_python_runtime_package=fake_stage_runtime_package,
current_sdk_version=fake_current_sdk_version,