mirror of
https://github.com/openai/codex.git
synced 2026-03-15 10:26:29 +03:00
Compare commits
12 Commits
rate-limit
...
pr4158
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
98df710750 | ||
|
|
639a6fd2f3 | ||
|
|
db4aa6f916 | ||
|
|
cb96f4f596 | ||
|
|
5b910f1f05 | ||
|
|
af6304c641 | ||
|
|
b90eeabd74 | ||
|
|
f7d2f3e54d | ||
|
|
3fe3b6328b | ||
|
|
8144ddb3da | ||
|
|
9336f2b84b | ||
|
|
af37785bca |
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
@@ -27,12 +27,26 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
# Run all tasks using workspace filters
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
|
||||
- name: Ensure staging a release works.
|
||||
- name: Stage npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: ./codex-cli/scripts/stage_release.sh
|
||||
run: |
|
||||
set -euo pipefail
|
||||
CODEX_VERSION=0.40.0
|
||||
PACK_OUTPUT="${RUNNER_TEMP}/codex-npm.tgz"
|
||||
python3 ./codex-cli/scripts/build_npm_package.py \
|
||||
--release-version "$CODEX_VERSION" \
|
||||
--pack-output "$PACK_OUTPUT"
|
||||
echo "PACK_OUTPUT=$PACK_OUTPUT" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Upload staged npm package artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: codex-npm-staging
|
||||
path: ${{ env.PACK_OUTPUT }}
|
||||
|
||||
- name: Ensure root README.md contains only ASCII and certain Unicode code points
|
||||
run: ./scripts/asciicheck.py README.md
|
||||
|
||||
6
.github/workflows/rust-ci.yml
vendored
6
.github/workflows/rust-ci.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
components: rustfmt
|
||||
- name: cargo fmt
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
working-directory: codex-rs
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
- uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2
|
||||
with:
|
||||
tool: cargo-shear
|
||||
@@ -143,7 +143,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
components: clippy
|
||||
|
||||
106
.github/workflows/rust-release.yml
vendored
106
.github/workflows/rust-release.yml
vendored
@@ -77,7 +77,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: dtolnay/rust-toolchain@1.89
|
||||
- uses: dtolnay/rust-toolchain@1.90
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
@@ -173,6 +173,9 @@ jobs:
|
||||
outputs:
|
||||
version: ${{ steps.release_name.outputs.name }}
|
||||
tag: ${{ github.ref_name }}
|
||||
should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }}
|
||||
npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }}
|
||||
slice_tags: ${{ steps.slice_tags.outputs.value }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -193,21 +196,59 @@ jobs:
|
||||
version="${GITHUB_REF_NAME#rust-v}"
|
||||
echo "name=${version}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Determine npm publish settings
|
||||
id: npm_publish_settings
|
||||
env:
|
||||
VERSION: ${{ steps.release_name.outputs.name }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version="${VERSION}"
|
||||
|
||||
if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=alpha" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
echo "npm_tag=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Determine slice tags
|
||||
id: slice_tags
|
||||
run: |
|
||||
set -euo pipefail
|
||||
slice_tags=$(python - <<'PY'
|
||||
import importlib.util
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
module_path = Path("codex-cli/scripts/build_npm_package.py").resolve()
|
||||
sys.path.insert(0, str(module_path.parent))
|
||||
spec = importlib.util.spec_from_file_location("build_npm_package", module_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
print(" ".join(module.DEFAULT_SLICE_TAGS), end="")
|
||||
PY
|
||||
)
|
||||
echo "value=${slice_tags}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# build_npm_package.py requires DotSlash when staging releases.
|
||||
- uses: facebook/install-dotslash@v2
|
||||
- name: Stage npm package
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
TMP_DIR="${RUNNER_TEMP}/npm-stage"
|
||||
python3 codex-cli/scripts/stage_rust_release.py \
|
||||
OUTPUT_DIR="${GITHUB_WORKSPACE}/dist/npm"
|
||||
mkdir -p "${OUTPUT_DIR}"
|
||||
./codex-cli/scripts/build_npm_package.py \
|
||||
--release-version "${{ steps.release_name.outputs.name }}" \
|
||||
--tmp "${TMP_DIR}"
|
||||
mkdir -p dist/npm
|
||||
# Produce an npm-ready tarball using `npm pack` and store it in dist/npm.
|
||||
# We then rename it to a stable name used by our publishing script.
|
||||
(cd "$TMP_DIR" && npm pack --pack-destination "${GITHUB_WORKSPACE}/dist/npm")
|
||||
mv "${GITHUB_WORKSPACE}"/dist/npm/*.tgz \
|
||||
"${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz"
|
||||
--staging-dir "${TMP_DIR}" \
|
||||
--pack-output "${OUTPUT_DIR}/codex-npm-${{ steps.release_name.outputs.name }}.tgz" \
|
||||
--slice-pack-dir "${OUTPUT_DIR}"
|
||||
|
||||
- name: Create GitHub Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
@@ -230,8 +271,8 @@ jobs:
|
||||
# July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/
|
||||
# npm docs: https://docs.npmjs.com/trusted-publishers
|
||||
publish-npm:
|
||||
# Skip this step for pre-releases (alpha/beta).
|
||||
if: ${{ !contains(needs.release.outputs.version, '-') }}
|
||||
# Publish to npm for stable releases and alpha pre-releases with numeric suffixes.
|
||||
if: ${{ needs.release.outputs.should_publish_npm == 'true' }}
|
||||
name: publish-npm
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
@@ -261,12 +302,51 @@ jobs:
|
||||
mkdir -p dist/npm
|
||||
gh release download "$tag" \
|
||||
--repo "${GITHUB_REPOSITORY}" \
|
||||
--pattern "codex-npm-${version}.tgz" \
|
||||
--pattern "codex-npm-${version}*.tgz" \
|
||||
--dir dist/npm
|
||||
|
||||
# No NODE_AUTH_TOKEN needed because we use OIDC.
|
||||
- name: Publish to npm
|
||||
run: npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ needs.release.outputs.version }}.tgz"
|
||||
env:
|
||||
VERSION: ${{ needs.release.outputs.version }}
|
||||
NPM_TAG: ${{ needs.release.outputs.npm_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
tag_args=()
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
tag_args+=(--tag "${NPM_TAG}")
|
||||
fi
|
||||
|
||||
npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${VERSION}.tgz" "${tag_args[@]}"
|
||||
|
||||
- name: Publish slice npm packages
|
||||
env:
|
||||
VERSION: ${{ needs.release.outputs.version }}
|
||||
NPM_TAG: ${{ needs.release.outputs.npm_tag }}
|
||||
SLICE_TAGS: ${{ needs.release.outputs.slice_tags }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [[ -z "${SLICE_TAGS}" ]]; then
|
||||
echo "No slice tags defined; skipping slice publish." >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
IFS=' ' read -r -a slice_tags <<<"${SLICE_TAGS}"
|
||||
for slice_tag in "${slice_tags[@]}"; do
|
||||
tarball="${GITHUB_WORKSPACE}/dist/npm/codex-npm-${VERSION}-${slice_tag}.tgz"
|
||||
if [[ ! -f "${tarball}" ]]; then
|
||||
echo "Missing slice tarball ${tarball}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
publish_tag="${slice_tag}"
|
||||
if [[ -n "${NPM_TAG}" ]]; then
|
||||
publish_tag="${NPM_TAG}-${slice_tag}"
|
||||
fi
|
||||
|
||||
echo "Publishing ${tarball} with npm tag '${publish_tag}'"
|
||||
npm publish "${tarball}" --tag "${publish_tag}"
|
||||
done
|
||||
|
||||
update-branch:
|
||||
name: Update latest-alpha-cli branch
|
||||
|
||||
8
codex-cli/.gitignore
vendored
8
codex-cli/.gitignore
vendored
@@ -1,7 +1 @@
|
||||
# Added by ./scripts/install_native_deps.sh
|
||||
/bin/codex-aarch64-apple-darwin
|
||||
/bin/codex-aarch64-unknown-linux-musl
|
||||
/bin/codex-linux-sandbox-arm64
|
||||
/bin/codex-linux-sandbox-x64
|
||||
/bin/codex-x86_64-apple-darwin
|
||||
/bin/codex-x86_64-unknown-linux-musl
|
||||
/vendor/
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
// Unified entry point for the Codex CLI.
|
||||
|
||||
import { existsSync } from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
@@ -40,10 +41,10 @@ switch (platform) {
|
||||
case "win32":
|
||||
switch (arch) {
|
||||
case "x64":
|
||||
targetTriple = "x86_64-pc-windows-msvc.exe";
|
||||
targetTriple = "x86_64-pc-windows-msvc";
|
||||
break;
|
||||
case "arm64":
|
||||
targetTriple = "aarch64-pc-windows-msvc.exe";
|
||||
targetTriple = "aarch64-pc-windows-msvc";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
@@ -57,7 +58,10 @@ if (!targetTriple) {
|
||||
throw new Error(`Unsupported platform: ${platform} (${arch})`);
|
||||
}
|
||||
|
||||
const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
|
||||
const vendorRoot = path.join(__dirname, "..", "vendor");
|
||||
const archRoot = path.join(vendorRoot, targetTriple);
|
||||
const codexBinaryName = process.platform === "win32" ? "codex.exe" : "codex";
|
||||
const binaryPath = path.join(archRoot, "codex", codexBinaryName);
|
||||
|
||||
// Use an asynchronous spawn instead of spawnSync so that Node is able to
|
||||
// respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is
|
||||
@@ -66,23 +70,6 @@ const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`);
|
||||
// receives a fatal signal, both processes exit in a predictable manner.
|
||||
const { spawn } = await import("child_process");
|
||||
|
||||
async function tryImport(moduleName) {
|
||||
try {
|
||||
// eslint-disable-next-line node/no-unsupported-features/es-syntax
|
||||
return await import(moduleName);
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveRgDir() {
|
||||
const ripgrep = await tryImport("@vscode/ripgrep");
|
||||
if (!ripgrep?.rgPath) {
|
||||
return null;
|
||||
}
|
||||
return path.dirname(ripgrep.rgPath);
|
||||
}
|
||||
|
||||
function getUpdatedPath(newDirs) {
|
||||
const pathSep = process.platform === "win32" ? ";" : ":";
|
||||
const existingPath = process.env.PATH || "";
|
||||
@@ -94,9 +81,9 @@ function getUpdatedPath(newDirs) {
|
||||
}
|
||||
|
||||
const additionalDirs = [];
|
||||
const rgDir = await resolveRgDir();
|
||||
if (rgDir) {
|
||||
additionalDirs.push(rgDir);
|
||||
const pathDir = path.join(archRoot, "path");
|
||||
if (existsSync(pathDir)) {
|
||||
additionalDirs.push(pathDir);
|
||||
}
|
||||
const updatedPath = getUpdatedPath(additionalDirs);
|
||||
|
||||
|
||||
79
codex-cli/bin/rg
Executable file
79
codex-cli/bin/rg
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env dotslash
|
||||
|
||||
{
|
||||
"name": "rg",
|
||||
"platforms": {
|
||||
"macos-aarch64": {
|
||||
"size": 1787248,
|
||||
"hash": "blake3",
|
||||
"digest": "8d9942032585ea8ee805937634238d9aee7b210069f4703c88fbe568e26fb78a",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-aarch64": {
|
||||
"size": 2047405,
|
||||
"hash": "blake3",
|
||||
"digest": "0b670b8fa0a3df2762af2fc82cc4932f684ca4c02dbd1260d4f3133fd4b2a515",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-aarch64-unknown-linux-gnu/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-unknown-linux-gnu.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"macos-x86_64": {
|
||||
"size": 2082672,
|
||||
"hash": "blake3",
|
||||
"digest": "e9b862fc8da3127f92791f0ff6a799504154ca9d36c98bf3e60a81c6b1f7289e",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-apple-darwin/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-apple-darwin.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"linux-x86_64": {
|
||||
"size": 2566310,
|
||||
"hash": "blake3",
|
||||
"digest": "f73cca4e54d78c31f832c7f6e2c0b4db8b04fa3eaa747915727d570893dbee76",
|
||||
"format": "tar.gz",
|
||||
"path": "ripgrep-14.1.1-x86_64-unknown-linux-musl/rg",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-unknown-linux-musl.tar.gz"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-x86_64": {
|
||||
"size": 2058893,
|
||||
"hash": "blake3",
|
||||
"digest": "a8ce1a6fed4f8093ee997e57f33254e94b2cd18e26358b09db599c89882eadbd",
|
||||
"format": "zip",
|
||||
"path": "ripgrep-14.1.1-x86_64-pc-windows-msvc/rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
},
|
||||
"windows-aarch64": {
|
||||
"size": 1667740,
|
||||
"hash": "blake3",
|
||||
"digest": "47b971a8c4fca1d23a4e7c19bd4d88465ebc395598458133139406d3bf85f3fa",
|
||||
"format": "zip",
|
||||
"path": "rg.exe",
|
||||
"providers": [
|
||||
{
|
||||
"url": "https://github.com/microsoft/ripgrep-prebuilt/releases/download/v13.0.0-13/ripgrep-v13.0.0-13-aarch64-pc-windows-msvc.zip"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
101
codex-cli/package-lock.json
generated
101
codex-cli/package-lock.json
generated
@@ -2,118 +2,17 @@
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@openai/codex",
|
||||
"version": "0.0.0-dev",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"bin": {
|
||||
"codex": "bin/codex.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
}
|
||||
},
|
||||
"node_modules/@vscode/ripgrep": {
|
||||
"version": "1.15.14",
|
||||
"resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.15.14.tgz",
|
||||
"integrity": "sha512-/G1UJPYlm+trBWQ6cMO3sv6b8D1+G16WaJH1/DSqw32JOVlzgZbLkDxRyzIpTpv30AcYGMkCf5tUqGlW6HbDWw==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"proxy-from-env": "^1.1.0",
|
||||
"yauzl": "^2.9.2"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "7.1.4",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
|
||||
"integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
"integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
|
||||
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
"integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
|
||||
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "^7.1.2",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pend": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz",
|
||||
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/yauzl": {
|
||||
"version": "2.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
|
||||
"integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer-crc32": "~0.2.3",
|
||||
"fd-slicer": "~1.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,17 +11,11 @@
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"dist"
|
||||
"vendor"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openai/codex.git",
|
||||
"directory": "codex-cli"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vscode/ripgrep": "^1.15.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^3.3.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,5 +5,16 @@ Run the following:
|
||||
To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows:
|
||||
|
||||
```bash
|
||||
./codex-cli/scripts/stage_rust_release.py --release-version 0.6.0
|
||||
./codex-cli/scripts/build_npm_package.py --release-version 0.6.0
|
||||
```
|
||||
|
||||
To produce per-platform "slice" tarballs in addition to the fat package, supply the
|
||||
`--slice-pack-dir` flag to write the outputs. For example:
|
||||
|
||||
```bash
|
||||
./codex-cli/scripts/build_npm_package.py --release-version 0.6.0 --slice-pack-dir dist/npm
|
||||
```
|
||||
|
||||
The command above writes the full tarball plus the per-platform archives named with the
|
||||
VS Code-style identifiers (for example, `codex-npm-0.6.0-darwin-arm64.tgz`). Note this will
|
||||
create `./codex-cli/vendor/` as a side-effect.
|
||||
|
||||
404
codex-cli/scripts/build_npm_package.py
Executable file
404
codex-cli/scripts/build_npm_package.py
Executable file
@@ -0,0 +1,404 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Stage and optionally package the @openai/codex npm module."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Sequence
|
||||
|
||||
from install_native_deps import CODEX_TARGETS, VENDOR_DIR_NAME
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
REPO_ROOT = CODEX_CLI_ROOT.parent
|
||||
GITHUB_REPO = "openai/codex"
|
||||
|
||||
TARGET_TO_SLICE_TAG = {
|
||||
"x86_64-unknown-linux-musl": "linux-x64",
|
||||
"aarch64-unknown-linux-musl": "linux-arm64",
|
||||
"x86_64-apple-darwin": "darwin-x64",
|
||||
"aarch64-apple-darwin": "darwin-arm64",
|
||||
"x86_64-pc-windows-msvc": "win32-x64",
|
||||
"aarch64-pc-windows-msvc": "win32-arm64",
|
||||
}
|
||||
|
||||
_SLICE_ACCUMULATOR: dict[str, list[str]] = {}
|
||||
for target in CODEX_TARGETS:
|
||||
slice_tag = TARGET_TO_SLICE_TAG.get(target)
|
||||
if slice_tag is None:
|
||||
raise RuntimeError(f"Missing slice tag mapping for target '{target}'.")
|
||||
_SLICE_ACCUMULATOR.setdefault(slice_tag, []).append(target)
|
||||
|
||||
SLICE_TAG_TO_TARGETS = {tag: tuple(targets) for tag, targets in _SLICE_ACCUMULATOR.items()}
|
||||
|
||||
DEFAULT_SLICE_TAGS = tuple(SLICE_TAG_TO_TARGETS)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.")
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
help="Version number to write to package.json inside the staged package.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version",
|
||||
help=(
|
||||
"Version to stage for npm release. When provided, the script also resolves the "
|
||||
"matching rust-release workflow unless --workflow-url is supplied."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help="Optional GitHub Actions workflow run URL used to download native binaries.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--staging-dir",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory to stage the package contents. Defaults to a new temporary directory "
|
||||
"if omitted. The directory must be empty when provided."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
dest="staging_dir",
|
||||
type=Path,
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pack-output",
|
||||
type=Path,
|
||||
help="Path where the generated npm tarball should be written.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--slice-pack-dir",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory where per-platform slice npm tarballs should be written. "
|
||||
"When provided, all known slices are packed unless --slices is given."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--slices",
|
||||
nargs="+",
|
||||
choices=sorted(DEFAULT_SLICE_TAGS),
|
||||
help="Optional subset of slice tags to pack.",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
if args.slices and args.slice_pack_dir is None:
|
||||
raise RuntimeError("--slice-pack-dir is required when specifying --slices.")
|
||||
|
||||
version = args.version
|
||||
release_version = args.release_version
|
||||
if release_version:
|
||||
if version and version != release_version:
|
||||
raise RuntimeError("--version and --release-version must match when both are provided.")
|
||||
version = release_version
|
||||
|
||||
if not version:
|
||||
raise RuntimeError("Must specify --version or --release-version.")
|
||||
|
||||
staging_dir, created_temp = prepare_staging_dir(args.staging_dir)
|
||||
|
||||
try:
|
||||
stage_sources(staging_dir, version)
|
||||
|
||||
workflow_url = args.workflow_url
|
||||
resolved_head_sha: str | None = None
|
||||
if not workflow_url:
|
||||
if release_version:
|
||||
workflow = resolve_release_workflow(version)
|
||||
workflow_url = workflow["url"]
|
||||
resolved_head_sha = workflow.get("headSha")
|
||||
else:
|
||||
workflow_url = resolve_latest_alpha_workflow_url()
|
||||
elif release_version:
|
||||
try:
|
||||
workflow = resolve_release_workflow(version)
|
||||
resolved_head_sha = workflow.get("headSha")
|
||||
except Exception:
|
||||
resolved_head_sha = None
|
||||
|
||||
if release_version and resolved_head_sha:
|
||||
print(f"should `git checkout {resolved_head_sha}`")
|
||||
|
||||
if not workflow_url:
|
||||
raise RuntimeError("Unable to determine workflow URL for native binaries.")
|
||||
|
||||
install_native_binaries(staging_dir, workflow_url)
|
||||
|
||||
slice_outputs: list[tuple[str, Path]] = []
|
||||
if args.slice_pack_dir is not None:
|
||||
slice_tags = tuple(args.slices or DEFAULT_SLICE_TAGS)
|
||||
slice_outputs = build_slice_packages(
|
||||
staging_dir,
|
||||
version,
|
||||
args.slice_pack_dir,
|
||||
slice_tags,
|
||||
)
|
||||
|
||||
if release_version:
|
||||
staging_dir_str = str(staging_dir)
|
||||
print(
|
||||
f"Staged version {version} for release in {staging_dir_str}\n\n"
|
||||
"Verify the CLI:\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --version\n"
|
||||
f" node {staging_dir_str}/bin/codex.js --help\n\n"
|
||||
)
|
||||
else:
|
||||
print(f"Staged package in {staging_dir}")
|
||||
|
||||
if args.pack_output is not None:
|
||||
output_path = run_npm_pack(staging_dir, args.pack_output)
|
||||
print(f"npm pack output written to {output_path}")
|
||||
|
||||
for slice_tag, output_path in slice_outputs:
|
||||
print(f"built slice {slice_tag} tarball at {output_path}")
|
||||
finally:
|
||||
if created_temp:
|
||||
# Preserve the staging directory for further inspection.
|
||||
pass
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]:
|
||||
if staging_dir is not None:
|
||||
staging_dir = staging_dir.resolve()
|
||||
staging_dir.mkdir(parents=True, exist_ok=True)
|
||||
if any(staging_dir.iterdir()):
|
||||
raise RuntimeError(f"Staging directory {staging_dir} is not empty.")
|
||||
return staging_dir, False
|
||||
|
||||
temp_dir = Path(tempfile.mkdtemp(prefix="codex-npm-stage-"))
|
||||
return temp_dir, True
|
||||
|
||||
|
||||
def stage_sources(staging_dir: Path, version: str) -> None:
|
||||
bin_dir = staging_dir / "bin"
|
||||
bin_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js")
|
||||
rg_manifest = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
if rg_manifest.exists():
|
||||
shutil.copy2(rg_manifest, bin_dir / "rg")
|
||||
|
||||
readme_src = REPO_ROOT / "README.md"
|
||||
if readme_src.exists():
|
||||
shutil.copy2(readme_src, staging_dir / "README.md")
|
||||
|
||||
with open(CODEX_CLI_ROOT / "package.json", "r", encoding="utf-8") as fh:
|
||||
package_json = json.load(fh)
|
||||
package_json["version"] = version
|
||||
|
||||
with open(staging_dir / "package.json", "w", encoding="utf-8") as out:
|
||||
json.dump(package_json, out, indent=2)
|
||||
out.write("\n")
|
||||
|
||||
|
||||
def install_native_binaries(staging_dir: Path, workflow_url: str | None) -> None:
|
||||
cmd = ["./scripts/install_native_deps.py"]
|
||||
if workflow_url:
|
||||
cmd.extend(["--workflow-url", workflow_url])
|
||||
cmd.append(str(staging_dir))
|
||||
subprocess.check_call(cmd, cwd=CODEX_CLI_ROOT)
|
||||
|
||||
|
||||
def build_slice_packages(
|
||||
base_staging_dir: Path,
|
||||
version: str,
|
||||
output_dir: Path,
|
||||
slice_tags: Sequence[str],
|
||||
) -> list[tuple[str, Path]]:
|
||||
if not slice_tags:
|
||||
return []
|
||||
|
||||
base_vendor = base_staging_dir / VENDOR_DIR_NAME
|
||||
if not base_vendor.exists():
|
||||
raise RuntimeError(
|
||||
f"Base staging directory {base_staging_dir} does not include native vendor binaries."
|
||||
)
|
||||
|
||||
output_dir = output_dir.resolve()
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
results: list[tuple[str, Path]] = []
|
||||
for slice_tag in slice_tags:
|
||||
targets = SLICE_TAG_TO_TARGETS.get(slice_tag)
|
||||
if not targets:
|
||||
raise RuntimeError(f"Unknown slice tag '{slice_tag}'.")
|
||||
|
||||
missing = [target for target in targets if not (base_vendor / target).exists()]
|
||||
if missing:
|
||||
missing_label = ", ".join(missing)
|
||||
raise RuntimeError(
|
||||
f"Missing native binaries for slice '{slice_tag}': {missing_label} is absent in vendor."
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix=f"codex-npm-slice-{slice_tag}-") as slice_dir_str:
|
||||
slice_dir = Path(slice_dir_str)
|
||||
stage_sources(slice_dir, version)
|
||||
slice_vendor = slice_dir / VENDOR_DIR_NAME
|
||||
copy_vendor_slice(base_vendor, slice_vendor, targets)
|
||||
output_path = output_dir / f"codex-npm-{version}-{slice_tag}.tgz"
|
||||
run_npm_pack(slice_dir, output_path)
|
||||
results.append((slice_tag, output_path))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def copy_vendor_slice(base_vendor: Path, dest_vendor: Path, targets: Sequence[str]) -> None:
|
||||
dest_vendor.parent.mkdir(parents=True, exist_ok=True)
|
||||
dest_vendor.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for entry in base_vendor.iterdir():
|
||||
if entry.is_file():
|
||||
shutil.copy2(entry, dest_vendor / entry.name)
|
||||
|
||||
for target in targets:
|
||||
src = base_vendor / target
|
||||
dest = dest_vendor / target
|
||||
shutil.copytree(src, dest)
|
||||
|
||||
|
||||
def resolve_latest_alpha_workflow_url() -> str:
|
||||
version = determine_latest_alpha_version()
|
||||
workflow_url = fetch_workflow_url_for_version(version)
|
||||
if not workflow_url:
|
||||
raise RuntimeError(f"Unable to locate workflow for version {version}.")
|
||||
return workflow_url
|
||||
|
||||
|
||||
def determine_latest_alpha_version() -> str:
|
||||
releases = list_releases()
|
||||
best_key: tuple[int, int, int, int] | None = None
|
||||
best_version: str | None = None
|
||||
pattern = re.compile(r"^rust-v(\d+)\.(\d+)\.(\d+)-alpha\.(\d+)$")
|
||||
for release in releases:
|
||||
tag = release.get("tag_name", "")
|
||||
match = pattern.match(tag)
|
||||
if not match:
|
||||
continue
|
||||
key = tuple(int(match.group(i)) for i in range(1, 5))
|
||||
if best_key is None or key > best_key:
|
||||
best_key = key
|
||||
best_version = (
|
||||
f"{match.group(1)}.{match.group(2)}.{match.group(3)}-alpha.{match.group(4)}"
|
||||
)
|
||||
|
||||
if best_version is None:
|
||||
raise RuntimeError("No alpha releases found when resolving workflow URL.")
|
||||
return best_version
|
||||
|
||||
|
||||
def list_releases() -> list[dict]:
|
||||
stdout = subprocess.check_output(
|
||||
["gh", "api", f"/repos/{GITHUB_REPO}/releases?per_page=100"],
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
releases = json.loads(stdout or "[]")
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Unable to parse releases JSON.") from exc
|
||||
if not isinstance(releases, list):
|
||||
raise RuntimeError("Unexpected response when listing releases.")
|
||||
return releases
|
||||
|
||||
|
||||
def fetch_workflow_url_for_version(version: str) -> str | None:
|
||||
ref = f"rust-v{version}"
|
||||
stdout = subprocess.check_output(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
ref,
|
||||
"--limit",
|
||||
"20",
|
||||
"--json",
|
||||
"workflowName,url",
|
||||
],
|
||||
text=True,
|
||||
)
|
||||
|
||||
try:
|
||||
runs = json.loads(stdout or "[]")
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Unable to parse workflow run listing.") from exc
|
||||
|
||||
for run in runs:
|
||||
if run.get("workflowName") == "rust-release":
|
||||
url = run.get("url")
|
||||
if url:
|
||||
return url
|
||||
return None
|
||||
|
||||
|
||||
def resolve_release_workflow(version: str) -> dict:
|
||||
stdout = subprocess.check_output(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--jq",
|
||||
'first(.[] | select(.workflowName == "rust-release"))',
|
||||
],
|
||||
text=True,
|
||||
)
|
||||
workflow = json.loads(stdout)
|
||||
if not workflow:
|
||||
raise RuntimeError(f"Unable to find rust-release workflow for version {version}.")
|
||||
return workflow
|
||||
|
||||
|
||||
def run_npm_pack(staging_dir: Path, output_path: Path) -> Path:
|
||||
output_path = output_path.resolve()
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-npm-pack-") as pack_dir_str:
|
||||
pack_dir = Path(pack_dir_str)
|
||||
stdout = subprocess.check_output(
|
||||
["npm", "pack", "--json", "--pack-destination", str(pack_dir)],
|
||||
cwd=staging_dir,
|
||||
text=True,
|
||||
)
|
||||
try:
|
||||
pack_output = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError("Failed to parse npm pack output.") from exc
|
||||
|
||||
if not pack_output:
|
||||
raise RuntimeError("npm pack did not produce an output tarball.")
|
||||
|
||||
tarball_name = pack_output[0].get("filename") or pack_output[0].get("name")
|
||||
if not tarball_name:
|
||||
raise RuntimeError("Unable to determine npm pack output filename.")
|
||||
|
||||
tarball_path = pack_dir / tarball_name
|
||||
if not tarball_path.exists():
|
||||
raise RuntimeError(f"Expected npm pack output not found: {tarball_path}")
|
||||
|
||||
shutil.move(str(tarball_path), output_path)
|
||||
|
||||
return output_path
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
318
codex-cli/scripts/install_native_deps.py
Executable file
318
codex-cli/scripts/install_native_deps.py
Executable file
@@ -0,0 +1,318 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Install Codex native binaries (Rust CLI plus ripgrep helpers)."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
import tempfile
|
||||
import zipfile
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Sequence
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import urlopen
|
||||
|
||||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
CODEX_CLI_ROOT = SCRIPT_DIR.parent
|
||||
DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0
|
||||
VENDOR_DIR_NAME = "vendor"
|
||||
RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg"
|
||||
CODEX_TARGETS = (
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-apple-darwin",
|
||||
"aarch64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"aarch64-pc-windows-msvc",
|
||||
)
|
||||
|
||||
RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [
|
||||
("x86_64-unknown-linux-musl", "linux-x86_64"),
|
||||
("aarch64-unknown-linux-musl", "linux-aarch64"),
|
||||
("x86_64-apple-darwin", "macos-x86_64"),
|
||||
("aarch64-apple-darwin", "macos-aarch64"),
|
||||
("x86_64-pc-windows-msvc", "windows-x86_64"),
|
||||
("aarch64-pc-windows-msvc", "windows-aarch64"),
|
||||
]
|
||||
RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS}
|
||||
DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS]
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Install native Codex binaries.")
|
||||
parser.add_argument(
|
||||
"--workflow-url",
|
||||
help=(
|
||||
"GitHub Actions workflow URL that produced the artifacts. Defaults to a "
|
||||
"known good run when omitted."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"root",
|
||||
nargs="?",
|
||||
type=Path,
|
||||
help=(
|
||||
"Directory containing package.json for the staged package. If omitted, the "
|
||||
"repository checkout is used."
|
||||
),
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
codex_cli_root = (args.root or CODEX_CLI_ROOT).resolve()
|
||||
vendor_dir = codex_cli_root / VENDOR_DIR_NAME
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip()
|
||||
if not workflow_url:
|
||||
workflow_url = DEFAULT_WORKFLOW_URL
|
||||
|
||||
workflow_id = workflow_url.rstrip("/").split("/")[-1]
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str:
|
||||
artifacts_dir = Path(artifacts_dir_str)
|
||||
_download_artifacts(workflow_id, artifacts_dir)
|
||||
install_codex_binaries(artifacts_dir, vendor_dir, CODEX_TARGETS)
|
||||
|
||||
fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST)
|
||||
|
||||
print(f"Installed native dependencies into {vendor_dir}")
|
||||
return 0
|
||||
|
||||
|
||||
def fetch_rg(
|
||||
vendor_dir: Path,
|
||||
targets: Sequence[str] | None = None,
|
||||
*,
|
||||
manifest_path: Path,
|
||||
) -> list[Path]:
|
||||
"""Download ripgrep binaries described by the DotSlash manifest."""
|
||||
|
||||
if targets is None:
|
||||
targets = DEFAULT_RG_TARGETS
|
||||
|
||||
if not manifest_path.exists():
|
||||
raise FileNotFoundError(f"DotSlash manifest not found: {manifest_path}")
|
||||
|
||||
manifest = _load_manifest(manifest_path)
|
||||
platforms = manifest.get("platforms", {})
|
||||
|
||||
vendor_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
task_configs: list[tuple[str, str, dict]] = []
|
||||
for target in targets:
|
||||
platform_key = RG_TARGET_TO_PLATFORM.get(target)
|
||||
if platform_key is None:
|
||||
raise ValueError(f"Unsupported ripgrep target '{target}'.")
|
||||
|
||||
platform_info = platforms.get(platform_key)
|
||||
if platform_info is None:
|
||||
raise RuntimeError(f"Platform '{platform_key}' not found in manifest {manifest_path}.")
|
||||
|
||||
task_configs.append((target, platform_key, platform_info))
|
||||
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(
|
||||
_fetch_single_rg,
|
||||
vendor_dir,
|
||||
target,
|
||||
platform_key,
|
||||
platform_info,
|
||||
manifest_path,
|
||||
): target
|
||||
for target, platform_key, platform_info in task_configs
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _download_artifacts(workflow_id: str, dest_dir: Path) -> None:
|
||||
cmd = [
|
||||
"gh",
|
||||
"run",
|
||||
"download",
|
||||
"--dir",
|
||||
str(dest_dir),
|
||||
"--repo",
|
||||
"openai/codex",
|
||||
workflow_id,
|
||||
]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def install_codex_binaries(
|
||||
artifacts_dir: Path, vendor_dir: Path, targets: Iterable[str]
|
||||
) -> list[Path]:
|
||||
targets = list(targets)
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
results: dict[str, Path] = {}
|
||||
max_workers = min(len(targets), max(1, (os.cpu_count() or 1)))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
future_map = {
|
||||
executor.submit(_install_single_codex_binary, artifacts_dir, vendor_dir, target): target
|
||||
for target in targets
|
||||
}
|
||||
|
||||
for future in as_completed(future_map):
|
||||
target = future_map[future]
|
||||
results[target] = future.result()
|
||||
|
||||
return [results[target] for target in targets]
|
||||
|
||||
|
||||
def _install_single_codex_binary(artifacts_dir: Path, vendor_dir: Path, target: str) -> Path:
|
||||
artifact_subdir = artifacts_dir / target
|
||||
archive_name = _archive_name_for_target(target)
|
||||
archive_path = artifact_subdir / archive_name
|
||||
if not archive_path.exists():
|
||||
raise FileNotFoundError(f"Expected artifact not found: {archive_path}")
|
||||
|
||||
dest_dir = vendor_dir / target / "codex"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
binary_name = "codex.exe" if "windows" in target else "codex"
|
||||
dest = dest_dir / binary_name
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(archive_path, "zst", None, dest)
|
||||
if "windows" not in target:
|
||||
dest.chmod(0o755)
|
||||
return dest
|
||||
|
||||
|
||||
def _archive_name_for_target(target: str) -> str:
|
||||
if "windows" in target:
|
||||
return f"codex-{target}.exe.zst"
|
||||
return f"codex-{target}.zst"
|
||||
|
||||
|
||||
def _fetch_single_rg(
|
||||
vendor_dir: Path,
|
||||
target: str,
|
||||
platform_key: str,
|
||||
platform_info: dict,
|
||||
manifest_path: Path,
|
||||
) -> Path:
|
||||
providers = platform_info.get("providers", [])
|
||||
if not providers:
|
||||
raise RuntimeError(f"No providers listed for platform '{platform_key}' in {manifest_path}.")
|
||||
|
||||
url = providers[0]["url"]
|
||||
archive_format = platform_info.get("format", "zst")
|
||||
archive_member = platform_info.get("path")
|
||||
|
||||
dest_dir = vendor_dir / target / "path"
|
||||
dest_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
is_windows = platform_key.startswith("win")
|
||||
binary_name = "rg.exe" if is_windows else "rg"
|
||||
dest = dest_dir / binary_name
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmp_dir_str:
|
||||
tmp_dir = Path(tmp_dir_str)
|
||||
archive_filename = os.path.basename(urlparse(url).path)
|
||||
download_path = tmp_dir / archive_filename
|
||||
_download_file(url, download_path)
|
||||
|
||||
dest.unlink(missing_ok=True)
|
||||
extract_archive(download_path, archive_format, archive_member, dest)
|
||||
|
||||
if not is_windows:
|
||||
dest.chmod(0o755)
|
||||
|
||||
return dest
|
||||
|
||||
|
||||
def _download_file(url: str, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
with urlopen(url) as response, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(response, out)
|
||||
|
||||
|
||||
def extract_archive(
|
||||
archive_path: Path,
|
||||
archive_format: str,
|
||||
archive_member: str | None,
|
||||
dest: Path,
|
||||
) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if archive_format == "zst":
|
||||
output_path = archive_path.parent / dest.name
|
||||
subprocess.check_call(
|
||||
["zstd", "-f", "-d", str(archive_path), "-o", str(output_path)]
|
||||
)
|
||||
shutil.move(str(output_path), dest)
|
||||
return
|
||||
|
||||
if archive_format == "tar.gz":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for tar.gz archive in DotSlash manifest.")
|
||||
with tarfile.open(archive_path, "r:gz") as tar:
|
||||
try:
|
||||
member = tar.getmember(archive_member)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
tar.extract(member, path=archive_path.parent, filter="data")
|
||||
extracted = archive_path.parent / archive_member
|
||||
shutil.move(str(extracted), dest)
|
||||
return
|
||||
|
||||
if archive_format == "zip":
|
||||
if not archive_member:
|
||||
raise RuntimeError("Missing 'path' for zip archive in DotSlash manifest.")
|
||||
with zipfile.ZipFile(archive_path) as archive:
|
||||
try:
|
||||
with archive.open(archive_member) as src, open(dest, "wb") as out:
|
||||
shutil.copyfileobj(src, out)
|
||||
except KeyError as exc:
|
||||
raise RuntimeError(
|
||||
f"Entry '{archive_member}' not found in archive {archive_path}."
|
||||
) from exc
|
||||
return
|
||||
|
||||
raise RuntimeError(f"Unsupported archive format '{archive_format}'.")
|
||||
|
||||
|
||||
def _load_manifest(manifest_path: Path) -> dict:
|
||||
cmd = ["dotslash", "--", "parse", str(manifest_path)]
|
||||
stdout = subprocess.check_output(cmd, text=True)
|
||||
try:
|
||||
manifest = json.loads(stdout)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RuntimeError(f"Invalid DotSlash manifest output from {manifest_path}.") from exc
|
||||
|
||||
if not isinstance(manifest, dict):
|
||||
raise RuntimeError(
|
||||
f"Unexpected DotSlash manifest structure for {manifest_path}: {type(manifest)!r}"
|
||||
)
|
||||
|
||||
return manifest
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
sys.exit(main())
|
||||
@@ -1,94 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Install native runtime dependencies for codex-cli.
|
||||
#
|
||||
# Usage
|
||||
# install_native_deps.sh [--workflow-url URL] [CODEX_CLI_ROOT]
|
||||
#
|
||||
# The optional RELEASE_ROOT is the path that contains package.json. Omitting
|
||||
# it installs the binaries into the repository's own bin/ folder to support
|
||||
# local development.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ------------------
|
||||
# Parse arguments
|
||||
# ------------------
|
||||
|
||||
CODEX_CLI_ROOT=""
|
||||
|
||||
# Until we start publishing stable GitHub releases, we have to grab the binaries
|
||||
# from the GitHub Action that created them. Update the URL below to point to the
|
||||
# appropriate workflow run:
|
||||
WORKFLOW_URL="https://github.com/openai/codex/actions/runs/17417194663" # rust-v0.28.0
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
if [ -n "$1" ]; then
|
||||
WORKFLOW_URL="$1"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$CODEX_CLI_ROOT" ]]; then
|
||||
CODEX_CLI_ROOT="$1"
|
||||
else
|
||||
echo "Unexpected argument: $1" >&2
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Determine where the binaries should be installed.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -n "$CODEX_CLI_ROOT" ]; then
|
||||
# The caller supplied a release root directory.
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
else
|
||||
# No argument; fall back to the repo’s own bin directory.
|
||||
# Resolve the path of this script, then walk up to the repo root.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
BIN_DIR="$CODEX_CLI_ROOT/bin"
|
||||
fi
|
||||
|
||||
# Make sure the destination directory exists.
|
||||
mkdir -p "$BIN_DIR"
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Download and decompress the artifacts from the GitHub Actions workflow.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
WORKFLOW_ID="${WORKFLOW_URL##*/}"
|
||||
|
||||
ARTIFACTS_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$ARTIFACTS_DIR"' EXIT
|
||||
|
||||
# NB: The GitHub CLI `gh` must be installed and authenticated.
|
||||
gh run download --dir "$ARTIFACTS_DIR" --repo openai/codex "$WORKFLOW_ID"
|
||||
|
||||
# x64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-unknown-linux-musl/codex-x86_64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-unknown-linux-musl"
|
||||
# ARM64 Linux
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-unknown-linux-musl/codex-aarch64-unknown-linux-musl.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-unknown-linux-musl"
|
||||
# x64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-apple-darwin/codex-x86_64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-apple-darwin"
|
||||
# ARM64 macOS
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-apple-darwin/codex-aarch64-apple-darwin.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-apple-darwin"
|
||||
# x64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/x86_64-pc-windows-msvc/codex-x86_64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-x86_64-pc-windows-msvc.exe"
|
||||
# ARM64 Windows
|
||||
zstd -d "$ARTIFACTS_DIR/aarch64-pc-windows-msvc/codex-aarch64-pc-windows-msvc.exe.zst" \
|
||||
-o "$BIN_DIR/codex-aarch64-pc-windows-msvc.exe"
|
||||
|
||||
echo "Installed native dependencies into $BIN_DIR"
|
||||
@@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# -----------------------------------------------------------------------------
|
||||
# stage_release.sh
|
||||
# -----------------------------------------------------------------------------
|
||||
# Stages an npm release for @openai/codex.
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# --tmp <dir> : Use <dir> instead of a freshly created temp directory.
|
||||
# -h|--help : Print usage.
|
||||
#
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Helper - usage / flag parsing
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Usage: $(basename "$0") [--tmp DIR] [--version VERSION]
|
||||
|
||||
Options
|
||||
--tmp DIR Use DIR to stage the release (defaults to a fresh mktemp dir)
|
||||
--version Specify the version to release (defaults to a timestamp-based version)
|
||||
-h, --help Show this help
|
||||
|
||||
Legacy positional argument: the first non-flag argument is still interpreted
|
||||
as the temporary directory (for backwards compatibility) but is deprecated.
|
||||
EOF
|
||||
exit "${1:-0}"
|
||||
}
|
||||
|
||||
TMPDIR=""
|
||||
# Default to a timestamp-based version (keep same scheme as before)
|
||||
VERSION="$(printf '0.1.%d' "$(date +%y%m%d%H%M)")"
|
||||
WORKFLOW_URL=""
|
||||
|
||||
# Manual flag parser - Bash getopts does not handle GNU long options well.
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--tmp)
|
||||
shift || { echo "--tmp requires an argument"; usage 1; }
|
||||
TMPDIR="$1"
|
||||
;;
|
||||
--tmp=*)
|
||||
TMPDIR="${1#*=}"
|
||||
;;
|
||||
--version)
|
||||
shift || { echo "--version requires an argument"; usage 1; }
|
||||
VERSION="$1"
|
||||
;;
|
||||
--workflow-url)
|
||||
shift || { echo "--workflow-url requires an argument"; exit 1; }
|
||||
WORKFLOW_URL="$1"
|
||||
;;
|
||||
-h|--help)
|
||||
usage 0
|
||||
;;
|
||||
--*)
|
||||
echo "Unknown option: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
*)
|
||||
echo "Unexpected extra argument: $1" >&2
|
||||
usage 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Fallback when the caller did not specify a directory.
|
||||
# If no directory was specified create a fresh temporary one.
|
||||
if [[ -z "$TMPDIR" ]]; then
|
||||
TMPDIR="$(mktemp -d)"
|
||||
fi
|
||||
|
||||
# Ensure the directory exists, then resolve to an absolute path.
|
||||
mkdir -p "$TMPDIR"
|
||||
TMPDIR="$(cd "$TMPDIR" && pwd)"
|
||||
|
||||
# Main build logic
|
||||
|
||||
echo "Staging release in $TMPDIR"
|
||||
|
||||
# The script lives in codex-cli/scripts/ - change into codex-cli root so that
|
||||
# relative paths keep working.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
pushd "$CODEX_CLI_ROOT" >/dev/null
|
||||
|
||||
# 1. Build the JS artifacts ---------------------------------------------------
|
||||
|
||||
# Paths inside the staged package
|
||||
mkdir -p "$TMPDIR/bin"
|
||||
|
||||
cp -r bin/codex.js "$TMPDIR/bin/codex.js"
|
||||
cp ../README.md "$TMPDIR" || true # README is one level up - ignore if missing
|
||||
|
||||
# Modify package.json - bump version and optionally add the native directory to
|
||||
# the files array so that the binaries are published to npm.
|
||||
|
||||
jq --arg version "$VERSION" \
|
||||
'.version = $version' \
|
||||
package.json > "$TMPDIR/package.json"
|
||||
|
||||
# 2. Native runtime deps (sandbox plus optional Rust binaries)
|
||||
|
||||
./scripts/install_native_deps.sh --workflow-url "$WORKFLOW_URL" "$TMPDIR"
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
echo "Staged version $VERSION for release in $TMPDIR"
|
||||
|
||||
echo "Verify the CLI:"
|
||||
echo " node ${TMPDIR}/bin/codex.js --version"
|
||||
echo " node ${TMPDIR}/bin/codex.js --help"
|
||||
|
||||
# Print final hint for convenience
|
||||
echo "Next: cd \"$TMPDIR\" && npm publish"
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Stage a release for the npm module.
|
||||
|
||||
Run this after the GitHub Release has been created and use
|
||||
`--release-version` to specify the version to release.
|
||||
|
||||
Optionally pass `--tmp` to control the temporary staging directory that will be
|
||||
forwarded to stage_release.sh.
|
||||
"""
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-version", required=True, help="Version to release, e.g., 0.3.0"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tmp",
|
||||
help="Optional path to stage the npm package; forwarded to stage_release.sh",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
version = args.release_version
|
||||
|
||||
gh_run = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"run",
|
||||
"list",
|
||||
"--branch",
|
||||
f"rust-v{version}",
|
||||
"--json",
|
||||
"workflowName,url,headSha",
|
||||
"--jq",
|
||||
'first(.[] | select(.workflowName == "rust-release"))',
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
gh_run.check_returncode()
|
||||
workflow = json.loads(gh_run.stdout)
|
||||
sha = workflow["headSha"]
|
||||
|
||||
print(f"should `git checkout {sha}`")
|
||||
|
||||
current_dir = Path(__file__).parent.resolve()
|
||||
cmd = [
|
||||
str(current_dir / "stage_release.sh"),
|
||||
"--version",
|
||||
version,
|
||||
"--workflow-url",
|
||||
workflow["url"],
|
||||
]
|
||||
if args.tmp:
|
||||
cmd.extend(["--tmp", args.tmp])
|
||||
|
||||
stage_release = subprocess.run(cmd)
|
||||
stage_release.check_returncode()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
53
codex-rs/Cargo.lock
generated
53
codex-rs/Cargo.lock
generated
@@ -664,13 +664,10 @@ dependencies = [
|
||||
name = "codex-common"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"clap",
|
||||
"codex-core",
|
||||
"codex-protocol",
|
||||
"serde",
|
||||
"thiserror 2.0.16",
|
||||
"tokio",
|
||||
"toml",
|
||||
]
|
||||
|
||||
@@ -986,6 +983,16 @@ dependencies = [
|
||||
"vt100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codex-utils-readiness"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"thiserror 2.0.16",
|
||||
"time",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "color-eyre"
|
||||
version = "0.6.5"
|
||||
@@ -1281,12 +1288,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "deranged"
|
||||
version = "0.4.0"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
|
||||
checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071"
|
||||
dependencies = [
|
||||
"powerfmt",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2686,9 +2693,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.27"
|
||||
version = "0.4.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
|
||||
|
||||
[[package]]
|
||||
name = "logos"
|
||||
@@ -3929,9 +3936,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.224"
|
||||
version = "1.0.226"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6aaeb1e94f53b16384af593c71e20b095e958dab1d26939c1b70645c5cfbcc0b"
|
||||
checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
@@ -3939,18 +3946,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.224"
|
||||
version = "1.0.226"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32f39390fa6346e24defbcdd3d9544ba8a19985d0af74df8501fbfe9a64341ab"
|
||||
checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.224"
|
||||
version = "1.0.226"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87ff78ab5e8561c9a675bfc1785cb07ae721f0ee53329a595cefd8c04c2ac4e0"
|
||||
checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -4453,15 +4460,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.20.0"
|
||||
version = "3.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
|
||||
checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16"
|
||||
dependencies = [
|
||||
"fastrand",
|
||||
"getrandom 0.3.3",
|
||||
"once_cell",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4585,9 +4592,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.41"
|
||||
version = "0.3.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
|
||||
checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
|
||||
dependencies = [
|
||||
"deranged",
|
||||
"itoa",
|
||||
@@ -4602,15 +4609,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "time-core"
|
||||
version = "0.1.4"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
|
||||
checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
|
||||
|
||||
[[package]]
|
||||
name = "time-macros"
|
||||
version = "0.2.22"
|
||||
version = "0.2.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
|
||||
checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3"
|
||||
dependencies = [
|
||||
"num-conv",
|
||||
"time-core",
|
||||
|
||||
@@ -19,6 +19,7 @@ members = [
|
||||
"protocol",
|
||||
"protocol-ts",
|
||||
"tui",
|
||||
"utils/readiness",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
@@ -49,6 +50,7 @@ codex-ollama = { path = "ollama" }
|
||||
codex-protocol = { path = "protocol" }
|
||||
codex-protocol-ts = { path = "protocol-ts" }
|
||||
codex-tui = { path = "tui" }
|
||||
codex-utils-readiness = { path = "utils/readiness" }
|
||||
core_test_support = { path = "core/tests/common" }
|
||||
mcp-types = { path = "mcp-types" }
|
||||
mcp_test_support = { path = "mcp-server/tests/common" }
|
||||
@@ -121,7 +123,7 @@ strum = "0.27.2"
|
||||
strum_macros = "0.27.2"
|
||||
supports-color = "3.0.2"
|
||||
sys-locale = "0.3.2"
|
||||
tempfile = "3.13.0"
|
||||
tempfile = "3.23.0"
|
||||
textwrap = "0.16.2"
|
||||
thiserror = "2.0.16"
|
||||
time = "0.3"
|
||||
@@ -191,7 +193,7 @@ unwrap_used = "deny"
|
||||
# cargo-shear cannot see the platform-specific openssl-sys usage, so we
|
||||
# silence the false positive here instead of deleting a real dependency.
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["openssl-sys"]
|
||||
ignored = ["openssl-sys", "codex-utils-readiness"]
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
|
||||
@@ -7,14 +7,11 @@ version = { workspace = true }
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive", "wrap_help"], optional = true }
|
||||
codex-core = { workspace = true }
|
||||
codex-protocol = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
toml = { workspace = true, optional = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
[features]
|
||||
# Separate feature so that `clap` is not a mandatory dependency.
|
||||
|
||||
@@ -34,5 +34,3 @@ pub mod model_presets;
|
||||
// Shared approval presets (AskForApproval + Sandbox) used by TUI and MCP server
|
||||
// Not to be confused with AskForApproval, which we should probably rename to EscalationPolicy.
|
||||
pub mod approval_presets;
|
||||
// Readiness flag with token-based authorization and async waiting (Tokio).
|
||||
pub mod readiness;
|
||||
|
||||
@@ -43,6 +43,7 @@ use crate::model_provider_info::WireApi;
|
||||
use crate::openai_model_info::get_model_info;
|
||||
use crate::openai_tools::create_tools_json_for_responses_api;
|
||||
use crate::protocol::RateLimitSnapshot;
|
||||
use crate::protocol::RateLimitWindow;
|
||||
use crate::protocol::TokenUsage;
|
||||
use crate::token_data::PlanType;
|
||||
use crate::util::backoff;
|
||||
@@ -414,9 +415,6 @@ struct SseEvent {
|
||||
delta: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseCreated {}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseCompleted {
|
||||
id: String,
|
||||
@@ -488,19 +486,39 @@ fn attach_item_ids(payload_json: &mut Value, original_items: &[ResponseItem]) {
|
||||
}
|
||||
|
||||
fn parse_rate_limit_snapshot(headers: &HeaderMap) -> Option<RateLimitSnapshot> {
|
||||
let primary_used_percent = parse_header_f64(headers, "x-codex-primary-used-percent")?;
|
||||
let secondary_used_percent = parse_header_f64(headers, "x-codex-secondary-used-percent")?;
|
||||
let primary_to_secondary_ratio_percent =
|
||||
parse_header_f64(headers, "x-codex-primary-over-secondary-limit-percent")?;
|
||||
let primary_window_minutes = parse_header_u64(headers, "x-codex-primary-window-minutes")?;
|
||||
let secondary_window_minutes = parse_header_u64(headers, "x-codex-secondary-window-minutes")?;
|
||||
let primary = parse_rate_limit_window(
|
||||
headers,
|
||||
"x-codex-primary-used-percent",
|
||||
"x-codex-primary-window-minutes",
|
||||
"x-codex-primary-reset-after-seconds",
|
||||
);
|
||||
|
||||
Some(RateLimitSnapshot {
|
||||
primary_used_percent,
|
||||
secondary_used_percent,
|
||||
primary_to_secondary_ratio_percent,
|
||||
primary_window_minutes,
|
||||
secondary_window_minutes,
|
||||
let secondary = parse_rate_limit_window(
|
||||
headers,
|
||||
"x-codex-secondary-used-percent",
|
||||
"x-codex-secondary-window-minutes",
|
||||
"x-codex-secondary-reset-after-seconds",
|
||||
);
|
||||
|
||||
if primary.is_none() && secondary.is_none() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(RateLimitSnapshot { primary, secondary })
|
||||
}
|
||||
|
||||
fn parse_rate_limit_window(
|
||||
headers: &HeaderMap,
|
||||
used_percent_header: &str,
|
||||
window_minutes_header: &str,
|
||||
resets_header: &str,
|
||||
) -> Option<RateLimitWindow> {
|
||||
let used_percent = parse_header_f64(headers, used_percent_header)?;
|
||||
|
||||
Some(RateLimitWindow {
|
||||
used_percent,
|
||||
window_minutes: parse_header_u64(headers, window_minutes_header),
|
||||
resets_in_seconds: parse_header_u64(headers, resets_header),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -55,7 +55,7 @@ pub(super) async fn run_inline_auto_compact_task(
|
||||
let input = vec![InputItem::Text {
|
||||
text: SUMMARIZATION_PROMPT.to_string(),
|
||||
}];
|
||||
run_compact_task_inner(sess, turn_context, sub_id, input, None, false).await;
|
||||
run_compact_task_inner(sess, turn_context, sub_id, input, false).await;
|
||||
}
|
||||
|
||||
pub(super) async fn run_compact_task(
|
||||
@@ -71,15 +71,7 @@ pub(super) async fn run_compact_task(
|
||||
}),
|
||||
};
|
||||
sess.send_event(start_event).await;
|
||||
run_compact_task_inner(
|
||||
sess.clone(),
|
||||
turn_context,
|
||||
sub_id.clone(),
|
||||
input,
|
||||
None,
|
||||
true,
|
||||
)
|
||||
.await;
|
||||
run_compact_task_inner(sess.clone(), turn_context, sub_id.clone(), input, true).await;
|
||||
let event = Event {
|
||||
id: sub_id,
|
||||
msg: EventMsg::TaskComplete(TaskCompleteEvent {
|
||||
@@ -94,7 +86,6 @@ async fn run_compact_task_inner(
|
||||
turn_context: Arc<TurnContext>,
|
||||
sub_id: String,
|
||||
input: Vec<InputItem>,
|
||||
instructions_override: Option<String>,
|
||||
remove_task_on_completion: bool,
|
||||
) {
|
||||
let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input);
|
||||
@@ -104,9 +95,7 @@ async fn run_compact_task_inner(
|
||||
|
||||
let prompt = Prompt {
|
||||
input: turn_input,
|
||||
tools: Vec::new(),
|
||||
base_instructions_override: instructions_override,
|
||||
output_schema: None,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let max_retries = turn_context.client.get_provider().stream_max_retries();
|
||||
|
||||
@@ -267,14 +267,20 @@ pub fn get_error_message_ui(e: &CodexErr) -> String {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use codex_protocol::protocol::RateLimitWindow;
|
||||
|
||||
fn rate_limit_snapshot() -> RateLimitSnapshot {
|
||||
RateLimitSnapshot {
|
||||
primary_used_percent: 0.5,
|
||||
secondary_used_percent: 0.3,
|
||||
primary_to_secondary_ratio_percent: 0.7,
|
||||
primary_window_minutes: 60,
|
||||
secondary_window_minutes: 120,
|
||||
primary: Some(RateLimitWindow {
|
||||
used_percent: 50.0,
|
||||
window_minutes: Some(60),
|
||||
resets_in_seconds: Some(3600),
|
||||
}),
|
||||
secondary: Some(RateLimitWindow {
|
||||
used_percent: 30.0,
|
||||
window_minutes: Some(120),
|
||||
resets_in_seconds: Some(7200),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,13 +7,14 @@ use crate::model_family::ModelFamily;
|
||||
/// Though this would help present more accurate pricing information in the UI.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ModelInfo {
|
||||
/// Size of the context window in tokens.
|
||||
/// Size of the context window in tokens. This is the maximum size of the input context.
|
||||
pub(crate) context_window: u64,
|
||||
|
||||
/// Maximum number of output tokens that can be generated for the model.
|
||||
pub(crate) max_output_tokens: u64,
|
||||
|
||||
/// Token threshold where we should automatically compact conversation history.
|
||||
/// Token threshold where we should automatically compact conversation history. This considers
|
||||
/// input tokens + output tokens of this turn.
|
||||
pub(crate) auto_compact_token_limit: Option<i64>,
|
||||
}
|
||||
|
||||
@@ -64,7 +65,7 @@ pub(crate) fn get_model_info(model_family: &ModelFamily) -> Option<ModelInfo> {
|
||||
_ if slug.starts_with("gpt-5-codex") => Some(ModelInfo {
|
||||
context_window: 272_000,
|
||||
max_output_tokens: 128_000,
|
||||
auto_compact_token_limit: Some(220_000),
|
||||
auto_compact_token_limit: Some(350_000),
|
||||
}),
|
||||
|
||||
_ if slug.starts_with("gpt-5") => Some(ModelInfo::new(272_000, 128_000)),
|
||||
|
||||
@@ -7,8 +7,6 @@ use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use codex_protocol::mcp_protocol::ConversationId;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use time::OffsetDateTime;
|
||||
use time::format_description::FormatItem;
|
||||
@@ -28,7 +26,6 @@ use super::policy::is_persisted_response_item;
|
||||
use crate::config::Config;
|
||||
use crate::default_client::ORIGINATOR;
|
||||
use crate::git_info::collect_git_info;
|
||||
use codex_protocol::models::ResponseItem;
|
||||
use codex_protocol::protocol::InitialHistory;
|
||||
use codex_protocol::protocol::ResumedHistory;
|
||||
use codex_protocol::protocol::RolloutItem;
|
||||
@@ -36,19 +33,6 @@ use codex_protocol::protocol::RolloutLine;
|
||||
use codex_protocol::protocol::SessionMeta;
|
||||
use codex_protocol::protocol::SessionMetaLine;
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct SessionStateSnapshot {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Clone)]
|
||||
pub struct SavedSession {
|
||||
pub session: SessionMeta,
|
||||
#[serde(default)]
|
||||
pub items: Vec<ResponseItem>,
|
||||
#[serde(default)]
|
||||
pub state: SessionStateSnapshot,
|
||||
pub session_id: ConversationId,
|
||||
}
|
||||
|
||||
/// Records all [`ResponseItem`]s for a session and flushes them to disk after
|
||||
/// every update.
|
||||
///
|
||||
|
||||
@@ -10,11 +10,6 @@ use crate::openai_tools::ResponsesApiTool;
|
||||
|
||||
const APPLY_PATCH_LARK_GRAMMAR: &str = include_str!("tool_apply_patch.lark");
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub(crate) struct ApplyPatchToolArgs {
|
||||
pub(crate) input: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ApplyPatchToolType {
|
||||
|
||||
@@ -763,9 +763,10 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
.insert_header("content-type", "text/event-stream")
|
||||
.insert_header("x-codex-primary-used-percent", "12.5")
|
||||
.insert_header("x-codex-secondary-used-percent", "40.0")
|
||||
.insert_header("x-codex-primary-over-secondary-limit-percent", "75.0")
|
||||
.insert_header("x-codex-primary-window-minutes", "10")
|
||||
.insert_header("x-codex-secondary-window-minutes", "60")
|
||||
.insert_header("x-codex-primary-reset-after-seconds", "1800")
|
||||
.insert_header("x-codex-secondary-reset-after-seconds", "7200")
|
||||
.set_body_raw(sse_body, "text/event-stream");
|
||||
|
||||
Mock::given(method("POST"))
|
||||
@@ -811,11 +812,16 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
json!({
|
||||
"info": null,
|
||||
"rate_limits": {
|
||||
"primary_used_percent": 12.5,
|
||||
"secondary_used_percent": 40.0,
|
||||
"primary_to_secondary_ratio_percent": 75.0,
|
||||
"primary_window_minutes": 10,
|
||||
"secondary_window_minutes": 60
|
||||
"primary": {
|
||||
"used_percent": 12.5,
|
||||
"window_minutes": 10,
|
||||
"resets_in_seconds": 1800
|
||||
},
|
||||
"secondary": {
|
||||
"used_percent": 40.0,
|
||||
"window_minutes": 60,
|
||||
"resets_in_seconds": 7200
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
@@ -853,11 +859,16 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
"model_context_window": 272000
|
||||
},
|
||||
"rate_limits": {
|
||||
"primary_used_percent": 12.5,
|
||||
"secondary_used_percent": 40.0,
|
||||
"primary_to_secondary_ratio_percent": 75.0,
|
||||
"primary_window_minutes": 10,
|
||||
"secondary_window_minutes": 60
|
||||
"primary": {
|
||||
"used_percent": 12.5,
|
||||
"window_minutes": 10,
|
||||
"resets_in_seconds": 1800
|
||||
},
|
||||
"secondary": {
|
||||
"used_percent": 40.0,
|
||||
"window_minutes": 60,
|
||||
"resets_in_seconds": 7200
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
@@ -868,7 +879,20 @@ async fn token_count_includes_rate_limits_snapshot() {
|
||||
let final_snapshot = final_payload
|
||||
.rate_limits
|
||||
.expect("latest rate limit snapshot should be retained");
|
||||
assert_eq!(final_snapshot.primary_used_percent, 12.5);
|
||||
assert_eq!(
|
||||
final_snapshot
|
||||
.primary
|
||||
.as_ref()
|
||||
.map(|window| window.used_percent),
|
||||
Some(12.5)
|
||||
);
|
||||
assert_eq!(
|
||||
final_snapshot
|
||||
.primary
|
||||
.as_ref()
|
||||
.and_then(|window| window.resets_in_seconds),
|
||||
Some(1800)
|
||||
);
|
||||
|
||||
wait_for_event(&codex, |msg| matches!(msg, EventMsg::TaskComplete(_))).await;
|
||||
}
|
||||
@@ -904,11 +928,16 @@ async fn usage_limit_error_emits_rate_limit_event() -> anyhow::Result<()> {
|
||||
let codex = codex_fixture.codex.clone();
|
||||
|
||||
let expected_limits = json!({
|
||||
"primary_used_percent": 100.0,
|
||||
"secondary_used_percent": 87.5,
|
||||
"primary_to_secondary_ratio_percent": 95.0,
|
||||
"primary_window_minutes": 15,
|
||||
"secondary_window_minutes": 60
|
||||
"primary": {
|
||||
"used_percent": 100.0,
|
||||
"window_minutes": 15,
|
||||
"resets_in_seconds": null
|
||||
},
|
||||
"secondary": {
|
||||
"used_percent": 87.5,
|
||||
"window_minutes": 60,
|
||||
"resets_in_seconds": null
|
||||
}
|
||||
});
|
||||
|
||||
let submission_id = codex
|
||||
|
||||
@@ -597,16 +597,18 @@ pub struct TokenCountEvent {
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
pub struct RateLimitSnapshot {
|
||||
/// Percentage (0-100) of the primary window that has been consumed.
|
||||
pub primary_used_percent: f64,
|
||||
/// Percentage (0-100) of the secondary window that has been consumed.
|
||||
pub secondary_used_percent: f64,
|
||||
/// Size of the primary window relative to secondary (0-100).
|
||||
pub primary_to_secondary_ratio_percent: f64,
|
||||
/// Rolling window duration for the primary limit, in minutes.
|
||||
pub primary_window_minutes: u64,
|
||||
/// Rolling window duration for the secondary limit, in minutes.
|
||||
pub secondary_window_minutes: u64,
|
||||
pub primary: Option<RateLimitWindow>,
|
||||
pub secondary: Option<RateLimitWindow>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
|
||||
pub struct RateLimitWindow {
|
||||
/// Percentage (0-100) of the window that has been consumed.
|
||||
pub used_percent: f64,
|
||||
/// Rolling window duration, in minutes.
|
||||
pub window_minutes: Option<u64>,
|
||||
/// Seconds until the window resets.
|
||||
pub resets_in_seconds: Option<u64>,
|
||||
}
|
||||
|
||||
// Includes prompts, tools and space to call compact.
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "1.89.0"
|
||||
components = [ "clippy", "rustfmt", "rust-src"]
|
||||
channel = "1.90.0"
|
||||
components = ["clippy", "rustfmt", "rust-src"]
|
||||
|
||||
@@ -77,6 +77,7 @@ use crate::history_cell::CommandOutput;
|
||||
use crate::history_cell::ExecCell;
|
||||
use crate::history_cell::HistoryCell;
|
||||
use crate::history_cell::PatchEventType;
|
||||
use crate::history_cell::RateLimitSnapshotDisplay;
|
||||
use crate::markdown::append_markdown;
|
||||
use crate::slash_command::SlashCommand;
|
||||
use crate::text_formatting::truncate_text;
|
||||
@@ -94,6 +95,7 @@ use crate::streaming::controller::AppEventHistorySink;
|
||||
use crate::streaming::controller::StreamController;
|
||||
use std::path::Path;
|
||||
|
||||
use chrono::Local;
|
||||
use codex_common::approval_presets::ApprovalPreset;
|
||||
use codex_common::approval_presets::builtin_approval_presets;
|
||||
use codex_common::model_presets::ModelPreset;
|
||||
@@ -129,39 +131,46 @@ struct RateLimitWarningState {
|
||||
impl RateLimitWarningState {
|
||||
fn take_warnings(
|
||||
&mut self,
|
||||
secondary_used_percent: f64,
|
||||
primary_used_percent: f64,
|
||||
secondary_used_percent: Option<f64>,
|
||||
primary_used_percent: Option<f64>,
|
||||
) -> Vec<String> {
|
||||
if secondary_used_percent == 100.0 || primary_used_percent == 100.0 {
|
||||
let reached_secondary_cap =
|
||||
matches!(secondary_used_percent, Some(percent) if percent == 100.0);
|
||||
let reached_primary_cap = matches!(primary_used_percent, Some(percent) if percent == 100.0);
|
||||
if reached_secondary_cap || reached_primary_cap {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut warnings = Vec::new();
|
||||
|
||||
let mut highest_secondary: Option<f64> = None;
|
||||
while self.secondary_index < RATE_LIMIT_WARNING_THRESHOLDS.len()
|
||||
&& secondary_used_percent >= RATE_LIMIT_WARNING_THRESHOLDS[self.secondary_index]
|
||||
{
|
||||
highest_secondary = Some(RATE_LIMIT_WARNING_THRESHOLDS[self.secondary_index]);
|
||||
self.secondary_index += 1;
|
||||
}
|
||||
if let Some(threshold) = highest_secondary {
|
||||
warnings.push(format!(
|
||||
"Heads up, you've used over {threshold:.0}% of your weekly limit. Run /status for a breakdown."
|
||||
));
|
||||
if let Some(secondary_used_percent) = secondary_used_percent {
|
||||
let mut highest_secondary: Option<f64> = None;
|
||||
while self.secondary_index < RATE_LIMIT_WARNING_THRESHOLDS.len()
|
||||
&& secondary_used_percent >= RATE_LIMIT_WARNING_THRESHOLDS[self.secondary_index]
|
||||
{
|
||||
highest_secondary = Some(RATE_LIMIT_WARNING_THRESHOLDS[self.secondary_index]);
|
||||
self.secondary_index += 1;
|
||||
}
|
||||
if let Some(threshold) = highest_secondary {
|
||||
warnings.push(format!(
|
||||
"Heads up, you've used over {threshold:.0}% of your weekly limit. Run /status for a breakdown."
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let mut highest_primary: Option<f64> = None;
|
||||
while self.primary_index < RATE_LIMIT_WARNING_THRESHOLDS.len()
|
||||
&& primary_used_percent >= RATE_LIMIT_WARNING_THRESHOLDS[self.primary_index]
|
||||
{
|
||||
highest_primary = Some(RATE_LIMIT_WARNING_THRESHOLDS[self.primary_index]);
|
||||
self.primary_index += 1;
|
||||
}
|
||||
if let Some(threshold) = highest_primary {
|
||||
warnings.push(format!(
|
||||
"Heads up, you've used over {threshold:.0}% of your 5h limit. Run /status for a breakdown."
|
||||
));
|
||||
if let Some(primary_used_percent) = primary_used_percent {
|
||||
let mut highest_primary: Option<f64> = None;
|
||||
while self.primary_index < RATE_LIMIT_WARNING_THRESHOLDS.len()
|
||||
&& primary_used_percent >= RATE_LIMIT_WARNING_THRESHOLDS[self.primary_index]
|
||||
{
|
||||
highest_primary = Some(RATE_LIMIT_WARNING_THRESHOLDS[self.primary_index]);
|
||||
self.primary_index += 1;
|
||||
}
|
||||
if let Some(threshold) = highest_primary {
|
||||
warnings.push(format!(
|
||||
"Heads up, you've used over {threshold:.0}% of your 5h limit. Run /status for a breakdown."
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
warnings
|
||||
@@ -189,7 +198,7 @@ pub(crate) struct ChatWidget {
|
||||
session_header: SessionHeader,
|
||||
initial_user_message: Option<UserMessage>,
|
||||
token_info: Option<TokenUsageInfo>,
|
||||
rate_limit_snapshot: Option<RateLimitSnapshot>,
|
||||
rate_limit_snapshot: Option<RateLimitSnapshotDisplay>,
|
||||
rate_limit_warnings: RateLimitWarningState,
|
||||
// Stream lifecycle controller
|
||||
stream_controller: Option<StreamController>,
|
||||
@@ -366,16 +375,24 @@ impl ChatWidget {
|
||||
fn on_rate_limit_snapshot(&mut self, snapshot: Option<RateLimitSnapshot>) {
|
||||
if let Some(snapshot) = snapshot {
|
||||
let warnings = self.rate_limit_warnings.take_warnings(
|
||||
snapshot.secondary_used_percent,
|
||||
snapshot.primary_used_percent,
|
||||
snapshot
|
||||
.secondary
|
||||
.as_ref()
|
||||
.map(|window| window.used_percent),
|
||||
snapshot.primary.as_ref().map(|window| window.used_percent),
|
||||
);
|
||||
self.rate_limit_snapshot = Some(snapshot);
|
||||
|
||||
let display = history_cell::rate_limit_snapshot_display(&snapshot, Local::now());
|
||||
self.rate_limit_snapshot = Some(display);
|
||||
|
||||
if !warnings.is_empty() {
|
||||
for warning in warnings {
|
||||
self.add_to_history(history_cell::new_warning_event(warning));
|
||||
}
|
||||
self.request_redraw();
|
||||
}
|
||||
} else {
|
||||
self.rate_limit_snapshot = None;
|
||||
}
|
||||
}
|
||||
/// Finalize any active exec as failed and stop/clear running UI state.
|
||||
|
||||
@@ -384,12 +384,12 @@ fn rate_limit_warnings_emit_thresholds() {
|
||||
let mut state = RateLimitWarningState::default();
|
||||
let mut warnings: Vec<String> = Vec::new();
|
||||
|
||||
warnings.extend(state.take_warnings(10.0, 55.0));
|
||||
warnings.extend(state.take_warnings(55.0, 10.0));
|
||||
warnings.extend(state.take_warnings(10.0, 80.0));
|
||||
warnings.extend(state.take_warnings(80.0, 10.0));
|
||||
warnings.extend(state.take_warnings(10.0, 95.0));
|
||||
warnings.extend(state.take_warnings(95.0, 10.0));
|
||||
warnings.extend(state.take_warnings(Some(10.0), Some(55.0)));
|
||||
warnings.extend(state.take_warnings(Some(55.0), Some(10.0)));
|
||||
warnings.extend(state.take_warnings(Some(10.0), Some(80.0)));
|
||||
warnings.extend(state.take_warnings(Some(80.0), Some(10.0)));
|
||||
warnings.extend(state.take_warnings(Some(10.0), Some(95.0)));
|
||||
warnings.extend(state.take_warnings(Some(95.0), Some(10.0)));
|
||||
|
||||
assert_eq!(
|
||||
warnings,
|
||||
|
||||
@@ -11,6 +11,9 @@ use crate::wrapping::RtOptions;
|
||||
use crate::wrapping::word_wrap_line;
|
||||
use crate::wrapping::word_wrap_lines;
|
||||
use base64::Engine;
|
||||
use chrono::DateTime;
|
||||
use chrono::Duration as ChronoDuration;
|
||||
use chrono::Local;
|
||||
use codex_ansi_escape::ansi_escape_line;
|
||||
use codex_common::create_config_summary_entries;
|
||||
use codex_common::elapsed::format_duration;
|
||||
@@ -25,6 +28,7 @@ use codex_core::project_doc::discover_project_doc_paths;
|
||||
use codex_core::protocol::FileChange;
|
||||
use codex_core::protocol::McpInvocation;
|
||||
use codex_core::protocol::RateLimitSnapshot;
|
||||
use codex_core::protocol::RateLimitWindow;
|
||||
use codex_core::protocol::SandboxPolicy;
|
||||
use codex_core::protocol::SessionConfiguredEvent;
|
||||
use codex_core::protocol::TokenUsage;
|
||||
@@ -47,6 +51,7 @@ use ratatui::widgets::WidgetRef;
|
||||
use ratatui::widgets::Wrap;
|
||||
use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
use std::io::Cursor;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
@@ -1078,11 +1083,54 @@ pub(crate) fn new_warning_event(message: String) -> PlainHistoryCell {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct RateLimitWindowDisplay {
|
||||
pub used_percent: f64,
|
||||
pub resets_at: Option<String>,
|
||||
}
|
||||
|
||||
impl RateLimitWindowDisplay {
|
||||
fn from_window(window: &RateLimitWindow, captured_at: DateTime<Local>) -> Self {
|
||||
let resets_at = window
|
||||
.resets_in_seconds
|
||||
.and_then(|seconds| i64::try_from(seconds).ok())
|
||||
.and_then(|secs| captured_at.checked_add_signed(ChronoDuration::seconds(secs)))
|
||||
.map(|dt| dt.format("%b %-d, %Y %-I:%M %p").to_string());
|
||||
|
||||
Self {
|
||||
used_percent: window.used_percent,
|
||||
resets_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct RateLimitSnapshotDisplay {
|
||||
pub primary: Option<RateLimitWindowDisplay>,
|
||||
pub secondary: Option<RateLimitWindowDisplay>,
|
||||
}
|
||||
|
||||
pub(crate) fn rate_limit_snapshot_display(
|
||||
snapshot: &RateLimitSnapshot,
|
||||
captured_at: DateTime<Local>,
|
||||
) -> RateLimitSnapshotDisplay {
|
||||
RateLimitSnapshotDisplay {
|
||||
primary: snapshot
|
||||
.primary
|
||||
.as_ref()
|
||||
.map(|window| RateLimitWindowDisplay::from_window(window, captured_at)),
|
||||
secondary: snapshot
|
||||
.secondary
|
||||
.as_ref()
|
||||
.map(|window| RateLimitWindowDisplay::from_window(window, captured_at)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn new_status_output(
|
||||
config: &Config,
|
||||
usage: &TokenUsage,
|
||||
session_id: &Option<ConversationId>,
|
||||
rate_limits: Option<&RateLimitSnapshot>,
|
||||
rate_limits: Option<&RateLimitSnapshotDisplay>,
|
||||
) -> PlainHistoryCell {
|
||||
let mut lines: Vec<Line<'static>> = Vec::new();
|
||||
lines.push("/status".magenta().into());
|
||||
@@ -1611,23 +1659,39 @@ fn format_mcp_invocation<'a>(invocation: McpInvocation) -> Line<'a> {
|
||||
invocation_spans.into()
|
||||
}
|
||||
|
||||
fn build_status_limit_lines(snapshot: Option<&RateLimitSnapshot>) -> Vec<Line<'static>> {
|
||||
fn build_status_limit_lines(snapshot: Option<&RateLimitSnapshotDisplay>) -> Vec<Line<'static>> {
|
||||
let mut lines: Vec<Line<'static>> =
|
||||
vec![vec![padded_emoji("⏱️").into(), "Usage Limits".bold()].into()];
|
||||
|
||||
match snapshot {
|
||||
Some(snapshot) => {
|
||||
let rows = [
|
||||
("5h limit".to_string(), snapshot.primary_used_percent),
|
||||
("Weekly limit".to_string(), snapshot.secondary_used_percent),
|
||||
];
|
||||
let label_width = rows
|
||||
.iter()
|
||||
.map(|(label, _)| UnicodeWidthStr::width(label.as_str()))
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
for (label, percent) in rows {
|
||||
lines.push(build_status_limit_line(&label, percent, label_width));
|
||||
let mut windows: Vec<(&str, &RateLimitWindowDisplay)> = Vec::new();
|
||||
if let Some(primary) = snapshot.primary.as_ref() {
|
||||
windows.push(("5h limit", primary));
|
||||
}
|
||||
if let Some(secondary) = snapshot.secondary.as_ref() {
|
||||
windows.push(("Weekly limit", secondary));
|
||||
}
|
||||
|
||||
if windows.is_empty() {
|
||||
lines.push(" • No rate limit data available.".into());
|
||||
} else {
|
||||
let label_width = windows
|
||||
.iter()
|
||||
.map(|(label, _)| UnicodeWidthStr::width(*label))
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
|
||||
for (label, window) in windows {
|
||||
lines.push(build_status_limit_line(
|
||||
label,
|
||||
window.used_percent,
|
||||
label_width,
|
||||
));
|
||||
if let Some(resets_at) = window.resets_at.as_deref() {
|
||||
lines.push(build_status_reset_line(resets_at));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => lines.push(" • Send a message to load usage data.".into()),
|
||||
@@ -1651,6 +1715,10 @@ fn build_status_limit_line(label: &str, percent_used: f64, label_width: usize) -
|
||||
Line::from(spans)
|
||||
}
|
||||
|
||||
fn build_status_reset_line(resets_at: &str) -> Line<'static> {
|
||||
vec![" ".into(), format!("Resets at: {resets_at}").dim()].into()
|
||||
}
|
||||
|
||||
fn render_status_limit_progress_bar(percent_used: f64) -> String {
|
||||
let ratio = (percent_used / 100.0).clamp(0.0, 1.0);
|
||||
let filled = (ratio * STATUS_LIMIT_BAR_SEGMENTS as f64).round() as usize;
|
||||
|
||||
16
codex-rs/utils/readiness/Cargo.toml
Normal file
16
codex-rs/utils/readiness/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "codex-utils-readiness"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
async-trait = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
time = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync", "time"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { workspace = true, features = ["macros", "rt", "rt-multi-thread"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
Reference in New Issue
Block a user