This commit is contained in:
Eason Goodale
2025-04-20 03:47:48 -07:00
parent b1cef74d8c
commit 379b023a7f
64 changed files with 6753 additions and 8117 deletions

View File

@@ -496,7 +496,6 @@ export class AgentLoop {
if (this.model.startsWith("o")) {
reasoning = { effort: "high" };
if (this.model === "o3" || this.model === "o4-mini") {
// @ts-expect-error waiting for API type update
reasoning.summary = "auto";
}
}
@@ -517,6 +516,7 @@ export class AgentLoop {
stream: true,
parallel_tool_calls: false,
reasoning,
...(this.config.flexMode ? { service_tier: "flex" } : {}),
tools: [
{
type: "function",
@@ -791,6 +791,41 @@ export class AgentLoop {
this.onLoading(false);
return;
}
// Suppress internal stack on JSON parse failures
if (err instanceof SyntaxError) {
this.onItem({
id: `error-${Date.now()}`,
type: "message",
role: "system",
content: [
{
type: "input_text",
text: "⚠️ Failed to parse streaming response (invalid JSON). Please `/clear` to reset.",
},
],
});
this.onLoading(false);
return;
}
// Handle OpenAI API quota errors
if (
err instanceof Error &&
(err as { code?: string }).code === "insufficient_quota"
) {
this.onItem({
id: `error-${Date.now()}`,
type: "message",
role: "system",
content: [
{
type: "input_text",
text: "⚠️ Insufficient quota. Please check your billing details and retry.",
},
],
});
this.onLoading(false);
return;
}
throw err;
} finally {
this.currentStream = null;

View File

@@ -1,5 +1,6 @@
import type { ExecInput, ExecResult } from "./sandbox/interface.js";
import type { SpawnOptions } from "child_process";
import type { ParseEntry } from "shell-quote";
import { process_patch } from "./apply-patch.js";
import { SandboxType } from "./sandbox/interface.js";
@@ -8,9 +9,17 @@ import { exec as rawExec } from "./sandbox/raw-exec.js";
import { formatCommandForDisplay } from "../../format-command.js";
import fs from "fs";
import os from "os";
import { parse } from "shell-quote";
const DEFAULT_TIMEOUT_MS = 10_000; // 10 seconds
function requiresShell(cmd: Array<string>): boolean {
return cmd.some((arg) => {
const tokens = parse(arg) as Array<ParseEntry>;
return tokens.some((token) => typeof token === "object" && "op" in token);
});
}
/**
* This function should never return a rejected promise: errors should be
* mapped to a non-zero exit code and the error message should be in stderr.
@@ -33,6 +42,7 @@ export function exec(
const opts: SpawnOptions = {
timeout: timeoutInMillis || DEFAULT_TIMEOUT_MS,
...(requiresShell(cmd) ? { shell: true } : {}),
...(workdir ? { cwd: workdir } : {}),
};
// Merge default writable roots with any user-specified ones.

View File

@@ -3,7 +3,6 @@ import type { SpawnOptions } from "child_process";
import { exec } from "./raw-exec.js";
import { log } from "../log.js";
import { realpathSync } from "fs";
import { CONFIG_DIR } from "src/utils/config.js";
function getCommonRoots() {
@@ -30,9 +29,7 @@ export function execWithSeatbelt(
const { policies, params } = writableRoots
.map((root, index) => ({
policy: `(subpath (param "WRITABLE_ROOT_${index}"))`,
// the kernel resolves symlinks before handing them to seatbelt for checking
// so store the canonicalized form in the policy to be compared against
param: `-DWRITABLE_ROOT_${index}=${realpathSync(root)}`,
param: `-DWRITABLE_ROOT_${index}=${root}`,
}))
.reduce(
(

View File

@@ -0,0 +1,82 @@
import type {
ResponseItem,
ResponseOutputItem,
} from "openai/resources/responses/responses.mjs";
/**
* Build a GitHub issuesnew URL that prefills the Codex 2bugreport.yml
* template with whatever structured data we can infer from the current
* session.
*/
export function buildBugReportUrl({
items,
cliVersion,
model,
platform,
}: {
/** Chat history so we can summarise user steps */
items: Array<ResponseItem | ResponseOutputItem>;
/** CLI revision string (e.g. output of `codex --revision`) */
cliVersion: string;
/** Active model name */
model: string;
/** Platform string e.g. `darwin arm64 23.0.0` */
platform: string;
}): string {
const params = new URLSearchParams({
template: "2-bug-report.yml",
labels: "bug",
});
params.set("version", cliVersion);
params.set("model", model);
params.set("platform", platform);
const bullets: Array<string> = [];
for (let i = 0; i < items.length; ) {
const entry = items[i];
if (entry?.type === "message" && entry.role === "user") {
const contentArray = entry.content as
| Array<{ text?: string }>
| undefined;
const messageText = contentArray
?.map((c) => c.text ?? "")
.join(" ")
.trim();
let reasoning = 0;
let toolCalls = 0;
let j = i + 1;
while (j < items.length) {
const it = items[j];
if (it?.type === "message" && it?.role === "user") {
break;
} else if (
it?.type === "reasoning" ||
(it?.type === "message" && it?.role === "assistant")
) {
reasoning += 1;
} else if (it?.type === "function_call") {
toolCalls += 1;
}
j++;
}
const codeBlock = `\`\`\`\n ${messageText}\n \`\`\``;
bullets.push(
`- ${codeBlock}\n - \`${reasoning} reasoning\` | \`${toolCalls} tool\``,
);
i = j;
} else {
i += 1;
}
}
if (bullets.length) {
params.set("steps", bullets.join("\n"));
}
return `https://github.com/openai/codex/issues/new?${params.toString()}`;
}

View File

@@ -0,0 +1,143 @@
import { CONFIG_DIR } from "./config";
import boxen from "boxen";
import chalk from "chalk";
import * as cp from "node:child_process";
import { readFile, writeFile } from "node:fs/promises";
import { join } from "node:path";
import which from "which";
interface UpdateCheckState {
lastUpdateCheck?: string;
}
interface PackageInfo {
current: string;
wanted: string;
latest: string;
dependent: string;
location: string;
}
interface UpdateCheckInfo {
currentVersion: string;
latestVersion: string;
}
const UPDATE_CHECK_FREQUENCY = 1000 * 60 * 60 * 24; // 1 day
export async function getNPMCommandPath(): Promise<string | undefined> {
try {
return await which(process.platform === "win32" ? "npm.cmd" : "npm");
} catch {
return undefined;
}
}
export async function checkOutdated(
npmCommandPath: string,
): Promise<UpdateCheckInfo | undefined> {
return new Promise((resolve, _reject) => {
// TODO: support local installation
// Right now we're using "--global", which only checks global packages.
// But codex might be installed locally — we should check the local version first,
// and only fall back to the global one if needed.
const args = ["outdated", "--global", "--json", "--", "@openai/codex"];
// corepack npm wrapper would automatically update package.json. disable that behavior.
// COREPACK_ENABLE_AUTO_PIN disables the package.json overwrite, and
// COREPACK_ENABLE_PROJECT_SPEC makes the npm view command succeed
// even if packageManager specified a package manager other than npm.
const env = {
...process.env,
COREPACK_ENABLE_AUTO_PIN: "0",
COREPACK_ENABLE_PROJECT_SPEC: "0",
};
let options: cp.ExecFileOptions = { env };
let commandPath = npmCommandPath;
if (process.platform === "win32") {
options = { ...options, shell: true };
commandPath = `"${npmCommandPath}"`;
}
cp.execFile(commandPath, args, options, async (_error, stdout) => {
try {
const { name: packageName } = await import("../../package.json");
const content: Record<string, PackageInfo> = JSON.parse(stdout);
if (!content[packageName]) {
// package not installed or not outdated
resolve(undefined);
return;
}
const currentVersion = content[packageName].current;
const latestVersion = content[packageName].latest;
resolve({ currentVersion, latestVersion });
return;
} catch {
// ignore
}
resolve(undefined);
});
});
}
export async function checkForUpdates(): Promise<void> {
const stateFile = join(CONFIG_DIR, "update-check.json");
let state: UpdateCheckState | undefined;
try {
state = JSON.parse(await readFile(stateFile, "utf8"));
} catch {
// ignore
}
if (
state?.lastUpdateCheck &&
Date.now() - new Date(state.lastUpdateCheck).valueOf() <
UPDATE_CHECK_FREQUENCY
) {
return;
}
const npmCommandPath = await getNPMCommandPath();
if (!npmCommandPath) {
return;
}
const packageInfo = await checkOutdated(npmCommandPath);
await writeState(stateFile, {
...state,
lastUpdateCheck: new Date().toUTCString(),
});
if (!packageInfo) {
return;
}
const updateMessage = `To update, run: ${chalk.cyan(
"npm install -g @openai/codex",
)} to update.`;
const box = boxen(
`\
Update available! ${chalk.red(packageInfo.currentVersion)}${chalk.green(
packageInfo.latestVersion,
)}.
${updateMessage}`,
{
padding: 1,
margin: 1,
align: "center",
borderColor: "yellow",
borderStyle: "round",
},
);
// eslint-disable-next-line no-console
console.log(box);
}
async function writeState(stateFilePath: string, state: UpdateCheckState) {
await writeFile(stateFilePath, JSON.stringify(state, null, 2), {
encoding: "utf8",
});
}

View File

@@ -9,9 +9,17 @@ import OpenAI from "openai";
* @param model The model to use for generating the summary
* @returns A concise structured summary string
*/
/**
* Generate a condensed summary of the conversation items.
* @param items The list of conversation items to summarize
* @param model The model to use for generating the summary
* @param flexMode Whether to use the flex-mode service tier
* @returns A concise structured summary string
*/
export async function generateCompactSummary(
items: Array<ResponseItem>,
model: string,
flexMode = false,
): Promise<string> {
const oai = new OpenAI({
apiKey: process.env["OPENAI_API_KEY"],
@@ -44,6 +52,7 @@ export async function generateCompactSummary(
const response = await oai.chat.completions.create({
model,
...(flexMode ? { service_tier: "flex" } : {}),
messages: [
{
role: "assistant",

View File

@@ -56,6 +56,8 @@ export type StoredConfig = {
saveHistory?: boolean;
sensitivePatterns?: Array<string>;
};
/** User-defined safe commands */
safeCommands?: Array<string>;
};
// Minimal config written on first run. An *empty* model string ensures that
@@ -75,15 +77,21 @@ export type AppConfig = {
apiKey?: string;
model: string;
instructions: string;
approvalMode?: AutoApprovalMode;
fullAutoErrorMode?: FullAutoErrorMode;
memory?: MemoryConfig;
/** Whether to enable desktop notifications for responses */
notify: boolean;
/** Enable the "flex-mode" processing mode for supported models (o3, o4-mini) */
flexMode?: boolean;
history?: {
maxSize: number;
saveHistory: boolean;
sensitivePatterns: Array<string>;
};
/** User-defined safe commands */
safeCommands?: Array<string>;
};
// ---------------------------------------------------------------------------
@@ -268,6 +276,8 @@ export const loadConfig = (
: DEFAULT_AGENTIC_MODEL),
instructions: combinedInstructions,
notify: storedConfig.notify === true,
approvalMode: storedConfig.approvalMode,
safeCommands: storedConfig.safeCommands ?? [],
};
// -----------------------------------------------------------------------
@@ -345,6 +355,13 @@ export const loadConfig = (
};
}
// Load user-defined safe commands
if (Array.isArray(storedConfig.safeCommands)) {
config.safeCommands = storedConfig.safeCommands.map(String);
} else {
config.safeCommands = [];
}
return config;
};
@@ -376,6 +393,7 @@ export const saveConfig = (
// Create the config object to save
const configToSave: StoredConfig = {
model: config.model,
approvalMode: config.approvalMode,
};
// Add history settings if they exist
@@ -386,6 +404,10 @@ export const saveConfig = (
sensitivePatterns: config.history.sensitivePatterns,
};
}
// Save: User-defined safe commands
if (config.safeCommands && config.safeCommands.length > 0) {
configToSave.safeCommands = config.safeCommands;
}
if (ext === ".yaml" || ext === ".yml") {
writeFileSync(targetPath, dumpYaml(configToSave), "utf-8");

View File

@@ -0,0 +1,36 @@
import type { ResponseItem } from "openai/resources/responses/responses.mjs";
/**
* Extracts the patch texts of all `apply_patch` tool calls from the given
* message history. Returns an empty string when none are found.
*/
export function extractAppliedPatches(items: Array<ResponseItem>): string {
const patches: Array<string> = [];
for (const item of items) {
if (item.type !== "function_call") {
continue;
}
const { name: toolName, arguments: argsString } = item as unknown as {
name: unknown;
arguments: unknown;
};
if (toolName !== "apply_patch" || typeof argsString !== "string") {
continue;
}
try {
const args = JSON.parse(argsString) as { patch?: string };
if (typeof args.patch === "string" && args.patch.length > 0) {
patches.push(args.patch.trim());
}
} catch {
// Ignore malformed JSON we never want to crash the overlay.
continue;
}
}
return patches.join("\n\n");
}

View File

@@ -0,0 +1,29 @@
import { execSync } from "node:child_process";
/**
* Returns the current Git diff for the working directory. If the current
* working directory is not inside a Git repository, `isGitRepo` will be
* false and `diff` will be an empty string.
*/
export function getGitDiff(): {
isGitRepo: boolean;
diff: string;
} {
try {
// First check whether we are inside a git repository. `revparse` exits
// with a nonzero status code if not.
execSync("git rev-parse --is-inside-work-tree", { stdio: "ignore" });
// If the above call didnt throw, we are inside a git repo. Retrieve the
// diff including color codes so that the overlay can render them.
const output = execSync("git diff --color", {
encoding: "utf8",
maxBuffer: 10 * 1024 * 1024, // 10 MB ought to be enough for now
});
return { isGitRepo: true, diff: output };
} catch {
// Either git is not installed or were not inside a repository.
return { isGitRepo: false, diff: "" };
}
}

View File

@@ -4,8 +4,8 @@ import { fileTypeFromBuffer } from "file-type";
import fs from "fs/promises";
import path from "node:path";
// Map dataurls → original filenames so TUI can render friendly labels.
// Populated during createInputItem.
// Map dataurls → original filenames so the TUI can render friendly labels.
// This map is populated during `createInputItem` execution.
export const imageFilenameByDataUrl = new Map<string, string>();
export async function createInputItem(
@@ -20,30 +20,41 @@ export async function createInputItem(
for (const filePath of images) {
/* eslint-disable no-await-in-loop */
let binary: Buffer;
let binary: Buffer | undefined;
try {
binary = await fs.readFile(filePath);
} catch (err: unknown) {
// Gracefully skip files that no longer exist on disk. This can happen
// when an image was attached earlier but has since been moved or
// deleted before the user submitted the prompt. For any other error
// codes rethrow so callers are still notified of unexpected issues
// (e.g. permission errors).
// Gracefully handle files that no longer exist on disk. This can happen
// when an image was attached earlier but has since been moved or deleted
// before the user submitted the prompt.
const e = err as NodeJS.ErrnoException;
if (e?.code === "ENOENT") {
// Skip silently user will simply not include the missing image.
continue;
// Insert a placeholder message so the user is aware a file was missing.
inputItem.content.push({
type: "input_text",
text: `[missing image: ${path.basename(filePath)}]`,
});
continue; // skip to next image
}
// For any other error (e.g. permission issues) bubble up so callers can
// react accordingly.
throw err as Error;
}
if (!binary) {
// Should not happen, but satisfies TypeScript.
continue;
}
const kind = await fileTypeFromBuffer(binary);
/* eslint-enable no-await-in-loop */
const encoded = binary.toString("base64");
const mime = kind?.mime ?? "application/octet-stream";
const dataUrl = `data:${mime};base64,${encoded}`;
// Store pretty label (relative path when possible)
// Store a pretty label (make path relative when possible) so the TUI can
// display something friendlier than a long dataurl.
const label = path.isAbsolute(filePath)
? path.relative(process.cwd(), filePath)
: filePath;

View File

@@ -1,4 +1,4 @@
export const CLI_VERSION = "0.1.2504172351"; // Must be in sync with package.json.
export const CLI_VERSION = "0.1.2504181820"; // Must be in sync with package.json.
export const ORIGIN = "codex_cli_ts";
export type TerminalChatSession = {

View File

@@ -0,0 +1,32 @@
// Defines the available slash commands and their descriptions.
// Used for autocompletion in the chat input.
export interface SlashCommand {
command: string;
description: string;
}
export const SLASH_COMMANDS: Array<SlashCommand> = [
{
command: "/clear",
description: "Clear conversation history and free up context",
},
{
command: "/clearhistory",
description: "Clear command history",
},
{
command: "/compact",
description:
"Clear conversation history but keep a summary in context. Optional: /compact [instructions for summarization]",
},
{ command: "/history", description: "Open command history" },
{ command: "/help", description: "Show list of commands" },
{ command: "/model", description: "Open model selection panel" },
{ command: "/approval", description: "Open approval mode selection panel" },
{ command: "/bug", description: "Generate a prefilled GitHub bug report" },
{
command: "/diff",
description:
"Show git diff of the working directory (or applied patches if not in git)",
},
];