mirror of
https://github.com/openai/codex.git
synced 2026-05-03 04:42:20 +03:00
## Summary - store a pre-rendered `feedback_log_body` in SQLite so `/feedback` exports keep span prefixes and structured event fields - render SQLite feedback exports with timestamps and level prefixes to match the old in-memory feedback formatter, while preserving existing trailing newlines - count `feedback_log_body` in the SQLite retention budget so structured or span-prefixed rows still prune correctly - bound `/feedback` row loading in SQL with the retention estimate, then apply exact whole-line truncation in Rust so uploads stay capped without splitting lines ## Details - add a `feedback_log_body` column to `logs` and backfill it from `message` for existing rows - capture span names plus formatted span and event fields at write time, since SQLite does not retain enough structure to reconstruct the old formatter later - keep SQLite feedback queries scoped to the requested thread plus same-process threadless rows - restore a SQL-side cumulative `estimated_bytes` cap for feedback export queries so over-retained partitions do not load every matching row before truncation - add focused formatting coverage for exported feedback lines and parity coverage against `tracing_subscriber` ## Testing - cargo test -p codex-state - just fix -p codex-state - just fmt codex author: `codex resume 019ca1b0-0ecc-78b1-85eb-6befdd7e4f1f` --------- Co-authored-by: Codex <noreply@openai.com>
353 lines
9.7 KiB
Rust
353 lines
9.7 KiB
Rust
use std::path::PathBuf;
|
|
use std::time::Duration;
|
|
|
|
use anyhow::Context;
|
|
use chrono::DateTime;
|
|
use clap::Parser;
|
|
use codex_state::LogQuery;
|
|
use codex_state::LogRow;
|
|
use codex_state::StateRuntime;
|
|
use dirs::home_dir;
|
|
use owo_colors::OwoColorize;
|
|
|
|
#[derive(Debug, Parser)]
|
|
#[command(name = "codex-state-logs")]
|
|
#[command(about = "Tail Codex logs from the dedicated logs SQLite DB with simple filters")]
|
|
struct Args {
|
|
/// Path to CODEX_HOME. Defaults to $CODEX_HOME or ~/.codex.
|
|
#[arg(long, env = "CODEX_HOME")]
|
|
codex_home: Option<PathBuf>,
|
|
|
|
/// Direct path to the logs SQLite database. Overrides --codex-home.
|
|
#[arg(long)]
|
|
db: Option<PathBuf>,
|
|
|
|
/// Log level to match exactly (case-insensitive).
|
|
#[arg(long)]
|
|
level: Option<String>,
|
|
|
|
/// Start timestamp (RFC3339 or unix seconds).
|
|
#[arg(long, value_name = "RFC3339|UNIX")]
|
|
from: Option<String>,
|
|
|
|
/// End timestamp (RFC3339 or unix seconds).
|
|
#[arg(long, value_name = "RFC3339|UNIX")]
|
|
to: Option<String>,
|
|
|
|
/// Substring match on module_path. Repeat to include multiple substrings.
|
|
#[arg(long = "module")]
|
|
module: Vec<String>,
|
|
|
|
/// Substring match on file path. Repeat to include multiple substrings.
|
|
#[arg(long = "file")]
|
|
file: Vec<String>,
|
|
|
|
/// Match one or more thread ids. Repeat to include multiple threads.
|
|
#[arg(long = "thread-id")]
|
|
thread_id: Vec<String>,
|
|
|
|
/// Substring match against the rendered log body.
|
|
#[arg(long)]
|
|
search: Option<String>,
|
|
|
|
/// Include logs that do not have a thread id.
|
|
#[arg(long)]
|
|
threadless: bool,
|
|
|
|
/// Number of matching rows to show before tailing.
|
|
#[arg(long, default_value_t = 200)]
|
|
backfill: usize,
|
|
|
|
/// Poll interval in milliseconds.
|
|
#[arg(long, default_value_t = 500)]
|
|
poll_ms: u64,
|
|
|
|
/// Show compact output with only time, level, and rendered log body.
|
|
#[arg(long)]
|
|
compact: bool,
|
|
}
|
|
|
|
#[derive(Debug, Clone)]
|
|
struct LogFilter {
|
|
level_upper: Option<String>,
|
|
from_ts: Option<i64>,
|
|
to_ts: Option<i64>,
|
|
module_like: Vec<String>,
|
|
file_like: Vec<String>,
|
|
thread_ids: Vec<String>,
|
|
search: Option<String>,
|
|
include_threadless: bool,
|
|
}
|
|
|
|
#[tokio::main]
|
|
async fn main() -> anyhow::Result<()> {
|
|
let args = Args::parse();
|
|
let db_path = resolve_db_path(&args)?;
|
|
let filter = build_filter(&args)?;
|
|
let codex_home = db_path
|
|
.parent()
|
|
.map(ToOwned::to_owned)
|
|
.unwrap_or_else(|| PathBuf::from("."));
|
|
let runtime = StateRuntime::init(codex_home, "logs-client".to_string()).await?;
|
|
|
|
let mut last_id =
|
|
print_backfill(runtime.as_ref(), &filter, args.backfill, args.compact).await?;
|
|
if last_id == 0 {
|
|
last_id = fetch_max_id(runtime.as_ref(), &filter).await?;
|
|
}
|
|
|
|
let poll_interval = Duration::from_millis(args.poll_ms);
|
|
loop {
|
|
let rows = fetch_new_rows(runtime.as_ref(), &filter, last_id).await?;
|
|
for row in rows {
|
|
last_id = last_id.max(row.id);
|
|
println!("{}", format_row(&row, args.compact));
|
|
}
|
|
tokio::time::sleep(poll_interval).await;
|
|
}
|
|
}
|
|
|
|
fn resolve_db_path(args: &Args) -> anyhow::Result<PathBuf> {
|
|
if let Some(db) = args.db.as_ref() {
|
|
return Ok(db.clone());
|
|
}
|
|
|
|
let codex_home = args.codex_home.clone().unwrap_or_else(default_codex_home);
|
|
Ok(codex_state::logs_db_path(codex_home.as_path()))
|
|
}
|
|
|
|
fn default_codex_home() -> PathBuf {
|
|
if let Some(home) = home_dir() {
|
|
return home.join(".codex");
|
|
}
|
|
PathBuf::from(".codex")
|
|
}
|
|
|
|
fn build_filter(args: &Args) -> anyhow::Result<LogFilter> {
|
|
let from_ts = args
|
|
.from
|
|
.as_deref()
|
|
.map(parse_timestamp)
|
|
.transpose()
|
|
.context("failed to parse --from")?;
|
|
let to_ts = args
|
|
.to
|
|
.as_deref()
|
|
.map(parse_timestamp)
|
|
.transpose()
|
|
.context("failed to parse --to")?;
|
|
|
|
let level_upper = args.level.as_ref().map(|level| level.to_ascii_uppercase());
|
|
let module_like = args
|
|
.module
|
|
.iter()
|
|
.filter(|module| !module.is_empty())
|
|
.cloned()
|
|
.collect::<Vec<_>>();
|
|
let file_like = args
|
|
.file
|
|
.iter()
|
|
.filter(|file| !file.is_empty())
|
|
.cloned()
|
|
.collect::<Vec<_>>();
|
|
let thread_ids = args
|
|
.thread_id
|
|
.iter()
|
|
.filter(|thread_id| !thread_id.is_empty())
|
|
.cloned()
|
|
.collect::<Vec<_>>();
|
|
|
|
Ok(LogFilter {
|
|
level_upper,
|
|
from_ts,
|
|
to_ts,
|
|
module_like,
|
|
file_like,
|
|
thread_ids,
|
|
search: args.search.clone(),
|
|
include_threadless: args.threadless,
|
|
})
|
|
}
|
|
|
|
fn parse_timestamp(value: &str) -> anyhow::Result<i64> {
|
|
if let Ok(secs) = value.parse::<i64>() {
|
|
return Ok(secs);
|
|
}
|
|
|
|
let dt = DateTime::parse_from_rfc3339(value)
|
|
.with_context(|| format!("expected RFC3339 or unix seconds, got {value}"))?;
|
|
Ok(dt.timestamp())
|
|
}
|
|
|
|
async fn print_backfill(
|
|
runtime: &StateRuntime,
|
|
filter: &LogFilter,
|
|
backfill: usize,
|
|
compact: bool,
|
|
) -> anyhow::Result<i64> {
|
|
if backfill == 0 {
|
|
return Ok(0);
|
|
}
|
|
|
|
let mut rows = fetch_backfill(runtime, filter, backfill).await?;
|
|
rows.reverse();
|
|
|
|
let mut last_id = 0;
|
|
for row in rows {
|
|
last_id = last_id.max(row.id);
|
|
println!("{}", format_row(&row, compact));
|
|
}
|
|
Ok(last_id)
|
|
}
|
|
|
|
async fn fetch_backfill(
|
|
runtime: &StateRuntime,
|
|
filter: &LogFilter,
|
|
backfill: usize,
|
|
) -> anyhow::Result<Vec<LogRow>> {
|
|
let query = to_log_query(
|
|
filter,
|
|
Some(backfill),
|
|
/*after_id*/ None,
|
|
/*descending*/ true,
|
|
);
|
|
runtime
|
|
.query_logs(&query)
|
|
.await
|
|
.context("failed to fetch backfill logs")
|
|
}
|
|
|
|
async fn fetch_new_rows(
|
|
runtime: &StateRuntime,
|
|
filter: &LogFilter,
|
|
last_id: i64,
|
|
) -> anyhow::Result<Vec<LogRow>> {
|
|
let query = to_log_query(
|
|
filter,
|
|
/*limit*/ None,
|
|
Some(last_id),
|
|
/*descending*/ false,
|
|
);
|
|
runtime
|
|
.query_logs(&query)
|
|
.await
|
|
.context("failed to fetch new logs")
|
|
}
|
|
|
|
async fn fetch_max_id(runtime: &StateRuntime, filter: &LogFilter) -> anyhow::Result<i64> {
|
|
let query = to_log_query(
|
|
filter, /*limit*/ None, /*after_id*/ None, /*descending*/ false,
|
|
);
|
|
runtime
|
|
.max_log_id(&query)
|
|
.await
|
|
.context("failed to fetch max log id")
|
|
}
|
|
|
|
fn to_log_query(
|
|
filter: &LogFilter,
|
|
limit: Option<usize>,
|
|
after_id: Option<i64>,
|
|
descending: bool,
|
|
) -> LogQuery {
|
|
LogQuery {
|
|
level_upper: filter.level_upper.clone(),
|
|
from_ts: filter.from_ts,
|
|
to_ts: filter.to_ts,
|
|
module_like: filter.module_like.clone(),
|
|
file_like: filter.file_like.clone(),
|
|
thread_ids: filter.thread_ids.clone(),
|
|
search: filter.search.clone(),
|
|
include_threadless: filter.include_threadless,
|
|
after_id,
|
|
limit,
|
|
descending,
|
|
}
|
|
}
|
|
|
|
fn format_row(row: &LogRow, compact: bool) -> String {
|
|
let timestamp = formatter::ts(row.ts, row.ts_nanos, compact);
|
|
let level = row.level.as_str();
|
|
let target = row.target.as_str();
|
|
let message = row.message.as_deref().unwrap_or("");
|
|
let level_colored = formatter::level(level);
|
|
let timestamp_colored = timestamp.dimmed().to_string();
|
|
let thread_id = row.thread_id.as_deref().unwrap_or("-");
|
|
let thread_id_colored = thread_id.blue().dimmed().to_string();
|
|
let target_colored = target.dimmed().to_string();
|
|
let message_colored = heuristic_formatting(message);
|
|
if compact {
|
|
format!("{timestamp_colored} {level_colored} {message_colored}")
|
|
} else {
|
|
format!(
|
|
"{timestamp_colored} {level_colored} [{thread_id_colored}] {target_colored} - {message_colored}"
|
|
)
|
|
}
|
|
}
|
|
|
|
fn heuristic_formatting(message: &str) -> String {
|
|
if matcher::apply_patch(message) {
|
|
formatter::apply_patch(message)
|
|
} else {
|
|
message.bold().to_string()
|
|
}
|
|
}
|
|
|
|
mod matcher {
|
|
pub(super) fn apply_patch(message: &str) -> bool {
|
|
message.contains("ToolCall: apply_patch")
|
|
}
|
|
}
|
|
|
|
mod formatter {
|
|
use chrono::DateTime;
|
|
use chrono::SecondsFormat;
|
|
use chrono::Utc;
|
|
use owo_colors::OwoColorize;
|
|
|
|
pub(super) fn apply_patch(message: &str) -> String {
|
|
message
|
|
.lines()
|
|
.map(|line| {
|
|
if line.starts_with('+') {
|
|
line.green().bold().to_string()
|
|
} else if line.starts_with('-') {
|
|
line.red().bold().to_string()
|
|
} else {
|
|
line.bold().to_string()
|
|
}
|
|
})
|
|
.collect::<Vec<_>>()
|
|
.join("\n")
|
|
}
|
|
|
|
pub(super) fn ts(ts: i64, ts_nanos: i64, compact: bool) -> String {
|
|
let nanos = u32::try_from(ts_nanos).unwrap_or(0);
|
|
match DateTime::<Utc>::from_timestamp(ts, nanos) {
|
|
Some(dt) if compact => dt.format("%H:%M:%S").to_string(),
|
|
Some(dt) => dt.to_rfc3339_opts(SecondsFormat::Millis, true),
|
|
None => format!("{ts}.{ts_nanos:09}Z"),
|
|
}
|
|
}
|
|
|
|
pub(super) fn level(level: &str) -> String {
|
|
let padded = format!("{level:<5}");
|
|
if level.eq_ignore_ascii_case("error") {
|
|
return padded.red().bold().to_string();
|
|
}
|
|
if level.eq_ignore_ascii_case("warn") {
|
|
return padded.yellow().bold().to_string();
|
|
}
|
|
if level.eq_ignore_ascii_case("info") {
|
|
return padded.green().bold().to_string();
|
|
}
|
|
if level.eq_ignore_ascii_case("debug") {
|
|
return padded.blue().bold().to_string();
|
|
}
|
|
if level.eq_ignore_ascii_case("trace") {
|
|
return padded.magenta().bold().to_string();
|
|
}
|
|
padded.bold().to_string()
|
|
}
|
|
}
|