feat: log client use min log level (#18661)

In the log client, use the log level filter as a minimum severity
instead of exact match

---------

Co-authored-by: Codex <noreply@openai.com>
This commit is contained in:
jif-oai
2026-04-20 14:40:39 +01:00
committed by GitHub
parent 7e5588699d
commit e1c289e11b
4 changed files with 175 additions and 13 deletions

View File

@@ -4,6 +4,7 @@ use std::time::Duration;
use anyhow::Context;
use chrono::DateTime;
use clap::Parser;
use clap::ValueEnum;
use codex_state::LogQuery;
use codex_state::LogRow;
use codex_state::StateRuntime;
@@ -22,9 +23,9 @@ struct Args {
#[arg(long)]
db: Option<PathBuf>,
/// Log level to match exactly (case-insensitive).
#[arg(long)]
level: Option<String>,
/// Minimum log level to include.
#[arg(long, value_enum, ignore_case = true)]
level: Option<LogLevelThreshold>,
/// Start timestamp (RFC3339 or unix seconds).
#[arg(long, value_name = "RFC3339|UNIX")]
@@ -69,7 +70,7 @@ struct Args {
#[derive(Debug, Clone)]
struct LogFilter {
level_upper: Option<String>,
levels_upper: Vec<String>,
from_ts: Option<i64>,
to_ts: Option<i64>,
module_like: Vec<String>,
@@ -79,6 +80,28 @@ struct LogFilter {
include_threadless: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)]
enum LogLevelThreshold {
Trace,
Debug,
Info,
Warn,
Error,
}
impl LogLevelThreshold {
fn levels_upper(self) -> Vec<String> {
let levels = match self {
LogLevelThreshold::Trace => &["TRACE", "DEBUG", "INFO", "WARN", "ERROR"][..],
LogLevelThreshold::Debug => &["DEBUG", "INFO", "WARN", "ERROR"],
LogLevelThreshold::Info => &["INFO", "WARN", "ERROR"],
LogLevelThreshold::Warn => &["WARN", "ERROR"],
LogLevelThreshold::Error => &["ERROR"],
};
levels.iter().map(ToString::to_string).collect()
}
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let args = Args::parse();
@@ -137,7 +160,9 @@ fn build_filter(args: &Args) -> anyhow::Result<LogFilter> {
.transpose()
.context("failed to parse --to")?;
let level_upper = args.level.as_ref().map(|level| level.to_ascii_uppercase());
let levels_upper = args
.level
.map_or_else(Vec::new, LogLevelThreshold::levels_upper);
let module_like = args
.module
.iter()
@@ -158,7 +183,7 @@ fn build_filter(args: &Args) -> anyhow::Result<LogFilter> {
.collect::<Vec<_>>();
Ok(LogFilter {
level_upper,
levels_upper,
from_ts,
to_ts,
module_like,
@@ -251,7 +276,7 @@ fn to_log_query(
descending: bool,
) -> LogQuery {
LogQuery {
level_upper: filter.level_upper.clone(),
levels_upper: filter.levels_upper.clone(),
from_ts: filter.from_ts,
to_ts: filter.to_ts,
module_like: filter.module_like.clone(),
@@ -350,3 +375,42 @@ mod formatter {
padded.bold().to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn log_level_threshold_includes_more_severe_levels() {
assert_eq!(
LogLevelThreshold::Warn.levels_upper(),
vec!["WARN".to_string(), "ERROR".to_string()]
);
assert_eq!(
LogLevelThreshold::Trace.levels_upper(),
vec![
"TRACE".to_string(),
"DEBUG".to_string(),
"INFO".to_string(),
"WARN".to_string(),
"ERROR".to_string(),
]
);
}
#[test]
fn log_level_rejects_aliases_and_unknown_values() {
assert!(Args::try_parse_from(["codex-state-logs", "--level", "warning"]).is_err());
assert!(Args::try_parse_from(["codex-state-logs", "--level", "err"]).is_err());
assert!(Args::try_parse_from(["codex-state-logs", "--level", "warn,error"]).is_err());
}
#[test]
fn log_level_accepts_canonical_values_case_insensitively() {
let args = Args::try_parse_from(["codex-state-logs", "--level", "WARN"])
.expect("parse uppercase log level");
assert_eq!(args.level, Some(LogLevelThreshold::Warn));
}
}

View File

@@ -32,7 +32,7 @@ pub struct LogRow {
#[derive(Clone, Debug, Default)]
pub struct LogQuery {
pub level_upper: Option<String>,
pub levels_upper: Vec<String>,
pub from_ts: Option<i64>,
pub to_ts: Option<i64>,
pub module_like: Vec<String>,

View File

@@ -258,7 +258,15 @@ async fn remove_legacy_db_files(
// sidecar-style paths first so the main file is attempted last.
legacy_paths.sort_by_key(|path| std::cmp::Reverse(path.as_os_str().len()));
for legacy_path in legacy_paths {
if let Err(err) = tokio::fs::remove_file(&legacy_path).await {
let mut result = tokio::fs::remove_file(&legacy_path).await;
for _ in 0..3 {
if result.is_ok() {
break;
}
tokio::time::sleep(Duration::from_millis(25)).await;
result = tokio::fs::remove_file(&legacy_path).await;
}
if let Err(err) = result {
warn!(
"failed to remove legacy {db_label} db file {}: {err}",
legacy_path.display(),

View File

@@ -464,10 +464,15 @@ fn format_feedback_log_line(
}
fn push_log_filters<'a>(builder: &mut QueryBuilder<'a, Sqlite>, query: &'a LogQuery) {
if let Some(level_upper) = query.level_upper.as_ref() {
builder
.push(" AND UPPER(level) = ")
.push_bind(level_upper.as_str());
if !query.levels_upper.is_empty() {
builder.push(" AND UPPER(level) IN (");
{
let mut separated = builder.separated(", ");
for level_upper in &query.levels_upper {
separated.push_bind(level_upper.as_str());
}
}
builder.push(")");
}
if let Some(from_ts) = query.from_ts {
builder.push(" AND ts >= ").push_bind(from_ts);
@@ -851,6 +856,91 @@ mod tests {
let _ = tokio::fs::remove_dir_all(codex_home).await;
}
#[tokio::test]
async fn query_logs_filters_level_set_without_rewriting_stored_level() {
let codex_home = unique_temp_dir();
let runtime = StateRuntime::init(codex_home.clone(), "test-provider".to_string())
.await
.expect("initialize runtime");
runtime
.insert_logs(&[
LogEntry {
ts: 1,
ts_nanos: 0,
level: "TRACE".to_string(),
target: "cli".to_string(),
message: Some("trace-row".to_string()),
feedback_log_body: Some("trace-row".to_string()),
thread_id: None,
process_uuid: None,
file: Some("main.rs".to_string()),
line: Some(1),
module_path: None,
},
LogEntry {
ts: 2,
ts_nanos: 0,
level: "INFO".to_string(),
target: "cli".to_string(),
message: Some("info-row".to_string()),
feedback_log_body: Some("info-row".to_string()),
thread_id: None,
process_uuid: None,
file: Some("main.rs".to_string()),
line: Some(2),
module_path: None,
},
LogEntry {
ts: 3,
ts_nanos: 0,
level: "warn".to_string(),
target: "cli".to_string(),
message: Some("warn-row".to_string()),
feedback_log_body: Some("warn-row".to_string()),
thread_id: None,
process_uuid: None,
file: Some("main.rs".to_string()),
line: Some(3),
module_path: None,
},
LogEntry {
ts: 4,
ts_nanos: 0,
level: "ERROR".to_string(),
target: "cli".to_string(),
message: Some("error-row".to_string()),
feedback_log_body: Some("error-row".to_string()),
thread_id: None,
process_uuid: None,
file: Some("main.rs".to_string()),
line: Some(4),
module_path: None,
},
])
.await
.expect("insert test logs");
let rows = runtime
.query_logs(&LogQuery {
levels_upper: vec!["WARN".to_string(), "ERROR".to_string()],
..Default::default()
})
.await
.expect("query matching logs");
let actual = rows
.iter()
.map(|row| (row.level.as_str(), row.message.as_deref()))
.collect::<Vec<_>>();
assert_eq!(
actual,
vec![("warn", Some("warn-row")), ("ERROR", Some("error-row"))]
);
let _ = tokio::fs::remove_dir_all(codex_home).await;
}
#[tokio::test]
async fn insert_logs_prunes_old_rows_when_thread_exceeds_size_limit() {
let codex_home = unique_temp_dir();