mirror of
https://github.com/openai/codex.git
synced 2026-05-02 12:21:26 +03:00
## Why Follow-up to #16345, the Bazel clippy rollout in #15955, and the cleanup pass in #16353. `cargo clippy` was enforcing the workspace deny-list from `codex-rs/Cargo.toml` because the member crates opt into `[lints] workspace = true`, but Bazel clippy was only using `rules_rust` plus `clippy.toml`. That left the Bazel lane vulnerable to drift: `clippy.toml` can tune lint behavior, but it cannot set allow/warn/deny/forbid levels. This PR now closes both sides of the follow-up. It keeps `.bazelrc` in sync with `[workspace.lints.clippy]`, and it fixes the real clippy violations that the newly-synced Windows Bazel lane surfaced once that deny-list started matching Cargo. ## What Changed - added `.github/scripts/verify_bazel_clippy_lints.py`, a Python check that parses `codex-rs/Cargo.toml` with `tomllib`, reads the Bazel `build:clippy` `clippy_flag` entries from `.bazelrc`, and reports missing, extra, or mismatched lint levels - ran that verifier from the lightweight `ci.yml` workflow so the sync check does not depend on a Rust toolchain being installed first - expanded the `.bazelrc` comment to explain the Cargo `workspace = true` linkage and why Bazel needs the deny-list duplicated explicitly - fixed the Windows-only `codex-windows-sandbox` violations that Bazel clippy reported after the sync, using the same style as #16353: inline `format!` args, method references instead of trivial closures, removed redundant clones, and replaced SID conversion `unwrap` and `expect` calls with proper errors - cleaned up the remaining cross-platform violations the Bazel lane exposed in `codex-backend-client` and `core_test_support` ## Testing Key new test introduced by this PR: `python3 .github/scripts/verify_bazel_clippy_lints.py`
175 lines
5.4 KiB
Rust
175 lines
5.4 KiB
Rust
use anyhow::{anyhow, Result};
|
|
use dirs_next::home_dir;
|
|
use std::collections::HashMap;
|
|
use std::env;
|
|
use std::fs::{self, File};
|
|
use std::io::Write;
|
|
use std::path::{Path, PathBuf};
|
|
|
|
pub fn normalize_null_device_env(env_map: &mut HashMap<String, String>) {
|
|
let keys: Vec<String> = env_map.keys().cloned().collect();
|
|
for k in keys {
|
|
if let Some(v) = env_map.get(&k).cloned() {
|
|
let t = v.trim().to_ascii_lowercase();
|
|
if t == "/dev/null" || t == "\\\\\\\\dev\\\\\\\\null" {
|
|
env_map.insert(k, "NUL".to_string());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn ensure_non_interactive_pager(env_map: &mut HashMap<String, String>) {
|
|
env_map
|
|
.entry("GIT_PAGER".into())
|
|
.or_insert_with(|| "more.com".into());
|
|
env_map
|
|
.entry("PAGER".into())
|
|
.or_insert_with(|| "more.com".into());
|
|
env_map.entry("LESS".into()).or_insert_with(|| "".into());
|
|
}
|
|
|
|
// Keep PATH and PATHEXT stable for callers that rely on inheriting the parent process env.
|
|
pub fn inherit_path_env(env_map: &mut HashMap<String, String>) {
|
|
if !env_map.contains_key("PATH")
|
|
&& let Ok(path) = env::var("PATH")
|
|
{
|
|
env_map.insert("PATH".into(), path);
|
|
}
|
|
if !env_map.contains_key("PATHEXT")
|
|
&& let Ok(pathext) = env::var("PATHEXT")
|
|
{
|
|
env_map.insert("PATHEXT".into(), pathext);
|
|
}
|
|
}
|
|
|
|
fn prepend_path(env_map: &mut HashMap<String, String>, prefix: &str) {
|
|
let existing = env_map
|
|
.get("PATH")
|
|
.cloned()
|
|
.or_else(|| env::var("PATH").ok())
|
|
.unwrap_or_default();
|
|
let parts: Vec<String> = existing.split(';').map(ToString::to_string).collect();
|
|
if parts
|
|
.first()
|
|
.map(|p| p.eq_ignore_ascii_case(prefix))
|
|
.unwrap_or(false)
|
|
{
|
|
return;
|
|
}
|
|
let mut new_path = String::new();
|
|
new_path.push_str(prefix);
|
|
if !existing.is_empty() {
|
|
new_path.push(';');
|
|
new_path.push_str(&existing);
|
|
}
|
|
env_map.insert("PATH".into(), new_path);
|
|
}
|
|
|
|
fn reorder_pathext_for_stubs(env_map: &mut HashMap<String, String>) {
|
|
let default = env_map
|
|
.get("PATHEXT")
|
|
.cloned()
|
|
.or_else(|| env::var("PATHEXT").ok())
|
|
.unwrap_or(".COM;.EXE;.BAT;.CMD".to_string());
|
|
let exts: Vec<String> = default
|
|
.split(';')
|
|
.filter(|e| !e.is_empty())
|
|
.map(ToString::to_string)
|
|
.collect();
|
|
let exts_norm: Vec<String> = exts.iter().map(|e| e.to_ascii_uppercase()).collect();
|
|
let want = [".BAT", ".CMD"];
|
|
let mut front: Vec<String> = Vec::new();
|
|
for w in want {
|
|
if let Some(idx) = exts_norm.iter().position(|e| e == w) {
|
|
front.push(exts[idx].clone());
|
|
}
|
|
}
|
|
let rest: Vec<String> = exts
|
|
.into_iter()
|
|
.enumerate()
|
|
.filter(|(i, _)| {
|
|
let up = &exts_norm[*i];
|
|
up != ".BAT" && up != ".CMD"
|
|
})
|
|
.map(|(_, e)| e)
|
|
.collect();
|
|
let mut combined = Vec::new();
|
|
combined.extend(front);
|
|
combined.extend(rest);
|
|
env_map.insert("PATHEXT".into(), combined.join(";"));
|
|
}
|
|
|
|
fn ensure_denybin(tools: &[&str], denybin_dir: Option<&Path>) -> Result<PathBuf> {
|
|
let base = match denybin_dir {
|
|
Some(p) => p.to_path_buf(),
|
|
None => {
|
|
let home = home_dir().ok_or_else(|| anyhow!("no home dir"))?;
|
|
home.join(".sbx-denybin")
|
|
}
|
|
};
|
|
fs::create_dir_all(&base)?;
|
|
for tool in tools {
|
|
for ext in [".bat", ".cmd"] {
|
|
let path = base.join(format!("{tool}{ext}"));
|
|
if !path.exists() {
|
|
let mut f = File::create(&path)?;
|
|
f.write_all(b"@echo off\\r\\nexit /b 1\\r\\n")?;
|
|
}
|
|
}
|
|
}
|
|
Ok(base)
|
|
}
|
|
|
|
pub fn apply_no_network_to_env(env_map: &mut HashMap<String, String>) -> Result<()> {
|
|
env_map.insert("SBX_NONET_ACTIVE".into(), "1".into());
|
|
env_map
|
|
.entry("HTTP_PROXY".into())
|
|
.or_insert_with(|| "http://127.0.0.1:9".into());
|
|
env_map
|
|
.entry("HTTPS_PROXY".into())
|
|
.or_insert_with(|| "http://127.0.0.1:9".into());
|
|
env_map
|
|
.entry("ALL_PROXY".into())
|
|
.or_insert_with(|| "http://127.0.0.1:9".into());
|
|
env_map
|
|
.entry("NO_PROXY".into())
|
|
.or_insert_with(|| "localhost,127.0.0.1,::1".into());
|
|
env_map
|
|
.entry("PIP_NO_INDEX".into())
|
|
.or_insert_with(|| "1".into());
|
|
env_map
|
|
.entry("PIP_DISABLE_PIP_VERSION_CHECK".into())
|
|
.or_insert_with(|| "1".into());
|
|
env_map
|
|
.entry("NPM_CONFIG_OFFLINE".into())
|
|
.or_insert_with(|| "true".into());
|
|
env_map
|
|
.entry("CARGO_NET_OFFLINE".into())
|
|
.or_insert_with(|| "true".into());
|
|
env_map
|
|
.entry("GIT_HTTP_PROXY".into())
|
|
.or_insert_with(|| "http://127.0.0.1:9".into());
|
|
env_map
|
|
.entry("GIT_HTTPS_PROXY".into())
|
|
.or_insert_with(|| "http://127.0.0.1:9".into());
|
|
env_map
|
|
.entry("GIT_SSH_COMMAND".into())
|
|
.or_insert_with(|| "cmd /c exit 1".into());
|
|
env_map
|
|
.entry("GIT_ALLOW_PROTOCOLS".into())
|
|
.or_insert_with(|| "".into());
|
|
|
|
let base = ensure_denybin(&["ssh", "scp"], /*denybin_dir*/ None)?;
|
|
for tool in ["curl", "wget"] {
|
|
for ext in [".bat", ".cmd"] {
|
|
let p = base.join(format!("{tool}{ext}"));
|
|
if p.exists() {
|
|
let _ = fs::remove_file(&p);
|
|
}
|
|
}
|
|
}
|
|
prepend_path(env_map, &base.to_string_lossy());
|
|
reorder_pathext_for_stubs(env_map);
|
|
Ok(())
|
|
}
|