Compare commits

...

3 Commits

Author SHA1 Message Date
Michael Bolin
c042246bf5 cloud-tasks: remove cloud-tasks-client crate features
## Why

`codex-cloud-tasks-client` was still using Cargo features to select its HTTP and mock implementations. That leaks feature permutations into both Cargo and Bazel for a crate that does not need them.

## What changed

- delete the `online` and `mock` crate features from `codex-cloud-tasks-client`
- make `codex-backend-client` unconditional and always export both `HttpClient` and `MockClient`
- remove the internal `features = ["mock", "online"]` activation from `codex-cloud-tasks`
- remove the matching Bazel `crate_features` override
- shrink the manifest verifier allowlist to match the remaining exceptions
2026-04-01 11:06:41 -07:00
Michael Bolin
dc263f5926 ci: block new workspace crate features (#16455)
## Why

We already enforce workspace metadata and lint inheritance for
`codex-rs` manifests, but we still allow new crate features to slip into
the workspace. That makes it too easy to add more Cargo-only feature
permutations while we are trying to eliminate them.

## What changed

- extend `verify_cargo_workspace_manifests.py` to reject new
`[features]` tables in workspace crates
- reject new optional dependencies that create implicit crate features
- reject new workspace-to-workspace `features = [...]` activations and
`default-features = false`
- add a narrow temporary allowlist for the existing feature-bearing
manifests and internal feature activations
- make the allowlist self-shrinking so a follow-up removal has to delete
its corresponding exception


---
[//]: # (BEGIN SAPLING FOOTER)
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with [ReviewStack](https://reviewstack.dev/openai/codex/pull/16455).
* #16457
* #16456
* __->__ #16455
2026-04-01 11:06:36 -07:00
Peter Meyers
e8d5c6b446 Make fuzzy file search case insensitive (#15772)
Makes fuzzy file search use case-insensitive matching instead of
smart-case in `codex-file-search`. I find smart-case to be a poor user
experience -using the wrong case for a letter drops its match so
significantly, it often drops off the results list, effectively making a
search case-sensitive.
2026-04-01 14:04:33 -04:00
7 changed files with 337 additions and 52 deletions

View File

@@ -1,11 +1,13 @@
#!/usr/bin/env python3
"""Verify that codex-rs crates inherit workspace metadata, lints, and names.
"""Verify that codex-rs Cargo manifests follow workspace manifest policy.
This keeps `cargo clippy` aligned with the workspace lint policy by ensuring
each crate opts into `[lints] workspace = true`, and it also checks the crate
name conventions for top-level `codex-rs/*` crates and `codex-rs/utils/*`
crates.
Checks:
- Crates inherit `[workspace.package]` metadata.
- Crates opt into `[lints] workspace = true`.
- Crate names follow the codex-rs directory naming conventions.
- Workspace manifests do not introduce new workspace crate feature toggles
while the remaining exceptions are being removed.
"""
from __future__ import annotations
@@ -24,20 +26,64 @@ TOP_LEVEL_NAME_EXCEPTIONS = {
UTILITY_NAME_EXCEPTIONS = {
"path-utils": "codex-utils-path",
}
MANIFEST_FEATURE_EXCEPTIONS = {
"codex-rs/otel/Cargo.toml": {
"disable-default-metrics-exporter": (),
},
"codex-rs/tui/Cargo.toml": {
"default": ("voice-input",),
"vt100-tests": (),
"debug-logs": (),
"voice-input": ("dep:cpal",),
},
}
OPTIONAL_DEPENDENCY_EXCEPTIONS = {
(
"codex-rs/tui/Cargo.toml",
'target.cfg(not(target_os = "linux")).dependencies',
"cpal",
),
}
INTERNAL_DEPENDENCY_FEATURE_EXCEPTIONS = {
(
"codex-rs/core/Cargo.toml",
"dev-dependencies",
"codex-otel",
): ("disable-default-metrics-exporter",),
}
def main() -> int:
failures = [
(path.relative_to(ROOT), errors)
for path in cargo_manifests()
if (errors := manifest_errors(path))
]
if not failures:
internal_package_names = workspace_package_names()
used_manifest_feature_exceptions: set[str] = set()
used_optional_dependency_exceptions: set[tuple[str, str, str]] = set()
used_internal_dependency_feature_exceptions: set[tuple[str, str, str]] = set()
failures_by_path: dict[str, list[str]] = {}
for path in manifests_to_verify():
if errors := manifest_errors(
path,
internal_package_names,
used_manifest_feature_exceptions,
used_optional_dependency_exceptions,
used_internal_dependency_feature_exceptions,
):
failures_by_path[manifest_key(path)] = errors
add_unused_exception_errors(
failures_by_path,
used_manifest_feature_exceptions,
used_optional_dependency_exceptions,
used_internal_dependency_feature_exceptions,
)
if not failures_by_path:
return 0
print(
"Cargo manifests under codex-rs must inherit workspace package metadata and "
"opt into workspace lints."
"Cargo manifests under codex-rs must inherit workspace package metadata, "
"opt into workspace lints, and avoid introducing new workspace crate "
"features."
)
print(
"Cargo only applies `codex-rs/Cargo.toml` `[workspace.lints.clippy]` "
@@ -56,8 +102,13 @@ def main() -> int:
"Package-name checks apply to `codex-rs/<crate>/Cargo.toml` and "
"`codex-rs/utils/<crate>/Cargo.toml`."
)
print(
"Existing workspace crate features are temporarily allowlisted in this "
"script and must shrink as they are removed."
)
print()
for path, errors in failures:
for path in sorted(failures_by_path):
errors = failures_by_path[path]
print(f"{path}:")
for error in errors:
print(f" - {error}")
@@ -65,28 +116,106 @@ def main() -> int:
return 1
def manifest_errors(path: Path) -> list[str]:
def manifest_errors(
path: Path,
internal_package_names: set[str],
used_manifest_feature_exceptions: set[str],
used_optional_dependency_exceptions: set[tuple[str, str, str]],
used_internal_dependency_feature_exceptions: set[tuple[str, str, str]],
) -> list[str]:
manifest = load_manifest(path)
package = manifest.get("package")
if not isinstance(package, dict):
if not isinstance(package, dict) and path != CARGO_RS_ROOT / "Cargo.toml":
return []
errors = []
for field in WORKSPACE_PACKAGE_FIELDS:
if not is_workspace_reference(package.get(field)):
errors.append(f"set `{field}.workspace = true` in `[package]`")
if isinstance(package, dict):
for field in WORKSPACE_PACKAGE_FIELDS:
if not is_workspace_reference(package.get(field)):
errors.append(f"set `{field}.workspace = true` in `[package]`")
lints = manifest.get("lints")
if not (isinstance(lints, dict) and lints.get("workspace") is True):
errors.append("add `[lints]` with `workspace = true`")
lints = manifest.get("lints")
if not (isinstance(lints, dict) and lints.get("workspace") is True):
errors.append("add `[lints]` with `workspace = true`")
expected_name = expected_package_name(path)
if expected_name is not None:
actual_name = package.get("name")
if actual_name != expected_name:
expected_name = expected_package_name(path)
if expected_name is not None:
actual_name = package.get("name")
if actual_name != expected_name:
errors.append(
f"set `[package].name` to `{expected_name}` (found `{actual_name}`)"
)
path_key = manifest_key(path)
features = manifest.get("features")
if features is not None:
normalized_features = normalize_feature_mapping(features)
expected_features = MANIFEST_FEATURE_EXCEPTIONS.get(path_key)
if expected_features is None:
errors.append(
f"set `[package].name` to `{expected_name}` (found `{actual_name}`)"
"remove `[features]`; new workspace crate features are not allowed"
)
else:
used_manifest_feature_exceptions.add(path_key)
if normalized_features != expected_features:
errors.append(
"limit `[features]` to the existing exception list while "
"workspace crate features are being removed "
f"(expected {render_feature_mapping(expected_features)})"
)
for section_name, dependencies in dependency_sections(manifest):
for dependency_name, dependency in dependencies.items():
if not isinstance(dependency, dict):
continue
if dependency.get("optional") is True:
exception_key = (path_key, section_name, dependency_name)
if exception_key in OPTIONAL_DEPENDENCY_EXCEPTIONS:
used_optional_dependency_exceptions.add(exception_key)
else:
errors.append(
"remove `optional = true` from "
f"`{dependency_entry_label(section_name, dependency_name)}`; "
"new optional dependencies are not allowed because they "
"create crate features"
)
if not is_internal_dependency(path, dependency_name, dependency, internal_package_names):
continue
dependency_features = dependency.get("features")
if dependency_features is not None:
normalized_dependency_features = normalize_string_list(
dependency_features
)
exception_key = (path_key, section_name, dependency_name)
expected_dependency_features = (
INTERNAL_DEPENDENCY_FEATURE_EXCEPTIONS.get(exception_key)
)
if expected_dependency_features is None:
errors.append(
"remove `features = [...]` from workspace dependency "
f"`{dependency_entry_label(section_name, dependency_name)}`; "
"new workspace crate feature activations are not allowed"
)
else:
used_internal_dependency_feature_exceptions.add(exception_key)
if normalized_dependency_features != expected_dependency_features:
errors.append(
"limit workspace dependency features on "
f"`{dependency_entry_label(section_name, dependency_name)}` "
"to the existing exception list while workspace crate "
"features are being removed "
f"(expected {render_string_list(expected_dependency_features)})"
)
if dependency.get("default-features") is False:
errors.append(
"remove `default-features = false` from workspace dependency "
f"`{dependency_entry_label(section_name, dependency_name)}`; "
"new workspace crate feature toggles are not allowed"
)
return errors
@@ -109,6 +238,153 @@ def is_workspace_reference(value: object) -> bool:
return isinstance(value, dict) and value.get("workspace") is True
def manifest_key(path: Path) -> str:
return str(path.relative_to(ROOT))
def normalize_feature_mapping(value: object) -> dict[str, tuple[str, ...]] | None:
if not isinstance(value, dict):
return None
normalized = {}
for key, features in value.items():
if not isinstance(key, str):
return None
normalized_features = normalize_string_list(features)
if normalized_features is None:
return None
normalized[key] = normalized_features
return normalized
def normalize_string_list(value: object) -> tuple[str, ...] | None:
if not isinstance(value, list) or not all(isinstance(item, str) for item in value):
return None
return tuple(value)
def render_feature_mapping(features: dict[str, tuple[str, ...]]) -> str:
entries = [
f"{name} = {render_string_list(items)}" for name, items in features.items()
]
return ", ".join(entries)
def render_string_list(items: tuple[str, ...]) -> str:
return "[" + ", ".join(f'"{item}"' for item in items) + "]"
def dependency_sections(manifest: dict) -> list[tuple[str, dict]]:
sections = []
for section_name in ("dependencies", "dev-dependencies", "build-dependencies"):
dependencies = manifest.get(section_name)
if isinstance(dependencies, dict):
sections.append((section_name, dependencies))
workspace = manifest.get("workspace")
if isinstance(workspace, dict):
workspace_dependencies = workspace.get("dependencies")
if isinstance(workspace_dependencies, dict):
sections.append(("workspace.dependencies", workspace_dependencies))
target = manifest.get("target")
if not isinstance(target, dict):
return sections
for target_name, tables in target.items():
if not isinstance(tables, dict):
continue
for section_name in ("dependencies", "dev-dependencies", "build-dependencies"):
dependencies = tables.get(section_name)
if isinstance(dependencies, dict):
sections.append((f"target.{target_name}.{section_name}", dependencies))
return sections
def dependency_entry_label(section_name: str, dependency_name: str) -> str:
return f"[{section_name}].{dependency_name}"
def is_internal_dependency(
manifest_path: Path,
dependency_name: str,
dependency: dict,
internal_package_names: set[str],
) -> bool:
package_name = dependency.get("package", dependency_name)
if isinstance(package_name, str) and package_name in internal_package_names:
return True
dependency_path = dependency.get("path")
if not isinstance(dependency_path, str):
return False
resolved_dependency_path = (manifest_path.parent / dependency_path).resolve()
try:
resolved_dependency_path.relative_to(CARGO_RS_ROOT)
except ValueError:
return False
return True
def add_unused_exception_errors(
failures_by_path: dict[str, list[str]],
used_manifest_feature_exceptions: set[str],
used_optional_dependency_exceptions: set[tuple[str, str, str]],
used_internal_dependency_feature_exceptions: set[tuple[str, str, str]],
) -> None:
for path_key in sorted(
set(MANIFEST_FEATURE_EXCEPTIONS) - used_manifest_feature_exceptions
):
add_failure(
failures_by_path,
path_key,
"remove the stale `[features]` exception from "
"`MANIFEST_FEATURE_EXCEPTIONS`",
)
for path_key, section_name, dependency_name in sorted(
OPTIONAL_DEPENDENCY_EXCEPTIONS - used_optional_dependency_exceptions
):
add_failure(
failures_by_path,
path_key,
"remove the stale optional-dependency exception for "
f"`{dependency_entry_label(section_name, dependency_name)}` from "
"`OPTIONAL_DEPENDENCY_EXCEPTIONS`",
)
for path_key, section_name, dependency_name in sorted(
set(INTERNAL_DEPENDENCY_FEATURE_EXCEPTIONS)
- used_internal_dependency_feature_exceptions
):
add_failure(
failures_by_path,
path_key,
"remove the stale internal dependency feature exception for "
f"`{dependency_entry_label(section_name, dependency_name)}` from "
"`INTERNAL_DEPENDENCY_FEATURE_EXCEPTIONS`",
)
def add_failure(failures_by_path: dict[str, list[str]], path_key: str, error: str) -> None:
failures_by_path.setdefault(path_key, []).append(error)
def workspace_package_names() -> set[str]:
package_names = set()
for path in cargo_manifests():
manifest = load_manifest(path)
package = manifest.get("package")
if not isinstance(package, dict):
continue
package_name = package.get("name")
if isinstance(package_name, str):
package_names.add(package_name)
return package_names
def load_manifest(path: Path) -> dict:
return tomllib.loads(path.read_text())
@@ -121,5 +397,9 @@ def cargo_manifests() -> list[Path]:
)
def manifests_to_verify() -> list[Path]:
return [CARGO_RS_ROOT / "Cargo.toml", *cargo_manifests()]
if __name__ == "__main__":
sys.exit(main())

View File

@@ -366,6 +366,30 @@ async fn test_fuzzy_file_search_session_streams_updates() -> Result<()> {
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_fuzzy_file_search_session_update_is_case_insensitive() -> Result<()> {
let codex_home = TempDir::new()?;
let root = TempDir::new()?;
std::fs::write(root.path().join("alpha.txt"), "contents")?;
let mut mcp = initialized_mcp(&codex_home).await?;
let root_path = root.path().to_string_lossy().to_string();
let session_id = "session-case-insensitive";
mcp.start_fuzzy_file_search_session(session_id, vec![root_path.clone()])
.await?;
mcp.update_fuzzy_file_search_session(session_id, "ALP")
.await?;
let payload =
wait_for_session_updated(&mut mcp, session_id, "ALP", FileExpectation::NonEmpty).await?;
assert_eq!(payload.files.len(), 1);
assert_eq!(payload.files[0].root, root_path);
assert_eq!(payload.files[0].path, "alpha.txt");
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_fuzzy_file_search_session_no_updates_after_complete_until_query_edited() -> Result<()>
{

View File

@@ -3,8 +3,4 @@ load("//:defs.bzl", "codex_rust_crate")
codex_rust_crate(
name = "cloud-tasks-client",
crate_name = "codex_cloud_tasks_client",
crate_features = [
"mock",
"online",
],
)

View File

@@ -11,11 +11,6 @@ path = "src/lib.rs"
[lints]
workspace = true
[features]
default = ["online"]
online = ["dep:codex-backend-client"]
mock = []
[dependencies]
anyhow = "1"
async-trait = "0.1"
@@ -24,5 +19,5 @@ diffy = "0.4.2"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
thiserror = "2.0.17"
codex-backend-client = { path = "../backend-client", optional = true }
codex-backend-client = { path = "../backend-client" }
codex-git-utils = { workspace = true }

View File

@@ -15,16 +15,9 @@ pub use api::TaskSummary;
pub use api::TaskText;
pub use api::TurnAttempt;
#[cfg(feature = "mock")]
mod mock;
#[cfg(feature = "online")]
mod http;
#[cfg(feature = "mock")]
mod mock;
pub use http::HttpClient;
pub use mock::MockClient;
#[cfg(feature = "online")]
pub use http::HttpClient;
// Reusable apply engine now lives in the shared crate `codex-git-utils`.

View File

@@ -16,10 +16,7 @@ anyhow = { workspace = true }
base64 = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
clap = { workspace = true, features = ["derive"] }
codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = [
"mock",
"online",
] }
codex-cloud-tasks-client = { path = "../cloud-tasks-client" }
codex-client = { workspace = true }
codex-core = { path = "../core" }
codex-git-utils = { workspace = true }

View File

@@ -336,7 +336,7 @@ where
fn create_pattern(pattern: &str) -> Pattern {
Pattern::new(
pattern,
CaseMatching::Smart,
CaseMatching::Ignore,
Normalization::Smart,
AtomKind::Fuzzy,
)
@@ -508,7 +508,7 @@ fn matcher_worker(
nucleo.pattern.reparse(
0,
&query,
CaseMatching::Smart,
CaseMatching::Ignore,
Normalization::Smart,
append,
);