mirror of
https://github.com/openai/codex.git
synced 2026-05-03 04:42:20 +03:00
chore: migrate from Config::load_from_base_config_with_overrides to ConfigBuilder (#8276)
https://github.com/openai/codex/pull/8235 introduced `ConfigBuilder` and this PR updates all call non-test call sites to use it instead of `Config::load_from_base_config_with_overrides()`. This is important because `load_from_base_config_with_overrides()` uses an empty `ConfigRequirements`, which is a reasonable default for testing so the tests are not influenced by the settings on the host. This method is now guarded by `#[cfg(test)]` so it cannot be used by business logic. Because `ConfigBuilder::build()` is `async`, many of the test methods had to be migrated to be `async`, as well. On the bright side, this made it possible to eliminate a bunch of `block_on_future()` stuff.
This commit is contained in:
@@ -1682,8 +1682,7 @@ mod tests {
|
||||
use crate::exec_cell::ExecCall;
|
||||
use crate::exec_cell::ExecCell;
|
||||
use codex_core::config::Config;
|
||||
use codex_core::config::ConfigOverrides;
|
||||
use codex_core::config::ConfigToml;
|
||||
use codex_core::config::ConfigBuilder;
|
||||
use codex_core::config::types::McpServerConfig;
|
||||
use codex_core::config::types::McpServerTransportConfig;
|
||||
use codex_core::openai_models::models_manager::ModelsManager;
|
||||
@@ -1700,14 +1699,13 @@ mod tests {
|
||||
use mcp_types::TextContent;
|
||||
use mcp_types::Tool;
|
||||
use mcp_types::ToolInputSchema;
|
||||
|
||||
fn test_config() -> Config {
|
||||
Config::load_from_base_config_with_overrides(
|
||||
ConfigToml::default(),
|
||||
ConfigOverrides::default(),
|
||||
std::env::temp_dir(),
|
||||
)
|
||||
.expect("config")
|
||||
async fn test_config() -> Config {
|
||||
let codex_home = std::env::temp_dir();
|
||||
ConfigBuilder::default()
|
||||
.codex_home(codex_home.clone())
|
||||
.build()
|
||||
.await
|
||||
.expect("config")
|
||||
}
|
||||
|
||||
fn render_lines(lines: &[Line<'static>]) -> Vec<String> {
|
||||
@@ -1785,9 +1783,9 @@ mod tests {
|
||||
insta::assert_snapshot!(rendered);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mcp_tools_output_masks_sensitive_values() {
|
||||
let mut config = test_config();
|
||||
#[tokio::test]
|
||||
async fn mcp_tools_output_masks_sensitive_values() {
|
||||
let mut config = test_config().await;
|
||||
let mut env = HashMap::new();
|
||||
env.insert("TOKEN".to_string(), "secret".to_string());
|
||||
let stdio_config = McpServerConfig {
|
||||
@@ -2618,9 +2616,9 @@ mod tests {
|
||||
assert_eq!(rendered, vec!["• Detailed reasoning goes here."]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reasoning_summary_block_respects_config_overrides() {
|
||||
let mut config = test_config();
|
||||
#[tokio::test]
|
||||
async fn reasoning_summary_block_respects_config_overrides() {
|
||||
let mut config = test_config().await;
|
||||
config.model = Some("gpt-3.5-turbo".to_string());
|
||||
config.model_supports_reasoning_summaries = Some(true);
|
||||
config.model_reasoning_summary_format = Some(ReasoningSummaryFormat::Experimental);
|
||||
|
||||
Reference in New Issue
Block a user