[codex-analytics] feature plumbing and emittance

This commit is contained in:
rhan-oai
2026-04-06 12:26:33 -07:00
parent 6d273dd348
commit 031eb880e7
21 changed files with 1264 additions and 44 deletions

View File

@@ -78,3 +78,31 @@ model_provider = "{model_provider_id}"
),
)
}
pub fn write_mock_responses_config_toml_with_chatgpt_base_url(
codex_home: &Path,
server_uri: &str,
chatgpt_base_url: &str,
) -> std::io::Result<()> {
let config_toml = codex_home.join("config.toml");
std::fs::write(
config_toml,
format!(
r#"
model = "mock-model"
approval_policy = "never"
sandbox_mode = "read-only"
chatgpt_base_url = "{chatgpt_base_url}"
model_provider = "mock_provider"
[model_providers.mock_provider]
name = "Mock provider for test"
base_url = "{server_uri}/v1"
wire_api = "responses"
request_max_retries = 0
stream_max_retries = 0
"#
),
)
}

View File

@@ -14,6 +14,7 @@ pub use auth_fixtures::encode_id_token;
pub use auth_fixtures::write_chatgpt_auth;
use codex_app_server_protocol::JSONRPCResponse;
pub use config::write_mock_responses_config_toml;
pub use config::write_mock_responses_config_toml_with_chatgpt_base_url;
pub use core_test_support::format_with_current_shell;
pub use core_test_support::format_with_current_shell_display;
pub use core_test_support::format_with_current_shell_display_non_login;