mirror of
https://github.com/openai/codex.git
synced 2026-04-29 02:41:12 +03:00
This is the first step in supporting other model providers in the Rust CLI. Specifically, this PR adds support for the new entries in `Config` and `ConfigOverrides` to specify a `ModelProviderInfo`, which is the basic config needed for an LLM provider. This PR does not get us all the way there yet because `client.rs` still categorically appends `/responses` to the URL and expects the endpoint to support the OpenAI Responses API. Will fix that next!
31 lines
728 B
Rust
31 lines
728 B
Rust
//! Root of the `codex-core` library.
|
||
|
||
// Prevent accidental direct writes to stdout/stderr in library code. All
|
||
// user‑visible output must go through the appropriate abstraction (e.g.,
|
||
// the TUI or the tracing stack).
|
||
#![deny(clippy::print_stdout, clippy::print_stderr)]
|
||
|
||
mod client;
|
||
pub mod codex;
|
||
pub use codex::Codex;
|
||
pub mod codex_wrapper;
|
||
pub mod config;
|
||
pub mod error;
|
||
pub mod exec;
|
||
mod flags;
|
||
mod is_safe_command;
|
||
#[cfg(target_os = "linux")]
|
||
pub mod linux;
|
||
mod mcp_connection_manager;
|
||
pub mod mcp_server_config;
|
||
mod mcp_tool_call;
|
||
mod model_provider_info;
|
||
pub use model_provider_info::ModelProviderInfo;
|
||
mod models;
|
||
pub mod protocol;
|
||
mod rollout;
|
||
mod safety;
|
||
mod user_notification;
|
||
pub mod util;
|
||
mod zdr_transcript;
|