adding helper to prevent caching

This commit is contained in:
Ahmed Ibrahim
2025-07-14 13:47:11 -07:00
parent e4f6b76eca
commit 75a1e4b768
3 changed files with 28 additions and 6 deletions

View File

@@ -21,8 +21,8 @@ use crate::client_common::ResponseEvent;
use crate::client_common::ResponseStream;
use crate::error::CodexErr;
use crate::error::Result;
use crate::flags::OPENAI_REQUEST_MAX_RETRIES;
use crate::flags::OPENAI_STREAM_IDLE_TIMEOUT_MS;
use crate::flags::openai_request_max_retries;
use crate::models::ContentItem;
use crate::models::ResponseItem;
use crate::openai_tools::create_tools_json_for_chat_completions_api;
@@ -146,7 +146,7 @@ pub(crate) async fn stream_chat_completions(
return Err(CodexErr::UnexpectedStatus(status, body));
}
if attempt > *OPENAI_REQUEST_MAX_RETRIES {
if attempt > openai_request_max_retries() {
return Err(CodexErr::RetryLimit(status));
}
@@ -162,7 +162,7 @@ pub(crate) async fn stream_chat_completions(
tokio::time::sleep(delay).await;
}
Err(e) => {
if attempt > *OPENAI_REQUEST_MAX_RETRIES {
if attempt > openai_request_max_retries() {
return Err(e.into());
}
let delay = backoff(attempt);

View File

@@ -29,8 +29,8 @@ use crate::config_types::ReasoningSummary as ReasoningSummaryConfig;
use crate::error::CodexErr;
use crate::error::Result;
use crate::flags::CODEX_RS_SSE_FIXTURE;
use crate::flags::OPENAI_REQUEST_MAX_RETRIES;
use crate::flags::OPENAI_STREAM_IDLE_TIMEOUT_MS;
use crate::flags::openai_request_max_retries;
use crate::model_provider_info::ModelProviderInfo;
use crate::model_provider_info::WireApi;
use crate::models::ResponseItem;
@@ -171,7 +171,7 @@ impl ModelClient {
return Err(CodexErr::UnexpectedStatus(status, body));
}
if attempt > *OPENAI_REQUEST_MAX_RETRIES {
if attempt > openai_request_max_retries() {
return Err(CodexErr::RetryLimit(status));
}
@@ -188,7 +188,7 @@ impl ModelClient {
tokio::time::sleep(delay).await;
}
Err(e) => {
if attempt > *OPENAI_REQUEST_MAX_RETRIES {
if attempt > openai_request_max_retries() {
return Err(e.into());
}
let delay = backoff(attempt);

View File

@@ -23,3 +23,25 @@ env_flags! {
/// Fixture path for offline tests (see client.rs).
pub CODEX_RS_SSE_FIXTURE: Option<&str> = None;
}
// -----------------------------------------------------------------------------
// Test-friendly runtime override helpers
// -----------------------------------------------------------------------------
/// Return the effective retry budget for outbound OpenAI requests.
///
/// The `env_flags!` macro above initialises its values lazily and caches
/// them for the remainder of the process. A number of our unit tests tweak
/// `OPENAI_REQUEST_MAX_RETRIES` *at runtime* (e.g. set to 0/1) to exercise the
/// retry/backoff logic deterministically. When another test touches the flag
/// first, the cached value "sticks" and later tests silently inherit it,
/// leading to surprising flakes (see #???).
///
/// To make the behaviour deterministic we reread the raw environment variable
/// on every call and fall back to the cached default when unset or invalid.
#[inline]
pub fn openai_request_max_retries() -> u64 {
match std::env::var("OPENAI_REQUEST_MAX_RETRIES") {
Ok(s) => s.parse::<u64>().unwrap_or(*OPENAI_REQUEST_MAX_RETRIES),
Err(_) => *OPENAI_REQUEST_MAX_RETRIES,
}
}