Compare commits

...

1 Commits

Author SHA1 Message Date
Charles Cunningham
b8f4945040 Drop orphan reasoning items before replay
Trim trailing reasoning items from prompt history before the next user message or prompt end so interrupted turns do not replay invalid Responses API input.

Co-authored-by: Codex <noreply@openai.com>
2026-03-23 12:10:21 -07:00
3 changed files with 85 additions and 1 deletions

View File

@@ -333,7 +333,8 @@ impl ContextManager {
/// This function enforces a couple of invariants on the in-memory history:
/// 1. every call (function/custom) has a corresponding output entry
/// 2. every output has a corresponding call entry
/// 3. when images are unsupported, image content is stripped from messages and tool outputs
/// 3. trailing reasoning items are dropped before the next user message and at prompt end
/// 4. when images are unsupported, image content is stripped from messages and tool outputs
fn normalize_history(&mut self, input_modalities: &[InputModality]) {
// all function/tool calls must have a corresponding output
normalize::ensure_call_outputs_present(&mut self.items);
@@ -341,6 +342,9 @@ impl ContextManager {
// all outputs must have a corresponding function/tool call
normalize::remove_orphan_outputs(&mut self.items);
// interrupted turns can leave replay-unsafe trailing reasoning before the next user item
normalize::remove_trailing_reasoning_before_user_messages(&mut self.items);
// strip images when model does not support them
normalize::strip_images_when_unsupported(input_modalities, &mut self.items);
}

View File

@@ -442,6 +442,59 @@ fn for_prompt_preserves_image_generation_calls_when_images_are_supported() {
);
}
#[test]
fn for_prompt_drops_trailing_reasoning_before_next_user_message() {
let history = create_history_with_items(vec![
user_input_text_msg("first"),
assistant_msg("done"),
reasoning_msg("orphaned"),
user_input_text_msg("second"),
]);
assert_eq!(
history.for_prompt(&default_input_modalities()),
vec![
user_input_text_msg("first"),
assistant_msg("done"),
user_input_text_msg("second"),
]
);
}
#[test]
fn for_prompt_drops_trailing_reasoning_at_prompt_end() {
let history = create_history_with_items(vec![
user_input_text_msg("first"),
assistant_msg("done"),
reasoning_msg("orphaned"),
]);
assert_eq!(
history.for_prompt(&default_input_modalities()),
vec![user_input_text_msg("first"), assistant_msg("done"),]
);
}
#[test]
fn for_prompt_keeps_reasoning_when_followed_by_assistant_message() {
let history = create_history_with_items(vec![
user_input_text_msg("first"),
reasoning_msg("kept"),
assistant_msg("done"),
user_input_text_msg("second"),
]);
assert_eq!(
history.for_prompt(&default_input_modalities()),
vec![
user_input_text_msg("first"),
reasoning_msg("kept"),
assistant_msg("done"),
user_input_text_msg("second"),
]
);
}
#[test]
fn for_prompt_clears_image_generation_result_when_images_are_unsupported() {
let history = create_history_with_items(vec![

View File

@@ -194,6 +194,33 @@ pub(crate) fn remove_orphan_outputs(items: &mut Vec<ResponseItem>) {
});
}
pub(crate) fn remove_trailing_reasoning_before_user_messages(items: &mut Vec<ResponseItem>) {
let mut normalized = Vec::with_capacity(items.len());
let mut removed_count = 0usize;
for item in items.drain(..) {
if matches!(&item, ResponseItem::Message { role, .. } if role == "user") {
while matches!(normalized.last(), Some(ResponseItem::Reasoning { .. })) {
normalized.pop();
removed_count += 1;
}
}
normalized.push(item);
}
while matches!(normalized.last(), Some(ResponseItem::Reasoning { .. })) {
normalized.pop();
removed_count += 1;
}
if removed_count > 0 {
info!("Removed {removed_count} trailing reasoning item(s) before user input or prompt end");
}
*items = normalized;
}
pub(crate) fn remove_corresponding_for(items: &mut Vec<ResponseItem>, item: &ResponseItem) {
match item {
ResponseItem::FunctionCall { call_id, .. } => {