This commit is contained in:
jif-oai
2025-10-30 21:05:46 +00:00
parent 7a9c344bd5
commit 3315336366
4 changed files with 37 additions and 21 deletions

View File

@@ -335,7 +335,7 @@ pub(crate) struct SessionConfiguration {
cwd: PathBuf,
/// Set of feature flags for this session
pub features: Features,
features: Features,
// TODO(pakrym): Remove config from here
original_config_do_not_use: Arc<Config>,
@@ -1004,10 +1004,15 @@ impl Session {
let mut history = state.clone_history();
let prompt_items = history.get_history_for_prompt();
let last_message_id = prompt_items
.iter()
.rev()
.find_map(crate::state::response_item_id)
.map(ToString::to_string);
state.set_responses_api_chain(ResponsesApiChainState {
last_response_id: Some(response_id),
last_prompt_items: prompt_items,
last_message_id,
});
}

View File

@@ -29,7 +29,7 @@ pub(crate) struct ConversationHistory {
#[derive(Debug, Clone, Default)]
pub(crate) struct ResponsesApiChainState {
pub last_response_id: Option<String>,
pub last_prompt_items: Vec<ResponseItem>,
pub last_message_id: Option<String>,
}
impl ConversationHistory {

View File

@@ -4,6 +4,7 @@ mod turn;
pub(crate) use service::SessionServices;
pub(crate) use session::SessionState;
pub(crate) use session::response_item_id;
pub(crate) use turn::ActiveTurn;
pub(crate) use turn::RunningTask;
pub(crate) use turn::TaskKind;

View File

@@ -83,39 +83,49 @@ impl SessionState {
let mut prompt = Prompt::default();
prompt.store_response = supports_responses_api_chaining;
let mut prompt_items = self.history.get_history_for_prompt();
let prompt_items = self.history.get_history_for_prompt();
if !supports_responses_api_chaining {
self.reset_responses_api_chain();
prompt.input = prompt_items;
return prompt;
}
let mut previous_response_id = None;
if let Some(chain_state) = self.history.responses_api_chain() {
if let Some(prev_id) = chain_state.last_response_id {
let prefix = common_prefix_len(&chain_state.last_prompt_items, &prompt_items);
let matches_previous_prompt = prefix == chain_state.last_prompt_items.len();
if matches_previous_prompt {
previous_response_id = Some(prev_id);
if prefix > 0 {
prompt_items.drain(..prefix);
}
} else if !chain_state.last_prompt_items.is_empty() {
self.reset_responses_api_chain();
let previous_response_id = chain_state.last_response_id.clone();
if let Some(last_message_id) = chain_state.last_message_id.as_ref() {
if let Some(position) = prompt_items
.iter()
.position(|item| response_item_id(item) == Some(last_message_id))
{
prompt.previous_response_id = previous_response_id;
prompt.input = prompt_items.iter().skip(position + 1).cloned().collect();
return prompt;
}
// Cache marker no longer present; fall back to full prompt and clear chain info.
self.reset_responses_api_chain();
prompt.input = prompt_items;
return prompt;
}
prompt.previous_response_id = previous_response_id;
prompt.input = prompt_items;
return prompt;
}
prompt.previous_response_id = previous_response_id;
prompt.input = prompt_items;
prompt
}
}
fn common_prefix_len(lhs: &[ResponseItem], rhs: &[ResponseItem]) -> usize {
lhs.iter()
.zip(rhs.iter())
.take_while(|(left, right)| left == right)
.count()
pub(crate) fn response_item_id(item: &ResponseItem) -> Option<&str> {
match item {
ResponseItem::Message { id: Some(id), .. }
| ResponseItem::Reasoning { id, .. }
| ResponseItem::LocalShellCall { id: Some(id), .. }
| ResponseItem::FunctionCall { id: Some(id), .. }
| ResponseItem::CustomToolCall { id: Some(id), .. }
| ResponseItem::WebSearchCall { id: Some(id), .. } => Some(id.as_str()),
_ => None,
}
}