mirror of
https://github.com/openai/codex.git
synced 2026-05-05 22:01:37 +03:00
## Why to support a new bring your own search tool in Responses API(https://developers.openai.com/api/docs/guides/tools-tool-search#client-executed-tool-search) we migrating our bm25 search tool to use official way to execute search on client and communicate additional tools to the model. ## What - replace the legacy `search_tool_bm25` flow with client-executed `tool_search` - add protocol, SSE, history, and normalization support for `tool_search_call` and `tool_search_output` - return namespaced Codex Apps search results and wire namespaced follow-up tool calls back into MCP dispatch
38 lines
1.1 KiB
Rust
38 lines
1.1 KiB
Rust
use codex_protocol::models::ResponseItem;
|
|
use serde_json::Value;
|
|
|
|
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
|
|
pub enum Compression {
|
|
#[default]
|
|
None,
|
|
Zstd,
|
|
}
|
|
|
|
pub(crate) fn attach_item_ids(payload_json: &mut Value, original_items: &[ResponseItem]) {
|
|
let Some(input_value) = payload_json.get_mut("input") else {
|
|
return;
|
|
};
|
|
let Value::Array(items) = input_value else {
|
|
return;
|
|
};
|
|
|
|
for (value, item) in items.iter_mut().zip(original_items.iter()) {
|
|
if let ResponseItem::Reasoning { id, .. }
|
|
| ResponseItem::Message { id: Some(id), .. }
|
|
| ResponseItem::WebSearchCall { id: Some(id), .. }
|
|
| ResponseItem::FunctionCall { id: Some(id), .. }
|
|
| ResponseItem::ToolSearchCall { id: Some(id), .. }
|
|
| ResponseItem::LocalShellCall { id: Some(id), .. }
|
|
| ResponseItem::CustomToolCall { id: Some(id), .. } = item
|
|
{
|
|
if id.is_empty() {
|
|
continue;
|
|
}
|
|
|
|
if let Some(obj) = value.as_object_mut() {
|
|
obj.insert("id".to_string(), Value::String(id.clone()));
|
|
}
|
|
}
|
|
}
|
|
}
|