mirror of
https://github.com/openai/codex.git
synced 2026-04-28 02:11:08 +03:00
## Summary Additional clarifications to our prompt. Still very concise, but we'll continue to add more here.
162 lines
5.0 KiB
Rust
162 lines
5.0 KiB
Rust
use serde::Serialize;
|
||
use serde_json::json;
|
||
use std::collections::BTreeMap;
|
||
|
||
use crate::client_common::Prompt;
|
||
use crate::model_family::ModelFamily;
|
||
use crate::plan_tool::PLAN_TOOL;
|
||
|
||
#[derive(Debug, Clone, Serialize)]
|
||
pub(crate) struct ResponsesApiTool {
|
||
pub(crate) name: &'static str,
|
||
pub(crate) description: &'static str,
|
||
pub(crate) strict: bool,
|
||
pub(crate) parameters: JsonSchema,
|
||
}
|
||
|
||
/// When serialized as JSON, this produces a valid "Tool" in the OpenAI
|
||
/// Responses API.
|
||
#[derive(Debug, Clone, Serialize)]
|
||
#[serde(tag = "type")]
|
||
pub(crate) enum OpenAiTool {
|
||
#[serde(rename = "function")]
|
||
Function(ResponsesApiTool),
|
||
#[serde(rename = "local_shell")]
|
||
LocalShell {},
|
||
}
|
||
|
||
/// Generic JSON‑Schema subset needed for our tool definitions
|
||
#[derive(Debug, Clone, Serialize)]
|
||
#[serde(tag = "type", rename_all = "lowercase")]
|
||
pub(crate) enum JsonSchema {
|
||
String,
|
||
Number,
|
||
Array {
|
||
items: Box<JsonSchema>,
|
||
},
|
||
Object {
|
||
properties: BTreeMap<String, JsonSchema>,
|
||
required: &'static [&'static str],
|
||
#[serde(rename = "additionalProperties")]
|
||
additional_properties: bool,
|
||
},
|
||
}
|
||
|
||
fn create_shell_tool() -> OpenAiTool {
|
||
let mut properties = BTreeMap::new();
|
||
properties.insert(
|
||
"command".to_string(),
|
||
JsonSchema::Array {
|
||
items: Box::new(JsonSchema::String),
|
||
},
|
||
);
|
||
properties.insert("workdir".to_string(), JsonSchema::String);
|
||
properties.insert("timeout".to_string(), JsonSchema::Number);
|
||
|
||
OpenAiTool::Function(ResponsesApiTool {
|
||
name: "shell",
|
||
description: "Runs a shell command and returns its output",
|
||
strict: false,
|
||
parameters: JsonSchema::Object {
|
||
properties,
|
||
required: &["command"],
|
||
additional_properties: false,
|
||
},
|
||
})
|
||
}
|
||
|
||
/// Returns JSON values that are compatible with Function Calling in the
|
||
/// Responses API:
|
||
/// https://platform.openai.com/docs/guides/function-calling?api-mode=responses
|
||
pub(crate) fn create_tools_json_for_responses_api(
|
||
prompt: &Prompt,
|
||
model_family: &ModelFamily,
|
||
include_plan_tool: bool,
|
||
) -> crate::error::Result<Vec<serde_json::Value>> {
|
||
// Assemble tool list: built-in tools + any extra tools from the prompt.
|
||
let mut openai_tools = vec![create_shell_tool()];
|
||
if model_family.uses_local_shell_tool {
|
||
openai_tools.push(OpenAiTool::LocalShell {});
|
||
}
|
||
|
||
let mut tools_json = Vec::with_capacity(openai_tools.len() + prompt.extra_tools.len() + 1);
|
||
for tool in openai_tools.iter() {
|
||
tools_json.push(serde_json::to_value(tool)?);
|
||
}
|
||
tools_json.extend(
|
||
prompt
|
||
.extra_tools
|
||
.clone()
|
||
.into_iter()
|
||
.map(|(name, tool)| mcp_tool_to_openai_tool(name, tool)),
|
||
);
|
||
|
||
if include_plan_tool {
|
||
tools_json.push(serde_json::to_value(PLAN_TOOL.clone())?);
|
||
}
|
||
|
||
Ok(tools_json)
|
||
}
|
||
|
||
/// Returns JSON values that are compatible with Function Calling in the
|
||
/// Chat Completions API:
|
||
/// https://platform.openai.com/docs/guides/function-calling?api-mode=chat
|
||
pub(crate) fn create_tools_json_for_chat_completions_api(
|
||
prompt: &Prompt,
|
||
model_family: &ModelFamily,
|
||
include_plan_tool: bool,
|
||
) -> crate::error::Result<Vec<serde_json::Value>> {
|
||
// We start with the JSON for the Responses API and than rewrite it to match
|
||
// the chat completions tool call format.
|
||
let responses_api_tools_json =
|
||
create_tools_json_for_responses_api(prompt, model_family, include_plan_tool)?;
|
||
let tools_json = responses_api_tools_json
|
||
.into_iter()
|
||
.filter_map(|mut tool| {
|
||
if tool.get("type") != Some(&serde_json::Value::String("function".to_string())) {
|
||
return None;
|
||
}
|
||
|
||
if let Some(map) = tool.as_object_mut() {
|
||
// Remove "type" field as it is not needed in chat completions.
|
||
map.remove("type");
|
||
Some(json!({
|
||
"type": "function",
|
||
"function": map,
|
||
}))
|
||
} else {
|
||
None
|
||
}
|
||
})
|
||
.collect::<Vec<serde_json::Value>>();
|
||
Ok(tools_json)
|
||
}
|
||
|
||
fn mcp_tool_to_openai_tool(
|
||
fully_qualified_name: String,
|
||
tool: mcp_types::Tool,
|
||
) -> serde_json::Value {
|
||
let mcp_types::Tool {
|
||
description,
|
||
mut input_schema,
|
||
..
|
||
} = tool;
|
||
|
||
// OpenAI models mandate the "properties" field in the schema. The Agents
|
||
// SDK fixed this by inserting an empty object for "properties" if it is not
|
||
// already present https://github.com/openai/openai-agents-python/issues/449
|
||
// so here we do the same.
|
||
if input_schema.properties.is_none() {
|
||
input_schema.properties = Some(serde_json::Value::Object(serde_json::Map::new()));
|
||
}
|
||
|
||
// TODO(mbolin): Change the contract of this function to return
|
||
// ResponsesApiTool.
|
||
json!({
|
||
"name": fully_qualified_name,
|
||
"description": description,
|
||
"parameters": input_schema,
|
||
"type": "function",
|
||
})
|
||
}
|