Compare commits

..

2 Commits

Author SHA1 Message Date
pakrym-oai
4156ba1227 codex: make MCP history cell tests robust 2026-03-26 18:21:07 -10:00
pakrym-oai
7a98b41b19 Normalize /mcp tool grouping for hyphenated server names 2026-03-26 18:19:31 -10:00
15 changed files with 346 additions and 423 deletions

View File

@@ -20,6 +20,9 @@ common:windows --host_platform=//:local_windows
common --@rules_cc//cc/toolchains/args/archiver_flags:use_libtool_on_macos=False
common --@llvm//config:experimental_stub_libgcc_s
# We need to use the sh toolchain on windows so we don't send host bash paths to the linux executor.
common:windows --@rules_rust//rust/settings:experimental_use_sh_toolchain_for_bootstrap_process_wrapper
# TODO(zbarsky): rules_rust doesn't implement this flag properly with remote exec...
# common --@rules_rust//rust/settings:pipelined_compilation

View File

@@ -17,7 +17,6 @@ concurrency:
cancel-in-progress: ${{ github.ref_name != 'main' }}
jobs:
test:
timeout-minutes: 120
strategy:
fail-fast: false
matrix:
@@ -40,9 +39,9 @@ jobs:
# - os: ubuntu-24.04-arm
# target: aarch64-unknown-linux-gnu
# Windows
- os: windows-latest
target: x86_64-pc-windows-gnullvm
# TODO: Enable Windows once we fix the toolchain issues there.
#- os: windows-latest
# target: x86_64-pc-windows-gnullvm
runs-on: ${{ matrix.os }}
# Configure a human readable name for each job
@@ -95,14 +94,8 @@ jobs:
if: runner.os == 'Windows'
shell: pwsh
run: |
# Use a very short path to reduce argv/path length issues, but avoid the
# drive root because some Windows test launchers mis-handle MANIFEST paths there.
"BAZEL_STARTUP_ARGS=--output_user_root=D:\b" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
- name: Enable Git long paths (Windows)
if: runner.os == 'Windows'
shell: pwsh
run: git config --global core.longpaths true
# Use a very short path to reduce argv/path length issues.
"BAZEL_STARTUP_ARGS=--output_user_root=C:\" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
- name: bazel test //...
env:
@@ -160,77 +153,6 @@ jobs:
# canary and release workflows should build `third_party/v8`.
-//third_party/v8:all
)
if [[ "${RUNNER_OS:-}" == "Windows" ]]; then
# This is intentionally a foothold rather than the final Windows
# Bazel suite. Keep a stable set of targets that are known not to
# pull in the unresolved V8 dependency so we can land meaningful CI
# coverage now and expand it once the V8 situation is understood.
bazel_targets=(
//codex-rs/analytics:analytics-unit-tests
//codex-rs/ansi-escape:ansi-escape-unit-tests
//codex-rs/app-server-protocol:app-server-protocol-schema_fixtures-test
//codex-rs/app-server-protocol:app-server-protocol-unit-tests
//codex-rs/apply-patch:apply-patch-all-test
//codex-rs/apply-patch:apply-patch-unit-tests
//codex-rs/async-utils:async-utils-unit-tests
//codex-rs/codex-api:codex-api-unit-tests
//codex-rs/codex-backend-openapi-models:codex-backend-openapi-models-unit-tests
//codex-rs/codex-client:codex-client-unit-tests
//codex-rs/codex-experimental-api-macros:codex-experimental-api-macros-unit-tests
//codex-rs/config:config-unit-tests
//codex-rs/connectors:connectors-unit-tests
//codex-rs/core-skills:core-skills-unit-tests
//codex-rs/exec-server:exec-server-unit-tests
//codex-rs/execpolicy-legacy:execpolicy-legacy-all-test
//codex-rs/execpolicy-legacy:execpolicy-legacy-unit-tests
//codex-rs/execpolicy:execpolicy-unit-tests
//codex-rs/features:features-unit-tests
//codex-rs/feedback:feedback-unit-tests
//codex-rs/file-search:file-search-unit-tests
//codex-rs/git-utils:git-utils-unit-tests
//codex-rs/hooks:hooks-unit-tests
//codex-rs/instructions:instructions-unit-tests
//codex-rs/keyring-store:keyring-store-unit-tests
//codex-rs/network-proxy:network-proxy-unit-tests
//codex-rs/otel:otel-unit-tests
//codex-rs/plugin:plugin-unit-tests
//codex-rs/process-hardening:process-hardening-unit-tests
//codex-rs/protocol:protocol-unit-tests
//codex-rs/responses-api-proxy:responses-api-proxy-unit-tests
//codex-rs/rmcp-client:rmcp-client-unit-tests
//codex-rs/rollout:rollout-unit-tests
//codex-rs/sandboxing:sandboxing-unit-tests
//codex-rs/secrets:secrets-unit-tests
//codex-rs/shell-escalation:shell-escalation-unit-tests
//codex-rs/skills:skills-unit-tests
//codex-rs/state:state-unit-tests
//codex-rs/stdio-to-uds:stdio-to-uds-unit-tests
//codex-rs/terminal-detection:terminal-detection-unit-tests
//codex-rs/tools:tools-unit-tests
//codex-rs/utils/absolute-path:absolute-path-unit-tests
//codex-rs/utils/approval-presets:approval-presets-unit-tests
//codex-rs/utils/cache:cache-unit-tests
//codex-rs/utils/cargo-bin:cargo-bin-unit-tests
//codex-rs/utils/cli:cli-unit-tests
//codex-rs/utils/elapsed:elapsed-unit-tests
//codex-rs/utils/fuzzy-match:fuzzy-match-unit-tests
//codex-rs/utils/home-dir:home-dir-unit-tests
//codex-rs/utils/image:image-unit-tests
//codex-rs/utils/json-to-toml:json-to-toml-unit-tests
//codex-rs/utils/output-truncation:output-truncation-unit-tests
//codex-rs/utils/path-utils:path-utils-unit-tests
//codex-rs/utils/plugins:plugins-unit-tests
//codex-rs/utils/pty:pty-unit-tests
//codex-rs/utils/readiness:readiness-unit-tests
//codex-rs/utils/rustls-provider:rustls-provider-unit-tests
//codex-rs/utils/sleep-inhibitor:sleep-inhibitor-unit-tests
//codex-rs/utils/stream-parser:stream-parser-unit-tests
//codex-rs/utils/string:string-unit-tests
//codex-rs/utils/template:template-unit-tests
//codex-rs/windows-sandbox-rs:windows-sandbox-rs-unit-tests
)
echo "Windows smoke target count: ${#bazel_targets[@]}"
fi
if [[ "${RUNNER_OS:-}" != "Windows" ]]; then
# Bazel test sandboxes on macOS may resolve an older Homebrew `node`

View File

@@ -44,27 +44,8 @@ bazel_dep(name = "apple_support", version = "2.1.0")
bazel_dep(name = "rules_cc", version = "0.2.16")
bazel_dep(name = "rules_platform", version = "0.1.0")
bazel_dep(name = "rules_rs", version = "0.0.43")
# `rules_rs` 0.0.43 does not model `x86_64-pc-windows-gnullvm` as a distinct
# Windows exec platform, so patch it until upstream grows that support.
single_version_override(
module_name = "rules_rs",
patch_strip = 1,
patches = [
"//patches:rules_rs_windows_gnullvm_exec.patch",
],
version = "0.0.43",
)
rules_rust = use_extension("@rules_rs//rs/experimental:rules_rust.bzl", "rules_rust")
# Build-script probe binaries inherit CFLAGS/CXXFLAGS from Bazel's C++
# toolchain. On `windows-gnullvm`, llvm-mingw does not ship
# `libssp_nonshared`, so strip the forwarded stack-protector flags there.
rules_rust.patch(
patches = [
"//patches:rules_rust_windows_gnullvm_build_script.patch",
],
strip = 1,
)
use_repo(rules_rust, "rules_rust")
toolchains = use_extension("@rules_rs//rs/experimental/toolchains:module_extension.bzl", "toolchains")

4
MODULE.bazel.lock generated
View File

@@ -1582,14 +1582,12 @@
"cargo-1.93.0-aarch64-pc-windows-msvc.tar.xz": "155bff7a16aa7054e7ed7c3a82e362d4b302b3882d751b823e06ff63ae3f103d",
"cargo-1.93.0-aarch64-unknown-linux-gnu.tar.xz": "5998940b8b97286bb67facb1a85535eeb3d4d7a61e36a85e386e5c0c5cfe5266",
"cargo-1.93.0-x86_64-apple-darwin.tar.xz": "95a47c5ed797c35419908f04188d8b7de09946e71073c4b72632b16f5b10dfae",
"cargo-1.93.0-x86_64-pc-windows-gnullvm.tar.xz": "f19766837559f90476508140cb95cc708220012ec00a854fa9f99187b1f246b6",
"cargo-1.93.0-x86_64-pc-windows-msvc.tar.xz": "e59c5e2baa9ec17261f2cda6676ebf7b68b21a860e3f7451c4d964728951da75",
"cargo-1.93.0-x86_64-unknown-linux-gnu.tar.xz": "c23de3ae709ff33eed5e4ae59d1f9bcd75fa4dbaa9fb92f7b06bfb534b8db880",
"clippy-1.93.0-aarch64-apple-darwin.tar.xz": "0b6e943a8d12be0e68575acf59c9ea102daf795055fcbbf862b0bfd35ec40039",
"clippy-1.93.0-aarch64-pc-windows-msvc.tar.xz": "07bcf2edb88cdf5ead2f02e4a8493e9b0ef935a31253fac6f9f3378d8023f113",
"clippy-1.93.0-aarch64-unknown-linux-gnu.tar.xz": "872ae6d68d625946d281b91d928332e6b74f6ab269b6af842338df4338805a60",
"clippy-1.93.0-x86_64-apple-darwin.tar.xz": "e6d0b1afb9607c14a1172d09ee194a032bbb3e48af913d55c5a473e0559eddde",
"clippy-1.93.0-x86_64-pc-windows-gnullvm.tar.xz": "b6f1f7264ed6943c59dedfb9531fbadcc3c0fcf273c940a63d58898b14a1060f",
"clippy-1.93.0-x86_64-pc-windows-msvc.tar.xz": "25fb103390bf392980b4689ac09b2ec2ab4beefb7022a983215b613ad05eab57",
"clippy-1.93.0-x86_64-unknown-linux-gnu.tar.xz": "793108977514b15c0f45ade28ae35c58b05370cb0f22e89bd98fdfa61eabf55d",
"rust-std-1.93.0-aarch64-apple-darwin.tar.xz": "8603c63715349636ed85b4fe716c4e827a727918c840e54aff5b243cedadf19b",
@@ -1661,14 +1659,12 @@
"rustc-1.93.0-aarch64-pc-windows-msvc.tar.xz": "a3ac1a8e411de8470f71b366f89d187718c431526912b181692ed0a18c56c7ad",
"rustc-1.93.0-aarch64-unknown-linux-gnu.tar.xz": "1a9045695892ec08d8e9751bf7cf7db71fe27a6202dd12ce13aca48d0602dbde",
"rustc-1.93.0-x86_64-apple-darwin.tar.xz": "594bb293f0a4f444656cf8dec2149fcb979c606260efee9e09bcf8c9c6ed6ae7",
"rustc-1.93.0-x86_64-pc-windows-gnullvm.tar.xz": "0cdaa8de66f5ce21d1ea73917efc5c64f408bda49f678ddde19465ced9d5ec63",
"rustc-1.93.0-x86_64-pc-windows-msvc.tar.xz": "fa17677eee0d83eb055b309953184bf87ba634923d8897f860cda65d55c6e350",
"rustc-1.93.0-x86_64-unknown-linux-gnu.tar.xz": "00c6e6740ea6a795e33568cd7514855d58408a1180cd820284a7bbf7c46af715",
"rustfmt-1.93.0-aarch64-apple-darwin.tar.xz": "0dd1faedf0768ef362f4aae4424b34e8266f2b9cf5e76ea4fcaf780220b363a0",
"rustfmt-1.93.0-aarch64-pc-windows-msvc.tar.xz": "24eed108489567133bbfe40c8eacda1567be55fae4c526911b39eb33eb27a6cb",
"rustfmt-1.93.0-aarch64-unknown-linux-gnu.tar.xz": "92e1acb45ae642136258b4dabb39302af2d53c83e56ebd5858bc969f9e5c141a",
"rustfmt-1.93.0-x86_64-apple-darwin.tar.xz": "c8453b4c5758eb39423042ffa9c23ed6128cbed2b15b581e5e1192c9cc0b1d4e",
"rustfmt-1.93.0-x86_64-pc-windows-gnullvm.tar.xz": "47167e9e78db9be4503a060dee02f4df2cda252da32175dbf44331f965a747b9",
"rustfmt-1.93.0-x86_64-pc-windows-msvc.tar.xz": "5becc7c2dba4b9ab5199012cad30829235a7f7fb5d85a238697e8f0e44cbd9af",
"rustfmt-1.93.0-x86_64-unknown-linux-gnu.tar.xz": "7f81f6c17d11a7fda5b4e1b111942fb3b23d30dcec767e13e340ebfb762a5e33"
}

View File

@@ -33,6 +33,32 @@ const MCP_TOOL_NAME_DELIMITER: &str = "__";
pub(crate) const CODEX_APPS_MCP_SERVER_NAME: &str = "codex_apps";
const CODEX_CONNECTORS_TOKEN_ENV_VAR: &str = "CODEX_CONNECTORS_TOKEN";
/// The Responses API requires tool names to match `^[a-zA-Z0-9_-]+$`.
/// MCP server/tool names are user-controlled, so sanitize the fully-qualified
/// name we expose to the model by replacing any disallowed character with `_`.
pub(crate) fn sanitize_responses_api_tool_name(name: &str) -> String {
let mut sanitized = String::with_capacity(name.len());
for c in name.chars() {
if c.is_ascii_alphanumeric() || c == '_' {
sanitized.push(c);
} else {
sanitized.push('_');
}
}
if sanitized.is_empty() {
"_".to_string()
} else {
sanitized
}
}
pub fn qualified_mcp_tool_name_prefix(server_name: &str) -> String {
sanitize_responses_api_tool_name(&format!(
"{MCP_TOOL_NAME_PREFIX}{MCP_TOOL_NAME_DELIMITER}{server_name}{MCP_TOOL_NAME_DELIMITER}"
))
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct ToolPluginProvenance {
plugin_display_names_by_connector_id: HashMap<String, Vec<String>>,

View File

@@ -52,6 +52,14 @@ fn split_qualified_tool_name_returns_server_and_tool() {
);
}
#[test]
fn qualified_mcp_tool_name_prefix_sanitizes_server_names_without_lowercasing() {
assert_eq!(
qualified_mcp_tool_name_prefix("Some-Server"),
"mcp__Some_Server__".to_string()
);
}
#[test]
fn split_qualified_tool_name_rejects_invalid_names() {
assert_eq!(split_qualified_tool_name("other__alpha__do_thing"), None);

View File

@@ -22,6 +22,7 @@ use std::time::Instant;
use crate::mcp::CODEX_APPS_MCP_SERVER_NAME;
use crate::mcp::ToolPluginProvenance;
use crate::mcp::auth::McpAuthStatusEntry;
use crate::mcp::sanitize_responses_api_tool_name;
use anyhow::Context;
use anyhow::Result;
use anyhow::anyhow;
@@ -104,26 +105,6 @@ const MCP_TOOLS_LIST_DURATION_METRIC: &str = "codex.mcp.tools.list.duration_ms";
const MCP_TOOLS_FETCH_UNCACHED_DURATION_METRIC: &str = "codex.mcp.tools.fetch_uncached.duration_ms";
const MCP_TOOLS_CACHE_WRITE_DURATION_METRIC: &str = "codex.mcp.tools.cache_write.duration_ms";
/// The Responses API requires tool names to match `^[a-zA-Z0-9_-]+$`.
/// MCP server/tool names are user-controlled, so sanitize the fully-qualified
/// name we expose to the model by replacing any disallowed character with `_`.
fn sanitize_responses_api_tool_name(name: &str) -> String {
let mut sanitized = String::with_capacity(name.len());
for c in name.chars() {
if c.is_ascii_alphanumeric() || c == '_' {
sanitized.push(c);
} else {
sanitized.push('_');
}
}
if sanitized.is_empty() {
"_".to_string()
} else {
sanitized
}
}
fn sha1_hex(s: &str) -> String {
let mut hasher = Sha1::new();
hasher.update(s.as_bytes());

View File

@@ -42,6 +42,7 @@ use base64::Engine;
use codex_core::config::Config;
use codex_core::config::types::McpServerTransportConfig;
use codex_core::mcp::McpManager;
use codex_core::mcp::qualified_mcp_tool_name_prefix;
use codex_core::plugins::PluginsManager;
use codex_core::web_search::web_search_detail;
use codex_otel::RuntimeMetricsSummary;
@@ -1824,7 +1825,7 @@ pub(crate) fn new_mcp_tools_output(
servers.sort_by(|(a, _), (b, _)| a.cmp(b));
for (server, cfg) in servers {
let prefix = format!("mcp__{server}__");
let prefix = qualified_mcp_tool_name_prefix(server);
let mut names: Vec<String> = tools
.keys()
.filter(|k| k.starts_with(&prefix))
@@ -2544,7 +2545,6 @@ mod tests {
use codex_core::config::Config;
use codex_core::config::ConfigBuilder;
use codex_core::config::types::McpServerConfig;
use codex_core::config::types::McpServerTransportConfig;
use codex_otel::RuntimeMetricTotals;
use codex_otel::RuntimeMetricsSummary;
use codex_protocol::ThreadId;
@@ -2582,6 +2582,88 @@ mod tests {
std::env::temp_dir()
}
fn stdio_server_config(
command: &str,
args: Vec<&str>,
env: Option<HashMap<String, String>>,
env_vars: Vec<&str>,
) -> McpServerConfig {
let mut table = toml::Table::new();
table.insert(
"command".to_string(),
toml::Value::String(command.to_string()),
);
if !args.is_empty() {
table.insert(
"args".to_string(),
toml::Value::Array(
args.into_iter()
.map(|arg| toml::Value::String(arg.to_string()))
.collect(),
),
);
}
if let Some(env) = env {
table.insert("env".to_string(), string_map_to_toml_value(env));
}
if !env_vars.is_empty() {
table.insert(
"env_vars".to_string(),
toml::Value::Array(
env_vars
.into_iter()
.map(|name| toml::Value::String(name.to_string()))
.collect(),
),
);
}
toml::Value::Table(table)
.try_into()
.expect("test stdio MCP config should deserialize")
}
fn streamable_http_server_config(
url: &str,
bearer_token_env_var: Option<&str>,
http_headers: Option<HashMap<String, String>>,
env_http_headers: Option<HashMap<String, String>>,
) -> McpServerConfig {
let mut table = toml::Table::new();
table.insert("url".to_string(), toml::Value::String(url.to_string()));
if let Some(bearer_token_env_var) = bearer_token_env_var {
table.insert(
"bearer_token_env_var".to_string(),
toml::Value::String(bearer_token_env_var.to_string()),
);
}
if let Some(http_headers) = http_headers {
table.insert(
"http_headers".to_string(),
string_map_to_toml_value(http_headers),
);
}
if let Some(env_http_headers) = env_http_headers {
table.insert(
"env_http_headers".to_string(),
string_map_to_toml_value(env_http_headers),
);
}
toml::Value::Table(table)
.try_into()
.expect("test streamable_http MCP config should deserialize")
}
fn string_map_to_toml_value(entries: HashMap<String, String>) -> toml::Value {
toml::Value::Table(
entries
.into_iter()
.map(|(key, value)| (key, toml::Value::String(value)))
.collect(),
)
}
fn render_lines(lines: &[Line<'static>]) -> Vec<String> {
lines
.iter()
@@ -2897,25 +2979,7 @@ mod tests {
let mut config = test_config().await;
let mut env = HashMap::new();
env.insert("TOKEN".to_string(), "secret".to_string());
let stdio_config = McpServerConfig {
transport: McpServerTransportConfig::Stdio {
command: "docs-server".to_string(),
args: vec![],
env: Some(env),
env_vars: vec!["APP_TOKEN".to_string()],
cwd: None,
},
enabled: true,
required: false,
disabled_reason: None,
startup_timeout_sec: None,
tool_timeout_sec: None,
enabled_tools: None,
disabled_tools: None,
scopes: None,
oauth_resource: None,
tools: HashMap::new(),
};
let stdio_config = stdio_server_config("docs-server", vec![], Some(env), vec!["APP_TOKEN"]);
let mut servers = config.mcp_servers.get().clone();
servers.insert("docs".to_string(), stdio_config);
@@ -2923,24 +2987,12 @@ mod tests {
headers.insert("Authorization".to_string(), "Bearer secret".to_string());
let mut env_headers = HashMap::new();
env_headers.insert("X-API-Key".to_string(), "API_KEY_ENV".to_string());
let http_config = McpServerConfig {
transport: McpServerTransportConfig::StreamableHttp {
url: "https://example.com/mcp".to_string(),
bearer_token_env_var: Some("MCP_TOKEN".to_string()),
http_headers: Some(headers),
env_http_headers: Some(env_headers),
},
enabled: true,
required: false,
disabled_reason: None,
startup_timeout_sec: None,
tool_timeout_sec: None,
enabled_tools: None,
disabled_tools: None,
scopes: None,
oauth_resource: None,
tools: HashMap::new(),
};
let http_config = streamable_http_server_config(
"https://example.com/mcp",
Some("MCP_TOKEN"),
Some(headers),
Some(env_headers),
);
servers.insert("http".to_string(), http_config);
config
.mcp_servers
@@ -2988,6 +3040,46 @@ mod tests {
insta::assert_snapshot!(rendered);
}
#[tokio::test]
async fn mcp_tools_output_lists_tools_for_hyphenated_server_names() {
let mut config = test_config().await;
let mut servers = config.mcp_servers.get().clone();
servers.insert(
"some-server".to_string(),
stdio_server_config("docs-server", vec!["--stdio"], None, vec![]),
);
config
.mcp_servers
.set(servers)
.expect("test mcp servers should accept any configuration");
let tools = HashMap::from([(
"mcp__some_server__lookup".to_string(),
Tool {
description: None,
name: "lookup".to_string(),
title: None,
input_schema: serde_json::json!({"type": "object", "properties": {}}),
output_schema: None,
annotations: None,
icons: None,
meta: None,
},
)]);
let auth_statuses: HashMap<String, McpAuthStatus> = HashMap::new();
let cell = new_mcp_tools_output(
&config,
tools,
HashMap::new(),
HashMap::new(),
&auth_statuses,
);
let rendered = render_lines(&cell.display_lines(120)).join("\n");
insta::assert_snapshot!(rendered);
}
#[test]
fn empty_agent_message_cell_transcript() {
let cell = AgentMessageCell::new(vec![Line::default()], false);

View File

@@ -0,0 +1,16 @@
---
source: tui/src/history_cell.rs
assertion_line: 3080
expression: rendered
---
/mcp
🔌 MCP Tools
• some-server
• Status: enabled
• Auth: Unsupported
• Command: docs-server --stdio
• Tools: lookup
• Resources: (none)
• Resource templates: (none)

View File

@@ -45,6 +45,8 @@ use codex_core::config::types::McpServerTransportConfig;
#[cfg(test)]
use codex_core::mcp::McpManager;
#[cfg(test)]
use codex_core::mcp::qualified_mcp_tool_name_prefix;
#[cfg(test)]
use codex_core::plugins::PluginsManager;
use codex_core::web_search::web_search_detail;
use codex_otel::RuntimeMetricsSummary;
@@ -1831,7 +1833,7 @@ pub(crate) fn new_mcp_tools_output(
servers.sort_by(|(a, _), (b, _)| a.cmp(b));
for (server, cfg) in servers {
let prefix = format!("mcp__{server}__");
let prefix = qualified_mcp_tool_name_prefix(server);
let mut names: Vec<String> = tools
.keys()
.filter(|k| k.starts_with(&prefix))
@@ -2773,7 +2775,6 @@ mod tests {
use codex_core::config::ConfigBuilder;
use codex_core::config::types::McpServerConfig;
use codex_core::config::types::McpServerDisabledReason;
use codex_core::config::types::McpServerTransportConfig;
use codex_otel::RuntimeMetricTotals;
use codex_otel::RuntimeMetricsSummary;
use codex_protocol::ThreadId;
@@ -2811,6 +2812,88 @@ mod tests {
std::env::temp_dir()
}
fn stdio_server_config(
command: &str,
args: Vec<&str>,
env: Option<HashMap<String, String>>,
env_vars: Vec<&str>,
) -> McpServerConfig {
let mut table = toml::Table::new();
table.insert(
"command".to_string(),
toml::Value::String(command.to_string()),
);
if !args.is_empty() {
table.insert(
"args".to_string(),
toml::Value::Array(
args.into_iter()
.map(|arg| toml::Value::String(arg.to_string()))
.collect(),
),
);
}
if let Some(env) = env {
table.insert("env".to_string(), string_map_to_toml_value(env));
}
if !env_vars.is_empty() {
table.insert(
"env_vars".to_string(),
toml::Value::Array(
env_vars
.into_iter()
.map(|name| toml::Value::String(name.to_string()))
.collect(),
),
);
}
toml::Value::Table(table)
.try_into()
.expect("test stdio MCP config should deserialize")
}
fn streamable_http_server_config(
url: &str,
bearer_token_env_var: Option<&str>,
http_headers: Option<HashMap<String, String>>,
env_http_headers: Option<HashMap<String, String>>,
) -> McpServerConfig {
let mut table = toml::Table::new();
table.insert("url".to_string(), toml::Value::String(url.to_string()));
if let Some(bearer_token_env_var) = bearer_token_env_var {
table.insert(
"bearer_token_env_var".to_string(),
toml::Value::String(bearer_token_env_var.to_string()),
);
}
if let Some(http_headers) = http_headers {
table.insert(
"http_headers".to_string(),
string_map_to_toml_value(http_headers),
);
}
if let Some(env_http_headers) = env_http_headers {
table.insert(
"env_http_headers".to_string(),
string_map_to_toml_value(env_http_headers),
);
}
toml::Value::Table(table)
.try_into()
.expect("test streamable_http MCP config should deserialize")
}
fn string_map_to_toml_value(entries: HashMap<String, String>) -> toml::Value {
toml::Value::Table(
entries
.into_iter()
.map(|(key, value)| (key, toml::Value::String(value)))
.collect(),
)
}
fn render_lines(lines: &[Line<'static>]) -> Vec<String> {
lines
.iter()
@@ -3126,25 +3209,7 @@ mod tests {
let mut config = test_config().await;
let mut env = HashMap::new();
env.insert("TOKEN".to_string(), "secret".to_string());
let stdio_config = McpServerConfig {
transport: McpServerTransportConfig::Stdio {
command: "docs-server".to_string(),
args: vec![],
env: Some(env),
env_vars: vec!["APP_TOKEN".to_string()],
cwd: None,
},
enabled: true,
required: false,
disabled_reason: None,
startup_timeout_sec: None,
tool_timeout_sec: None,
enabled_tools: None,
disabled_tools: None,
scopes: None,
oauth_resource: None,
tools: HashMap::new(),
};
let stdio_config = stdio_server_config("docs-server", vec![], Some(env), vec!["APP_TOKEN"]);
let mut servers = config.mcp_servers.get().clone();
servers.insert("docs".to_string(), stdio_config);
@@ -3152,24 +3217,12 @@ mod tests {
headers.insert("Authorization".to_string(), "Bearer secret".to_string());
let mut env_headers = HashMap::new();
env_headers.insert("X-API-Key".to_string(), "API_KEY_ENV".to_string());
let http_config = McpServerConfig {
transport: McpServerTransportConfig::StreamableHttp {
url: "https://example.com/mcp".to_string(),
bearer_token_env_var: Some("MCP_TOKEN".to_string()),
http_headers: Some(headers),
env_http_headers: Some(env_headers),
},
enabled: true,
required: false,
disabled_reason: None,
startup_timeout_sec: None,
tool_timeout_sec: None,
enabled_tools: None,
disabled_tools: None,
scopes: None,
oauth_resource: None,
tools: HashMap::new(),
};
let http_config = streamable_http_server_config(
"https://example.com/mcp",
Some("MCP_TOKEN"),
Some(headers),
Some(env_headers),
);
servers.insert("http".to_string(), http_config);
config
.mcp_servers
@@ -3218,30 +3271,52 @@ mod tests {
}
#[tokio::test]
async fn mcp_tools_output_from_statuses_renders_status_only_servers() {
async fn mcp_tools_output_lists_tools_for_hyphenated_server_names() {
let mut config = test_config().await;
let servers = HashMap::from([(
"plugin_docs".to_string(),
McpServerConfig {
transport: McpServerTransportConfig::Stdio {
command: "docs-server".to_string(),
args: vec!["--stdio".to_string()],
env: None,
env_vars: vec![],
cwd: None,
},
enabled: false,
required: false,
disabled_reason: Some(McpServerDisabledReason::Unknown),
startup_timeout_sec: None,
tool_timeout_sec: None,
enabled_tools: None,
disabled_tools: None,
scopes: None,
oauth_resource: None,
tools: HashMap::new(),
let mut servers = config.mcp_servers.get().clone();
servers.insert(
"some-server".to_string(),
stdio_server_config("docs-server", vec!["--stdio"], None, vec![]),
);
config
.mcp_servers
.set(servers)
.expect("test mcp servers should accept any configuration");
let tools = HashMap::from([(
"mcp__some_server__lookup".to_string(),
Tool {
description: None,
name: "lookup".to_string(),
title: None,
input_schema: serde_json::json!({"type": "object", "properties": {}}),
output_schema: None,
annotations: None,
icons: None,
meta: None,
},
)]);
let auth_statuses: HashMap<String, McpAuthStatus> = HashMap::new();
let cell = new_mcp_tools_output(
&config,
tools,
HashMap::new(),
HashMap::new(),
&auth_statuses,
);
let rendered = render_lines(&cell.display_lines(120)).join("\n");
insta::assert_snapshot!(rendered);
}
#[tokio::test]
async fn mcp_tools_output_from_statuses_renders_status_only_servers() {
let mut config = test_config().await;
let mut plugin_docs = stdio_server_config("docs-server", vec!["--stdio"], None, vec![]);
plugin_docs.enabled = false;
plugin_docs.disabled_reason = Some(McpServerDisabledReason::Unknown);
let servers = HashMap::from([("plugin_docs".to_string(), plugin_docs)]);
config
.mcp_servers
.set(servers)

View File

@@ -0,0 +1,16 @@
---
source: tui_app_server/src/history_cell.rs
assertion_line: 3310
expression: rendered
---
/mcp
🔌 MCP Tools
• some-server
• Status: enabled
• Auth: Unsupported
• Command: docs-server --stdio
• Tools: lookup
• Resources: (none)
• Resource templates: (none)

View File

@@ -1,7 +1,5 @@
exports_files([
"aws-lc-sys_memcmp_check.patch",
"rules_rust_windows_gnullvm_build_script.patch",
"rules_rs_windows_gnullvm_exec.patch",
"rusty_v8_prebuilt_out_dir.patch",
"v8_bazel_rules.patch",
"v8_module_deps.patch",

View File

@@ -1,147 +0,0 @@
diff --git a/rs/experimental/platforms/triples.bzl b/rs/experimental/platforms/triples.bzl
--- a/rs/experimental/platforms/triples.bzl
+++ b/rs/experimental/platforms/triples.bzl
@@ -30,6 +30,7 @@
"x86_64-unknown-linux-gnu",
"aarch64-unknown-linux-gnu",
"x86_64-pc-windows-msvc",
+ "x86_64-pc-windows-gnullvm",
"aarch64-pc-windows-msvc",
"x86_64-apple-darwin",
"aarch64-apple-darwin",
diff --git a/rs/experimental/toolchains/declare_rustc_toolchains.bzl b/rs/experimental/toolchains/declare_rustc_toolchains.bzl
--- a/rs/experimental/toolchains/declare_rustc_toolchains.bzl
+++ b/rs/experimental/toolchains/declare_rustc_toolchains.bzl
@@ -10,6 +10,11 @@
return "beta"
return "stable"
+def _exec_triple_suffix(exec_triple):
+ if exec_triple.system == "windows":
+ return "{}_{}_{}".format(exec_triple.system, exec_triple.arch, exec_triple.abi)
+ return "{}_{}".format(exec_triple.system, exec_triple.arch)
+
def declare_rustc_toolchains(
*,
version,
@@ -23,15 +28,14 @@
for triple in execs:
exec_triple = _parse_triple(triple)
- triple_suffix = exec_triple.system + "_" + exec_triple.arch
+ triple_suffix = _exec_triple_suffix(exec_triple)
rustc_repo_label = "@rustc_{}_{}//:".format(triple_suffix, version_key)
cargo_repo_label = "@cargo_{}_{}//:".format(triple_suffix, version_key)
clippy_repo_label = "@clippy_{}_{}//:".format(triple_suffix, version_key)
- rust_toolchain_name = "{}_{}_{}_rust_toolchain".format(
- exec_triple.system,
- exec_triple.arch,
+ rust_toolchain_name = "{}_{}_rust_toolchain".format(
+ triple_suffix,
version_key,
)
@@ -90,11 +94,8 @@
target_key = sanitize_triple(target_triple)
native.toolchain(
- name = "{}_{}_to_{}_{}".format(exec_triple.system, exec_triple.arch, target_key, version_key),
- exec_compatible_with = [
- "@platforms//os:" + exec_triple.system,
- "@platforms//cpu:" + exec_triple.arch,
- ],
+ name = "{}_to_{}_{}".format(triple_suffix, target_key, version_key),
+ exec_compatible_with = triple_to_constraint_set(triple),
target_compatible_with = triple_to_constraint_set(target_triple),
target_settings = [
"@rules_rust//rust/toolchain/channel:" + channel,
diff --git a/rs/experimental/toolchains/declare_rustfmt_toolchains.bzl b/rs/experimental/toolchains/declare_rustfmt_toolchains.bzl
--- a/rs/experimental/toolchains/declare_rustfmt_toolchains.bzl
+++ b/rs/experimental/toolchains/declare_rustfmt_toolchains.bzl
@@ -1,6 +1,6 @@
load("@rules_rust//rust:toolchain.bzl", "rustfmt_toolchain")
load("@rules_rust//rust/platform:triple.bzl", _parse_triple = "triple")
-load("//rs/experimental/platforms:triples.bzl", "SUPPORTED_EXEC_TRIPLES")
+load("//rs/experimental/platforms:triples.bzl", "SUPPORTED_EXEC_TRIPLES", "triple_to_constraint_set")
load("//rs/experimental/toolchains:toolchain_utils.bzl", "sanitize_version")
def _channel(version):
@@ -10,6 +10,11 @@
return "beta"
return "stable"
+def _exec_triple_suffix(exec_triple):
+ if exec_triple.system == "windows":
+ return "{}_{}_{}".format(exec_triple.system, exec_triple.arch, exec_triple.abi)
+ return "{}_{}".format(exec_triple.system, exec_triple.arch)
+
def declare_rustfmt_toolchains(
*,
version,
@@ -22,14 +27,13 @@
for triple in execs:
exec_triple = _parse_triple(triple)
- triple_suffix = exec_triple.system + "_" + exec_triple.arch
+ triple_suffix = _exec_triple_suffix(exec_triple)
rustc_repo_label = "@rustc_{}_{}//:".format(triple_suffix, version_key)
rustfmt_repo_label = "@rustfmt_{}_{}//:".format(triple_suffix, rustfmt_version_key)
- rustfmt_toolchain_name = "{}_{}_{}_rustfmt_toolchain".format(
- exec_triple.system,
- exec_triple.arch,
+ rustfmt_toolchain_name = "{}_{}_rustfmt_toolchain".format(
+ triple_suffix,
version_key,
)
@@ -43,11 +47,8 @@
)
native.toolchain(
- name = "{}_{}_rustfmt_{}".format(exec_triple.system, exec_triple.arch, version_key),
- exec_compatible_with = [
- "@platforms//os:" + exec_triple.system,
- "@platforms//cpu:" + exec_triple.arch,
- ],
+ name = "{}_rustfmt_{}".format(triple_suffix, version_key),
+ exec_compatible_with = triple_to_constraint_set(triple),
target_compatible_with = [],
target_settings = [
"@rules_rust//rust/toolchain/channel:" + channel,
diff --git a/rs/experimental/toolchains/module_extension.bzl b/rs/experimental/toolchains/module_extension.bzl
--- a/rs/experimental/toolchains/module_extension.bzl
+++ b/rs/experimental/toolchains/module_extension.bzl
@@ -37,6 +37,11 @@
return "aarch64"
return arch
+def _exec_triple_suffix(exec_triple):
+ if exec_triple.system == "windows":
+ return "{}_{}_{}".format(exec_triple.system, exec_triple.arch, exec_triple.abi)
+ return "{}_{}".format(exec_triple.system, exec_triple.arch)
+
def _sanitize_path_fragment(path):
return path.replace("/", "_").replace(":", "_")
@@ -181,7 +186,7 @@
for triple in SUPPORTED_EXEC_TRIPLES:
exec_triple = _parse_triple(triple)
- triple_suffix = exec_triple.system + "_" + exec_triple.arch
+ triple_suffix = _exec_triple_suffix(exec_triple)
rustc_name = "rustc_{}_{}".format(triple_suffix, version_key)
rustc_repository(
@@ -230,7 +235,7 @@
for triple in SUPPORTED_EXEC_TRIPLES:
exec_triple = _parse_triple(triple)
- triple_suffix = exec_triple.system + "_" + exec_triple.arch
+ triple_suffix = _exec_triple_suffix(exec_triple)
rustfmt_repository(
name = "rustfmt_{}_{}".format(triple_suffix, version_key),

View File

@@ -1,38 +0,0 @@
diff --git a/cargo/private/cargo_build_script.bzl b/cargo/private/cargo_build_script.bzl
--- a/cargo/private/cargo_build_script.bzl
+++ b/cargo/private/cargo_build_script.bzl
@@ -120,6 +120,25 @@
executable = True,
)
+def _strip_stack_protector_for_windows_llvm_mingw(toolchain, args):
+ """Drop stack protector flags unsupported by llvm-mingw build-script probes."""
+ if "windows-gnullvm" not in toolchain.target_flag_value:
+ return args
+
+ uses_llvm_mingw = False
+ for arg in args:
+ if "mingw-w64-" in arg:
+ uses_llvm_mingw = True
+ break
+
+ if not uses_llvm_mingw:
+ return args
+
+ # llvm-mingw does not ship libssp_nonshared, so forwarding stack-protector
+ # flags through CFLAGS/CXXFLAGS breaks build.rs probe binaries compiled via
+ # cc-rs.
+ return [arg for arg in args if not arg.startswith("-fstack-protector")]
+
def get_cc_compile_args_and_env(cc_toolchain, feature_configuration):
"""Gather cc environment variables from the given `cc_toolchain`
@@ -503,6 +522,8 @@
if not env["AR"]:
env["AR"] = cc_toolchain.ar_executable
+ cc_c_args = _strip_stack_protector_for_windows_llvm_mingw(toolchain, cc_c_args)
+ cc_cxx_args = _strip_stack_protector_for_windows_llvm_mingw(toolchain, cc_cxx_args)
# Populate CFLAGS and CXXFLAGS that cc-rs relies on when building from source, in particular
# to determine the deployment target when building for apple platforms (`macosx-version-min`
# for example, itself derived from the `macos_minimum_os` Bazel argument).

View File

@@ -41,17 +41,11 @@ if not defined manifest if exist "%~dpn0.runfiles_manifest" set "manifest=%~dpn0
if not defined manifest if exist "%~f0.exe.runfiles_manifest" set "manifest=%~f0.exe.runfiles_manifest"
if defined manifest if exist "%manifest%" (
rem Read the manifest directly instead of shelling out to findstr. In the
rem GitHub Windows runner, the nested `findstr` path produced
rem `FINDSTR: Cannot open D:MANIFEST`, which then broke runfile resolution for
rem Bazel tests even though the manifest file was present.
for /f "usebackq tokens=1,* delims= " %%A in ("%manifest%") do (
if "%%A"=="%logical_path%" (
endlocal & set "%~1=%%B" & exit /b 0
)
if "%%A"=="%workspace_logical_path%" (
endlocal & set "%~1=%%B" & exit /b 0
)
for /f "usebackq tokens=1,* delims= " %%A in (`findstr /b /c:"%logical_path% " "%manifest%"`) do (
endlocal & set "%~1=%%B" & exit /b 0
)
for /f "usebackq tokens=1,* delims= " %%A in (`findstr /b /c:"%workspace_logical_path% " "%manifest%"`) do (
endlocal & set "%~1=%%B" & exit /b 0
)
)