Add max context window model metadata (#18382)

Adds max_context_window to model metadata and routes core context-window
reads through resolved model info. Config model_context_window overrides
are clamped to max_context_window when present; without an override, the
model context_window is used.
This commit is contained in:
Ahmed Ibrahim
2026-04-17 21:48:14 -07:00
committed by GitHub
parent e9c70fff3f
commit 5bb193aa88
17 changed files with 330 additions and 6 deletions

View File

@@ -15,6 +15,7 @@
},
"supports_parallel_tool_calls": true,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "experimental",
"default_reasoning_summary": "none",
"slug": "gpt-5.3-codex",
@@ -88,6 +89,7 @@
},
"supports_parallel_tool_calls": true,
"context_window": 272000,
"max_context_window": 1000000,
"reasoning_summary_format": "experimental",
"default_reasoning_summary": "none",
"slug": "gpt-5.4",
@@ -161,6 +163,7 @@
},
"supports_parallel_tool_calls": true,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "experimental",
"default_reasoning_summary": "auto",
"slug": "gpt-5.2-codex",
@@ -235,6 +238,7 @@
},
"supports_parallel_tool_calls": false,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "experimental",
"default_reasoning_summary": "auto",
"slug": "gpt-5.1-codex-max",
@@ -302,6 +306,7 @@
},
"supports_parallel_tool_calls": false,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "experimental",
"default_reasoning_summary": "auto",
"slug": "gpt-5.1-codex",
@@ -365,6 +370,7 @@
},
"supports_parallel_tool_calls": true,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "none",
"default_reasoning_summary": "auto",
"slug": "gpt-5.2",
@@ -432,6 +438,7 @@
},
"supports_parallel_tool_calls": true,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "none",
"default_reasoning_summary": "auto",
"slug": "gpt-5.1",
@@ -495,6 +502,7 @@
},
"supports_parallel_tool_calls": false,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "experimental",
"default_reasoning_summary": "auto",
"slug": "gpt-5-codex",
@@ -558,6 +566,7 @@
},
"supports_parallel_tool_calls": false,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "none",
"default_reasoning_summary": "auto",
"slug": "gpt-5",
@@ -624,6 +633,7 @@
},
"supports_parallel_tool_calls": false,
"context_window": 128000,
"max_context_window": 128000,
"reasoning_summary_format": "none",
"default_reasoning_summary": "auto",
"slug": "gpt-oss-120b",
@@ -683,6 +693,7 @@
},
"supports_parallel_tool_calls": false,
"context_window": 128000,
"max_context_window": 128000,
"reasoning_summary_format": "none",
"default_reasoning_summary": "auto",
"slug": "gpt-oss-20b",
@@ -743,6 +754,7 @@
},
"supports_parallel_tool_calls": false,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "experimental",
"default_reasoning_summary": "auto",
"slug": "gpt-5.1-codex-mini",
@@ -802,6 +814,7 @@
},
"supports_parallel_tool_calls": false,
"context_window": 272000,
"max_context_window": 272000,
"reasoning_summary_format": "experimental",
"default_reasoning_summary": "auto",
"slug": "gpt-5-codex-mini",