mirror of
https://github.com/openai/codex.git
synced 2026-04-04 22:41:48 +03:00
Compare commits
1 Commits
pr16640
...
pakrym/pyt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e05f333e55 |
13
sdk/python/.gitignore
vendored
Normal file
13
sdk/python/.gitignore
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
*.egg-info/
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
.venv/
|
||||||
|
.pytest_cache/
|
||||||
|
.ruff_cache/
|
||||||
|
.mypy_cache/
|
||||||
|
.coverage
|
||||||
|
htmlcov/
|
||||||
43
sdk/python/README.md
Normal file
43
sdk/python/README.md
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# openai-codex-sdk
|
||||||
|
|
||||||
|
A modern, minimalistic Python library project scaffold.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- PEP 621 `pyproject.toml` with `hatchling` build backend
|
||||||
|
- `src/` layout for package code
|
||||||
|
- Preconfigured tooling: Ruff, MyPy, and Pytest
|
||||||
|
- Ready for publishing to PyPI and local development
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m venv .venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
pip install -U pip
|
||||||
|
pip install -e .[dev]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pytest
|
||||||
|
```
|
||||||
|
|
||||||
|
## Linting & Formatting
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ruff check src tests
|
||||||
|
ruff format src tests
|
||||||
|
mypy src
|
||||||
|
```
|
||||||
|
|
||||||
|
## Releasing
|
||||||
|
|
||||||
|
Update the version in `src/openai_codex_sdk/__about__.py` and `pyproject.toml`, then build and publish:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
rm -rf dist
|
||||||
|
python -m build
|
||||||
|
python -m twine upload dist/*
|
||||||
|
```
|
||||||
64
sdk/python/pyproject.toml
Normal file
64
sdk/python/pyproject.toml
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["hatchling>=1.25"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "openai-codex-sdk"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Modern minimalistic Python SDK scaffold."
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
license = {text = "MIT"}
|
||||||
|
authors = [{name = "Codex Team"}]
|
||||||
|
keywords = ["codex", "sdk", "template"]
|
||||||
|
classifiers = [
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
]
|
||||||
|
dependencies = []
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://example.com/openai-codex-sdk"
|
||||||
|
Repository = "https://example.com/openai-codex-sdk.git"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"mypy>=1.12",
|
||||||
|
"pytest>=8.3",
|
||||||
|
"pytest-cov>=5.0",
|
||||||
|
"pytest-asyncio>=0.24",
|
||||||
|
"ruff>=0.7",
|
||||||
|
"pyright>=1.1.379",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.hatch.metadata]
|
||||||
|
allow-direct-references = true
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
packages = ["src/openai_codex_sdk"]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 88
|
||||||
|
target-version = "py311"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
select = ["E", "F", "I", "UP", "B", "A"]
|
||||||
|
|
||||||
|
[tool.ruff.format]
|
||||||
|
docstring-code-format = true
|
||||||
|
indent-style = "space"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
minversion = "8.0"
|
||||||
|
addopts = "-ra --strict-markers"
|
||||||
|
testpaths = ["tests"]
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.11"
|
||||||
|
packages = ["openai_codex_sdk"]
|
||||||
|
strict = true
|
||||||
|
warn_unused_configs = true
|
||||||
7
sdk/python/pyrightconfig.json
Normal file
7
sdk/python/pyrightconfig.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json.schemastore.org/pyrightconfig.json",
|
||||||
|
"include": ["src", "tests"],
|
||||||
|
"typeCheckingMode": "strict",
|
||||||
|
"venvPath": ".",
|
||||||
|
"venv": ".venv"
|
||||||
|
}
|
||||||
5
sdk/python/src/openai_codex_sdk/__about__.py
Normal file
5
sdk/python/src/openai_codex_sdk/__about__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
"""Package metadata."""
|
||||||
|
|
||||||
|
__all__ = ["__version__"]
|
||||||
|
|
||||||
|
__version__ = "0.1.0"
|
||||||
66
sdk/python/src/openai_codex_sdk/__init__.py
Normal file
66
sdk/python/src/openai_codex_sdk/__init__.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
"""openai-codex-sdk public API."""
|
||||||
|
|
||||||
|
from .__about__ import __version__
|
||||||
|
from .codex import Codex
|
||||||
|
from .codex_options import CodexOptions
|
||||||
|
from .events import (
|
||||||
|
ItemCompletedEvent,
|
||||||
|
ItemStartedEvent,
|
||||||
|
ItemUpdatedEvent,
|
||||||
|
ThreadError,
|
||||||
|
ThreadErrorEvent,
|
||||||
|
ThreadEvent,
|
||||||
|
ThreadStartedEvent,
|
||||||
|
TurnCompletedEvent,
|
||||||
|
TurnFailedEvent,
|
||||||
|
TurnStartedEvent,
|
||||||
|
Usage,
|
||||||
|
)
|
||||||
|
from .items import (
|
||||||
|
AssistantMessageItem,
|
||||||
|
CommandExecutionItem,
|
||||||
|
ErrorItem,
|
||||||
|
FileChangeItem,
|
||||||
|
McpToolCallItem,
|
||||||
|
ReasoningItem,
|
||||||
|
ThreadItem,
|
||||||
|
TodoItem,
|
||||||
|
TodoListItem,
|
||||||
|
WebSearchItem,
|
||||||
|
)
|
||||||
|
from .thread import Input, RunResult, RunStreamedResult, Thread
|
||||||
|
from .turn_options import ApprovalMode, SandboxMode, TurnOptions
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"__version__",
|
||||||
|
"Codex",
|
||||||
|
"CodexOptions",
|
||||||
|
"Thread",
|
||||||
|
"RunResult",
|
||||||
|
"RunStreamedResult",
|
||||||
|
"Input",
|
||||||
|
"TurnOptions",
|
||||||
|
"ApprovalMode",
|
||||||
|
"SandboxMode",
|
||||||
|
"ThreadEvent",
|
||||||
|
"ThreadStartedEvent",
|
||||||
|
"TurnStartedEvent",
|
||||||
|
"TurnCompletedEvent",
|
||||||
|
"TurnFailedEvent",
|
||||||
|
"ItemStartedEvent",
|
||||||
|
"ItemUpdatedEvent",
|
||||||
|
"ItemCompletedEvent",
|
||||||
|
"ThreadError",
|
||||||
|
"ThreadErrorEvent",
|
||||||
|
"Usage",
|
||||||
|
"ThreadItem",
|
||||||
|
"AssistantMessageItem",
|
||||||
|
"ReasoningItem",
|
||||||
|
"CommandExecutionItem",
|
||||||
|
"FileChangeItem",
|
||||||
|
"McpToolCallItem",
|
||||||
|
"WebSearchItem",
|
||||||
|
"TodoListItem",
|
||||||
|
"TodoItem",
|
||||||
|
"ErrorItem",
|
||||||
|
]
|
||||||
20
sdk/python/src/openai_codex_sdk/codex.py
Normal file
20
sdk/python/src/openai_codex_sdk/codex.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .codex_options import CodexOptions
|
||||||
|
from .exec import CodexExec
|
||||||
|
from .thread import Thread
|
||||||
|
|
||||||
|
|
||||||
|
class Codex:
|
||||||
|
def __init__(self, options: CodexOptions) -> None:
|
||||||
|
if not options.executable_path:
|
||||||
|
raise ValueError("executable_path is required")
|
||||||
|
|
||||||
|
self._exec = CodexExec(options.executable_path)
|
||||||
|
self._options = options
|
||||||
|
|
||||||
|
def start_thread(self) -> Thread:
|
||||||
|
return Thread(self._exec, self._options)
|
||||||
|
|
||||||
|
def resume_thread(self, thread_id: str) -> Thread:
|
||||||
|
return Thread(self._exec, self._options, thread_id)
|
||||||
12
sdk/python/src/openai_codex_sdk/codex_options.py
Normal file
12
sdk/python/src/openai_codex_sdk/codex_options.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class CodexOptions:
|
||||||
|
"""Configuration for creating a ``Codex`` client."""
|
||||||
|
|
||||||
|
executable_path: str
|
||||||
|
base_url: str | None = None
|
||||||
|
api_key: str | None = None
|
||||||
66
sdk/python/src/openai_codex_sdk/events.py
Normal file
66
sdk/python/src/openai_codex_sdk/events.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Literal, TypedDict
|
||||||
|
|
||||||
|
from .items import ThreadItem
|
||||||
|
|
||||||
|
|
||||||
|
class ThreadStartedEvent(TypedDict):
|
||||||
|
type: Literal["thread.started"]
|
||||||
|
thread_id: str
|
||||||
|
|
||||||
|
|
||||||
|
class TurnStartedEvent(TypedDict):
|
||||||
|
type: Literal["turn.started"]
|
||||||
|
|
||||||
|
|
||||||
|
class Usage(TypedDict):
|
||||||
|
input_tokens: int
|
||||||
|
cached_input_tokens: int
|
||||||
|
output_tokens: int
|
||||||
|
|
||||||
|
|
||||||
|
class TurnCompletedEvent(TypedDict):
|
||||||
|
type: Literal["turn.completed"]
|
||||||
|
usage: Usage
|
||||||
|
|
||||||
|
|
||||||
|
class ThreadError(TypedDict):
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
class TurnFailedEvent(TypedDict):
|
||||||
|
type: Literal["turn.failed"]
|
||||||
|
error: ThreadError
|
||||||
|
|
||||||
|
|
||||||
|
class ItemStartedEvent(TypedDict):
|
||||||
|
type: Literal["item.started"]
|
||||||
|
item: ThreadItem
|
||||||
|
|
||||||
|
|
||||||
|
class ItemUpdatedEvent(TypedDict):
|
||||||
|
type: Literal["item.updated"]
|
||||||
|
item: ThreadItem
|
||||||
|
|
||||||
|
|
||||||
|
class ItemCompletedEvent(TypedDict):
|
||||||
|
type: Literal["item.completed"]
|
||||||
|
item: ThreadItem
|
||||||
|
|
||||||
|
|
||||||
|
class ThreadErrorEvent(TypedDict):
|
||||||
|
type: Literal["error"]
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
ThreadEvent = (
|
||||||
|
ThreadStartedEvent
|
||||||
|
| TurnStartedEvent
|
||||||
|
| TurnCompletedEvent
|
||||||
|
| TurnFailedEvent
|
||||||
|
| ItemStartedEvent
|
||||||
|
| ItemUpdatedEvent
|
||||||
|
| ItemCompletedEvent
|
||||||
|
| ThreadErrorEvent
|
||||||
|
)
|
||||||
80
sdk/python/src/openai_codex_sdk/exec.py
Normal file
80
sdk/python/src/openai_codex_sdk/exec.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import AsyncGenerator
|
||||||
|
|
||||||
|
from .turn_options import SandboxMode
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class CodexExecArgs:
|
||||||
|
input: str
|
||||||
|
base_url: str | None = None
|
||||||
|
api_key: str | None = None
|
||||||
|
thread_id: str | None = None
|
||||||
|
model: str | None = None
|
||||||
|
sandbox_mode: SandboxMode | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class CodexExec:
|
||||||
|
def __init__(self, executable_path: str) -> None:
|
||||||
|
self._executable_path = executable_path
|
||||||
|
|
||||||
|
async def run(self, args: CodexExecArgs) -> AsyncGenerator[str, None]:
|
||||||
|
command_args: list[str] = ["exec", "--experimental-json"]
|
||||||
|
|
||||||
|
if args.model:
|
||||||
|
command_args.extend(["--model", args.model])
|
||||||
|
|
||||||
|
if args.sandbox_mode:
|
||||||
|
command_args.extend(["--sandbox", args.sandbox_mode])
|
||||||
|
|
||||||
|
if args.thread_id:
|
||||||
|
command_args.extend(["resume", args.thread_id, args.input])
|
||||||
|
else:
|
||||||
|
command_args.append(args.input)
|
||||||
|
|
||||||
|
env = dict(os.environ)
|
||||||
|
if args.base_url:
|
||||||
|
env["OPENAI_BASE_URL"] = args.base_url
|
||||||
|
if args.api_key:
|
||||||
|
env["OPENAI_API_KEY"] = args.api_key
|
||||||
|
|
||||||
|
try:
|
||||||
|
process = await asyncio.create_subprocess_exec(
|
||||||
|
self._executable_path,
|
||||||
|
*command_args,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - passthrough for caller
|
||||||
|
raise RuntimeError("Failed to start codex executable") from exc
|
||||||
|
|
||||||
|
if not process.stdout:
|
||||||
|
process.kill()
|
||||||
|
await process.wait()
|
||||||
|
raise RuntimeError("Child process has no stdout")
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
line = await process.stdout.readline()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
yield line.decode("utf-8").rstrip("\n")
|
||||||
|
|
||||||
|
return_code = await process.wait()
|
||||||
|
if return_code != 0:
|
||||||
|
stderr_output = b""
|
||||||
|
if process.stderr:
|
||||||
|
stderr_output = await process.stderr.read()
|
||||||
|
message = stderr_output.decode("utf-8", errors="ignore").strip()
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Codex Exec exited with code {return_code}" + (f": {message}" if message else "")
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if process.returncode is None:
|
||||||
|
process.kill()
|
||||||
|
await process.wait()
|
||||||
85
sdk/python/src/openai_codex_sdk/items.py
Normal file
85
sdk/python/src/openai_codex_sdk/items.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Literal, NotRequired, TypedDict
|
||||||
|
|
||||||
|
|
||||||
|
class CommandExecutionItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["command_execution"]
|
||||||
|
command: str
|
||||||
|
aggregated_output: str
|
||||||
|
status: Literal["in_progress", "completed", "failed"]
|
||||||
|
exit_code: NotRequired[int]
|
||||||
|
|
||||||
|
|
||||||
|
class FileUpdateChange(TypedDict):
|
||||||
|
path: str
|
||||||
|
kind: Literal["add", "delete", "update"]
|
||||||
|
|
||||||
|
|
||||||
|
class FileChangeItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["file_change"]
|
||||||
|
changes: list[FileUpdateChange]
|
||||||
|
status: Literal["completed", "failed"]
|
||||||
|
|
||||||
|
|
||||||
|
class McpToolCallItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["mcp_tool_call"]
|
||||||
|
server: str
|
||||||
|
tool: str
|
||||||
|
status: Literal["in_progress", "completed", "failed"]
|
||||||
|
|
||||||
|
|
||||||
|
class AssistantMessageItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["assistant_message"]
|
||||||
|
text: str
|
||||||
|
|
||||||
|
|
||||||
|
class ReasoningItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["reasoning"]
|
||||||
|
text: str
|
||||||
|
|
||||||
|
|
||||||
|
class WebSearchItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["web_search"]
|
||||||
|
query: str
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["error"]
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
class TodoItem(TypedDict):
|
||||||
|
text: str
|
||||||
|
completed: bool
|
||||||
|
|
||||||
|
|
||||||
|
class TodoListItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["todo_list"]
|
||||||
|
items: list[TodoItem]
|
||||||
|
|
||||||
|
|
||||||
|
class SessionItem(TypedDict):
|
||||||
|
id: str
|
||||||
|
item_type: Literal["session"]
|
||||||
|
session_id: str
|
||||||
|
|
||||||
|
|
||||||
|
ThreadItem = (
|
||||||
|
AssistantMessageItem
|
||||||
|
| ReasoningItem
|
||||||
|
| CommandExecutionItem
|
||||||
|
| FileChangeItem
|
||||||
|
| McpToolCallItem
|
||||||
|
| WebSearchItem
|
||||||
|
| TodoListItem
|
||||||
|
| ErrorItem
|
||||||
|
)
|
||||||
70
sdk/python/src/openai_codex_sdk/thread.py
Normal file
70
sdk/python/src/openai_codex_sdk/thread.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import AsyncGenerator, cast
|
||||||
|
|
||||||
|
from .codex_options import CodexOptions
|
||||||
|
from .exec import CodexExec, CodexExecArgs
|
||||||
|
from .events import ItemCompletedEvent, ThreadEvent, ThreadStartedEvent
|
||||||
|
from .items import AssistantMessageItem, ThreadItem
|
||||||
|
from .turn_options import TurnOptions
|
||||||
|
|
||||||
|
Input = str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class RunResult:
|
||||||
|
items: list[ThreadItem]
|
||||||
|
final_response: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class RunStreamedResult:
|
||||||
|
events: AsyncGenerator[ThreadEvent, None]
|
||||||
|
|
||||||
|
|
||||||
|
class Thread:
|
||||||
|
def __init__(self, codex_exec: CodexExec, options: CodexOptions, thread_id: str | None = None) -> None:
|
||||||
|
self._exec = codex_exec
|
||||||
|
self._options = options
|
||||||
|
self.id = thread_id
|
||||||
|
|
||||||
|
async def run_streamed(self, input: Input, options: TurnOptions | None = None) -> RunStreamedResult:
|
||||||
|
return RunStreamedResult(events=self._run_streamed_internal(input, options))
|
||||||
|
|
||||||
|
async def run(self, input: Input, options: TurnOptions | None = None) -> RunResult:
|
||||||
|
generator = self._run_streamed_internal(input, options)
|
||||||
|
items: list[ThreadItem] = []
|
||||||
|
final_response = ""
|
||||||
|
|
||||||
|
async for event in generator:
|
||||||
|
if event["type"] != "item.completed":
|
||||||
|
continue
|
||||||
|
completed = cast(ItemCompletedEvent, event)
|
||||||
|
item = completed["item"]
|
||||||
|
items.append(item)
|
||||||
|
if item["item_type"] == "assistant_message":
|
||||||
|
assistant_item = cast(AssistantMessageItem, item)
|
||||||
|
final_response = assistant_item["text"]
|
||||||
|
|
||||||
|
return RunResult(items=items, final_response=final_response)
|
||||||
|
|
||||||
|
async def _run_streamed_internal(
|
||||||
|
self, input: Input, options: TurnOptions | None
|
||||||
|
) -> AsyncGenerator[ThreadEvent, None]:
|
||||||
|
exec_args = CodexExecArgs(
|
||||||
|
input=input,
|
||||||
|
base_url=self._options.base_url,
|
||||||
|
api_key=self._options.api_key,
|
||||||
|
thread_id=self.id,
|
||||||
|
model=options.model if options else None,
|
||||||
|
sandbox_mode=options.sandbox_mode if options else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
async for raw_event in self._exec.run(exec_args):
|
||||||
|
parsed = cast(ThreadEvent, json.loads(raw_event))
|
||||||
|
if parsed["type"] == "thread.started":
|
||||||
|
started = cast(ThreadStartedEvent, parsed)
|
||||||
|
self.id = started["thread_id"]
|
||||||
|
yield parsed
|
||||||
13
sdk/python/src/openai_codex_sdk/turn_options.py
Normal file
13
sdk/python/src/openai_codex_sdk/turn_options.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
ApprovalMode = Literal["never", "on-request", "on-failure", "untrusted"]
|
||||||
|
SandboxMode = Literal["read-only", "workspace-write", "danger-full-access"]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class TurnOptions:
|
||||||
|
model: str | None = None
|
||||||
|
sandbox_mode: SandboxMode | None = None
|
||||||
0
sdk/python/tests/__init__.py
Normal file
0
sdk/python/tests/__init__.py
Normal file
27
sdk/python/tests/codex_exec_spy.py
Normal file
27
sdk/python/tests/codex_exec_spy.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
from pytest import MonkeyPatch
|
||||||
|
|
||||||
|
from openai_codex_sdk.exec import CodexExecArgs
|
||||||
|
|
||||||
|
from .responses_proxy import FakeExec, ResponsesProxy
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class CodexExecSpyResult:
|
||||||
|
args: list[CodexExecArgs]
|
||||||
|
restore: Callable[[], None]
|
||||||
|
|
||||||
|
|
||||||
|
def install_codex_exec_spy(monkeypatch: MonkeyPatch, proxy: ResponsesProxy) -> CodexExecSpyResult:
|
||||||
|
calls: list[CodexExecArgs] = []
|
||||||
|
|
||||||
|
def factory(path: str) -> FakeExec:
|
||||||
|
return FakeExec(path, proxy, calls)
|
||||||
|
|
||||||
|
monkeypatch.setattr("openai_codex_sdk.codex.CodexExec", factory)
|
||||||
|
|
||||||
|
return CodexExecSpyResult(args=calls, restore=monkeypatch.undo)
|
||||||
37
sdk/python/tests/conftest.py
Normal file
37
sdk/python/tests/conftest.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import AsyncIterator, Awaitable, Callable
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
from pytest import MonkeyPatch
|
||||||
|
|
||||||
|
from .codex_exec_spy import CodexExecSpyResult, install_codex_exec_spy
|
||||||
|
from .responses_proxy import ResponsesProxy, ResponsesProxyOptions, start_responses_test_proxy
|
||||||
|
|
||||||
|
ProxyFactory = Callable[[ResponsesProxyOptions], Awaitable[ResponsesProxy]]
|
||||||
|
SpyFactory = Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def make_responses_proxy() -> AsyncIterator[ProxyFactory]:
|
||||||
|
proxies: list[ResponsesProxy] = []
|
||||||
|
|
||||||
|
async def _make(options: ResponsesProxyOptions) -> ResponsesProxy:
|
||||||
|
proxy = await start_responses_test_proxy(options)
|
||||||
|
proxies.append(proxy)
|
||||||
|
return proxy
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield _make
|
||||||
|
finally:
|
||||||
|
for proxy in proxies:
|
||||||
|
await proxy.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def codex_exec_spy(monkeypatch: MonkeyPatch) -> SpyFactory:
|
||||||
|
def _install(proxy: ResponsesProxy) -> CodexExecSpyResult:
|
||||||
|
return install_codex_exec_spy(monkeypatch, proxy)
|
||||||
|
|
||||||
|
return _install
|
||||||
210
sdk/python/tests/responses_proxy.py
Normal file
210
sdk/python/tests/responses_proxy.py
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, AsyncGenerator, TypedDict
|
||||||
|
|
||||||
|
from openai_codex_sdk.exec import CodexExecArgs
|
||||||
|
|
||||||
|
DEFAULT_RESPONSE_ID = "resp_mock"
|
||||||
|
DEFAULT_MESSAGE_ID = "msg_mock"
|
||||||
|
|
||||||
|
|
||||||
|
class SseEvent(TypedDict, total=False):
|
||||||
|
type: str
|
||||||
|
item: dict[str, Any]
|
||||||
|
response: dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
class SseResponseBody(TypedDict):
|
||||||
|
kind: str
|
||||||
|
events: list[SseEvent]
|
||||||
|
|
||||||
|
|
||||||
|
class ResponsesProxyOptions(TypedDict, total=False):
|
||||||
|
response_bodies: list[SseResponseBody]
|
||||||
|
status_code: int
|
||||||
|
|
||||||
|
|
||||||
|
class RecordedRequest(TypedDict):
|
||||||
|
body: str
|
||||||
|
json: dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class ResponsesProxy:
|
||||||
|
response_bodies: list[SseResponseBody]
|
||||||
|
status_code: int
|
||||||
|
requests: list[RecordedRequest]
|
||||||
|
_response_index: int = field(init=False, default=0)
|
||||||
|
_thread_counter: itertools.count = field(init=False, default_factory=lambda: itertools.count(1))
|
||||||
|
_thread_histories: dict[str, list[str]] = field(init=False, default_factory=dict)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
if not self.response_bodies:
|
||||||
|
raise ValueError("response_bodies is required")
|
||||||
|
|
||||||
|
async def close(self) -> None:
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
|
||||||
|
def _next_thread_id(self) -> str:
|
||||||
|
return f"thread_{next(self._thread_counter)}"
|
||||||
|
|
||||||
|
def _next_response(self) -> SseResponseBody:
|
||||||
|
index = min(self._response_index, len(self.response_bodies) - 1)
|
||||||
|
self._response_index += 1
|
||||||
|
return self.response_bodies[index]
|
||||||
|
|
||||||
|
def _build_request(self, args: CodexExecArgs, thread_id: str) -> RecordedRequest:
|
||||||
|
history = self._thread_histories.get(thread_id, [])
|
||||||
|
input_entries: list[dict[str, Any]] = []
|
||||||
|
for text in history:
|
||||||
|
input_entries.append(
|
||||||
|
{
|
||||||
|
"role": "assistant",
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "output_text",
|
||||||
|
"text": text,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
input_entries.append(
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "input_text",
|
||||||
|
"text": args.input,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
request_json: dict[str, Any] = {"input": input_entries}
|
||||||
|
if args.model is not None:
|
||||||
|
request_json["model"] = args.model
|
||||||
|
|
||||||
|
recorded = RecordedRequest(body=json.dumps(request_json), json=request_json)
|
||||||
|
self.requests.append(recorded)
|
||||||
|
return recorded
|
||||||
|
|
||||||
|
def record_run(self, args: CodexExecArgs) -> tuple[str, RecordedRequest, bool]:
|
||||||
|
if args.thread_id:
|
||||||
|
thread_id = args.thread_id
|
||||||
|
new_thread = False
|
||||||
|
else:
|
||||||
|
thread_id = self._next_thread_id()
|
||||||
|
new_thread = True
|
||||||
|
request = self._build_request(args, thread_id)
|
||||||
|
return thread_id, request, new_thread
|
||||||
|
|
||||||
|
def add_history(self, thread_id: str, text: str) -> None:
|
||||||
|
self._thread_histories.setdefault(thread_id, []).append(text)
|
||||||
|
|
||||||
|
def _convert_events(
|
||||||
|
self, response_body: SseResponseBody, thread_id: str, new_thread: bool
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
events: list[dict[str, Any]] = []
|
||||||
|
if new_thread:
|
||||||
|
events.append({"type": "thread.started", "thread_id": thread_id})
|
||||||
|
|
||||||
|
for event in response_body["events"]:
|
||||||
|
if event["type"] == "response.created":
|
||||||
|
events.append({"type": "turn.started"})
|
||||||
|
elif event["type"] == "response.output_item.done":
|
||||||
|
item = event["item"]
|
||||||
|
text = item["content"][0]["text"]
|
||||||
|
events.append(
|
||||||
|
{
|
||||||
|
"type": "item.completed",
|
||||||
|
"item": {
|
||||||
|
"id": item["id"],
|
||||||
|
"item_type": "assistant_message",
|
||||||
|
"text": text,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
elif event["type"] == "response.completed":
|
||||||
|
events.append(
|
||||||
|
{
|
||||||
|
"type": "turn.completed",
|
||||||
|
"usage": {
|
||||||
|
"input_tokens": 0,
|
||||||
|
"cached_input_tokens": 0,
|
||||||
|
"output_tokens": 0,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return events
|
||||||
|
|
||||||
|
def next_events(self, thread_id: str, new_thread: bool) -> list[dict[str, Any]]:
|
||||||
|
response_body = self._next_response()
|
||||||
|
return self._convert_events(response_body, thread_id, new_thread)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeExec:
|
||||||
|
def __init__(self, _path: str, proxy: ResponsesProxy, calls: list[CodexExecArgs]) -> None:
|
||||||
|
self._proxy = proxy
|
||||||
|
self.calls = calls
|
||||||
|
|
||||||
|
async def run(self, args: CodexExecArgs) -> AsyncGenerator[str, None]:
|
||||||
|
self.calls.append(args)
|
||||||
|
thread_id, _request, new_thread = self._proxy.record_run(args)
|
||||||
|
events = self._proxy.next_events(thread_id, new_thread)
|
||||||
|
|
||||||
|
for event in events:
|
||||||
|
if event["type"] == "item.completed":
|
||||||
|
item = event["item"]
|
||||||
|
text = item.get("text")
|
||||||
|
if text:
|
||||||
|
self._proxy.add_history(thread_id, text)
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
yield json.dumps(event)
|
||||||
|
|
||||||
|
|
||||||
|
async def start_responses_test_proxy(options: ResponsesProxyOptions) -> ResponsesProxy:
|
||||||
|
response_bodies = options.get("response_bodies")
|
||||||
|
if response_bodies is None:
|
||||||
|
raise ValueError("response_bodies is required")
|
||||||
|
status_code = options.get("status_code", 200)
|
||||||
|
proxy = ResponsesProxy(response_bodies, status_code, requests=[])
|
||||||
|
return proxy
|
||||||
|
|
||||||
|
|
||||||
|
def sse(*events: SseEvent) -> SseResponseBody:
|
||||||
|
return {"kind": "sse", "events": list(events)}
|
||||||
|
|
||||||
|
|
||||||
|
def response_started(response_id: str = DEFAULT_RESPONSE_ID) -> SseEvent:
|
||||||
|
return {
|
||||||
|
"type": "response.created",
|
||||||
|
"response": {"id": response_id},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def assistant_message(text: str, item_id: str = DEFAULT_MESSAGE_ID) -> SseEvent:
|
||||||
|
return {
|
||||||
|
"type": "response.output_item.done",
|
||||||
|
"item": {
|
||||||
|
"type": "message",
|
||||||
|
"role": "assistant",
|
||||||
|
"id": item_id,
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "output_text",
|
||||||
|
"text": text,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def response_completed(response_id: str = DEFAULT_RESPONSE_ID) -> SseEvent:
|
||||||
|
return {
|
||||||
|
"type": "response.completed",
|
||||||
|
"response": {"id": response_id},
|
||||||
|
}
|
||||||
172
sdk/python/tests/run_streamed_test.py
Normal file
172
sdk/python/tests/run_streamed_test.py
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import AsyncGenerator, Callable
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from openai_codex_sdk import Codex, CodexOptions
|
||||||
|
from openai_codex_sdk.events import ThreadEvent
|
||||||
|
|
||||||
|
from .codex_exec_spy import CodexExecSpyResult
|
||||||
|
from .responses_proxy import (
|
||||||
|
ResponsesProxy,
|
||||||
|
assistant_message,
|
||||||
|
response_completed,
|
||||||
|
response_started,
|
||||||
|
sse,
|
||||||
|
)
|
||||||
|
|
||||||
|
CODEX_EXEC_PATH = Path(__file__).resolve().parents[2] / "codex-rs" / "target" / "debug" / "codex"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_thread_events(
|
||||||
|
make_responses_proxy, codex_exec_spy: Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
) -> None:
|
||||||
|
proxy = await make_responses_proxy(
|
||||||
|
{
|
||||||
|
"status_code": 200,
|
||||||
|
"response_bodies": [
|
||||||
|
sse(
|
||||||
|
response_started(),
|
||||||
|
assistant_message("Hi!"),
|
||||||
|
response_completed(),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
codex_exec_spy(proxy)
|
||||||
|
|
||||||
|
client = Codex(CodexOptions(executable_path=str(CODEX_EXEC_PATH), base_url="http://proxy", api_key="test"))
|
||||||
|
|
||||||
|
thread = client.start_thread()
|
||||||
|
result = await thread.run_streamed("Hello, world!")
|
||||||
|
|
||||||
|
events: list[ThreadEvent] = []
|
||||||
|
async for event in result.events:
|
||||||
|
events.append(event)
|
||||||
|
|
||||||
|
assert events == [
|
||||||
|
{
|
||||||
|
"type": "thread.started",
|
||||||
|
"thread_id": "thread_1",
|
||||||
|
},
|
||||||
|
{"type": "turn.started"},
|
||||||
|
{
|
||||||
|
"type": "item.completed",
|
||||||
|
"item": {
|
||||||
|
"id": "msg_mock",
|
||||||
|
"item_type": "assistant_message",
|
||||||
|
"text": "Hi!",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "turn.completed",
|
||||||
|
"usage": {
|
||||||
|
"input_tokens": 0,
|
||||||
|
"cached_input_tokens": 0,
|
||||||
|
"output_tokens": 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
assert thread.id == "thread_1"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_sends_previous_items_when_run_streamed_called_twice(
|
||||||
|
make_responses_proxy, codex_exec_spy: Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
) -> None:
|
||||||
|
proxy = await make_responses_proxy(
|
||||||
|
{
|
||||||
|
"status_code": 200,
|
||||||
|
"response_bodies": [
|
||||||
|
sse(
|
||||||
|
response_started("response_1"),
|
||||||
|
assistant_message("First response", "item_1"),
|
||||||
|
response_completed("response_1"),
|
||||||
|
),
|
||||||
|
sse(
|
||||||
|
response_started("response_2"),
|
||||||
|
assistant_message("Second response", "item_2"),
|
||||||
|
response_completed("response_2"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
codex_exec_spy(proxy)
|
||||||
|
|
||||||
|
client = Codex(CodexOptions(executable_path=str(CODEX_EXEC_PATH), base_url="http://proxy", api_key="test"))
|
||||||
|
|
||||||
|
thread = client.start_thread()
|
||||||
|
first = await thread.run_streamed("first input")
|
||||||
|
await _drain_events(first.events)
|
||||||
|
|
||||||
|
second = await thread.run_streamed("second input")
|
||||||
|
await _drain_events(second.events)
|
||||||
|
|
||||||
|
assert len(proxy.requests) >= 2
|
||||||
|
second_request = proxy.requests[1]
|
||||||
|
payload = second_request["json"]
|
||||||
|
assistant_entry = next((entry for entry in payload["input"] if entry["role"] == "assistant"), None)
|
||||||
|
assert assistant_entry is not None
|
||||||
|
assistant_text = next(
|
||||||
|
(item["text"] for item in assistant_entry.get("content", []) if item.get("type") == "output_text"),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert assistant_text == "First response"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_resumes_thread_by_id_when_streaming(
|
||||||
|
make_responses_proxy, codex_exec_spy: Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
) -> None:
|
||||||
|
proxy = await make_responses_proxy(
|
||||||
|
{
|
||||||
|
"status_code": 200,
|
||||||
|
"response_bodies": [
|
||||||
|
sse(
|
||||||
|
response_started("response_1"),
|
||||||
|
assistant_message("First response", "item_1"),
|
||||||
|
response_completed("response_1"),
|
||||||
|
),
|
||||||
|
sse(
|
||||||
|
response_started("response_2"),
|
||||||
|
assistant_message("Second response", "item_2"),
|
||||||
|
response_completed("response_2"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
codex_exec_spy(proxy)
|
||||||
|
|
||||||
|
client = Codex(CodexOptions(executable_path=str(CODEX_EXEC_PATH), base_url="http://proxy", api_key="test"))
|
||||||
|
|
||||||
|
original_thread = client.start_thread()
|
||||||
|
first = await original_thread.run_streamed("first input")
|
||||||
|
await _drain_events(first.events)
|
||||||
|
|
||||||
|
resumed_thread = client.resume_thread(original_thread.id or "")
|
||||||
|
second = await resumed_thread.run_streamed("second input")
|
||||||
|
await _drain_events(second.events)
|
||||||
|
|
||||||
|
assert resumed_thread.id == original_thread.id
|
||||||
|
|
||||||
|
assert len(proxy.requests) >= 2
|
||||||
|
second_request = proxy.requests[1]
|
||||||
|
payload = second_request["json"]
|
||||||
|
assistant_entry = next((entry for entry in payload["input"] if entry["role"] == "assistant"), None)
|
||||||
|
assert assistant_entry is not None
|
||||||
|
assistant_text = next(
|
||||||
|
(item["text"] for item in assistant_entry.get("content", []) if item.get("type") == "output_text"),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert assistant_text == "First response"
|
||||||
|
|
||||||
|
|
||||||
|
async def _drain_events(events: AsyncGenerator[ThreadEvent, None]) -> None:
|
||||||
|
async for _ in events:
|
||||||
|
pass
|
||||||
223
sdk/python/tests/run_test.py
Normal file
223
sdk/python/tests/run_test.py
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from openai_codex_sdk import Codex, CodexOptions
|
||||||
|
from openai_codex_sdk.turn_options import TurnOptions
|
||||||
|
|
||||||
|
from .codex_exec_spy import CodexExecSpyResult
|
||||||
|
from .responses_proxy import (
|
||||||
|
ResponsesProxy,
|
||||||
|
assistant_message,
|
||||||
|
response_completed,
|
||||||
|
response_started,
|
||||||
|
sse,
|
||||||
|
)
|
||||||
|
|
||||||
|
CODEX_EXEC_PATH = Path(__file__).resolve().parents[2] / "codex-rs" / "target" / "debug" / "codex"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_thread_events(
|
||||||
|
make_responses_proxy, codex_exec_spy: Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
) -> None:
|
||||||
|
proxy = await make_responses_proxy(
|
||||||
|
{
|
||||||
|
"status_code": 200,
|
||||||
|
"response_bodies": [
|
||||||
|
sse(
|
||||||
|
response_started(),
|
||||||
|
assistant_message("Hi!"),
|
||||||
|
response_completed(),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
spy = codex_exec_spy(proxy)
|
||||||
|
|
||||||
|
client = Codex(CodexOptions(executable_path=str(CODEX_EXEC_PATH), base_url="http://proxy", api_key="test"))
|
||||||
|
thread = client.start_thread()
|
||||||
|
|
||||||
|
result = await thread.run("Hello, world!")
|
||||||
|
|
||||||
|
expected_items = [
|
||||||
|
{
|
||||||
|
"id": "msg_mock",
|
||||||
|
"item_type": "assistant_message",
|
||||||
|
"text": "Hi!",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
assert result.items == expected_items
|
||||||
|
assert thread.id is not None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_sends_previous_items_when_run_called_twice(
|
||||||
|
make_responses_proxy, codex_exec_spy: Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
) -> None:
|
||||||
|
proxy = await make_responses_proxy(
|
||||||
|
{
|
||||||
|
"status_code": 200,
|
||||||
|
"response_bodies": [
|
||||||
|
sse(
|
||||||
|
response_started("response_1"),
|
||||||
|
assistant_message("First response", "item_1"),
|
||||||
|
response_completed("response_1"),
|
||||||
|
),
|
||||||
|
sse(
|
||||||
|
response_started("response_2"),
|
||||||
|
assistant_message("Second response", "item_2"),
|
||||||
|
response_completed("response_2"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
codex_exec_spy(proxy)
|
||||||
|
|
||||||
|
client = Codex(CodexOptions(executable_path=str(CODEX_EXEC_PATH), base_url="http://proxy", api_key="test"))
|
||||||
|
|
||||||
|
thread = client.start_thread()
|
||||||
|
await thread.run("first input")
|
||||||
|
await thread.run("second input")
|
||||||
|
|
||||||
|
assert len(proxy.requests) >= 2
|
||||||
|
second_request = proxy.requests[1]
|
||||||
|
payload = second_request["json"]
|
||||||
|
assistant_entry = next((entry for entry in payload["input"] if entry["role"] == "assistant"), None)
|
||||||
|
assert assistant_entry is not None
|
||||||
|
assistant_text = next(
|
||||||
|
(item["text"] for item in assistant_entry.get("content", []) if item.get("type") == "output_text"),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert assistant_text == "First response"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_continues_thread_with_options(
|
||||||
|
make_responses_proxy, codex_exec_spy: Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
) -> None:
|
||||||
|
proxy = await make_responses_proxy(
|
||||||
|
{
|
||||||
|
"status_code": 200,
|
||||||
|
"response_bodies": [
|
||||||
|
sse(
|
||||||
|
response_started("response_1"),
|
||||||
|
assistant_message("First response", "item_1"),
|
||||||
|
response_completed("response_1"),
|
||||||
|
),
|
||||||
|
sse(
|
||||||
|
response_started("response_2"),
|
||||||
|
assistant_message("Second response", "item_2"),
|
||||||
|
response_completed("response_2"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
codex_exec_spy(proxy)
|
||||||
|
|
||||||
|
client = Codex(CodexOptions(executable_path=str(CODEX_EXEC_PATH), base_url="http://proxy", api_key="test"))
|
||||||
|
|
||||||
|
thread = client.start_thread()
|
||||||
|
await thread.run("first input")
|
||||||
|
await thread.run("second input", TurnOptions(model="gpt-test-1"))
|
||||||
|
|
||||||
|
assert len(proxy.requests) >= 2
|
||||||
|
second_request = proxy.requests[1]
|
||||||
|
payload = second_request["json"]
|
||||||
|
assert payload.get("model") == "gpt-test-1"
|
||||||
|
assistant_entry = next((entry for entry in payload["input"] if entry["role"] == "assistant"), None)
|
||||||
|
assert assistant_entry is not None
|
||||||
|
assistant_text = next(
|
||||||
|
(item["text"] for item in assistant_entry.get("content", []) if item.get("type") == "output_text"),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert assistant_text == "First response"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_resumes_thread_by_id(
|
||||||
|
make_responses_proxy, codex_exec_spy: Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
) -> None:
|
||||||
|
proxy = await make_responses_proxy(
|
||||||
|
{
|
||||||
|
"status_code": 200,
|
||||||
|
"response_bodies": [
|
||||||
|
sse(
|
||||||
|
response_started("response_1"),
|
||||||
|
assistant_message("First response", "item_1"),
|
||||||
|
response_completed("response_1"),
|
||||||
|
),
|
||||||
|
sse(
|
||||||
|
response_started("response_2"),
|
||||||
|
assistant_message("Second response", "item_2"),
|
||||||
|
response_completed("response_2"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
codex_exec_spy(proxy)
|
||||||
|
|
||||||
|
client = Codex(CodexOptions(executable_path=str(CODEX_EXEC_PATH), base_url="http://proxy", api_key="test"))
|
||||||
|
|
||||||
|
original_thread = client.start_thread()
|
||||||
|
await original_thread.run("first input")
|
||||||
|
|
||||||
|
resumed_thread = client.resume_thread(original_thread.id or "")
|
||||||
|
result = await resumed_thread.run("second input")
|
||||||
|
|
||||||
|
assert resumed_thread.id == original_thread.id
|
||||||
|
assert result.final_response == "Second response"
|
||||||
|
assert len(proxy.requests) >= 2
|
||||||
|
second_request = proxy.requests[1]
|
||||||
|
payload = second_request["json"]
|
||||||
|
assistant_entry = next((entry for entry in payload["input"] if entry["role"] == "assistant"), None)
|
||||||
|
assert assistant_entry is not None
|
||||||
|
assistant_text = next(
|
||||||
|
(item["text"] for item in assistant_entry.get("content", []) if item.get("type") == "output_text"),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert assistant_text == "First response"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_passes_turn_options_to_exec(
|
||||||
|
make_responses_proxy, codex_exec_spy: Callable[[ResponsesProxy], CodexExecSpyResult]
|
||||||
|
) -> None:
|
||||||
|
proxy = await make_responses_proxy(
|
||||||
|
{
|
||||||
|
"status_code": 200,
|
||||||
|
"response_bodies": [
|
||||||
|
sse(
|
||||||
|
response_started("response_1"),
|
||||||
|
assistant_message("Turn options applied", "item_1"),
|
||||||
|
response_completed("response_1"),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
spy = codex_exec_spy(proxy)
|
||||||
|
|
||||||
|
client = Codex(CodexOptions(executable_path=str(CODEX_EXEC_PATH), base_url="http://proxy", api_key="test"))
|
||||||
|
|
||||||
|
thread = client.start_thread()
|
||||||
|
await thread.run(
|
||||||
|
"apply options",
|
||||||
|
TurnOptions(model="gpt-test-1", sandbox_mode="workspace-write"),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert proxy.requests
|
||||||
|
payload = proxy.requests[0]["json"]
|
||||||
|
assert payload.get("model") == "gpt-test-1"
|
||||||
|
|
||||||
|
assert spy.args
|
||||||
|
command_args = spy.args[0]
|
||||||
|
assert command_args.sandbox_mode == "workspace-write"
|
||||||
|
assert command_args.model == "gpt-test-1"
|
||||||
Reference in New Issue
Block a user