feat: expand api snapshots and schema tooling

This commit is contained in:
Nathan Flurry 2026-01-26 00:13:17 -08:00
parent ee014b0838
commit 011ca27287
72 changed files with 29480 additions and 1081 deletions

View file

@ -1,14 +1,17 @@
name: release
on:
release:
types: [published]
workflow_dispatch:
inputs:
version:
description: "Version (e.g. 0.1.0 or v0.1.0)"
required: true
type: string
latest:
description: "Latest"
required: true
type: boolean
default: true
defaults:
run:
@ -18,44 +21,33 @@ env:
CARGO_INCREMENTAL: 0
jobs:
checks:
uses: ./.github/workflows/ci.yaml
setup:
name: "Setup"
needs: [checks]
runs-on: ubuntu-24.04
outputs:
version: ${{ steps.vars.outputs.version }}
latest: ${{ steps.latest.outputs.latest }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: dtolnay/rust-toolchain@stable
- uses: pnpm/action-setup@v4
- uses: actions/setup-node@v4
with:
node-version: 20
cache: pnpm
- name: Install tsx
run: npm install -g tsx
- name: Resolve version
id: vars
- name: Run setup phase
run: |
if [ "${{ github.event_name }}" = "release" ]; then
VERSION="${{ github.event.release.tag_name }}"
else
VERSION="${{ inputs.version }}"
CMD="./scripts/release/main.ts --version '${{ inputs.version }}' --phase setup-ci"
if [ "${{ inputs.latest }}" != "true" ]; then
CMD="$CMD --no-latest"
fi
VERSION="${VERSION#v}"
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
- name: Determine latest
id: latest
run: |
./scripts/release/main.ts --version "${{ steps.vars.outputs.version }}" --print-latest --output "$GITHUB_OUTPUT"
eval "$CMD"
binaries:
name: "Build Binaries"
@ -99,8 +91,8 @@ jobs:
name: binary-${{ matrix.target }}
path: dist/sandbox-agent-${{ matrix.target }}${{ matrix.binary_ext }}
publish:
name: "Publish Packages"
complete:
name: "Complete"
needs: [setup, binaries]
runs-on: ubuntu-24.04
steps:
@ -115,49 +107,12 @@ jobs:
- uses: actions/setup-node@v4
with:
node-version: 20
registry-url: 'https://registry.npmjs.org'
registry-url: "https://registry.npmjs.org"
cache: pnpm
- name: Install tsx
run: npm install -g tsx
- name: Download binaries
uses: actions/download-artifact@v4
with:
path: dist/
pattern: binary-*
merge-multiple: true
- name: List downloaded binaries
run: ls -la dist/
- name: Publish all
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
VERSION="${{ needs.setup.outputs.version }}"
./scripts/release/main.ts --version "$VERSION" \
--publish-crates \
--publish-npm-sdk \
--publish-npm-cli
artifacts:
name: "Upload Artifacts"
needs: [setup, binaries]
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-node@v4
with:
node-version: 20
- name: Install tsx
run: npm install -g tsx
- name: Install AWS CLI
run: |
sudo apt-get update
@ -177,19 +132,15 @@ jobs:
- name: List downloaded binaries
run: ls -la dist/
- name: Upload artifacts
- name: Publish & upload artifacts
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
R2_RELEASES_ACCESS_KEY_ID: ${{ secrets.R2_RELEASES_ACCESS_KEY_ID }}
R2_RELEASES_SECRET_ACCESS_KEY: ${{ secrets.R2_RELEASES_SECRET_ACCESS_KEY }}
run: |
VERSION="${{ needs.setup.outputs.version }}"
if [ "${{ needs.setup.outputs.latest }}" = "true" ]; then
LATEST_FLAG="--latest"
else
LATEST_FLAG="--no-latest"
CMD="./scripts/release/main.ts --version '${{ inputs.version }}' --phase complete-ci --no-validate-git"
if [ "${{ inputs.latest }}" != "true" ]; then
CMD="$CMD --no-latest"
fi
./scripts/release/main.ts --version "$VERSION" $LATEST_FLAG \
--upload-typescript \
--upload-install \
--upload-binaries
eval "$CMD"

174
ARCHITECTURE.md Normal file
View file

@ -0,0 +1,174 @@
# Architecture
This document covers three key architectural areas of the sandbox-daemon system.
## Agent Schema Pipeline
The schema pipeline extracts type definitions from AI coding agents and converts them to a universal format.
### Schema Extraction
TypeScript extractors in `resources/agent-schemas/src/` pull schemas from each agent:
| Agent | Source | Extractor |
|-------|--------|-----------|
| Claude | `claude --output-format json --json-schema` | `claude.ts` |
| Codex | `codex app-server generate-json-schema` | `codex.ts` |
| OpenCode | GitHub OpenAPI spec | `opencode.ts` |
| Amp | Scrapes ampcode.com docs | `amp.ts` |
All extractors include fallback schemas for when CLIs or URLs are unavailable.
**Output:** JSON schemas written to `resources/agent-schemas/artifacts/json-schema/`
### Rust Type Generation
The `server/packages/extracted-agent-schemas/` package generates Rust types at build time:
- `build.rs` reads JSON schemas and uses the `typify` crate to generate Rust structs
- Generated code is written to `$OUT_DIR/{agent}.rs`
- Types are exposed via `include!()` macros in `src/lib.rs`
```
resources/agent-schemas/artifacts/json-schema/*.json
↓ (build.rs + typify)
$OUT_DIR/{claude,codex,opencode,amp}.rs
↓ (include!)
extracted_agent_schemas::{claude,codex,opencode,amp}::*
```
### Universal Schema
The `server/packages/universal-agent-schema/` package defines agent-agnostic types:
**Core types** (`src/lib.rs`):
- `UniversalEvent` - Wrapper with id, timestamp, session_id, agent, data
- `UniversalEventData` - Enum: Message, Started, Error, QuestionAsked, PermissionAsked, Unknown
- `UniversalMessage` - Parsed (role, parts, metadata) or Unparsed (raw JSON)
- `UniversalMessagePart` - Text, ToolCall, ToolResult, FunctionCall, FunctionResult, File, Image, Error, Unknown
**Converters** (`src/agents/{claude,codex,opencode,amp}.rs`):
- Each agent has a converter module that transforms native events to universal format
- Conversions are best-effort; unparseable data preserved in `Unparsed` or `Unknown` variants
## Session Management
Sessions track agent conversations with in-memory state.
### Storage
Sessions are stored in an in-memory `HashMap<String, SessionState>` inside `SessionManager`:
```rust
struct SessionManager {
sessions: Mutex<HashMap<String, SessionState>>,
// ...
}
```
There is no disk persistence. Sessions are ephemeral and lost on server restart.
### SessionState
Each session tracks:
| Field | Purpose |
|-------|---------|
| `session_id` | Client-provided identifier |
| `agent` | Agent type (Claude, Codex, OpenCode, Amp) |
| `agent_mode` | Operating mode (build, plan, custom) |
| `permission_mode` | Permission handling (default, plan, bypass) |
| `model` | Optional model override |
| `events: Vec<UniversalEvent>` | Full event history |
| `pending_questions` | Question IDs awaiting reply |
| `pending_permissions` | Permission IDs awaiting reply |
| `broadcaster` | Tokio broadcast channel for SSE streaming |
| `ended` | Whether agent process has terminated |
### Lifecycle
```
POST /v1/sessions/{sessionId} Create session, auto-install agent
POST /v1/sessions/{id}/messages Spawn agent subprocess, stream output
GET /v1/sessions/{id}/events Poll for new events (offset-based)
GET /v1/sessions/{id}/events/sse Subscribe to SSE stream
POST .../questions/{id}/reply Answer agent question
POST .../permissions/{id}/reply Grant/deny permission request
(agent process terminates) Session marked as ended
```
### Event Flow
When a message is sent:
1. `send_message()` spawns the agent CLI as a subprocess
2. `consume_spawn()` reads stdout/stderr line by line
3. Each JSON line is parsed and converted via `parse_agent_line()`
4. Events are recorded via `record_event()` which:
- Assigns incrementing event ID
- Appends to `events` vector
- Broadcasts to SSE subscribers
## SDK Modes
The TypeScript SDK supports two connection modes.
### Embedded Mode
Defined in `sdks/typescript/src/spawn.ts`:
1. **Binary resolution**: Checks `SANDBOX_AGENT_BIN` env, then platform-specific npm package, then `PATH`
2. **Port selection**: Uses provided port or finds a free one via `net.createServer()`
3. **Token generation**: Uses provided token or generates random 24-byte hex string
4. **Spawn**: Launches `sandbox-agent --host <host> --port <port> --token <token>`
5. **Health wait**: Polls `GET /v1/health` until server is ready (up to 15s timeout)
6. **Cleanup**: On dispose, sends SIGTERM then SIGKILL if needed; also registers process exit handlers
```typescript
const handle = await spawnSandboxDaemon({ log: "inherit" });
// handle.baseUrl = "http://127.0.0.1:<port>"
// handle.token = "<generated>"
// handle.dispose() to cleanup
```
### Server Mode
Defined in `sdks/typescript/src/client.ts`:
- Direct HTTP client to a remote `sandbox-agent` server
- Uses provided `baseUrl` and optional `token`
- No subprocess management
```typescript
const client = new SandboxDaemonClient({
baseUrl: "http://remote-server:8080",
token: "secret",
});
```
### Auto-Detection
`SandboxDaemonClient.connect()` chooses the mode automatically:
```typescript
// If baseUrl provided → server mode
const client = await SandboxDaemonClient.connect({
baseUrl: "http://remote:8080",
});
// If no baseUrl → embedded mode (spawns subprocess)
const client = await SandboxDaemonClient.connect({});
// Explicit control
const client = await SandboxDaemonClient.connect({
spawn: { enabled: true, port: 9000 },
});
```
The `spawn` option can be:
- `true` / `false` - Enable/disable embedded mode
- `SandboxDaemonSpawnOptions` - Fine-grained control over host, port, token, binary path, timeout, logging

View file

@ -9,7 +9,15 @@ There are two ways to work with the SDKs:
## Agent Schemas
Agent schemas (Claude Code, Codex, OpenCode, Amp) are available for reference in `resources/agent-schemas/dist/`.
Agent schemas (Claude Code, Codex, OpenCode, Amp) are available for reference in `resources/agent-schemas/artifacts/json-schema/`.
Extraction methods:
- **Claude**: Uses `claude --output-format json --json-schema` CLI command
- **Codex**: Uses `codex app-server generate-json-schema` CLI command
- **OpenCode**: Fetches from GitHub OpenAPI spec
- **Amp**: Scrapes from `https://ampcode.com/manual/appendix?preview#message-schema`
All extractors have fallback schemas for when CLI/URL is unavailable.
Research on how different agents operate (CLI flags, streaming formats, HITL patterns, etc.) is in `research/agents/`. When adding or making changes to agent docs, follow the same structure as existing files.
@ -24,6 +32,8 @@ Universal schema guidance:
- Update `CLAUDE.md` to keep CLI endpoints in sync with HTTP API changes.
- When changing the HTTP API, update the TypeScript SDK and CLI together.
- Do not make breaking changes to API endpoints.
- When changing API routes, ensure the HTTP/SSE test suite has full coverage of every route.
- When agent schema changes, ensure API tests cover the new schema and event shapes end-to-end.
### CLI ⇄ HTTP endpoint map (keep in sync)

View file

@ -9,3 +9,70 @@ authors = ["Sandbox Agent Contributors"]
license = "Apache-2.0"
repository = "https://github.com/rivet-dev/sandbox-agent"
description = "Universal agent API for AI coding assistants"
[workspace.dependencies]
# Internal crates
sandbox-agent-core = { path = "server/packages/sandbox-agent" }
sandbox-agent-error = { path = "server/packages/error" }
sandbox-agent-agent-management = { path = "server/packages/agent-management" }
sandbox-agent-agent-credentials = { path = "server/packages/agent-credentials" }
sandbox-agent-universal-agent-schema = { path = "server/packages/universal-agent-schema" }
sandbox-agent-extracted-agent-schemas = { path = "server/packages/extracted-agent-schemas" }
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
# Error handling
thiserror = "1.0"
# Schema generation
schemars = "0.8"
utoipa = { version = "4.2", features = ["axum_extras"] }
# Web framework
axum = "0.7"
tower = { version = "0.5", features = ["util"] }
tower-http = { version = "0.5", features = ["cors", "trace"] }
# Async runtime
tokio = { version = "1.36", features = ["macros", "rt-multi-thread", "signal", "time"] }
tokio-stream = { version = "0.1", features = ["sync"] }
futures = "0.3"
# HTTP client
reqwest = { version = "0.11", features = ["blocking", "json", "rustls-tls", "stream"] }
# CLI
clap = { version = "4.5", features = ["derive"] }
# Logging
tracing = "0.1"
tracing-logfmt = "0.3"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# Time/date
time = { version = "0.3", features = ["parsing", "formatting"] }
chrono = { version = "0.4", features = ["serde"] }
# Filesystem/paths
dirs = "5.0"
tempfile = "3.10"
# Archive handling
flate2 = "1.0"
tar = "0.4"
zip = { version = "0.6", default-features = false, features = ["deflate"] }
# Misc
url = "2.5"
regress = "0.10"
# Code generation (build deps)
typify = "0.4"
prettyplease = "0.2"
syn = "2.0"
# Testing
http-body-util = "0.1"
insta = { version = "1.41", features = ["yaml"] }

View file

@ -7,6 +7,29 @@ Universal API for running Claude Code, Codex, OpenCode, and Amp inside sandboxes
- **Universal session schema**: Universal schema to store agent transcripts
- **Supports your sandbox provider**: Daytona, E2B, Vercel Sandboxes, and more
- **Lightweight, portable Rust binary**: Install anywhere with 1 curl command
- **OpenAPI spec**: Versioned API schema tracked in `sdks/openapi/openapi.json`
## Agent Support
| Feature | [Claude Code](https://docs.anthropic.com/en/docs/agents-and-tools/claude-code/overview) | [Codex](https://github.com/openai/codex) | [OpenCode](https://github.com/opencode-ai/opencode) | [Amp](https://ampcode.com) |
|---------|:-----------:|:-----:|:--------:|:---:|
| Stability | Stable | Stable | Experimental | Experimental |
| Text Messages | ✓ | ✓ | ✓ | ✓ |
| Tool Calls | ✓ | ✓ | ✓ | ✓ |
| Tool Results | ✓ | ✓ | ✓ | ✓ |
| Questions (HITL) | ✓ | | ✓ | |
| Permissions (HITL) | | | ✓ | |
| Images | | ✓ | ✓ | |
| File Attachments | | ✓ | ✓ | |
| Session Lifecycle | | ✓ | ✓ | |
| Error Events | | ✓ | ✓ | ✓ |
| Reasoning/Thinking | | ✓ | | |
| Command Execution | | ✓ | | |
| File Changes | | ✓ | | |
| MCP Tools | | ✓ | | |
| Streaming Deltas | | ✓ | ✓ | |
Want support for another agent? [Open an issue](https://github.com/anthropics/sandbox-agent/issues/new) to request it.
## Architecture

2
bugs.md Normal file
View file

@ -0,0 +1,2 @@
- openai exteacted credentials do not work

View file

@ -14,106 +14,19 @@ import {
Zap
} from "lucide-react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
const API_PREFIX = "/v1";
type AgentInfo = {
id: string;
installed: boolean;
version?: string;
path?: string;
};
type SessionInfo = {
sessionId: string;
agent: string;
agentMode: string;
permissionMode: string;
model?: string;
variant?: string;
agentSessionId?: string;
ended: boolean;
eventCount: number;
};
type AgentMode = {
id: string;
name: string;
description?: string;
};
type UniversalEvent = {
id: number;
timestamp: string;
sessionId: string;
agent: string;
agentSessionId?: string;
data: UniversalEventData;
};
type UniversalEventData =
| { message: UniversalMessage }
| { started: StartedInfo }
| { error: CrashInfo }
| { questionAsked: QuestionRequest }
| { permissionAsked: PermissionRequest };
type UniversalMessagePart = {
type: string;
text?: string;
name?: string;
input?: unknown;
output?: unknown;
};
type UniversalMessage = {
role?: string;
parts?: UniversalMessagePart[];
raw?: unknown;
error?: string;
};
type StartedInfo = {
message?: string;
pid?: number;
[key: string]: unknown;
};
type CrashInfo = {
message?: string;
code?: string;
detail?: string;
[key: string]: unknown;
};
type QuestionOption = {
label: string;
description?: string;
};
type QuestionItem = {
header?: string;
question: string;
options: QuestionOption[];
multiSelect?: boolean;
};
type QuestionRequest = {
id: string;
sessionID?: string;
questions: QuestionItem[];
tool?: { messageID?: string; callID?: string };
};
type PermissionRequest = {
id: string;
sessionID?: string;
permission: string;
patterns?: string[];
metadata?: Record<string, unknown>;
always?: string[];
tool?: { messageID?: string; callID?: string };
};
import {
SandboxDaemonError,
createSandboxDaemonClient,
type SandboxDaemonClient,
type AgentInfo,
type AgentModeInfo,
type PermissionRequest,
type QuestionRequest,
type SessionInfo,
type UniversalEvent,
type UniversalMessage,
type UniversalMessagePart
} from "sandbox-agent";
type RequestLog = {
id: number;
@ -130,29 +43,6 @@ type DebugTab = "log" | "events" | "approvals" | "agents";
const defaultAgents = ["claude", "codex", "opencode", "amp"];
const buildUrl = (endpoint: string, path: string, query?: Record<string, string>) => {
const base = endpoint.replace(/\/$/, "");
const fullPath = path.startsWith("/") ? path : `/${path}`;
const url = new URL(`${base}${fullPath}`);
if (query) {
Object.entries(query).forEach(([key, value]) => {
if (value !== "") {
url.searchParams.set(key, value);
}
});
}
return url.toString();
};
const safeJson = (text: string) => {
if (!text) return null;
try {
return JSON.parse(text);
} catch {
return text;
}
};
const formatJson = (value: unknown) => {
if (value === null || value === undefined) return "";
if (typeof value === "string") return value;
@ -203,7 +93,7 @@ export default function App() {
const [connectError, setConnectError] = useState<string | null>(null);
const [agents, setAgents] = useState<AgentInfo[]>([]);
const [modesByAgent, setModesByAgent] = useState<Record<string, AgentMode[]>>({});
const [modesByAgent, setModesByAgent] = useState<Record<string, AgentModeInfo[]>>({});
const [sessions, setSessions] = useState<SessionInfo[]>([]);
const [agentId, setAgentId] = useState("claude");
@ -222,7 +112,6 @@ export default function App() {
const [polling, setPolling] = useState(false);
const pollTimerRef = useRef<number | null>(null);
const [streamMode, setStreamMode] = useState<"poll" | "sse">("poll");
const eventSourceRef = useRef<EventSource | null>(null);
const [eventError, setEventError] = useState<string | null>(null);
const [questionSelections, setQuestionSelections] = useState<Record<string, string[][]>>({});
@ -237,6 +126,9 @@ export default function App() {
const messagesEndRef = useRef<HTMLDivElement>(null);
const clientRef = useRef<SandboxDaemonClient | null>(null);
const sseAbortRef = useRef<AbortController | null>(null);
const logRequest = useCallback((entry: RequestLog) => {
setRequestLog((prev) => {
const next = [entry, ...prev];
@ -244,25 +136,16 @@ export default function App() {
});
}, []);
const apiFetch = useCallback(
async (
path: string,
options?: {
method?: string;
body?: unknown;
query?: Record<string, string>;
}
) => {
const method = options?.method ?? "GET";
const url = buildUrl(endpoint, path, options?.query);
const bodyText = options?.body ? JSON.stringify(options.body) : undefined;
const headers: Record<string, string> = {};
if (bodyText) {
headers["Content-Type"] = "application/json";
}
if (token) {
headers.Authorization = `Bearer ${token}`;
}
const createClient = useCallback(() => {
const fetchWithLog: typeof fetch = async (input, init) => {
const method = init?.method ?? "GET";
const url =
typeof input === "string"
? input
: input instanceof URL
? input.toString()
: input.url;
const bodyText = typeof init?.body === "string" ? init.body : undefined;
const curl = buildCurl(method, url, bodyText, token);
const logId = logIdRef.current++;
const entry: RequestLog = {
@ -276,23 +159,10 @@ export default function App() {
let logged = false;
try {
const response = await fetch(url, {
method,
headers,
body: bodyText
});
const text = await response.text();
const data = safeJson(text);
const response = await fetch(input, init);
logRequest({ ...entry, status: response.status });
logged = true;
if (!response.ok) {
const errorMessage =
(typeof data === "object" && data && "detail" in data && data.detail) ||
(typeof data === "object" && data && "title" in data && data.title) ||
(typeof data === "string" ? data : `Request failed with ${response.status}`);
throw new Error(String(errorMessage));
}
return data;
return response;
} catch (error) {
const message = error instanceof Error ? error.message : "Request failed";
if (!logged) {
@ -300,22 +170,45 @@ export default function App() {
}
throw error;
}
},
[endpoint, token, logRequest]
);
};
const client = createSandboxDaemonClient({
baseUrl: endpoint,
token: token || undefined,
fetch: fetchWithLog
});
clientRef.current = client;
return client;
}, [endpoint, token, logRequest]);
const getClient = useCallback((): SandboxDaemonClient => {
if (!clientRef.current) {
throw new Error("Not connected");
}
return clientRef.current;
}, []);
const getErrorMessage = (error: unknown, fallback: string) => {
if (error instanceof SandboxDaemonError) {
return error.problem?.detail ?? error.problem?.title ?? error.message;
}
return error instanceof Error ? error.message : fallback;
};
const connect = async () => {
setConnecting(true);
setConnectError(null);
try {
await apiFetch(`${API_PREFIX}/health`);
const client = createClient();
await client.getHealth();
setConnected(true);
await refreshAgents();
await fetchSessions();
} catch (error) {
const message = error instanceof Error ? error.message : "Unable to connect";
const message = getErrorMessage(error, "Unable to connect");
setConnectError(message);
setConnected(false);
clientRef.current = null;
} finally {
setConnecting(false);
}
@ -323,6 +216,7 @@ export default function App() {
const disconnect = () => {
setConnected(false);
clientRef.current = null;
setSessionError(null);
setEvents([]);
setOffset(0);
@ -334,8 +228,8 @@ export default function App() {
const refreshAgents = async () => {
try {
const data = await apiFetch(`${API_PREFIX}/agents`);
const agentList = (data as { agents?: AgentInfo[] })?.agents ?? [];
const data = await getClient().listAgents();
const agentList = data.agents ?? [];
setAgents(agentList);
// Auto-load modes for installed agents
for (const agent of agentList) {
@ -344,14 +238,14 @@ export default function App() {
}
}
} catch (error) {
setConnectError(error instanceof Error ? error.message : "Unable to refresh agents");
setConnectError(getErrorMessage(error, "Unable to refresh agents"));
}
};
const fetchSessions = async () => {
try {
const data = await apiFetch(`${API_PREFIX}/sessions`);
const sessionList = (data as { sessions?: SessionInfo[] })?.sessions ?? [];
const data = await getClient().listSessions();
const sessionList = data.sessions ?? [];
setSessions(sessionList);
} catch {
// Silently fail - sessions list is supplementary
@ -360,20 +254,17 @@ export default function App() {
const installAgent = async (targetId: string, reinstall: boolean) => {
try {
await apiFetch(`${API_PREFIX}/agents/${targetId}/install`, {
method: "POST",
body: { reinstall }
});
await getClient().installAgent(targetId, { reinstall });
await refreshAgents();
} catch (error) {
setConnectError(error instanceof Error ? error.message : "Install failed");
setConnectError(getErrorMessage(error, "Install failed"));
}
};
const loadModes = async (targetId: string) => {
try {
const data = await apiFetch(`${API_PREFIX}/agents/${targetId}/modes`);
const modes = (data as { modes?: AgentMode[] })?.modes ?? [];
const data = await getClient().getAgentModes(targetId);
const modes = data.modes ?? [];
setModesByAgent((prev) => ({ ...prev, [targetId]: modes }));
} catch {
// Silently fail - modes are optional
@ -384,37 +275,41 @@ export default function App() {
if (!message.trim()) return;
setSessionError(null);
try {
await apiFetch(`${API_PREFIX}/sessions/${sessionId}/messages`, {
method: "POST",
body: { message }
});
await getClient().postMessage(sessionId, { message });
setMessage("");
// Auto-start polling if not already
if (!polling && streamMode === "poll") {
if (!polling) {
if (streamMode === "poll") {
startPolling();
} else {
startSse();
}
}
} catch (error) {
setSessionError(error instanceof Error ? error.message : "Unable to send message");
setSessionError(getErrorMessage(error, "Unable to send message"));
}
};
const createSession = async () => {
setSessionError(null);
try {
const body: Record<string, string> = { agent: agentId };
const body: {
agent: string;
agentMode?: string;
permissionMode?: string;
model?: string;
variant?: string;
} = { agent: agentId };
if (agentMode) body.agentMode = agentMode;
if (permissionMode) body.permissionMode = permissionMode;
if (model) body.model = model;
if (variant) body.variant = variant;
await apiFetch(`${API_PREFIX}/sessions/${sessionId}`, {
method: "POST",
body
});
await getClient().createSession(sessionId, body);
await fetchSessions();
} catch (error) {
setSessionError(error instanceof Error ? error.message : "Unable to create session");
setSessionError(getErrorMessage(error, "Unable to create session"));
}
};
@ -446,19 +341,22 @@ export default function App() {
// Create the session
try {
const body: Record<string, string> = { agent: agentId };
const body: {
agent: string;
agentMode?: string;
permissionMode?: string;
model?: string;
variant?: string;
} = { agent: agentId };
if (agentMode) body.agentMode = agentMode;
if (permissionMode) body.permissionMode = permissionMode;
if (model) body.model = model;
if (variant) body.variant = variant;
await apiFetch(`${API_PREFIX}/sessions/${id}`, {
method: "POST",
body
});
await getClient().createSession(id, body);
await fetchSessions();
} catch (error) {
setSessionError(error instanceof Error ? error.message : "Unable to create session");
setSessionError(getErrorMessage(error, "Unable to create session"));
}
};
@ -473,20 +371,17 @@ export default function App() {
const fetchEvents = useCallback(async () => {
if (!sessionId) return;
try {
const data = await apiFetch(`${API_PREFIX}/sessions/${sessionId}/events`, {
query: {
offset: String(offsetRef.current),
limit: "200"
}
const response = await getClient().getEvents(sessionId, {
offset: offsetRef.current,
limit: 200
});
const response = data as { events?: UniversalEvent[]; hasMore?: boolean };
const newEvents = response.events ?? [];
appendEvents(newEvents);
setEventError(null);
} catch (error) {
setEventError(error instanceof Error ? error.message : "Unable to fetch events");
setEventError(getErrorMessage(error, "Unable to fetch events"));
}
}, [apiFetch, appendEvents, sessionId]);
}, [appendEvents, getClient, sessionId]);
const startPolling = () => {
stopSse();
@ -506,39 +401,47 @@ export default function App() {
const startSse = () => {
stopPolling();
if (eventSourceRef.current) return;
if (token) {
setEventError("SSE streams cannot send auth headers. Use polling or run daemon with --no-token.");
if (sseAbortRef.current) return;
if (!sessionId) {
setEventError("Select or create a session first.");
return;
}
const url = buildUrl(endpoint, `${API_PREFIX}/sessions/${sessionId}/events/sse`, {
offset: String(offsetRef.current)
});
const source = new EventSource(url);
eventSourceRef.current = source;
source.onmessage = (event) => {
setEventError(null);
setPolling(true);
const controller = new AbortController();
sseAbortRef.current = controller;
const start = async () => {
try {
const parsed = safeJson(event.data);
if (Array.isArray(parsed)) {
appendEvents(parsed as UniversalEvent[]);
} else if (parsed && typeof parsed === "object") {
appendEvents([parsed as UniversalEvent]);
for await (const event of getClient().streamEvents(
sessionId,
{ offset: offsetRef.current },
controller.signal
)) {
appendEvents([event]);
}
} catch (error) {
setEventError(error instanceof Error ? error.message : "SSE parse error");
if (controller.signal.aborted) {
return;
}
setEventError(getErrorMessage(error, "SSE connection error. Falling back to polling."));
stopSse();
startPolling();
} finally {
if (sseAbortRef.current === controller) {
sseAbortRef.current = null;
setPolling(false);
}
}
};
source.onerror = () => {
setEventError("SSE connection error. Falling back to polling.");
stopSse();
};
void start();
};
const stopSse = () => {
if (eventSourceRef.current) {
eventSourceRef.current.close();
eventSourceRef.current = null;
if (sseAbortRef.current) {
sseAbortRef.current.abort();
sseAbortRef.current = null;
}
setPolling(false);
};
const resetEvents = () => {
@ -584,37 +487,28 @@ export default function App() {
const answerQuestion = async (request: QuestionRequest) => {
const answers = questionSelections[request.id] ?? [];
try {
await apiFetch(`${API_PREFIX}/sessions/${sessionId}/questions/${request.id}/reply`, {
method: "POST",
body: { answers }
});
await getClient().replyQuestion(sessionId, request.id, { answers });
setQuestionStatus((prev) => ({ ...prev, [request.id]: "replied" }));
} catch (error) {
setEventError(error instanceof Error ? error.message : "Unable to reply");
setEventError(getErrorMessage(error, "Unable to reply"));
}
};
const rejectQuestion = async (requestId: string) => {
try {
await apiFetch(`${API_PREFIX}/sessions/${sessionId}/questions/${requestId}/reject`, {
method: "POST",
body: {}
});
await getClient().rejectQuestion(sessionId, requestId);
setQuestionStatus((prev) => ({ ...prev, [requestId]: "rejected" }));
} catch (error) {
setEventError(error instanceof Error ? error.message : "Unable to reject");
setEventError(getErrorMessage(error, "Unable to reject"));
}
};
const replyPermission = async (requestId: string, reply: "once" | "always" | "reject") => {
try {
await apiFetch(`${API_PREFIX}/sessions/${sessionId}/permissions/${requestId}/reply`, {
method: "POST",
body: { reply }
});
await getClient().replyPermission(sessionId, requestId, { reply });
setPermissionStatus((prev) => ({ ...prev, [requestId]: "replied" }));
} catch (error) {
setEventError(error instanceof Error ? error.message : "Unable to reply");
setEventError(getErrorMessage(error, "Unable to reply"));
}
};
@ -637,14 +531,14 @@ export default function App() {
.filter((event): event is UniversalEvent & { data: { message: UniversalMessage } } => "message" in event.data)
.map((event) => {
const msg = event.data.message;
// Extract text from parts array
const content = msg?.parts
?.filter((part) => part.type === "text" && part.text)
.map((part) => part.text)
.join("\n") ?? "";
const parts = "parts" in msg ? msg.parts : [];
const content = parts
.filter((part: UniversalMessagePart) => part.type === "text" && part.text)
.map((part: UniversalMessagePart) => part.text)
.join("\n");
return {
id: event.id,
role: msg?.role ?? "assistant",
role: "role" in msg ? msg.role : "assistant",
content,
timestamp: event.timestamp
};
@ -697,7 +591,11 @@ export default function App() {
const toggleStream = () => {
if (polling) {
if (streamMode === "poll") {
stopPolling();
} else {
stopSse();
}
} else if (streamMode === "poll") {
startPolling();
} else {

View file

@ -0,0 +1,153 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://sandbox-agent/schemas/amp.json",
"title": "AMP Code SDK Schema",
"definitions": {
"StreamJSONMessage": {
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"message",
"tool_call",
"tool_result",
"error",
"done"
]
},
"id": {
"type": "string"
},
"content": {
"type": "string"
},
"tool_call": {
"$ref": "#/definitions/ToolCall"
},
"error": {
"type": "string"
}
},
"required": [
"type"
]
},
"AmpOptions": {
"type": "object",
"properties": {
"model": {
"type": "string"
},
"apiKey": {
"type": "string"
},
"baseURL": {
"type": "string"
},
"maxTokens": {
"type": "number"
},
"temperature": {
"type": "number"
},
"systemPrompt": {
"type": "string"
},
"tools": {
"type": "array",
"items": {
"type": "object"
}
},
"workingDirectory": {
"type": "string"
},
"permissionRules": {
"type": "array",
"items": {
"$ref": "#/definitions/PermissionRule"
}
}
}
},
"PermissionRule": {
"type": "object",
"properties": {
"tool": {
"type": "string"
},
"action": {
"type": "string",
"enum": [
"allow",
"deny",
"ask"
]
},
"pattern": {
"type": "string"
},
"description": {
"type": "string"
}
},
"required": [
"tool",
"action"
]
},
"Message": {
"type": "object",
"properties": {
"role": {
"type": "string",
"enum": [
"user",
"assistant",
"system"
]
},
"content": {
"type": "string"
},
"tool_calls": {
"type": "array",
"items": {
"$ref": "#/definitions/ToolCall"
}
}
},
"required": [
"role",
"content"
]
},
"ToolCall": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
},
"arguments": {
"oneOf": [
{
"type": "string"
},
{
"type": "object"
}
]
}
},
"required": [
"id",
"name",
"arguments"
]
}
}
}

View file

@ -0,0 +1,182 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://sandbox-agent/schemas/claude.json",
"title": "Claude Code SDK Schema",
"definitions": {
"SDKMessage": {
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"user",
"assistant",
"result"
]
},
"content": {
"type": "string"
},
"timestamp": {
"type": "string",
"format": "date-time"
}
},
"required": [
"type"
]
},
"SDKResultMessage": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "result"
},
"result": {
"type": "object"
},
"error": {
"type": "string"
},
"duration_ms": {
"type": "number"
}
},
"required": [
"type"
]
},
"Options": {
"type": "object",
"properties": {
"model": {
"type": "string"
},
"maxTokens": {
"type": "number"
},
"temperature": {
"type": "number"
},
"systemPrompt": {
"type": "string"
},
"tools": {
"type": "array",
"items": {
"type": "string"
}
},
"allowedTools": {
"type": "array",
"items": {
"type": "string"
}
},
"workingDirectory": {
"type": "string"
}
}
},
"BashInput": {
"type": "object",
"properties": {
"command": {
"type": "string"
},
"timeout": {
"type": "number"
},
"workingDirectory": {
"type": "string"
}
},
"required": [
"command"
]
},
"FileEditInput": {
"type": "object",
"properties": {
"path": {
"type": "string"
},
"oldText": {
"type": "string"
},
"newText": {
"type": "string"
}
},
"required": [
"path",
"oldText",
"newText"
]
},
"FileReadInput": {
"type": "object",
"properties": {
"path": {
"type": "string"
},
"startLine": {
"type": "number"
},
"endLine": {
"type": "number"
}
},
"required": [
"path"
]
},
"FileWriteInput": {
"type": "object",
"properties": {
"path": {
"type": "string"
},
"content": {
"type": "string"
}
},
"required": [
"path",
"content"
]
},
"GlobInput": {
"type": "object",
"properties": {
"pattern": {
"type": "string"
},
"path": {
"type": "string"
}
},
"required": [
"pattern"
]
},
"GrepInput": {
"type": "object",
"properties": {
"pattern": {
"type": "string"
},
"path": {
"type": "string"
},
"include": {
"type": "string"
}
},
"required": [
"pattern"
]
}
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

695
resources/agent-schemas/deno.lock generated Normal file
View file

@ -0,0 +1,695 @@
{
"version": "5",
"specifiers": {
"npm:@anthropic-ai/claude-code@latest": "2.1.19",
"npm:@openai/codex@latest": "0.91.0",
"npm:@types/node@22": "22.19.7",
"npm:cheerio@1": "1.2.0",
"npm:ts-json-schema-generator@^2.4.0": "2.4.0",
"npm:tsx@^4.19.0": "4.21.0",
"npm:typescript@^5.7.0": "5.9.3"
},
"npm": {
"@anthropic-ai/claude-code@2.1.19": {
"integrity": "sha512-/bUlQuX/6nKr1Zqfi/9Q6xf7WonUBk72ZfKKENU4WVrIFWqTv/0JJsoW/dHol9QBNHvyfKIeBbYu4avHNRAnuQ==",
"optionalDependencies": [
"@img/sharp-darwin-arm64",
"@img/sharp-darwin-x64",
"@img/sharp-linux-arm",
"@img/sharp-linux-arm64",
"@img/sharp-linux-x64",
"@img/sharp-linuxmusl-arm64",
"@img/sharp-linuxmusl-x64",
"@img/sharp-win32-x64"
],
"bin": true
},
"@esbuild/aix-ppc64@0.27.2": {
"integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==",
"os": ["aix"],
"cpu": ["ppc64"]
},
"@esbuild/android-arm64@0.27.2": {
"integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==",
"os": ["android"],
"cpu": ["arm64"]
},
"@esbuild/android-arm@0.27.2": {
"integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==",
"os": ["android"],
"cpu": ["arm"]
},
"@esbuild/android-x64@0.27.2": {
"integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==",
"os": ["android"],
"cpu": ["x64"]
},
"@esbuild/darwin-arm64@0.27.2": {
"integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==",
"os": ["darwin"],
"cpu": ["arm64"]
},
"@esbuild/darwin-x64@0.27.2": {
"integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==",
"os": ["darwin"],
"cpu": ["x64"]
},
"@esbuild/freebsd-arm64@0.27.2": {
"integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==",
"os": ["freebsd"],
"cpu": ["arm64"]
},
"@esbuild/freebsd-x64@0.27.2": {
"integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==",
"os": ["freebsd"],
"cpu": ["x64"]
},
"@esbuild/linux-arm64@0.27.2": {
"integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==",
"os": ["linux"],
"cpu": ["arm64"]
},
"@esbuild/linux-arm@0.27.2": {
"integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==",
"os": ["linux"],
"cpu": ["arm"]
},
"@esbuild/linux-ia32@0.27.2": {
"integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==",
"os": ["linux"],
"cpu": ["ia32"]
},
"@esbuild/linux-loong64@0.27.2": {
"integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==",
"os": ["linux"],
"cpu": ["loong64"]
},
"@esbuild/linux-mips64el@0.27.2": {
"integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==",
"os": ["linux"],
"cpu": ["mips64el"]
},
"@esbuild/linux-ppc64@0.27.2": {
"integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==",
"os": ["linux"],
"cpu": ["ppc64"]
},
"@esbuild/linux-riscv64@0.27.2": {
"integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==",
"os": ["linux"],
"cpu": ["riscv64"]
},
"@esbuild/linux-s390x@0.27.2": {
"integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==",
"os": ["linux"],
"cpu": ["s390x"]
},
"@esbuild/linux-x64@0.27.2": {
"integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==",
"os": ["linux"],
"cpu": ["x64"]
},
"@esbuild/netbsd-arm64@0.27.2": {
"integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==",
"os": ["netbsd"],
"cpu": ["arm64"]
},
"@esbuild/netbsd-x64@0.27.2": {
"integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==",
"os": ["netbsd"],
"cpu": ["x64"]
},
"@esbuild/openbsd-arm64@0.27.2": {
"integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==",
"os": ["openbsd"],
"cpu": ["arm64"]
},
"@esbuild/openbsd-x64@0.27.2": {
"integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==",
"os": ["openbsd"],
"cpu": ["x64"]
},
"@esbuild/openharmony-arm64@0.27.2": {
"integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==",
"os": ["openharmony"],
"cpu": ["arm64"]
},
"@esbuild/sunos-x64@0.27.2": {
"integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==",
"os": ["sunos"],
"cpu": ["x64"]
},
"@esbuild/win32-arm64@0.27.2": {
"integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==",
"os": ["win32"],
"cpu": ["arm64"]
},
"@esbuild/win32-ia32@0.27.2": {
"integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==",
"os": ["win32"],
"cpu": ["ia32"]
},
"@esbuild/win32-x64@0.27.2": {
"integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==",
"os": ["win32"],
"cpu": ["x64"]
},
"@img/sharp-darwin-arm64@0.33.5": {
"integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==",
"optionalDependencies": [
"@img/sharp-libvips-darwin-arm64"
],
"os": ["darwin"],
"cpu": ["arm64"]
},
"@img/sharp-darwin-x64@0.33.5": {
"integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==",
"optionalDependencies": [
"@img/sharp-libvips-darwin-x64"
],
"os": ["darwin"],
"cpu": ["x64"]
},
"@img/sharp-libvips-darwin-arm64@1.0.4": {
"integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==",
"os": ["darwin"],
"cpu": ["arm64"]
},
"@img/sharp-libvips-darwin-x64@1.0.4": {
"integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==",
"os": ["darwin"],
"cpu": ["x64"]
},
"@img/sharp-libvips-linux-arm64@1.0.4": {
"integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==",
"os": ["linux"],
"cpu": ["arm64"]
},
"@img/sharp-libvips-linux-arm@1.0.5": {
"integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==",
"os": ["linux"],
"cpu": ["arm"]
},
"@img/sharp-libvips-linux-x64@1.0.4": {
"integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==",
"os": ["linux"],
"cpu": ["x64"]
},
"@img/sharp-libvips-linuxmusl-arm64@1.0.4": {
"integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==",
"os": ["linux"],
"cpu": ["arm64"]
},
"@img/sharp-libvips-linuxmusl-x64@1.0.4": {
"integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==",
"os": ["linux"],
"cpu": ["x64"]
},
"@img/sharp-linux-arm64@0.33.5": {
"integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==",
"optionalDependencies": [
"@img/sharp-libvips-linux-arm64"
],
"os": ["linux"],
"cpu": ["arm64"]
},
"@img/sharp-linux-arm@0.33.5": {
"integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==",
"optionalDependencies": [
"@img/sharp-libvips-linux-arm"
],
"os": ["linux"],
"cpu": ["arm"]
},
"@img/sharp-linux-x64@0.33.5": {
"integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==",
"optionalDependencies": [
"@img/sharp-libvips-linux-x64"
],
"os": ["linux"],
"cpu": ["x64"]
},
"@img/sharp-linuxmusl-arm64@0.33.5": {
"integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==",
"optionalDependencies": [
"@img/sharp-libvips-linuxmusl-arm64"
],
"os": ["linux"],
"cpu": ["arm64"]
},
"@img/sharp-linuxmusl-x64@0.33.5": {
"integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==",
"optionalDependencies": [
"@img/sharp-libvips-linuxmusl-x64"
],
"os": ["linux"],
"cpu": ["x64"]
},
"@img/sharp-win32-x64@0.33.5": {
"integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==",
"os": ["win32"],
"cpu": ["x64"]
},
"@isaacs/balanced-match@4.0.1": {
"integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ=="
},
"@isaacs/brace-expansion@5.0.0": {
"integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==",
"dependencies": [
"@isaacs/balanced-match"
]
},
"@isaacs/cliui@8.0.2": {
"integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
"dependencies": [
"string-width@5.1.2",
"string-width-cjs@npm:string-width@4.2.3",
"strip-ansi@7.1.2",
"strip-ansi-cjs@npm:strip-ansi@6.0.1",
"wrap-ansi@8.1.0",
"wrap-ansi-cjs@npm:wrap-ansi@7.0.0"
]
},
"@openai/codex@0.91.0": {
"integrity": "sha512-eRLRg0+uM0g0iW+Ca5VedBk+laslLcq93Hf6rbFtv+gLb4+aMib2UPdvlDlvvCVkBMbvE8ckY/cju+iOOuKCNA==",
"bin": true
},
"@types/json-schema@7.0.15": {
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="
},
"@types/node@22.19.7": {
"integrity": "sha512-MciR4AKGHWl7xwxkBa6xUGxQJ4VBOmPTF7sL+iGzuahOFaO0jHCsuEfS80pan1ef4gWId1oWOweIhrDEYLuaOw==",
"dependencies": [
"undici-types"
]
},
"ansi-regex@5.0.1": {
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
},
"ansi-regex@6.2.2": {
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="
},
"ansi-styles@4.3.0": {
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dependencies": [
"color-convert"
]
},
"ansi-styles@6.2.3": {
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="
},
"boolbase@1.0.0": {
"integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="
},
"cheerio-select@2.1.0": {
"integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==",
"dependencies": [
"boolbase",
"css-select",
"css-what",
"domelementtype",
"domhandler",
"domutils"
]
},
"cheerio@1.2.0": {
"integrity": "sha512-WDrybc/gKFpTYQutKIK6UvfcuxijIZfMfXaYm8NMsPQxSYvf+13fXUJ4rztGGbJcBQ/GF55gvrZ0Bc0bj/mqvg==",
"dependencies": [
"cheerio-select",
"dom-serializer",
"domhandler",
"domutils",
"encoding-sniffer",
"htmlparser2",
"parse5",
"parse5-htmlparser2-tree-adapter",
"parse5-parser-stream",
"undici",
"whatwg-mimetype"
]
},
"color-convert@2.0.1": {
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dependencies": [
"color-name"
]
},
"color-name@1.1.4": {
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"commander@13.1.0": {
"integrity": "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw=="
},
"cross-spawn@7.0.6": {
"integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"dependencies": [
"path-key",
"shebang-command",
"which"
]
},
"css-select@5.2.2": {
"integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==",
"dependencies": [
"boolbase",
"css-what",
"domhandler",
"domutils",
"nth-check"
]
},
"css-what@6.2.2": {
"integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA=="
},
"dom-serializer@2.0.0": {
"integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
"dependencies": [
"domelementtype",
"domhandler",
"entities@4.5.0"
]
},
"domelementtype@2.3.0": {
"integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw=="
},
"domhandler@5.0.3": {
"integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
"dependencies": [
"domelementtype"
]
},
"domutils@3.2.2": {
"integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
"dependencies": [
"dom-serializer",
"domelementtype",
"domhandler"
]
},
"eastasianwidth@0.2.0": {
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
},
"emoji-regex@8.0.0": {
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
},
"emoji-regex@9.2.2": {
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="
},
"encoding-sniffer@0.2.1": {
"integrity": "sha512-5gvq20T6vfpekVtqrYQsSCFZ1wEg5+wW0/QaZMWkFr6BqD3NfKs0rLCx4rrVlSWJeZb5NBJgVLswK/w2MWU+Gw==",
"dependencies": [
"iconv-lite",
"whatwg-encoding"
]
},
"entities@4.5.0": {
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="
},
"entities@6.0.1": {
"integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="
},
"entities@7.0.1": {
"integrity": "sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA=="
},
"esbuild@0.27.2": {
"integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==",
"optionalDependencies": [
"@esbuild/aix-ppc64",
"@esbuild/android-arm",
"@esbuild/android-arm64",
"@esbuild/android-x64",
"@esbuild/darwin-arm64",
"@esbuild/darwin-x64",
"@esbuild/freebsd-arm64",
"@esbuild/freebsd-x64",
"@esbuild/linux-arm",
"@esbuild/linux-arm64",
"@esbuild/linux-ia32",
"@esbuild/linux-loong64",
"@esbuild/linux-mips64el",
"@esbuild/linux-ppc64",
"@esbuild/linux-riscv64",
"@esbuild/linux-s390x",
"@esbuild/linux-x64",
"@esbuild/netbsd-arm64",
"@esbuild/netbsd-x64",
"@esbuild/openbsd-arm64",
"@esbuild/openbsd-x64",
"@esbuild/openharmony-arm64",
"@esbuild/sunos-x64",
"@esbuild/win32-arm64",
"@esbuild/win32-ia32",
"@esbuild/win32-x64"
],
"scripts": true,
"bin": true
},
"foreground-child@3.3.1": {
"integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
"dependencies": [
"cross-spawn",
"signal-exit"
]
},
"fsevents@2.3.3": {
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"os": ["darwin"],
"scripts": true
},
"get-tsconfig@4.13.0": {
"integrity": "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==",
"dependencies": [
"resolve-pkg-maps"
]
},
"glob@11.1.0": {
"integrity": "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw==",
"dependencies": [
"foreground-child",
"jackspeak",
"minimatch",
"minipass",
"package-json-from-dist",
"path-scurry"
],
"bin": true
},
"htmlparser2@10.1.0": {
"integrity": "sha512-VTZkM9GWRAtEpveh7MSF6SjjrpNVNNVJfFup7xTY3UpFtm67foy9HDVXneLtFVt4pMz5kZtgNcvCniNFb1hlEQ==",
"dependencies": [
"domelementtype",
"domhandler",
"domutils",
"entities@7.0.1"
]
},
"iconv-lite@0.6.3": {
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"dependencies": [
"safer-buffer"
]
},
"is-fullwidth-code-point@3.0.0": {
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
},
"isexe@2.0.0": {
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
},
"jackspeak@4.1.1": {
"integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
"dependencies": [
"@isaacs/cliui"
]
},
"json5@2.2.3": {
"integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
"bin": true
},
"lru-cache@11.2.5": {
"integrity": "sha512-vFrFJkWtJvJnD5hg+hJvVE8Lh/TcMzKnTgCWmtBipwI5yLX/iX+5UB2tfuyODF5E7k9xEzMdYgGqaSb1c0c5Yw=="
},
"minimatch@10.1.1": {
"integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
"dependencies": [
"@isaacs/brace-expansion"
]
},
"minipass@7.1.2": {
"integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="
},
"normalize-path@3.0.0": {
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="
},
"nth-check@2.1.1": {
"integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
"dependencies": [
"boolbase"
]
},
"package-json-from-dist@1.0.1": {
"integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw=="
},
"parse5-htmlparser2-tree-adapter@7.1.0": {
"integrity": "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==",
"dependencies": [
"domhandler",
"parse5"
]
},
"parse5-parser-stream@7.1.2": {
"integrity": "sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==",
"dependencies": [
"parse5"
]
},
"parse5@7.3.0": {
"integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
"dependencies": [
"entities@6.0.1"
]
},
"path-key@3.1.1": {
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="
},
"path-scurry@2.0.1": {
"integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==",
"dependencies": [
"lru-cache",
"minipass"
]
},
"resolve-pkg-maps@1.0.0": {
"integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="
},
"safe-stable-stringify@2.5.0": {
"integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="
},
"safer-buffer@2.1.2": {
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"shebang-command@2.0.0": {
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"dependencies": [
"shebang-regex"
]
},
"shebang-regex@3.0.0": {
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="
},
"signal-exit@4.1.0": {
"integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="
},
"string-width@4.2.3": {
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"dependencies": [
"emoji-regex@8.0.0",
"is-fullwidth-code-point",
"strip-ansi@6.0.1"
]
},
"string-width@5.1.2": {
"integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
"dependencies": [
"eastasianwidth",
"emoji-regex@9.2.2",
"strip-ansi@7.1.2"
]
},
"strip-ansi@6.0.1": {
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"dependencies": [
"ansi-regex@5.0.1"
]
},
"strip-ansi@7.1.2": {
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dependencies": [
"ansi-regex@6.2.2"
]
},
"ts-json-schema-generator@2.4.0": {
"integrity": "sha512-HbmNsgs58CfdJq0gpteRTxPXG26zumezOs+SB9tgky6MpqiFgQwieCn2MW70+sxpHouZ/w9LW0V6L4ZQO4y1Ug==",
"dependencies": [
"@types/json-schema",
"commander",
"glob",
"json5",
"normalize-path",
"safe-stable-stringify",
"tslib",
"typescript"
],
"bin": true
},
"tslib@2.8.1": {
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="
},
"tsx@4.21.0": {
"integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==",
"dependencies": [
"esbuild",
"get-tsconfig"
],
"optionalDependencies": [
"fsevents"
],
"bin": true
},
"typescript@5.9.3": {
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"bin": true
},
"undici-types@6.21.0": {
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="
},
"undici@7.19.1": {
"integrity": "sha512-Gpq0iNm5M6cQWlyHQv9MV+uOj1jWk7LpkoE5vSp/7zjb4zMdAcUD+VL5y0nH4p9EbUklq00eVIIX/XcDHzu5xg=="
},
"whatwg-encoding@3.1.1": {
"integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
"dependencies": [
"iconv-lite"
],
"deprecated": true
},
"whatwg-mimetype@4.0.0": {
"integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg=="
},
"which@2.0.2": {
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dependencies": [
"isexe"
],
"bin": true
},
"wrap-ansi@7.0.0": {
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"dependencies": [
"ansi-styles@4.3.0",
"string-width@4.2.3",
"strip-ansi@6.0.1"
]
},
"wrap-ansi@8.1.0": {
"integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
"dependencies": [
"ansi-styles@6.2.3",
"string-width@5.1.2",
"strip-ansi@7.1.2"
]
}
},
"workspace": {
"packageJson": {
"dependencies": [
"npm:@anthropic-ai/claude-code@latest",
"npm:@openai/codex@latest",
"npm:@types/node@22",
"npm:cheerio@1",
"npm:ts-json-schema-generator@^2.4.0",
"npm:tsx@^4.19.0",
"npm:typescript@^5.7.0"
]
}
}
}

View file

@ -8,7 +8,11 @@
"extract:opencode": "tsx src/index.ts --agent=opencode",
"extract:claude": "tsx src/index.ts --agent=claude",
"extract:codex": "tsx src/index.ts --agent=codex",
"extract:amp": "tsx src/index.ts --agent=amp"
"extract:amp": "tsx src/index.ts --agent=amp",
"extract:claude-events": "tsx src/claude-event-types.ts",
"extract:claude-events:sdk": "tsx src/claude-event-types-sdk.ts",
"extract:claude-events:cli": "tsx src/claude-event-types-cli.ts",
"extract:claude-events:docs": "tsx src/claude-event-types-docs.ts"
},
"dependencies": {
"ts-json-schema-generator": "^2.4.0",

View file

@ -3,7 +3,7 @@ import { fetchWithCache } from "./cache.js";
import { createNormalizedSchema, type NormalizedSchema } from "./normalize.js";
import type { JSONSchema7 } from "json-schema";
const AMP_DOCS_URL = "https://ampcode.com/manual/appendix";
const AMP_DOCS_URL = "https://ampcode.com/manual/appendix?preview#message-schema";
// Key types we want to extract
const TARGET_TYPES = ["StreamJSONMessage", "AmpOptions", "PermissionRule", "Message", "ToolCall"];

View file

@ -0,0 +1,11 @@
import { collectFromCli } from "./claude-event-types.js";
const promptArg = process.argv.slice(2).find((arg) => arg.startsWith("--prompt="));
const timeoutArg = process.argv.slice(2).find((arg) => arg.startsWith("--timeoutMs="));
const prompt = promptArg?.split("=")[1] ?? "Reply with exactly OK.";
const timeoutMs = timeoutArg ? Number(timeoutArg.split("=")[1]) : 20000;
collectFromCli(prompt, timeoutMs).then((result) => {
console.log(JSON.stringify(result, null, 2));
});

View file

@ -0,0 +1,8 @@
import { collectFromDocs } from "./claude-event-types.js";
const urlsArg = process.argv.slice(2).find((arg) => arg.startsWith("--urls="));
const urls = urlsArg ? urlsArg.split("=")[1]!.split(",") : undefined;
collectFromDocs(urls ?? []).then((result) => {
console.log(JSON.stringify(result, null, 2));
});

View file

@ -0,0 +1,4 @@
import { collectFromSdkTypes } from "./claude-event-types.js";
const result = collectFromSdkTypes();
console.log(JSON.stringify(result, null, 2));

View file

@ -0,0 +1,338 @@
import { readFileSync, existsSync } from "node:fs";
import { join, dirname } from "node:path";
import { fileURLToPath } from "node:url";
import { spawn } from "node:child_process";
import ts from "typescript";
import { load } from "cheerio";
type SourceResult = {
source: string;
types: string[];
details?: Record<string, string[]>;
error?: string;
};
const SDK_POSSIBLE_PATHS = [
"node_modules/@anthropic-ai/claude-code/sdk-tools.d.ts",
"node_modules/@anthropic-ai/claude-code/dist/index.d.ts",
"node_modules/@anthropic-ai/claude-code/dist/types.d.ts",
"node_modules/@anthropic-ai/claude-code/index.d.ts",
];
const DEFAULT_DOC_URLS = [
"https://platform.claude.com/docs/en/messages-streaming",
"https://platform.claude.com/docs/en/api/messages-streaming",
"https://docs.anthropic.com/claude/reference/messages-streaming",
"https://docs.anthropic.com/claude/reference/messages-streaming#events",
"https://docs.anthropic.com/claude/docs/messages-streaming",
];
function moduleDir(): string {
const metaDir = (import.meta as { dirname?: string }).dirname;
if (typeof metaDir === "string") {
return metaDir;
}
return dirname(fileURLToPath(import.meta.url));
}
function findSdkTypesPath(): string | null {
const resourceDir = join(moduleDir(), "..");
const repoRoot = join(moduleDir(), "..", "..", "..");
const searchRoots = [resourceDir, repoRoot];
for (const root of searchRoots) {
for (const relativePath of SDK_POSSIBLE_PATHS) {
const fullPath = join(root, relativePath);
if (existsSync(fullPath)) {
return fullPath;
}
}
}
return null;
}
function extractStringLiterals(node: ts.TypeNode): string[] {
if (ts.isLiteralTypeNode(node) && ts.isStringLiteral(node.literal)) {
return [node.literal.text];
}
if (ts.isUnionTypeNode(node)) {
return node.types.flatMap((typeNode) => extractStringLiterals(typeNode));
}
return [];
}
function containerName(node: ts.Node): string | null {
let current: ts.Node | undefined = node;
while (current) {
if (ts.isInterfaceDeclaration(current) && current.name) {
return current.name.text;
}
if (ts.isTypeAliasDeclaration(current) && current.name) {
return current.name.text;
}
current = current.parent;
}
return null;
}
function collectFromSdkTypes(): SourceResult {
const path = findSdkTypesPath();
if (!path) {
return { source: "sdk", types: [], error: "Claude SDK types not found" };
}
const content = readFileSync(path, "utf8");
const sourceFile = ts.createSourceFile(path, content, ts.ScriptTarget.Latest, true);
const types = new Set<string>();
const details: Record<string, string[]> = {};
function visit(node: ts.Node): void {
if (ts.isPropertySignature(node)) {
const name = node.name && ts.isIdentifier(node.name) ? node.name.text : null;
if (name === "type" && node.type) {
const literals = extractStringLiterals(node.type);
if (literals.length > 0) {
const parentName = containerName(node) ?? "anonymous";
if (/Event|Stream|Message/i.test(parentName)) {
literals.forEach((value) => types.add(value));
details[parentName] = (details[parentName] ?? []).concat(literals);
}
}
}
}
ts.forEachChild(node, visit);
}
visit(sourceFile);
return { source: "sdk", types: Array.from(types).sort(), details };
}
function collectFromCli(prompt: string, timeoutMs: number): Promise<SourceResult> {
return new Promise((resolve) => {
const result: SourceResult = { source: "cli", types: [] };
const types = new Set<string>();
const denoGlobal = (globalThis as {
Deno?: {
which?: (cmd: string) => string | null;
Command?: new (
cmd: string,
options: { args: string[]; stdout: "piped"; stderr: "piped" },
) => { output: () => Promise<{ stdout: Uint8Array; stderr: Uint8Array; code: number }> };
};
}).Deno;
if (denoGlobal?.which && !denoGlobal.which("claude")) {
result.error = "claude binary not found in PATH";
resolve(result);
return;
}
if (denoGlobal?.Command) {
const command = new denoGlobal.Command("claude", {
args: ["--print", "--output-format", "stream-json", "--verbose", prompt],
stdout: "piped",
stderr: "piped",
});
try {
command
.output()
.then(({ stdout, stderr, code }) => {
const text = new TextDecoder().decode(stdout);
for (const line of text.split("\n")) {
const trimmed = line.trim();
if (!trimmed) continue;
try {
const value = JSON.parse(trimmed);
if (value && typeof value.type === "string") {
types.add(value.type);
}
} catch {
// ignore non-json
}
}
result.types = Array.from(types).sort();
if (code !== 0) {
result.error =
new TextDecoder().decode(stderr).trim() ||
`claude exited with code ${code}`;
}
resolve(result);
})
.catch((error) => {
result.error = error instanceof Error ? error.message : String(error);
resolve(result);
});
} catch (error) {
result.error = error instanceof Error ? error.message : String(error);
resolve(result);
}
return;
}
let child;
try {
child = spawn(
"claude",
["--print", "--output-format", "stream-json", "--verbose", prompt],
{ stdio: ["ignore", "pipe", "pipe"] },
);
} catch (error) {
result.error = error instanceof Error ? error.message : String(error);
resolve(result);
return;
}
if (!child.stdout || !child.stderr) {
result.error = "claude stdout/stderr not available";
resolve(result);
return;
}
let stderr = "";
const timer = setTimeout(() => {
child.kill("SIGKILL");
}, timeoutMs);
child.stdout.on("data", (chunk) => {
const text = chunk.toString("utf8");
for (const line of text.split("\n")) {
const trimmed = line.trim();
if (!trimmed) continue;
try {
const value = JSON.parse(trimmed);
if (value && typeof value.type === "string") {
types.add(value.type);
}
} catch {
// ignore non-json
}
}
});
child.stderr.on("data", (chunk) => {
stderr += chunk.toString("utf8");
});
child.on("close", (code) => {
clearTimeout(timer);
result.types = Array.from(types).sort();
if (code !== 0) {
result.error = stderr.trim() || `claude exited with code ${code}`;
}
resolve(result);
});
});
}
async function collectFromDocs(urls: string[]): Promise<SourceResult> {
if (typeof fetch !== "function") {
return { source: "docs", types: [], error: "fetch is not available in this runtime" };
}
const effectiveUrls = urls.length > 0 ? urls : DEFAULT_DOC_URLS;
const types = new Set<string>();
const extractFromText = (text: string) => {
const typeMatches = text.match(/\"type\"\\s*:\\s*\"([^\"]+)\"/g) ?? [];
for (const match of typeMatches) {
const value = match.split(":")[1]?.trim().replace(/^\"|\"$/g, "");
if (value) types.add(value);
}
const eventMatches = text.match(/event\\s*:\\s*([a-z_]+)/gi) ?? [];
for (const match of eventMatches) {
const value = match.split(":")[1]?.trim();
if (value) types.add(value);
}
};
for (const url of effectiveUrls) {
try {
const res = await fetch(url);
if (!res.ok) {
continue;
}
const html = await res.text();
const $ = load(html);
const blocks = $("pre, code")
.map((_, el) => $(el).text())
.get();
for (const block of blocks) {
extractFromText(block);
}
const nextData = $("#__NEXT_DATA__").text();
if (nextData) {
extractFromText(nextData);
}
extractFromText(html);
} catch {
// ignore per-url errors
}
}
return { source: "docs", types: Array.from(types).sort() };
}
type Args = {
source: "all" | "sdk" | "cli" | "docs";
prompt: string;
timeoutMs: number;
urls: string[];
json: boolean;
};
function parseArgs(): Args {
const args = process.argv.slice(2);
const sourceArg = args.find((arg) => arg.startsWith("--source="));
const promptArg = args.find((arg) => arg.startsWith("--prompt="));
const timeoutArg = args.find((arg) => arg.startsWith("--timeoutMs="));
const urlsArg = args.find((arg) => arg.startsWith("--urls="));
const json = args.includes("--json");
return {
source: (sourceArg?.split("=")[1] as Args["source"]) ?? "all",
prompt: promptArg?.split("=")[1] ?? "Reply with exactly OK.",
timeoutMs: timeoutArg ? Number(timeoutArg.split("=")[1]) : 20000,
urls: urlsArg ? urlsArg.split("=")[1]!.split(",") : DEFAULT_DOC_URLS,
json,
};
}
function summarize(results: SourceResult[]): void {
const counts = results.map((r) => ({ source: r.source, count: r.types.length }));
const max = Math.max(...counts.map((c) => c.count), 0);
const best = counts.filter((c) => c.count === max).map((c) => c.source);
const union = Array.from(
new Set(results.flatMap((r) => r.types))
).sort();
console.log("Claude event type extraction");
console.log("============================");
for (const result of results) {
console.log(`- ${result.source}: ${result.types.length} types${result.error ? " (error)" : ""}`);
}
console.log(`\nMost comprehensive: ${best.join(", ") || "none"}`);
console.log(`Union (${union.length}): ${union.join(", ")}`);
}
async function main(): Promise<void> {
const args = parseArgs();
const results: SourceResult[] = [];
if (args.source === "all" || args.source === "sdk") {
results.push(collectFromSdkTypes());
}
if (args.source === "all" || args.source === "cli") {
results.push(await collectFromCli(args.prompt, args.timeoutMs));
}
if (args.source === "all" || args.source === "docs") {
results.push(await collectFromDocs(args.urls));
}
if (args.json) {
console.log(JSON.stringify({ results }, null, 2));
return;
}
summarize(results);
}
main().catch((error) => {
console.error("Fatal error:", error);
process.exit(1);
});
export { collectFromCli, collectFromDocs, collectFromSdkTypes };

View file

@ -1,92 +1,43 @@
import { createGenerator, type Config } from "ts-json-schema-generator";
import { existsSync, readFileSync } from "fs";
import { join, dirname } from "path";
import { execSync } from "child_process";
import { createNormalizedSchema, type NormalizedSchema } from "./normalize.js";
import type { JSONSchema7 } from "json-schema";
// Try multiple possible paths for the SDK types
const POSSIBLE_PATHS = [
"node_modules/@anthropic-ai/claude-code/sdk-tools.d.ts",
"node_modules/@anthropic-ai/claude-code/dist/index.d.ts",
"node_modules/@anthropic-ai/claude-code/dist/types.d.ts",
"node_modules/@anthropic-ai/claude-code/index.d.ts",
];
// Key types we want to extract
const TARGET_TYPES = [
"ToolInputSchemas",
"AgentInput",
"BashInput",
"FileEditInput",
"FileReadInput",
"FileWriteInput",
"GlobInput",
"GrepInput",
"WebFetchInput",
"WebSearchInput",
"AskUserQuestionInput",
];
function findTypesPath(): string | null {
const baseDir = join(import.meta.dirname, "..", "..", "resources", "agent-schemas");
for (const relativePath of POSSIBLE_PATHS) {
const fullPath = join(baseDir, relativePath);
if (existsSync(fullPath)) {
return fullPath;
}
}
return null;
}
export async function extractClaudeSchema(): Promise<NormalizedSchema> {
console.log("Extracting Claude Code SDK schema...");
const typesPath = findTypesPath();
if (!typesPath) {
console.log(" [warn] Claude Code SDK types not found, using fallback schema");
return createFallbackSchema();
}
console.log(` [found] ${typesPath}`);
const config: Config = {
path: typesPath,
tsconfig: join(import.meta.dirname, "..", "..", "resources", "agent-schemas", "tsconfig.json"),
type: "*",
skipTypeCheck: true,
topRef: false,
expose: "export",
jsDoc: "extended",
};
console.log("Extracting Claude Code schema via CLI...");
try {
const generator = createGenerator(config);
const schema = generator.createSchema(config.type);
// Run claude CLI with --json-schema flag to get the schema
const output = execSync("claude --output-format json --json-schema", {
encoding: "utf-8",
timeout: 30000,
stdio: ["pipe", "pipe", "pipe"],
});
// Parse the JSON output
const parsed = JSON.parse(output);
// Extract definitions from the schema
const definitions: Record<string, JSONSchema7> = {};
if (schema.definitions) {
for (const [name, def] of Object.entries(schema.definitions)) {
if (parsed.definitions) {
for (const [name, def] of Object.entries(parsed.definitions)) {
definitions[name] = def as JSONSchema7;
}
} else if (parsed.$defs) {
for (const [name, def] of Object.entries(parsed.$defs)) {
definitions[name] = def as JSONSchema7;
}
} else {
// The output might be a single schema, use it as the root
definitions["Schema"] = parsed as JSONSchema7;
}
// Verify target types exist
const found = TARGET_TYPES.filter((name) => definitions[name]);
const missing = TARGET_TYPES.filter((name) => !definitions[name]);
if (missing.length > 0) {
console.log(` [warn] Missing expected types: ${missing.join(", ")}`);
}
console.log(` [ok] Extracted ${Object.keys(definitions).length} types (${found.length} target types)`);
console.log(` [ok] Extracted ${Object.keys(definitions).length} types from CLI`);
return createNormalizedSchema("claude", "Claude Code SDK Schema", definitions);
} catch (error) {
console.log(` [error] Schema generation failed: ${error}`);
const errorMessage = error instanceof Error ? error.message : String(error);
console.log(` [warn] CLI extraction failed: ${errorMessage}`);
console.log(" [fallback] Using embedded schema definitions");
return createFallbackSchema();
}

View file

@ -1,88 +1,69 @@
import { createGenerator, type Config } from "ts-json-schema-generator";
import { existsSync } from "fs";
import { execSync } from "child_process";
import { existsSync, readFileSync, rmSync, readdirSync } from "fs";
import { join } from "path";
import { createNormalizedSchema, type NormalizedSchema } from "./normalize.js";
import type { JSONSchema7 } from "json-schema";
// Try multiple possible paths for the SDK types
const POSSIBLE_PATHS = [
"node_modules/@openai/codex/dist/index.d.ts",
"node_modules/@openai/codex/dist/types.d.ts",
"node_modules/@openai/codex/index.d.ts",
];
// Key types we want to extract
const TARGET_TYPES = [
"ThreadEvent",
"ThreadItem",
"CodexOptions",
"ThreadOptions",
"Input",
"ResponseItem",
"FunctionCall",
"Message",
];
function findTypesPath(): string | null {
const baseDir = join(import.meta.dirname, "..", "..", "resources", "agent-schemas");
for (const relativePath of POSSIBLE_PATHS) {
const fullPath = join(baseDir, relativePath);
if (existsSync(fullPath)) {
return fullPath;
}
}
return null;
}
export async function extractCodexSchema(): Promise<NormalizedSchema> {
console.log("Extracting Codex SDK schema...");
console.log("Extracting Codex schema via CLI...");
const typesPath = findTypesPath();
const tempDir = join(import.meta.dirname, "..", ".temp-codex-schemas");
if (!typesPath) {
console.log(" [warn] Codex SDK types not found, using fallback schema");
try {
// Run codex CLI to generate JSON schema
execSync(`codex app-server generate-json-schema --out "${tempDir}"`, {
encoding: "utf-8",
timeout: 30000,
stdio: ["pipe", "pipe", "pipe"],
});
// Read generated schema files from temp directory
const definitions: Record<string, JSONSchema7> = {};
if (existsSync(tempDir)) {
const files = readdirSync(tempDir).filter((f) => f.endsWith(".json"));
for (const file of files) {
const filePath = join(tempDir, file);
const content = readFileSync(filePath, "utf-8");
const schema = JSON.parse(content);
// Extract the name from the file (e.g., "ThreadEvent.json" -> "ThreadEvent")
const name = file.replace(".json", "");
if (schema.definitions) {
for (const [defName, def] of Object.entries(schema.definitions)) {
definitions[defName] = def as JSONSchema7;
}
} else if (schema.$defs) {
for (const [defName, def] of Object.entries(schema.$defs)) {
definitions[defName] = def as JSONSchema7;
}
} else {
definitions[name] = schema as JSONSchema7;
}
}
// Clean up temp directory
rmSync(tempDir, { recursive: true, force: true });
}
if (Object.keys(definitions).length === 0) {
console.log(" [warn] No schemas extracted from CLI, using fallback");
return createFallbackSchema();
}
console.log(` [found] ${typesPath}`);
const config: Config = {
path: typesPath,
tsconfig: join(import.meta.dirname, "..", "..", "resources", "agent-schemas", "tsconfig.json"),
type: "*",
skipTypeCheck: true,
topRef: false,
expose: "export",
jsDoc: "extended",
};
try {
const generator = createGenerator(config);
const schema = generator.createSchema(config.type);
const definitions: Record<string, JSONSchema7> = {};
if (schema.definitions) {
for (const [name, def] of Object.entries(schema.definitions)) {
definitions[name] = def as JSONSchema7;
}
}
// Verify target types exist
const found = TARGET_TYPES.filter((name) => definitions[name]);
const missing = TARGET_TYPES.filter((name) => !definitions[name]);
if (missing.length > 0) {
console.log(` [warn] Missing expected types: ${missing.join(", ")}`);
}
console.log(` [ok] Extracted ${Object.keys(definitions).length} types (${found.length} target types)`);
console.log(` [ok] Extracted ${Object.keys(definitions).length} types from CLI`);
return createNormalizedSchema("codex", "Codex SDK Schema", definitions);
} catch (error) {
console.log(` [error] Schema generation failed: ${error}`);
// Clean up temp directory on error
if (existsSync(tempDir)) {
rmSync(tempDir, { recursive: true, force: true });
}
const errorMessage = error instanceof Error ? error.message : String(error);
console.log(` [warn] CLI extraction failed: ${errorMessage}`);
console.log(" [fallback] Using embedded schema definitions");
return createFallbackSchema();
}

View file

@ -6,8 +6,8 @@ import { extractCodexSchema } from "./codex.js";
import { extractAmpSchema } from "./amp.js";
import { validateSchema, type NormalizedSchema } from "./normalize.js";
const RESOURCE_DIR = join(import.meta.dirname, "..", "..", "resources", "agent-schemas");
const DIST_DIR = join(RESOURCE_DIR, "dist");
const RESOURCE_DIR = join(import.meta.dirname, "..");
const DIST_DIR = join(RESOURCE_DIR, "artifacts", "json-schema");
type AgentName = "opencode" | "claude" | "codex" | "amp";

View file

@ -2,7 +2,8 @@
import fs from "node:fs";
import path from "node:path";
import { spawnSync, execFileSync } from "node:child_process";
import { execFileSync, spawnSync } from "node:child_process";
import readline from "node:readline";
const ENDPOINT_URL =
"https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com";
@ -32,6 +33,47 @@ const PLATFORM_MAP: Record<string, { pkg: string; os: string; cpu: string; ext:
"aarch64-apple-darwin": { pkg: "darwin-arm64", os: "darwin", cpu: "arm64", ext: "" },
};
const STEPS = [
"confirm-release",
"update-version",
"generate-artifacts",
"git-commit",
"git-push",
"trigger-workflow",
"run-checks",
"publish-crates",
"publish-npm-sdk",
"publish-npm-cli",
"upload-typescript",
"upload-install",
"upload-binaries",
] as const;
const PHASES = ["setup-local", "setup-ci", "complete-ci"] as const;
type Step = (typeof STEPS)[number];
type Phase = (typeof PHASES)[number];
const PHASE_MAP: Record<Phase, Step[]> = {
"setup-local": [
"confirm-release",
"update-version",
"generate-artifacts",
"git-commit",
"git-push",
"trigger-workflow",
],
"setup-ci": ["run-checks"],
"complete-ci": [
"publish-crates",
"publish-npm-sdk",
"publish-npm-cli",
"upload-typescript",
"upload-install",
"upload-binaries",
],
};
function parseArgs(argv: string[]) {
const args = new Map<string, string>();
const flags = new Set<string>();
@ -61,11 +103,7 @@ function run(cmd: string, cmdArgs: string[], options: Record<string, any> = {})
}
}
function runCapture(
cmd: string,
cmdArgs: string[],
options: Record<string, any> = {},
) {
function runCapture(cmd: string, cmdArgs: string[], options: Record<string, any> = {}) {
const result = spawnSync(cmd, cmdArgs, {
stdio: ["ignore", "pipe", "pipe"],
encoding: "utf8",
@ -234,16 +272,55 @@ function uploadContent(content: string, remotePath: string) {
}
}
function updatePackageJson(filePath: string, version: string, updateOptionalDeps = false) {
const pkg = JSON.parse(fs.readFileSync(filePath, "utf8"));
pkg.version = version;
if (updateOptionalDeps && pkg.optionalDependencies) {
for (const dep of Object.keys(pkg.optionalDependencies)) {
pkg.optionalDependencies[dep] = version;
}
}
fs.writeFileSync(filePath, JSON.stringify(pkg, null, 2) + "\n");
}
function updateVersion(rootDir: string, version: string) {
const cargoPath = path.join(rootDir, "Cargo.toml");
let cargoContent = fs.readFileSync(cargoPath, "utf8");
cargoContent = cargoContent.replace(/^version = ".*"/m, `version = "${version}"`);
fs.writeFileSync(cargoPath, cargoContent);
updatePackageJson(path.join(rootDir, "sdks", "typescript", "package.json"), version, true);
updatePackageJson(path.join(rootDir, "sdks", "cli", "package.json"), version, true);
const platformsDir = path.join(rootDir, "sdks", "cli", "platforms");
for (const entry of fs.readdirSync(platformsDir, { withFileTypes: true })) {
if (!entry.isDirectory()) continue;
const pkgPath = path.join(platformsDir, entry.name, "package.json");
if (fs.existsSync(pkgPath)) {
updatePackageJson(pkgPath, version, false);
}
}
}
function buildTypescript(rootDir: string) {
const sdkDir = path.join(rootDir, "sdks", "typescript");
if (!fs.existsSync(sdkDir)) {
throw new Error(`TypeScript SDK not found at ${sdkDir}`);
}
run("npm", ["install"], { cwd: sdkDir });
run("npm", ["run", "build"], { cwd: sdkDir });
run("pnpm", ["install"], { cwd: sdkDir });
run("pnpm", ["run", "build"], { cwd: sdkDir });
return path.join(sdkDir, "dist");
}
function generateArtifacts(rootDir: string) {
const sdkDir = path.join(rootDir, "sdks", "typescript");
run("pnpm", ["run", "generate"], { cwd: sdkDir });
run("cargo", ["check", "-p", "sandbox-agent-universal-schema-gen"], { cwd: rootDir });
run("cargo", ["run", "-p", "sandbox-agent-openapi-gen", "--", "--out", "sdks/openapi/openapi.json"], {
cwd: rootDir,
});
}
function uploadTypescriptArtifacts(rootDir: string, version: string, latest: boolean) {
console.log("==> Building TypeScript SDK");
const distPath = buildTypescript(rootDir);
@ -256,13 +333,7 @@ function uploadTypescriptArtifacts(rootDir: string, version: string, latest: boo
}
function uploadInstallScript(rootDir: string, version: string, latest: boolean) {
const installPath = path.join(
rootDir,
"scripts",
"release",
"static",
"install.sh",
);
const installPath = path.join(rootDir, "scripts", "release", "static", "install.sh");
let installContent = fs.readFileSync(installPath, "utf8");
const uploadForVersion = (versionValue: string, remoteVersion: string) => {
@ -295,7 +366,6 @@ function uploadBinaries(rootDir: string, version: string, latest: boolean) {
}
}
// Pre-release checks
function runChecks(rootDir: string) {
console.log("==> Running Rust checks");
run("cargo", ["fmt", "--all", "--", "--check"], { cwd: rootDir });
@ -307,58 +377,46 @@ function runChecks(rootDir: string) {
run("pnpm", ["run", "build"], { cwd: rootDir });
}
// Crates.io publishing
function publishCrates(rootDir: string, version: string) {
// Update workspace version
const cargoPath = path.join(rootDir, "Cargo.toml");
let cargoContent = fs.readFileSync(cargoPath, "utf8");
cargoContent = cargoContent.replace(/^version = ".*"/m, `version = "${version}"`);
fs.writeFileSync(cargoPath, cargoContent);
updateVersion(rootDir, version);
for (const crate of CRATE_ORDER) {
console.log(`==> Publishing sandbox-agent-${crate}`);
const crateDir = path.join(rootDir, "server", "packages", crate);
run("cargo", ["publish", "--allow-dirty"], { cwd: crateDir });
// Wait for crates.io index propagation
console.log("Waiting 30s for index...");
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, 30000);
}
}
// npm SDK publishing
function publishNpmSdk(rootDir: string, version: string) {
const sdkDir = path.join(rootDir, "sdks", "typescript");
console.log("==> Publishing TypeScript SDK to npm");
run("npm", ["version", version, "--no-git-tag-version"], { cwd: sdkDir });
run("npm", ["version", version, "--no-git-tag-version", "--allow-same-version"], { cwd: sdkDir });
run("pnpm", ["install"], { cwd: sdkDir });
run("pnpm", ["run", "build"], { cwd: sdkDir });
run("npm", ["publish", "--access", "public"], { cwd: sdkDir });
}
// npm CLI publishing
function publishNpmCli(rootDir: string, version: string) {
const cliDir = path.join(rootDir, "sdks", "cli");
const distDir = path.join(rootDir, "dist");
// Publish platform packages first
for (const [target, info] of Object.entries(PLATFORM_MAP)) {
const platformDir = path.join(cliDir, "platforms", info.pkg);
const binDir = path.join(platformDir, "bin");
fs.mkdirSync(binDir, { recursive: true });
// Copy binary
const srcBinary = path.join(distDir, `sandbox-agent-${target}${info.ext}`);
const dstBinary = path.join(binDir, `sandbox-agent${info.ext}`);
fs.copyFileSync(srcBinary, dstBinary);
if (info.ext !== ".exe") fs.chmodSync(dstBinary, 0o755);
// Update version and publish
console.log(`==> Publishing @sandbox-agent/cli-${info.pkg}`);
run("npm", ["version", version, "--no-git-tag-version"], { cwd: platformDir });
run("npm", ["version", version, "--no-git-tag-version", "--allow-same-version"], { cwd: platformDir });
run("npm", ["publish", "--access", "public"], { cwd: platformDir });
}
// Publish main package (update optionalDeps versions)
console.log("==> Publishing @sandbox-agent/cli");
const pkgPath = path.join(cliDir, "package.json");
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
@ -370,7 +428,26 @@ function publishNpmCli(rootDir: string, version: string) {
run("npm", ["publish", "--access", "public"], { cwd: cliDir });
}
function main() {
function validateGit(rootDir: string) {
const status = runCapture("git", ["status", "--porcelain"], { cwd: rootDir });
if (status.trim()) {
throw new Error("Working tree is dirty; commit or stash changes before release.");
}
}
async function confirmRelease(version: string, latest: boolean) {
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
const answer = await new Promise<string>((resolve) => {
rl.question(`Release ${version} (latest=${latest})? (yes/no): `, resolve);
});
rl.close();
if (answer.toLowerCase() !== "yes" && answer.toLowerCase() !== "y") {
console.log("Release cancelled");
process.exit(0);
}
}
async function main() {
const { args, flags } = parseArgs(process.argv.slice(2));
const versionArg = args.get("--version");
if (!versionArg) {
@ -399,33 +476,155 @@ function main() {
}
}
const phaseArg = args.get("--phase");
const stepsArg = args.get("--only-steps");
const requestedSteps = new Set<Step>();
if (phaseArg || stepsArg) {
if (phaseArg && stepsArg) {
throw new Error("Cannot use both --phase and --only-steps");
}
if (phaseArg) {
const phases = phaseArg.split(",").map((value) => value.trim());
for (const phase of phases) {
if (!PHASES.includes(phase as Phase)) {
throw new Error(`Invalid phase: ${phase}`);
}
for (const step of PHASE_MAP[phase as Phase]) {
requestedSteps.add(step);
}
}
}
if (stepsArg) {
const steps = stepsArg.split(",").map((value) => value.trim());
for (const step of steps) {
if (!STEPS.includes(step as Step)) {
throw new Error(`Invalid step: ${step}`);
}
requestedSteps.add(step as Step);
}
}
}
const rootDir = process.cwd();
const shouldRun = (step: Step) => requestedSteps.has(step);
const hasPhases = requestedSteps.size > 0;
if (!hasPhases) {
if (flags.has("--check")) {
runChecks(process.cwd());
runChecks(rootDir);
}
if (flags.has("--publish-crates")) {
publishCrates(process.cwd(), version);
publishCrates(rootDir, version);
}
if (flags.has("--publish-npm-sdk")) {
publishNpmSdk(process.cwd(), version);
publishNpmSdk(rootDir, version);
}
if (flags.has("--publish-npm-cli")) {
publishNpmCli(process.cwd(), version);
publishNpmCli(rootDir, version);
}
if (flags.has("--upload-typescript")) {
uploadTypescriptArtifacts(process.cwd(), version, latest);
uploadTypescriptArtifacts(rootDir, version, latest);
}
if (flags.has("--upload-install")) {
uploadInstallScript(process.cwd(), version, latest);
uploadInstallScript(rootDir, version, latest);
}
if (flags.has("--upload-binaries")) {
uploadBinaries(rootDir, version, latest);
}
return;
}
if (flags.has("--upload-binaries")) {
uploadBinaries(process.cwd(), version, latest);
if (shouldRun("confirm-release") && !flags.has("--no-confirm")) {
await confirmRelease(version, latest);
}
const validateGitEnabled = !flags.has("--no-validate-git");
if ((shouldRun("git-commit") || shouldRun("git-push")) && validateGitEnabled) {
validateGit(rootDir);
}
if (shouldRun("update-version")) {
console.log("==> Updating versions");
updateVersion(rootDir, version);
}
if (shouldRun("generate-artifacts")) {
console.log("==> Generating OpenAPI and universal schemas");
generateArtifacts(rootDir);
}
if (shouldRun("git-commit")) {
console.log("==> Committing changes");
run("git", ["add", "."], { cwd: rootDir });
run("git", ["commit", "--allow-empty", "-m", `chore(release): update version to ${version}`], {
cwd: rootDir,
});
}
if (shouldRun("git-push")) {
console.log("==> Pushing changes");
const branch = runCapture("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: rootDir });
if (branch === "main") {
run("git", ["push"], { cwd: rootDir });
} else {
run("git", ["push", "-u", "origin", "HEAD"], { cwd: rootDir });
}
}
if (shouldRun("trigger-workflow")) {
console.log("==> Triggering release workflow");
const branch = runCapture("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: rootDir });
const latestFlag = latest ? "true" : "false";
run(
"gh",
[
"workflow",
"run",
".github/workflows/release.yaml",
"-f",
`version=${version}`,
"-f",
`latest=${latestFlag}`,
"--ref",
branch,
],
{ cwd: rootDir },
);
}
if (shouldRun("run-checks")) {
runChecks(rootDir);
}
if (shouldRun("publish-crates")) {
publishCrates(rootDir, version);
}
if (shouldRun("publish-npm-sdk")) {
publishNpmSdk(rootDir, version);
}
if (shouldRun("publish-npm-cli")) {
publishNpmCli(rootDir, version);
}
if (shouldRun("upload-typescript")) {
uploadTypescriptArtifacts(rootDir, version, latest);
}
if (shouldRun("upload-install")) {
uploadInstallScript(rootDir, version, latest);
}
if (shouldRun("upload-binaries")) {
uploadBinaries(rootDir, version, latest);
}
}
main();
main().catch((err) => {
console.error(err);
process.exit(1);
});

View file

@ -157,6 +157,26 @@
}
}
},
"/v1/sessions": {
"get": {
"tags": [
"sessions"
],
"operationId": "list_sessions",
"responses": {
"200": {
"description": "",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/SessionListResponse"
}
}
}
}
}
}
},
"/v1/sessions/{session_id}": {
"post": {
"tags": [
@ -1047,6 +1067,65 @@
}
}
},
"SessionInfo": {
"type": "object",
"required": [
"sessionId",
"agent",
"agentMode",
"permissionMode",
"ended",
"eventCount"
],
"properties": {
"agent": {
"type": "string"
},
"agentMode": {
"type": "string"
},
"agentSessionId": {
"type": "string",
"nullable": true
},
"ended": {
"type": "boolean"
},
"eventCount": {
"type": "integer",
"format": "int64",
"minimum": 0
},
"model": {
"type": "string",
"nullable": true
},
"permissionMode": {
"type": "string"
},
"sessionId": {
"type": "string"
},
"variant": {
"type": "string",
"nullable": true
}
}
},
"SessionListResponse": {
"type": "object",
"required": [
"sessions"
],
"properties": {
"sessions": {
"type": "array",
"items": {
"$ref": "#/components/schemas/SessionInfo"
}
}
}
},
"Started": {
"type": "object",
"properties": {

View file

@ -20,8 +20,8 @@
"dist"
],
"scripts": {
"generate:openapi": "cargo check -p sandbox-agent-openapi-gen && cargo run -p sandbox-agent-openapi-gen -- --out src/generated/openapi.json",
"generate:types": "openapi-typescript src/generated/openapi.json -o src/generated/openapi.ts",
"generate:openapi": "cargo check -p sandbox-agent-openapi-gen && cargo run -p sandbox-agent-openapi-gen -- --out ../openapi/openapi.json",
"generate:types": "openapi-typescript ../openapi/openapi.json -o src/generated/openapi.ts",
"generate": "pnpm run generate:openapi && pnpm run generate:types",
"build": "pnpm run generate && tsc -p tsconfig.json"
},

View file

@ -11,14 +11,21 @@ export type AgentInfo = components["schemas"]["AgentInfo"];
export type AgentListResponse = components["schemas"]["AgentListResponse"];
export type CreateSessionRequest = components["schemas"]["CreateSessionRequest"];
export type CreateSessionResponse = components["schemas"]["CreateSessionResponse"];
export type HealthResponse = components["schemas"]["HealthResponse"];
export type MessageRequest = components["schemas"]["MessageRequest"];
export type EventsQuery = components["schemas"]["EventsQuery"];
export type EventsResponse = components["schemas"]["EventsResponse"];
export type PermissionRequest = components["schemas"]["PermissionRequest"];
export type QuestionReplyRequest = components["schemas"]["QuestionReplyRequest"];
export type QuestionRequest = components["schemas"]["QuestionRequest"];
export type PermissionReplyRequest = components["schemas"]["PermissionReplyRequest"];
export type PermissionReply = components["schemas"]["PermissionReply"];
export type ProblemDetails = components["schemas"]["ProblemDetails"];
export type SessionInfo = components["schemas"]["SessionInfo"];
export type SessionListResponse = components["schemas"]["SessionListResponse"];
export type UniversalEvent = components["schemas"]["UniversalEvent"];
export type UniversalMessage = components["schemas"]["UniversalMessage"];
export type UniversalMessagePart = components["schemas"]["UniversalMessagePart"];
const API_PREFIX = "/v1";
@ -58,6 +65,7 @@ type RequestOptions = {
body?: unknown;
headers?: HeadersInit;
accept?: string;
signal?: AbortSignal;
};
export class SandboxDaemonClient {
@ -108,6 +116,10 @@ export class SandboxDaemonClient {
return this.requestJson("GET", `${API_PREFIX}/agents`);
}
async getHealth(): Promise<HealthResponse> {
return this.requestJson("GET", `${API_PREFIX}/health`);
}
async installAgent(agent: string, request: AgentInstallRequest = {}): Promise<void> {
await this.requestJson("POST", `${API_PREFIX}/agents/${encodeURIComponent(agent)}/install`, {
body: request,
@ -124,6 +136,10 @@ export class SandboxDaemonClient {
});
}
async listSessions(): Promise<SessionListResponse> {
return this.requestJson("GET", `${API_PREFIX}/sessions`);
}
async postMessage(sessionId: string, request: MessageRequest): Promise<void> {
await this.requestJson("POST", `${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}/messages`, {
body: request,
@ -136,15 +152,20 @@ export class SandboxDaemonClient {
});
}
async getEventsSse(sessionId: string, query?: EventsQuery): Promise<Response> {
async getEventsSse(sessionId: string, query?: EventsQuery, signal?: AbortSignal): Promise<Response> {
return this.requestRaw("GET", `${API_PREFIX}/sessions/${encodeURIComponent(sessionId)}/events/sse`, {
query,
accept: "text/event-stream",
signal,
});
}
async *streamEvents(sessionId: string, query?: EventsQuery): AsyncGenerator<UniversalEvent, void, void> {
const response = await this.getEventsSse(sessionId, query);
async *streamEvents(
sessionId: string,
query?: EventsQuery,
signal?: AbortSignal,
): AsyncGenerator<UniversalEvent, void, void> {
const response = await this.getEventsSse(sessionId, query, signal);
if (!response.body) {
throw new Error("SSE stream is not readable in this environment.");
}
@ -249,7 +270,7 @@ export class SandboxDaemonClient {
headers.set("Accept", options.accept);
}
const init: RequestInit = { method, headers };
const init: RequestInit = { method, headers, signal: options.signal };
if (options.body !== undefined) {
headers.set("Content-Type", "application/json");
init.body = JSON.stringify(options.body);

View file

@ -14,12 +14,19 @@ export type {
CreateSessionResponse,
EventsQuery,
EventsResponse,
HealthResponse,
MessageRequest,
PermissionRequest,
PermissionReply,
PermissionReplyRequest,
ProblemDetails,
QuestionRequest,
QuestionReplyRequest,
SessionInfo,
SessionListResponse,
UniversalEvent,
UniversalMessage,
UniversalMessagePart,
SandboxDaemonClientOptions,
SandboxDaemonConnectOptions,
} from "./client.js";

View file

@ -50,8 +50,9 @@ export async function spawnSandboxDaemon(
const net = await import("node:net");
const { createRequire } = await import("node:module");
const host = options.host ?? "127.0.0.1";
const port = options.port ?? (await getFreePort(net, host));
const bindHost = options.host ?? "127.0.0.1";
const port = options.port ?? (await getFreePort(net, bindHost));
const connectHost = bindHost === "0.0.0.0" || bindHost === "::" ? "127.0.0.1" : bindHost;
const token = options.token ?? crypto.randomBytes(24).toString("hex");
const timeoutMs = options.timeoutMs ?? 15_000;
const logMode: SandboxDaemonSpawnLogMode = options.log ?? "inherit";
@ -67,7 +68,7 @@ export async function spawnSandboxDaemon(
}
const stdio = logMode === "inherit" ? "inherit" : logMode === "silent" ? "ignore" : "pipe";
const args = ["--host", host, "--port", String(port), "--token", token];
const args = ["--host", bindHost, "--port", String(port), "--token", token];
const child = spawn(binaryPath, args, {
stdio,
env: {
@ -77,8 +78,8 @@ export async function spawnSandboxDaemon(
});
const cleanup = registerProcessCleanup(child);
const baseUrl = `http://${host}:${port}`;
const ready = waitForHealth(baseUrl, fetcher ?? globalThis.fetch, timeoutMs, child);
const baseUrl = `http://${connectHost}:${port}`;
const ready = waitForHealth(baseUrl, fetcher ?? globalThis.fetch, timeoutMs, child, token);
await ready;
@ -161,6 +162,7 @@ async function waitForHealth(
fetcher: typeof fetch | undefined,
timeoutMs: number,
child: ChildProcess,
token: string,
): Promise<void> {
if (!fetcher) {
throw new Error("Fetch API is not available; provide a fetch implementation.");
@ -173,7 +175,9 @@ async function waitForHealth(
throw new Error("sandbox-agent exited before becoming healthy.");
}
try {
const response = await fetcher(`${baseUrl}/v1/health`);
const response = await fetcher(`${baseUrl}/v1/health`, {
headers: { Authorization: `Bearer ${token}` },
});
if (response.ok) {
return;
}

1
server/AGENTS.md Symbolic link
View file

@ -0,0 +1 @@
CLAUDE.md

68
server/CLAUDE.md Normal file
View file

@ -0,0 +1,68 @@
# Server Testing
## Snapshot tests
The HTTP/SSE snapshot suite lives in:
- `server/packages/sandbox-agent/tests/http_sse_snapshots.rs`
Snapshots are written to:
- `server/packages/sandbox-agent/tests/snapshots/`
## Agent selection
`SANDBOX_TEST_AGENTS` controls which agents run. It accepts a comma-separated list or `all`.
If it is **not set**, tests will auto-detect installed agents by checking:
- binaries on `PATH`, and
- the default install dir (`$XDG_DATA_HOME/sandbox-agent/bin` or `./.sandbox-agent/bin`)
If no agents are found, tests fail with a clear error.
## Credential handling
Credentials are pulled from the host by default via `extract_all_credentials`:
- environment variables (e.g. `ANTHROPIC_API_KEY`, `OPENAI_API_KEY`)
- local CLI configs (Claude/Codex/Amp/OpenCode)
You can override host credentials for tests with:
- `SANDBOX_TEST_ANTHROPIC_API_KEY`
- `SANDBOX_TEST_OPENAI_API_KEY`
If `SANDBOX_TEST_AGENTS` includes an agent that requires a provider credential and it is missing,
tests fail before starting.
## Credential health checks
Before running agent tests, credentials are validated with minimal API calls:
- Anthropic: `GET https://api.anthropic.com/v1/models`
- `x-api-key` for API keys
- `Authorization: Bearer` for OAuth tokens
- `anthropic-version: 2023-06-01`
- OpenAI: `GET https://api.openai.com/v1/models` with `Authorization: Bearer`
401/403 yields a hard failure (`invalid credentials`). Other non-2xx responses or network
errors fail with a health-check error.
Health checks run in a blocking thread to avoid Tokio runtime drop errors inside async tests.
## Snapshot stability
To keep snapshots deterministic:
- Event streams are truncated after the first assistant or error event.
- Permission flow snapshots are truncated after the permission request (or first assistant) event.
- Unknown events are preserved as `kind: unknown` (raw payload in universal schema).
## Typical commands
Run only Claude snapshots:
```
SANDBOX_TEST_AGENTS=claude cargo test -p sandbox-agent-core --test http_sse_snapshots
```
Run all detected agents:
```
cargo test -p sandbox-agent-core --test http_sse_snapshots
```
## Universal Schema
When modifying agent conversion code in `server/packages/universal-agent-schema/src/agents/` or adding/changing properties on the universal schema, update the feature matrix in `README.md` to reflect which agents support which features.

View file

@ -6,7 +6,7 @@ authors.workspace = true
license.workspace = true
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
dirs = "5.0"
time = { version = "0.3", features = ["parsing", "formatting"] }
serde.workspace = true
serde_json.workspace = true
dirs.workspace = true
time.workspace = true

View file

@ -6,15 +6,15 @@ authors.workspace = true
license.workspace = true
[dependencies]
thiserror = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sandbox-agent-agent-credentials = { path = "../agent-credentials" }
reqwest = { version = "0.11", features = ["blocking", "json", "rustls-tls"] }
flate2 = "1.0"
tar = "0.4"
zip = { version = "0.6", default-features = false, features = ["deflate"] }
url = "2.5"
dirs = "5.0"
tempfile = "3.10"
time = { version = "0.3", features = ["parsing", "formatting"] }
sandbox-agent-agent-credentials.workspace = true
thiserror.workspace = true
serde.workspace = true
serde_json.workspace = true
reqwest.workspace = true
flate2.workspace = true
tar.workspace = true
zip.workspace = true
url.workspace = true
dirs.workspace = true
tempfile.workspace = true
time.workspace = true

View file

@ -1,9 +1,17 @@
use std::env;
use std::path::PathBuf;
use std::time::Duration;
use reqwest::blocking::Client;
use reqwest::header::{HeaderMap, HeaderValue, AUTHORIZATION, CONTENT_TYPE};
use reqwest::StatusCode;
use thiserror::Error;
use crate::agents::AgentId;
use crate::credentials::{AuthType, ExtractedCredentials, ProviderCredentials};
use crate::credentials::{
extract_all_credentials, AuthType, CredentialExtractionOptions, ExtractedCredentials,
ProviderCredentials,
};
#[derive(Debug, Clone)]
pub struct TestAgentConfig {
@ -13,20 +21,36 @@ pub struct TestAgentConfig {
#[derive(Debug, Error)]
pub enum TestAgentConfigError {
#[error("no test agents configured (set SANDBOX_TEST_AGENTS)")]
#[error("no test agents detected (install agents or set SANDBOX_TEST_AGENTS)")]
NoAgentsConfigured,
#[error("unknown agent name: {0}")]
UnknownAgent(String),
#[error("missing credentials for {agent}: {missing}")]
MissingCredentials { agent: AgentId, missing: String },
#[error("invalid credentials for {provider} (status {status})")]
InvalidCredentials { provider: String, status: u16 },
#[error("credential health check failed for {provider}: {message}")]
HealthCheckFailed { provider: String, message: String },
}
const AGENTS_ENV: &str = "SANDBOX_TEST_AGENTS";
const ANTHROPIC_ENV: &str = "SANDBOX_TEST_ANTHROPIC_API_KEY";
const OPENAI_ENV: &str = "SANDBOX_TEST_OPENAI_API_KEY";
const ANTHROPIC_MODELS_URL: &str = "https://api.anthropic.com/v1/models";
const OPENAI_MODELS_URL: &str = "https://api.openai.com/v1/models";
const ANTHROPIC_VERSION: &str = "2023-06-01";
#[derive(Default)]
struct HealthCheckCache {
anthropic_ok: bool,
openai_ok: bool,
}
pub fn test_agents_from_env() -> Result<Vec<TestAgentConfig>, TestAgentConfigError> {
let raw_agents = env::var(AGENTS_ENV).unwrap_or_default();
let mut agents = if raw_agents.trim().is_empty() {
detect_system_agents()
} else {
let mut agents = Vec::new();
for entry in raw_agents.split(',') {
let trimmed = entry.trim();
@ -46,43 +70,72 @@ pub fn test_agents_from_env() -> Result<Vec<TestAgentConfig>, TestAgentConfigErr
.ok_or_else(|| TestAgentConfigError::UnknownAgent(trimmed.to_string()))?;
agents.push(agent);
}
agents
};
agents.sort_by(|a, b| a.as_str().cmp(b.as_str()));
agents.dedup();
if agents.is_empty() {
return Err(TestAgentConfigError::NoAgentsConfigured);
}
let anthropic_key = read_env_key(ANTHROPIC_ENV);
let openai_key = read_env_key(OPENAI_ENV);
let extracted = extract_all_credentials(&CredentialExtractionOptions::new());
let anthropic_cred = read_env_key(ANTHROPIC_ENV)
.map(|key| ProviderCredentials {
api_key: key,
source: "sandbox-test-env".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
})
.or_else(|| extracted.anthropic.clone());
let openai_cred = read_env_key(OPENAI_ENV)
.map(|key| ProviderCredentials {
api_key: key,
source: "sandbox-test-env".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
})
.or_else(|| extracted.openai.clone());
let mut health_cache = HealthCheckCache::default();
let mut configs = Vec::new();
for agent in agents {
let credentials = match agent {
AgentId::Claude | AgentId::Amp => {
let anthropic_key = anthropic_key.clone().ok_or_else(|| {
let anthropic_cred = anthropic_cred.clone().ok_or_else(|| {
TestAgentConfigError::MissingCredentials {
agent,
missing: ANTHROPIC_ENV.to_string(),
}
})?;
credentials_with(anthropic_key, None)
ensure_anthropic_ok(&mut health_cache, &anthropic_cred)?;
credentials_with(Some(anthropic_cred), None)
}
AgentId::Codex => {
let openai_key = openai_key.clone().ok_or_else(|| {
let openai_cred = openai_cred.clone().ok_or_else(|| {
TestAgentConfigError::MissingCredentials {
agent,
missing: OPENAI_ENV.to_string(),
}
})?;
credentials_with(None, Some(openai_key))
ensure_openai_ok(&mut health_cache, &openai_cred)?;
credentials_with(None, Some(openai_cred))
}
AgentId::Opencode => {
if anthropic_key.is_none() && openai_key.is_none() {
if anthropic_cred.is_none() && openai_cred.is_none() {
return Err(TestAgentConfigError::MissingCredentials {
agent,
missing: format!("{ANTHROPIC_ENV} or {OPENAI_ENV}"),
});
}
credentials_with(anthropic_key.clone(), openai_key.clone())
if let Some(cred) = anthropic_cred.as_ref() {
ensure_anthropic_ok(&mut health_cache, cred)?;
}
if let Some(cred) = openai_cred.as_ref() {
ensure_openai_ok(&mut health_cache, cred)?;
}
credentials_with(anthropic_cred.clone(), openai_cred.clone())
}
};
configs.push(TestAgentConfig { agent, credentials });
@ -91,6 +144,178 @@ pub fn test_agents_from_env() -> Result<Vec<TestAgentConfig>, TestAgentConfigErr
Ok(configs)
}
fn ensure_anthropic_ok(
cache: &mut HealthCheckCache,
credentials: &ProviderCredentials,
) -> Result<(), TestAgentConfigError> {
if cache.anthropic_ok {
return Ok(());
}
health_check_anthropic(credentials)?;
cache.anthropic_ok = true;
Ok(())
}
fn ensure_openai_ok(
cache: &mut HealthCheckCache,
credentials: &ProviderCredentials,
) -> Result<(), TestAgentConfigError> {
if cache.openai_ok {
return Ok(());
}
health_check_openai(credentials)?;
cache.openai_ok = true;
Ok(())
}
fn health_check_anthropic(credentials: &ProviderCredentials) -> Result<(), TestAgentConfigError> {
let credentials = credentials.clone();
run_blocking_check("anthropic", move || {
let client = Client::builder()
.timeout(Duration::from_secs(10))
.build()
.map_err(|err| TestAgentConfigError::HealthCheckFailed {
provider: "anthropic".to_string(),
message: err.to_string(),
})?;
let mut headers = HeaderMap::new();
match credentials.auth_type {
AuthType::ApiKey => {
headers.insert(
"x-api-key",
HeaderValue::from_str(&credentials.api_key).map_err(|_| {
TestAgentConfigError::HealthCheckFailed {
provider: "anthropic".to_string(),
message: "invalid anthropic api key header value".to_string(),
}
})?,
);
}
AuthType::Oauth => {
let value = format!("Bearer {}", credentials.api_key);
headers.insert(
AUTHORIZATION,
HeaderValue::from_str(&value).map_err(|_| {
TestAgentConfigError::HealthCheckFailed {
provider: "anthropic".to_string(),
message: "invalid anthropic oauth header value".to_string(),
}
})?,
);
}
}
headers.insert(
"anthropic-version",
HeaderValue::from_static(ANTHROPIC_VERSION),
);
headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
let response = client
.get(ANTHROPIC_MODELS_URL)
.headers(headers)
.send()
.map_err(|err| TestAgentConfigError::HealthCheckFailed {
provider: "anthropic".to_string(),
message: err.to_string(),
})?;
handle_health_response("anthropic", response)
})
}
fn health_check_openai(credentials: &ProviderCredentials) -> Result<(), TestAgentConfigError> {
let credentials = credentials.clone();
run_blocking_check("openai", move || {
let client = Client::builder()
.timeout(Duration::from_secs(10))
.build()
.map_err(|err| TestAgentConfigError::HealthCheckFailed {
provider: "openai".to_string(),
message: err.to_string(),
})?;
let response = client
.get(OPENAI_MODELS_URL)
.bearer_auth(&credentials.api_key)
.send()
.map_err(|err| TestAgentConfigError::HealthCheckFailed {
provider: "openai".to_string(),
message: err.to_string(),
})?;
handle_health_response("openai", response)
})
}
fn handle_health_response(
provider: &str,
response: reqwest::blocking::Response,
) -> Result<(), TestAgentConfigError> {
let status = response.status();
if status.is_success() {
return Ok(());
}
if status == StatusCode::UNAUTHORIZED || status == StatusCode::FORBIDDEN {
return Err(TestAgentConfigError::InvalidCredentials {
provider: provider.to_string(),
status: status.as_u16(),
});
}
let body = response.text().unwrap_or_default();
let mut summary = body.trim().to_string();
if summary.len() > 200 {
summary.truncate(200);
}
Err(TestAgentConfigError::HealthCheckFailed {
provider: provider.to_string(),
message: format!("status {}: {}", status.as_u16(), summary),
})
}
fn run_blocking_check<F>(
provider: &str,
check: F,
) -> Result<(), TestAgentConfigError>
where
F: FnOnce() -> Result<(), TestAgentConfigError> + Send + 'static,
{
std::thread::spawn(check).join().unwrap_or_else(|_| {
Err(TestAgentConfigError::HealthCheckFailed {
provider: provider.to_string(),
message: "health check panicked".to_string(),
})
})
}
fn detect_system_agents() -> Vec<AgentId> {
let candidates = [AgentId::Claude, AgentId::Codex, AgentId::Opencode, AgentId::Amp];
let install_dir = default_install_dir();
candidates
.into_iter()
.filter(|agent| {
let binary = agent.binary_name();
find_in_path(binary) || install_dir.join(binary).exists()
})
.collect()
}
fn default_install_dir() -> PathBuf {
dirs::data_dir()
.map(|dir| dir.join("sandbox-agent").join("bin"))
.unwrap_or_else(|| PathBuf::from(".").join(".sandbox-agent").join("bin"))
}
fn find_in_path(binary_name: &str) -> bool {
let path_var = match env::var_os("PATH") {
Some(path) => path,
None => return false,
};
for path in env::split_paths(&path_var) {
let candidate = path.join(binary_name);
if candidate.exists() {
return true;
}
}
false
}
fn read_env_key(name: &str) -> Option<String> {
env::var(name).ok().and_then(|value| {
let trimmed = value.trim().to_string();
@ -103,25 +328,11 @@ fn read_env_key(name: &str) -> Option<String> {
}
fn credentials_with(
anthropic_key: Option<String>,
openai_key: Option<String>,
anthropic_cred: Option<ProviderCredentials>,
openai_cred: Option<ProviderCredentials>,
) -> ExtractedCredentials {
let mut credentials = ExtractedCredentials::default();
if let Some(key) = anthropic_key {
credentials.anthropic = Some(ProviderCredentials {
api_key: key,
source: "sandbox-test-env".to_string(),
auth_type: AuthType::ApiKey,
provider: "anthropic".to_string(),
});
}
if let Some(key) = openai_key {
credentials.openai = Some(ProviderCredentials {
api_key: key,
source: "sandbox-test-env".to_string(),
auth_type: AuthType::ApiKey,
provider: "openai".to_string(),
});
}
credentials.anthropic = anthropic_cred;
credentials.openai = openai_cred;
credentials
}

View file

@ -1,18 +0,0 @@
[package]
name = "sandbox-agent-agent-schema"
version.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
regress = "0.10"
[build-dependencies]
typify = "0.4"
serde_json = "1.0"
schemars = "0.8"
prettyplease = "0.2"
syn = "2.0"

View file

@ -1,76 +0,0 @@
//! Generated types from AI coding agent JSON schemas.
//!
//! This crate provides Rust types for:
//! - OpenCode SDK
//! - Claude Code SDK
//! - Codex SDK
//! - AMP Code SDK
pub mod opencode {
//! OpenCode SDK types extracted from OpenAPI 3.1.1 spec.
include!(concat!(env!("OUT_DIR"), "/opencode.rs"));
}
pub mod claude {
//! Claude Code SDK types extracted from TypeScript definitions.
include!(concat!(env!("OUT_DIR"), "/claude.rs"));
}
pub mod codex {
//! Codex SDK types.
include!(concat!(env!("OUT_DIR"), "/codex.rs"));
}
pub mod amp {
//! AMP Code SDK types.
include!(concat!(env!("OUT_DIR"), "/amp.rs"));
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_claude_bash_input() {
let input = claude::BashInput {
command: "ls -la".to_string(),
timeout: Some(5000.0),
description: Some("List files".to_string()),
run_in_background: None,
simulated_sed_edit: None,
dangerously_disable_sandbox: None,
};
let json = serde_json::to_string(&input).unwrap();
assert!(json.contains("ls -la"));
let parsed: claude::BashInput = serde_json::from_str(&json).unwrap();
assert_eq!(parsed.command, "ls -la");
}
#[test]
fn test_codex_thread_event() {
let event = codex::ThreadEvent {
type_: codex::ThreadEventType::ThreadCreated,
thread_id: Some("thread-123".to_string()),
item: None,
error: serde_json::Map::new(),
};
let json = serde_json::to_string(&event).unwrap();
assert!(json.contains("thread.created"));
}
#[test]
fn test_amp_message() {
let msg = amp::Message {
role: amp::MessageRole::User,
content: "Hello".to_string(),
tool_calls: vec![],
};
let json = serde_json::to_string(&msg).unwrap();
assert!(json.contains("user"));
assert!(json.contains("Hello"));
}
}

View file

@ -6,8 +6,8 @@ authors.workspace = true
license.workspace = true
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0"
schemars = "0.8"
utoipa = "4.2"
serde.workspace = true
serde_json.workspace = true
thiserror.workspace = true
schemars.workspace = true
utoipa.workspace = true

View file

@ -0,0 +1,19 @@
[package]
name = "sandbox-agent-extracted-agent-schemas"
version.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
[dependencies]
serde.workspace = true
serde_json.workspace = true
regress.workspace = true
chrono.workspace = true
[build-dependencies]
typify.workspace = true
serde_json.workspace = true
schemars.workspace = true
prettyplease.workspace = true
syn.workspace = true

View file

@ -4,7 +4,7 @@ use std::path::Path;
fn main() {
let out_dir = std::env::var("OUT_DIR").unwrap();
let schema_dir = Path::new("../../../resources/agent-schemas/dist");
let schema_dir = Path::new("../../../resources/agent-schemas/artifacts/json-schema");
let schemas = [
("opencode", "opencode.json"),

View file

@ -0,0 +1,111 @@
//! Generated types from AI coding agent JSON schemas.
//!
//! This crate provides Rust types for:
//! - OpenCode SDK
//! - Claude Code SDK
//! - Codex SDK
//! - AMP Code SDK
pub mod opencode {
//! OpenCode SDK types extracted from OpenAPI 3.1.1 spec.
include!(concat!(env!("OUT_DIR"), "/opencode.rs"));
}
pub mod claude {
//! Claude Code SDK types extracted from TypeScript definitions.
include!(concat!(env!("OUT_DIR"), "/claude.rs"));
}
pub mod codex {
//! Codex SDK types.
include!(concat!(env!("OUT_DIR"), "/codex.rs"));
}
pub mod amp {
//! AMP Code SDK types.
include!(concat!(env!("OUT_DIR"), "/amp.rs"));
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_claude_bash_input() {
let input = claude::BashInput {
command: "ls -la".to_string(),
timeout: Some(5000.0),
working_directory: None,
};
let json = serde_json::to_string(&input).unwrap();
assert!(json.contains("ls -la"));
let parsed: claude::BashInput = serde_json::from_str(&json).unwrap();
assert_eq!(parsed.command, "ls -la");
}
#[test]
fn test_codex_server_notification() {
// Test ItemCompletedNotification with AgentMessage
let notification = codex::ServerNotification::ItemCompleted(
codex::ItemCompletedNotification {
item: codex::ThreadItem::AgentMessage {
id: "msg-123".to_string(),
text: "Hello from Codex".to_string(),
},
thread_id: "thread-123".to_string(),
turn_id: "turn-456".to_string(),
}
);
let json = serde_json::to_string(&notification).unwrap();
assert!(json.contains("item/completed"));
assert!(json.contains("Hello from Codex"));
assert!(json.contains("agentMessage"));
}
#[test]
fn test_codex_thread_item_variants() {
// Test UserMessage variant
let user_msg = codex::ThreadItem::UserMessage {
content: vec![codex::UserInput::Text {
text: "Hello".to_string(),
text_elements: vec![],
}],
id: "user-1".to_string(),
};
let json = serde_json::to_string(&user_msg).unwrap();
assert!(json.contains("userMessage"));
assert!(json.contains("Hello"));
// Test CommandExecution variant
let cmd = codex::ThreadItem::CommandExecution {
aggregated_output: Some("output".to_string()),
command: "ls -la".to_string(),
command_actions: vec![],
cwd: "/tmp".to_string(),
duration_ms: Some(100),
exit_code: Some(0),
id: "cmd-1".to_string(),
process_id: None,
status: codex::CommandExecutionStatus::Completed,
};
let json = serde_json::to_string(&cmd).unwrap();
assert!(json.contains("commandExecution"));
assert!(json.contains("ls -la"));
}
#[test]
fn test_amp_message() {
let msg = amp::Message {
role: amp::MessageRole::User,
content: "Hello".to_string(),
tool_calls: vec![],
};
let json = serde_json::to_string(&msg).unwrap();
assert!(json.contains("user"));
assert!(json.contains("Hello"));
}
}

View file

@ -7,11 +7,11 @@ license.workspace = true
build = "build.rs"
[dependencies]
tracing = "0.1"
tracing-logfmt = "0.3"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing.workspace = true
tracing-logfmt.workspace = true
tracing-subscriber.workspace = true
[build-dependencies]
sandbox-agent-core = { path = "../sandbox-agent" }
serde_json = "1.0"
utoipa = "4.2"
sandbox-agent-core.workspace = true
serde_json.workspace = true
utoipa.workspace = true

View file

@ -10,30 +10,30 @@ name = "sandbox-agent"
path = "src/main.rs"
[dependencies]
thiserror = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
axum = "0.7"
clap = { version = "4.5", features = ["derive"] }
futures = "0.3"
sandbox-agent-error = { path = "../error" }
sandbox-agent-agent-management = { path = "../agent-management" }
sandbox-agent-agent-credentials = { path = "../agent-credentials" }
sandbox-agent-universal-agent-schema = { path = "../universal-agent-schema" }
reqwest = { version = "0.11", features = ["blocking", "json", "rustls-tls", "stream"] }
dirs = "5.0"
time = { version = "0.3", features = ["parsing", "formatting"] }
tokio = { version = "1.36", features = ["macros", "rt-multi-thread", "signal", "time"] }
tokio-stream = { version = "0.1", features = ["sync"] }
tower-http = { version = "0.5", features = ["cors", "trace"] }
utoipa = { version = "4.2", features = ["axum_extras"] }
schemars = "0.8"
tracing = "0.1"
tracing-logfmt = "0.3"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
sandbox-agent-error.workspace = true
sandbox-agent-agent-management.workspace = true
sandbox-agent-agent-credentials.workspace = true
sandbox-agent-universal-agent-schema.workspace = true
thiserror.workspace = true
serde.workspace = true
serde_json.workspace = true
axum.workspace = true
clap.workspace = true
futures.workspace = true
reqwest.workspace = true
dirs.workspace = true
time.workspace = true
tokio.workspace = true
tokio-stream.workspace = true
tower-http.workspace = true
utoipa.workspace = true
schemars.workspace = true
tracing.workspace = true
tracing-logfmt.workspace = true
tracing-subscriber.workspace = true
[dev-dependencies]
http-body-util = "0.1"
insta = "1.41"
tempfile = "3.10"
tower = "0.4"
http-body-util.workspace = true
insta.workspace = true
tempfile.workspace = true
tower.workspace = true

View file

@ -1202,6 +1202,11 @@ async fn require_token(
req: Request<axum::body::Body>,
next: Next,
) -> Result<Response, ApiError> {
let path = req.uri().path();
if path == "/v1/health" || path == "/health" {
return Ok(next.run(req).await);
}
let expected = match &state.auth.token {
Some(token) => token.as_str(),
None => return Ok(next.run(req).await),
@ -1946,7 +1951,7 @@ fn parse_agent_line(agent: AgentId, line: &str, session_id: &str) -> Option<Even
convert_claude::event_to_universal_with_session(&value, session_id.to_string())
}
AgentId::Codex => match serde_json::from_value(value.clone()) {
Ok(event) => convert_codex::event_to_universal(&event),
Ok(notification) => convert_codex::notification_to_universal(&notification),
Err(err) => EventConversion::new(unparsed_message(
&value.to_string(),
&err.to_string(),

View file

@ -1,8 +1,8 @@
use std::collections::BTreeMap;
use std::time::{Duration, Instant};
use axum::body::Body;
use axum::http::{Method, Request, StatusCode};
use axum::body::{Body, Bytes};
use axum::http::{header, HeaderMap, HeaderValue, Method, Request, StatusCode};
use axum::Router;
use futures::StreamExt;
use http_body_util::BodyExt;
@ -13,9 +13,13 @@ use sandbox_agent_agent_management::agents::{AgentId, AgentManager};
use sandbox_agent_agent_management::testing::{test_agents_from_env, TestAgentConfig};
use sandbox_agent_agent_credentials::ExtractedCredentials;
use sandbox_agent_core::router::{build_router, AppState, AuthConfig};
use tower::ServiceExt;
use tower::util::ServiceExt;
use tower_http::cors::CorsLayer;
const PROMPT: &str = "Reply with exactly the single word OK.";
const PERMISSION_PROMPT: &str = "List files in the current directory using available tools.";
const QUESTION_PROMPT: &str =
"Ask the user a multiple-choice question with options yes/no using any built-in AskUserQuestion tool, then wait.";
struct TestApp {
app: Router,
@ -24,11 +28,22 @@ struct TestApp {
impl TestApp {
fn new() -> Self {
Self::new_with_auth(AuthConfig::disabled())
}
fn new_with_auth(auth: AuthConfig) -> Self {
Self::new_with_auth_and_cors(auth, None)
}
fn new_with_auth_and_cors(auth: AuthConfig, cors: Option<CorsLayer>) -> Self {
let install_dir = tempfile::tempdir().expect("create temp install dir");
let manager = AgentManager::new(install_dir.path())
.expect("create agent manager");
let state = AppState::new(AuthConfig::disabled(), manager);
let app = build_router(state);
let state = AppState::new(auth, manager);
let mut app = build_router(state);
if let Some(cors) = cors {
app = app.layer(cors);
}
Self {
app,
_install_dir: install_dir,
@ -112,6 +127,37 @@ async fn send_json(app: &Router, method: Method, path: &str, body: Option<Value>
(status, value)
}
async fn send_request(app: &Router, request: Request<Body>) -> (StatusCode, HeaderMap, Bytes) {
let response = app
.clone()
.oneshot(request)
.await
.expect("request handled");
let status = response.status();
let headers = response.headers().clone();
let bytes = response
.into_body()
.collect()
.await
.expect("read body")
.to_bytes();
(status, headers, bytes)
}
async fn send_json_request(
app: &Router,
request: Request<Body>,
) -> (StatusCode, HeaderMap, Value) {
let (status, headers, bytes) = send_request(app, request).await;
let value = if bytes.is_empty() {
Value::Null
} else {
serde_json::from_slice(&bytes)
.unwrap_or(Value::String(String::from_utf8_lossy(&bytes).to_string()))
};
(status, headers, value)
}
async fn send_status(app: &Router, method: Method, path: &str, body: Option<Value>) -> StatusCode {
let (status, _) = send_json(app, method, path, body).await;
status
@ -128,14 +174,14 @@ async fn install_agent(app: &Router, agent: AgentId) {
assert_eq!(status, StatusCode::NO_CONTENT, "install {agent}");
}
async fn create_session(app: &Router, agent: AgentId, session_id: &str) {
async fn create_session(app: &Router, agent: AgentId, session_id: &str, permission_mode: &str) {
let status = send_status(
app,
Method::POST,
&format!("/v1/sessions/{session_id}"),
Some(json!({
"agent": agent.as_str(),
"permissionMode": "bypass"
"permissionMode": permission_mode
})),
)
.await;
@ -211,7 +257,7 @@ async fn read_sse_events(
_ => break,
};
let next = tokio::time::timeout(remaining, stream.next()).await;
let chunk = match next {
let chunk: Bytes = match next {
Ok(Some(Ok(chunk))) => chunk,
Ok(Some(Err(_))) => break,
Ok(None) => break,
@ -267,6 +313,23 @@ fn is_error_event(event: &Value) -> bool {
.is_some()
}
fn is_permission_event(event: &Value) -> bool {
event
.get("data")
.and_then(|data| data.get("permissionAsked"))
.is_some()
}
fn truncate_permission_events(events: &[Value]) -> Vec<Value> {
if let Some(idx) = events.iter().position(is_permission_event) {
return events[..=idx].to_vec();
}
if let Some(idx) = events.iter().position(is_assistant_message) {
return events[..=idx].to_vec();
}
events.to_vec()
}
fn normalize_events(events: &[Value]) -> Value {
let normalized = events
.iter()
@ -276,6 +339,16 @@ fn normalize_events(events: &[Value]) -> Value {
Value::Array(normalized)
}
fn truncate_after_first_stop(events: &[Value]) -> Vec<Value> {
if let Some(idx) = events
.iter()
.position(|event| is_assistant_message(event) || is_error_event(event))
{
return events[..=idx].to_vec();
}
events.to_vec()
}
fn normalize_event(event: &Value, seq: usize) -> Value {
let mut map = Map::new();
map.insert("seq".to_string(), Value::Number(seq.into()));
@ -379,8 +452,239 @@ fn normalize_permission(permission: &Value) -> Value {
Value::Object(map)
}
fn snapshot_name(prefix: &str, agent: AgentId) -> String {
format!("{prefix}_{}", agent.as_str())
fn normalize_agent_list(value: &Value) -> Value {
let agents = value
.get("agents")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let mut normalized = Vec::new();
for agent in agents {
let mut map = Map::new();
if let Some(id) = agent.get("id").and_then(Value::as_str) {
map.insert("id".to_string(), Value::String(id.to_string()));
}
// Skip installed/version/path fields - they depend on local environment
// and make snapshots non-deterministic
normalized.push(Value::Object(map));
}
normalized.sort_by(|a, b| {
a.get("id")
.and_then(Value::as_str)
.cmp(&b.get("id").and_then(Value::as_str))
});
json!({ "agents": normalized })
}
fn normalize_agent_modes(value: &Value) -> Value {
let modes = value
.get("modes")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let mut normalized = Vec::new();
for mode in modes {
let mut map = Map::new();
if let Some(id) = mode.get("id").and_then(Value::as_str) {
map.insert("id".to_string(), Value::String(id.to_string()));
}
if let Some(name) = mode.get("name").and_then(Value::as_str) {
map.insert("name".to_string(), Value::String(name.to_string()));
}
if mode.get("description").is_some() {
map.insert("description".to_string(), Value::Bool(true));
}
normalized.push(Value::Object(map));
}
normalized.sort_by(|a, b| {
a.get("id")
.and_then(Value::as_str)
.cmp(&b.get("id").and_then(Value::as_str))
});
json!({ "modes": normalized })
}
fn normalize_sessions(value: &Value) -> Value {
let sessions = value
.get("sessions")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let mut normalized = Vec::new();
for session in sessions {
let mut map = Map::new();
if let Some(session_id) = session.get("sessionId").and_then(Value::as_str) {
map.insert("sessionId".to_string(), Value::String(session_id.to_string()));
}
if let Some(agent) = session.get("agent").and_then(Value::as_str) {
map.insert("agent".to_string(), Value::String(agent.to_string()));
}
if let Some(agent_mode) = session.get("agentMode").and_then(Value::as_str) {
map.insert("agentMode".to_string(), Value::String(agent_mode.to_string()));
}
if let Some(permission_mode) = session.get("permissionMode").and_then(Value::as_str) {
map.insert("permissionMode".to_string(), Value::String(permission_mode.to_string()));
}
if session.get("model").is_some() {
map.insert("model".to_string(), Value::String("<redacted>".to_string()));
}
if session.get("variant").is_some() {
map.insert("variant".to_string(), Value::String("<redacted>".to_string()));
}
if session.get("agentSessionId").is_some() {
map.insert("agentSessionId".to_string(), Value::String("<redacted>".to_string()));
}
if let Some(ended) = session.get("ended").and_then(Value::as_bool) {
map.insert("ended".to_string(), Value::Bool(ended));
}
if session.get("eventCount").is_some() {
map.insert("eventCount".to_string(), Value::String("<redacted>".to_string()));
}
normalized.push(Value::Object(map));
}
normalized.sort_by(|a, b| {
a.get("sessionId")
.and_then(Value::as_str)
.cmp(&b.get("sessionId").and_then(Value::as_str))
});
json!({ "sessions": normalized })
}
fn normalize_create_session(value: &Value) -> Value {
let mut map = Map::new();
if let Some(healthy) = value.get("healthy").and_then(Value::as_bool) {
map.insert("healthy".to_string(), Value::Bool(healthy));
}
if value.get("agentSessionId").is_some() {
map.insert("agentSessionId".to_string(), Value::String("<redacted>".to_string()));
}
if let Some(error) = value.get("error") {
map.insert("error".to_string(), error.clone());
}
Value::Object(map)
}
fn normalize_health(value: &Value) -> Value {
let mut map = Map::new();
if let Some(status) = value.get("status").and_then(Value::as_str) {
map.insert("status".to_string(), Value::String(status.to_string()));
}
Value::Object(map)
}
fn snapshot_status(status: StatusCode) -> Value {
json!({ "status": status.as_u16() })
}
fn snapshot_cors(status: StatusCode, headers: &HeaderMap) -> Value {
let mut map = Map::new();
map.insert("status".to_string(), Value::Number(status.as_u16().into()));
for name in [
header::ACCESS_CONTROL_ALLOW_ORIGIN,
header::ACCESS_CONTROL_ALLOW_METHODS,
header::ACCESS_CONTROL_ALLOW_HEADERS,
header::ACCESS_CONTROL_ALLOW_CREDENTIALS,
header::VARY,
] {
if let Some(value) = headers.get(&name) {
map.insert(
name.as_str().to_string(),
Value::String(value.to_str().unwrap_or("<invalid>").to_string()),
);
}
}
Value::Object(map)
}
fn snapshot_name(prefix: &str, agent: Option<AgentId>) -> String {
match agent {
Some(agent) => format!("{prefix}_{}", agent.as_str()),
None => format!("{prefix}_global"),
}
}
async fn poll_events_until_match<F>(
app: &Router,
session_id: &str,
timeout: Duration,
stop: F,
) -> Vec<Value>
where
F: Fn(&[Value]) -> bool,
{
let start = Instant::now();
let mut offset = 0u64;
let mut events = Vec::new();
while start.elapsed() < timeout {
let path = format!("/v1/sessions/{session_id}/events?offset={offset}&limit=200");
let (status, payload) = send_json(app, Method::GET, &path, None).await;
assert_eq!(status, StatusCode::OK, "poll events");
let new_events = payload
.get("events")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
if !new_events.is_empty() {
if let Some(last) = new_events
.last()
.and_then(|event| event.get("id"))
.and_then(Value::as_u64)
{
offset = last;
}
events.extend(new_events);
if stop(&events) {
break;
}
}
tokio::time::sleep(Duration::from_millis(800)).await;
}
events
}
fn find_permission_id(events: &[Value]) -> Option<String> {
events
.iter()
.find_map(|event| {
event
.get("data")
.and_then(|data| data.get("permissionAsked"))
.and_then(|permission| permission.get("id"))
.and_then(Value::as_str)
.map(|id| id.to_string())
})
}
fn find_question_id_and_answers(events: &[Value]) -> Option<(String, Vec<Vec<String>>)> {
let question = events.iter().find_map(|event| {
event
.get("data")
.and_then(|data| data.get("questionAsked"))
.cloned()
})?;
let id = question.get("id").and_then(Value::as_str)?.to_string();
let questions = question
.get("questions")
.and_then(Value::as_array)
.cloned()
.unwrap_or_default();
let mut answers = Vec::new();
for question in questions {
let option = question
.get("options")
.and_then(Value::as_array)
.and_then(|options| options.first())
.and_then(|option| option.get("label"))
.and_then(Value::as_str)
.map(|label| label.to_string());
if let Some(label) = option {
answers.push(vec![label]);
} else {
answers.push(Vec::new());
}
}
Some((id, answers))
}
async fn run_http_events_snapshot(app: &Router, config: &TestAgentConfig) {
@ -388,10 +692,11 @@ async fn run_http_events_snapshot(app: &Router, config: &TestAgentConfig) {
install_agent(app, config.agent).await;
let session_id = format!("session-{}", config.agent.as_str());
create_session(app, config.agent, &session_id).await;
create_session(app, config.agent, &session_id, "bypass").await;
send_message(app, &session_id).await;
let events = poll_events_until(app, &session_id, Duration::from_secs(120)).await;
let events = truncate_after_first_stop(&events);
assert!(
!events.is_empty(),
"no events collected for {}",
@ -404,7 +709,7 @@ async fn run_http_events_snapshot(app: &Router, config: &TestAgentConfig) {
);
let normalized = normalize_events(&events);
insta::with_settings!({
snapshot_suffix => snapshot_name("http_events", config.agent),
snapshot_suffix => snapshot_name("http_events", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(normalized);
});
@ -415,7 +720,7 @@ async fn run_sse_events_snapshot(app: &Router, config: &TestAgentConfig) {
install_agent(app, config.agent).await;
let session_id = format!("sse-{}", config.agent.as_str());
create_session(app, config.agent, &session_id).await;
create_session(app, config.agent, &session_id, "bypass").await;
let sse_task = {
let app = app.clone();
@ -428,6 +733,7 @@ async fn run_sse_events_snapshot(app: &Router, config: &TestAgentConfig) {
send_message(app, &session_id).await;
let events = sse_task.await.expect("sse task");
let events = truncate_after_first_stop(&events);
assert!(
!events.is_empty(),
"no sse events collected for {}",
@ -440,26 +746,494 @@ async fn run_sse_events_snapshot(app: &Router, config: &TestAgentConfig) {
);
let normalized = normalize_events(&events);
insta::with_settings!({
snapshot_suffix => snapshot_name("sse_events", config.agent),
snapshot_suffix => snapshot_name("sse_events", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(normalized);
});
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn auth_snapshots() {
let token = "test-token";
let app = TestApp::new_with_auth(AuthConfig::with_token(token.to_string()));
let (status, payload) = send_json(&app.app, Method::GET, "/v1/health", None).await;
assert_eq!(status, StatusCode::OK, "health should be public");
insta::with_settings!({
snapshot_suffix => snapshot_name("auth_health_public", None),
}, {
insta::assert_yaml_snapshot!(json!({
"status": status.as_u16(),
"payload": normalize_health(&payload),
}));
});
let (status, payload) = send_json(&app.app, Method::GET, "/v1/agents", None).await;
assert_eq!(status, StatusCode::UNAUTHORIZED, "missing token should 401");
insta::with_settings!({
snapshot_suffix => snapshot_name("auth_missing_token", None),
}, {
insta::assert_yaml_snapshot!(json!({
"status": status.as_u16(),
"payload": payload,
}));
});
let request = Request::builder()
.method(Method::GET)
.uri("/v1/agents")
.header(header::AUTHORIZATION, "Bearer wrong-token")
.body(Body::empty())
.expect("auth invalid request");
let (status, _headers, payload) = send_json_request(&app.app, request).await;
assert_eq!(status, StatusCode::UNAUTHORIZED, "invalid token should 401");
insta::with_settings!({
snapshot_suffix => snapshot_name("auth_invalid_token", None),
}, {
insta::assert_yaml_snapshot!(json!({
"status": status.as_u16(),
"payload": payload,
}));
});
let request = Request::builder()
.method(Method::GET)
.uri("/v1/agents")
.header(header::AUTHORIZATION, format!("Bearer {token}"))
.body(Body::empty())
.expect("auth valid request");
let (status, _headers, payload) = send_json_request(&app.app, request).await;
assert_eq!(status, StatusCode::OK, "valid token should allow request");
insta::with_settings!({
snapshot_suffix => snapshot_name("auth_valid_token", None),
}, {
insta::assert_yaml_snapshot!(json!({
"status": status.as_u16(),
"payload": normalize_agent_list(&payload),
}));
});
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn cors_snapshots() {
let cors = CorsLayer::new()
.allow_origin(vec![HeaderValue::from_static("http://example.com")])
.allow_methods([Method::GET, Method::POST])
.allow_headers([header::CONTENT_TYPE, header::AUTHORIZATION])
.allow_credentials(true);
let app = TestApp::new_with_auth_and_cors(AuthConfig::disabled(), Some(cors));
let preflight = Request::builder()
.method(Method::OPTIONS)
.uri("/v1/health")
.header(header::ORIGIN, "http://example.com")
.header(header::ACCESS_CONTROL_REQUEST_METHOD, "GET")
.header(
header::ACCESS_CONTROL_REQUEST_HEADERS,
"authorization,content-type",
)
.body(Body::empty())
.expect("cors preflight request");
let (status, headers, _payload) = send_request(&app.app, preflight).await;
insta::with_settings!({
snapshot_suffix => snapshot_name("cors_preflight", None),
}, {
insta::assert_yaml_snapshot!(snapshot_cors(status, &headers));
});
let actual = Request::builder()
.method(Method::GET)
.uri("/v1/health")
.header(header::ORIGIN, "http://example.com")
.body(Body::empty())
.expect("cors actual request");
let (status, headers, payload) = send_json_request(&app.app, actual).await;
assert_eq!(status, StatusCode::OK, "cors actual request should succeed");
insta::with_settings!({
snapshot_suffix => snapshot_name("cors_actual", None),
}, {
insta::assert_yaml_snapshot!(json!({
"cors": snapshot_cors(status, &headers),
"payload": normalize_health(&payload),
}));
});
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn api_endpoints_snapshots() {
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS or install agents");
let app = TestApp::new();
let (status, health) = send_json(&app.app, Method::GET, "/v1/health", None).await;
assert_eq!(status, StatusCode::OK, "health status");
insta::with_settings!({
snapshot_suffix => snapshot_name("health", None),
}, {
insta::assert_yaml_snapshot!(normalize_health(&health));
});
// List agents (just verify the API returns correct agent IDs, not install state)
let (status, agents) = send_json(&app.app, Method::GET, "/v1/agents", None).await;
assert_eq!(status, StatusCode::OK, "agents list");
insta::with_settings!({
snapshot_suffix => snapshot_name("agents_list", None),
}, {
insta::assert_yaml_snapshot!(normalize_agent_list(&agents));
});
// Install agents (ensure they're available for subsequent tests)
for config in &configs {
let _guard = apply_credentials(&config.credentials);
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/agents/{}/install", config.agent.as_str()),
Some(json!({})),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "install agent");
insta::with_settings!({
snapshot_suffix => snapshot_name("agent_install", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(snapshot_status(status));
});
}
let mut session_ids = Vec::new();
for config in &configs {
let _guard = apply_credentials(&config.credentials);
let (status, modes) = send_json(
&app.app,
Method::GET,
&format!("/v1/agents/{}/modes", config.agent.as_str()),
None,
)
.await;
assert_eq!(status, StatusCode::OK, "agent modes");
insta::with_settings!({
snapshot_suffix => snapshot_name("agent_modes", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(normalize_agent_modes(&modes));
});
let session_id = format!("snapshot-{}", config.agent.as_str());
let (status, created) = send_json(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}"),
Some(json!({
"agent": config.agent.as_str(),
"permissionMode": "bypass"
})),
)
.await;
assert_eq!(status, StatusCode::OK, "create session");
insta::with_settings!({
snapshot_suffix => snapshot_name("create_session", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(normalize_create_session(&created));
});
session_ids.push((config.agent, session_id));
}
let (status, sessions) = send_json(&app.app, Method::GET, "/v1/sessions", None).await;
assert_eq!(status, StatusCode::OK, "list sessions");
insta::with_settings!({
snapshot_suffix => snapshot_name("sessions_list", None),
}, {
insta::assert_yaml_snapshot!(normalize_sessions(&sessions));
});
for (agent, session_id) in &session_ids {
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{session_id}/messages"),
Some(json!({ "message": PROMPT })),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "send message");
insta::with_settings!({
snapshot_suffix => snapshot_name("send_message", Some(*agent)),
}, {
insta::assert_yaml_snapshot!(snapshot_status(status));
});
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn approval_flow_snapshots() {
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS or install agents");
let app = TestApp::new();
for config in &configs {
let _guard = apply_credentials(&config.credentials);
install_agent(&app.app, config.agent).await;
let permission_session = format!("perm-{}", config.agent.as_str());
create_session(&app.app, config.agent, &permission_session, "plan").await;
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{permission_session}/messages"),
Some(json!({ "message": PERMISSION_PROMPT })),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "send permission prompt");
let permission_events = poll_events_until_match(
&app.app,
&permission_session,
Duration::from_secs(120),
|events| find_permission_id(events).is_some() || should_stop(events),
)
.await;
let permission_events = truncate_permission_events(&permission_events);
insta::with_settings!({
snapshot_suffix => snapshot_name("permission_events", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(normalize_events(&permission_events));
});
if let Some(permission_id) = find_permission_id(&permission_events) {
let status = send_status(
&app.app,
Method::POST,
&format!(
"/v1/sessions/{permission_session}/permissions/{permission_id}/reply"
),
Some(json!({ "reply": "once" })),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "reply permission");
insta::with_settings!({
snapshot_suffix => snapshot_name("permission_reply", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(snapshot_status(status));
});
} else {
let (status, payload) = send_json(
&app.app,
Method::POST,
&format!(
"/v1/sessions/{permission_session}/permissions/missing-permission/reply"
),
Some(json!({ "reply": "once" })),
)
.await;
assert!(!status.is_success(), "missing permission id should error");
insta::with_settings!({
snapshot_suffix => snapshot_name("permission_reply_missing", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(json!({
"status": status.as_u16(),
"payload": payload,
}));
});
}
let question_reply_session = format!("question-reply-{}", config.agent.as_str());
create_session(&app.app, config.agent, &question_reply_session, "bypass").await;
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{question_reply_session}/messages"),
Some(json!({ "message": QUESTION_PROMPT })),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "send question prompt");
let question_events = poll_events_until_match(
&app.app,
&question_reply_session,
Duration::from_secs(120),
|events| find_question_id_and_answers(events).is_some() || should_stop(events),
)
.await;
insta::with_settings!({
snapshot_suffix => snapshot_name("question_reply_events", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(normalize_events(&question_events));
});
if let Some((question_id, answers)) = find_question_id_and_answers(&question_events) {
let status = send_status(
&app.app,
Method::POST,
&format!(
"/v1/sessions/{question_reply_session}/questions/{question_id}/reply"
),
Some(json!({ "answers": answers })),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "reply question");
insta::with_settings!({
snapshot_suffix => snapshot_name("question_reply", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(snapshot_status(status));
});
} else {
let (status, payload) = send_json(
&app.app,
Method::POST,
&format!(
"/v1/sessions/{question_reply_session}/questions/missing-question/reply"
),
Some(json!({ "answers": [] })),
)
.await;
assert!(!status.is_success(), "missing question id should error");
insta::with_settings!({
snapshot_suffix => snapshot_name("question_reply_missing", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(json!({
"status": status.as_u16(),
"payload": payload,
}));
});
}
let question_reject_session = format!("question-reject-{}", config.agent.as_str());
create_session(&app.app, config.agent, &question_reject_session, "bypass").await;
let status = send_status(
&app.app,
Method::POST,
&format!("/v1/sessions/{question_reject_session}/messages"),
Some(json!({ "message": QUESTION_PROMPT })),
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "send question prompt reject");
let reject_events = poll_events_until_match(
&app.app,
&question_reject_session,
Duration::from_secs(120),
|events| find_question_id_and_answers(events).is_some() || should_stop(events),
)
.await;
insta::with_settings!({
snapshot_suffix => snapshot_name("question_reject_events", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(normalize_events(&reject_events));
});
if let Some((question_id, _)) = find_question_id_and_answers(&reject_events) {
let status = send_status(
&app.app,
Method::POST,
&format!(
"/v1/sessions/{question_reject_session}/questions/{question_id}/reject"
),
None,
)
.await;
assert_eq!(status, StatusCode::NO_CONTENT, "reject question");
insta::with_settings!({
snapshot_suffix => snapshot_name("question_reject", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(snapshot_status(status));
});
} else {
let (status, payload) = send_json(
&app.app,
Method::POST,
&format!(
"/v1/sessions/{question_reject_session}/questions/missing-question/reject"
),
None,
)
.await;
assert!(!status.is_success(), "missing question id reject should error");
insta::with_settings!({
snapshot_suffix => snapshot_name("question_reject_missing", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(json!({
"status": status.as_u16(),
"payload": payload,
}));
});
}
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn http_events_snapshots() {
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS");
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS or install agents");
let app = TestApp::new();
for config in &configs {
run_http_events_snapshot(&app.app, config).await;
}
}
async fn run_concurrency_snapshot(app: &Router, config: &TestAgentConfig) {
let _guard = apply_credentials(&config.credentials);
install_agent(app, config.agent).await;
let session_a = format!("concurrent-a-{}", config.agent.as_str());
let session_b = format!("concurrent-b-{}", config.agent.as_str());
create_session(app, config.agent, &session_a, "bypass").await;
create_session(app, config.agent, &session_b, "bypass").await;
let app_a = app.clone();
let app_b = app.clone();
let send_a = send_message(&app_a, &session_a);
let send_b = send_message(&app_b, &session_b);
tokio::join!(send_a, send_b);
let app_a = app.clone();
let app_b = app.clone();
let poll_a = poll_events_until(&app_a, &session_a, Duration::from_secs(120));
let poll_b = poll_events_until(&app_b, &session_b, Duration::from_secs(120));
let (events_a, events_b) = tokio::join!(poll_a, poll_b);
let events_a = truncate_after_first_stop(&events_a);
let events_b = truncate_after_first_stop(&events_b);
assert!(
!events_a.is_empty(),
"no events collected for concurrent session a {}",
config.agent
);
assert!(
!events_b.is_empty(),
"no events collected for concurrent session b {}",
config.agent
);
assert!(
should_stop(&events_a),
"timed out waiting for assistant/error event for concurrent session a {}",
config.agent
);
assert!(
should_stop(&events_b),
"timed out waiting for assistant/error event for concurrent session b {}",
config.agent
);
let snapshot = json!({
"session_a": normalize_events(&events_a),
"session_b": normalize_events(&events_b),
});
insta::with_settings!({
snapshot_suffix => snapshot_name("concurrency_events", Some(config.agent)),
}, {
insta::assert_yaml_snapshot!(snapshot);
});
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn sse_events_snapshots() {
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS");
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS or install agents");
let app = TestApp::new();
for config in &configs {
run_sse_events_snapshot(&app.app, config).await;
}
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn concurrency_snapshots() {
let configs = test_agents_from_env().expect("configure SANDBOX_TEST_AGENTS or install agents");
let app = TestApp::new();
for config in &configs {
run_concurrency_snapshot(&app.app, config).await;
}
}

View file

@ -0,0 +1,6 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 874
expression: snapshot_status(status)
---
status: 204

View file

@ -0,0 +1,12 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 900
expression: normalize_agent_modes(&modes)
---
modes:
- description: true
id: build
name: Build
- description: true
id: plan
name: Plan

View file

@ -0,0 +1,10 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 881
expression: normalize_agent_list(&agents)
---
agents:
- id: amp
- id: claude
- id: codex
- id: opencode

View file

@ -0,0 +1,6 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 918
expression: normalize_create_session(&created)
---
healthy: true

View file

@ -0,0 +1,6 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 850
expression: normalize_health(&health)
---
status: ok

View file

@ -0,0 +1,6 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 943
expression: snapshot_status(status)
---
status: 204

View file

@ -0,0 +1,15 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 928
expression: normalize_sessions(&sessions)
---
sessions:
- agent: claude
agentMode: build
agentSessionId: "<redacted>"
ended: false
eventCount: "<redacted>"
model: "<redacted>"
permissionMode: bypass
sessionId: snapshot-claude
variant: "<redacted>"

View file

@ -0,0 +1,21 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 978
expression: normalize_events(&permission_events)
---
- agent: claude
kind: started
seq: 1
started:
message: session.created
- agent: claude
kind: unknown
seq: 2
- agent: claude
kind: message
message:
parts:
- text: "<redacted>"
type: text
role: assistant
seq: 3

View file

@ -0,0 +1,11 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 1011
expression: "json!({ \"status\": status.as_u16(), \"payload\": payload, })"
---
payload:
detail: "invalid request: unknown permission id: missing-permission"
status: 400
title: Invalid Request
type: "urn:sandbox-agent:error:invalid_request"
status: 400

View file

@ -0,0 +1,21 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 1100
expression: normalize_events(&reject_events)
---
- agent: claude
kind: started
seq: 1
started:
message: session.created
- agent: claude
kind: unknown
seq: 2
- agent: claude
kind: message
message:
parts:
- text: "<redacted>"
type: text
role: assistant
seq: 3

View file

@ -0,0 +1,11 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 1151
expression: "json!({ \"status\": status.as_u16(), \"payload\": payload, })"
---
payload:
detail: "invalid request: unknown question id: missing-question"
status: 400
title: Invalid Request
type: "urn:sandbox-agent:error:invalid_request"
status: 400

View file

@ -0,0 +1,21 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 1039
expression: normalize_events(&question_events)
---
- agent: claude
kind: started
seq: 1
started:
message: session.created
- agent: claude
kind: unknown
seq: 2
- agent: claude
kind: message
message:
parts:
- text: "<redacted>"
type: text
role: assistant
seq: 3

View file

@ -0,0 +1,11 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 1072
expression: "json!({ \"status\": status.as_u16(), \"payload\": payload, })"
---
payload:
detail: "invalid request: unknown question id: missing-question"
status: 400
title: Invalid Request
type: "urn:sandbox-agent:error:invalid_request"
status: 400

View file

@ -0,0 +1,8 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 765
expression: "json!({ \"status\": status.as_u16(), \"payload\": normalize_health(&payload), })"
---
payload:
status: ok
status: 200

View file

@ -0,0 +1,13 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 793
expression: "json!({ \"status\": status.as_u16(), \"payload\": payload, })"
---
payload:
detail: token invalid
details:
message: missing or invalid token
status: 401
title: Token Invalid
type: "urn:sandbox-agent:error:token_invalid"
status: 401

View file

@ -0,0 +1,13 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 776
expression: "json!({ \"status\": status.as_u16(), \"payload\": payload, })"
---
payload:
detail: token invalid
details:
message: missing or invalid token
status: 401
title: Token Invalid
type: "urn:sandbox-agent:error:token_invalid"
status: 401

View file

@ -0,0 +1,12 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 810
expression: "json!({\n \"status\": status.as_u16(), \"payload\": normalize_agent_list(&payload),\n})"
---
payload:
agents:
- id: amp
- id: claude
- id: codex
- id: opencode
status: 200

View file

@ -0,0 +1,12 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 842
expression: "json!({\n \"cors\": snapshot_cors(status, &headers), \"payload\":\n normalize_health(&payload),\n})"
---
cors:
access-control-allow-credentials: "true"
access-control-allow-origin: "http://example.com"
status: 200
vary: "origin, access-control-request-method, access-control-request-headers"
payload:
status: ok

View file

@ -0,0 +1,11 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 818
expression: "snapshot_cors(status, &headers)"
---
access-control-allow-credentials: "true"
access-control-allow-headers: "content-type,authorization"
access-control-allow-methods: "GET,POST"
access-control-allow-origin: "http://example.com"
status: 200
vary: "origin, access-control-request-method, access-control-request-headers"

View file

@ -0,0 +1,39 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 1232
expression: snapshot
---
session_a:
- agent: claude
kind: started
seq: 1
started:
message: session.created
- agent: claude
kind: unknown
seq: 2
- agent: claude
kind: message
message:
parts:
- text: "<redacted>"
type: text
role: assistant
seq: 3
session_b:
- agent: claude
kind: started
seq: 1
started:
message: session.created
- agent: claude
kind: unknown
seq: 2
- agent: claude
kind: message
message:
parts:
- text: "<redacted>"
type: text
role: assistant
seq: 3

View file

@ -0,0 +1,21 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 721
expression: normalized
---
- agent: claude
kind: started
seq: 1
started:
message: session.created
- agent: claude
kind: unknown
seq: 2
- agent: claude
kind: message
message:
parts:
- text: "<redacted>"
type: text
role: assistant
seq: 3

View file

@ -0,0 +1,21 @@
---
source: server/packages/sandbox-agent/tests/http_sse_snapshots.rs
assertion_line: 729
expression: normalized
---
- agent: claude
kind: started
seq: 1
started:
message: session.created
- agent: claude
kind: unknown
seq: 2
- agent: claude
kind: message
message:
parts:
- text: "<redacted>"
type: text
role: assistant
seq: 3

View file

@ -6,9 +6,9 @@ authors.workspace = true
license.workspace = true
[dependencies]
sandbox-agent-agent-schema = { path = "../agent-schema" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
schemars = "0.8"
thiserror = "1.0"
utoipa = { version = "4.2", features = ["axum_extras"] }
sandbox-agent-extracted-agent-schemas.workspace = true
serde.workspace = true
serde_json.workspace = true
schemars.workspace = true
thiserror.workspace = true
utoipa.workspace = true

File diff suppressed because it is too large Load diff

View file

@ -4,7 +4,7 @@ use schemars::JsonSchema;
use thiserror::Error;
use utoipa::ToSchema;
pub use sandbox_agent_agent_schema::{amp, claude, codex, opencode};
pub use sandbox_agent_extracted_agent_schemas::{amp, claude, codex, opencode};
pub mod agents;

View file

@ -0,0 +1,12 @@
[package]
name = "sandbox-agent-universal-schema-gen"
version.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
build = "build.rs"
[build-dependencies]
sandbox-agent-universal-agent-schema.workspace = true
schemars.workspace = true
serde_json.workspace = true

View file

@ -0,0 +1,26 @@
use std::{fs, path::Path};
fn main() {
println!("cargo:rerun-if-changed=../universal-agent-schema/src/lib.rs");
let schema = schemars::schema_for!(sandbox_agent_universal_agent_schema::UniversalEvent);
let workspace_root = std::env::var("CARGO_MANIFEST_DIR")
.map(|dir| {
Path::new(&dir)
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap()
.to_path_buf()
})
.unwrap();
let out_dir = workspace_root.join("spec");
fs::create_dir_all(&out_dir).unwrap();
let json = serde_json::to_string_pretty(&schema).expect("Failed to serialize JSON schema");
fs::write(out_dir.join("universal-schema.json"), json)
.expect("Failed to write universal-schema.json");
}

View file

@ -0,0 +1,2 @@
// This crate exists only to trigger the build.rs script
// which generates the universal JSON schema at build time.

655
spec/universal-schema.json Normal file
View file

@ -0,0 +1,655 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "UniversalEvent",
"type": "object",
"required": [
"agent",
"data",
"id",
"sessionId",
"timestamp"
],
"properties": {
"agent": {
"type": "string"
},
"agentSessionId": {
"type": [
"string",
"null"
]
},
"data": {
"$ref": "#/definitions/UniversalEventData"
},
"id": {
"type": "integer",
"format": "uint64",
"minimum": 0.0
},
"sessionId": {
"type": "string"
},
"timestamp": {
"type": "string"
}
},
"definitions": {
"AttachmentSource": {
"oneOf": [
{
"type": "object",
"required": [
"path",
"type"
],
"properties": {
"path": {
"type": "string"
},
"type": {
"type": "string",
"enum": [
"path"
]
}
}
},
{
"type": "object",
"required": [
"type",
"url"
],
"properties": {
"type": {
"type": "string",
"enum": [
"url"
]
},
"url": {
"type": "string"
}
}
},
{
"type": "object",
"required": [
"data",
"type"
],
"properties": {
"data": {
"type": "string"
},
"encoding": {
"type": [
"string",
"null"
]
},
"type": {
"type": "string",
"enum": [
"data"
]
}
}
}
]
},
"CrashInfo": {
"type": "object",
"required": [
"message"
],
"properties": {
"details": true,
"kind": {
"type": [
"string",
"null"
]
},
"message": {
"type": "string"
}
}
},
"PermissionRequest": {
"type": "object",
"required": [
"always",
"id",
"patterns",
"permission",
"sessionId"
],
"properties": {
"always": {
"type": "array",
"items": {
"type": "string"
}
},
"id": {
"type": "string"
},
"metadata": {
"type": "object",
"additionalProperties": true
},
"patterns": {
"type": "array",
"items": {
"type": "string"
}
},
"permission": {
"type": "string"
},
"sessionId": {
"type": "string"
},
"tool": {
"anyOf": [
{
"$ref": "#/definitions/PermissionToolRef"
},
{
"type": "null"
}
]
}
}
},
"PermissionToolRef": {
"type": "object",
"required": [
"callId",
"messageId"
],
"properties": {
"callId": {
"type": "string"
},
"messageId": {
"type": "string"
}
}
},
"QuestionInfo": {
"type": "object",
"required": [
"options",
"question"
],
"properties": {
"custom": {
"type": [
"boolean",
"null"
]
},
"header": {
"type": [
"string",
"null"
]
},
"multiSelect": {
"type": [
"boolean",
"null"
]
},
"options": {
"type": "array",
"items": {
"$ref": "#/definitions/QuestionOption"
}
},
"question": {
"type": "string"
}
}
},
"QuestionOption": {
"type": "object",
"required": [
"label"
],
"properties": {
"description": {
"type": [
"string",
"null"
]
},
"label": {
"type": "string"
}
}
},
"QuestionRequest": {
"type": "object",
"required": [
"id",
"questions",
"sessionId"
],
"properties": {
"id": {
"type": "string"
},
"questions": {
"type": "array",
"items": {
"$ref": "#/definitions/QuestionInfo"
}
},
"sessionId": {
"type": "string"
},
"tool": {
"anyOf": [
{
"$ref": "#/definitions/QuestionToolRef"
},
{
"type": "null"
}
]
}
}
},
"QuestionToolRef": {
"type": "object",
"required": [
"callId",
"messageId"
],
"properties": {
"callId": {
"type": "string"
},
"messageId": {
"type": "string"
}
}
},
"Started": {
"type": "object",
"properties": {
"details": true,
"message": {
"type": [
"string",
"null"
]
}
}
},
"UniversalEventData": {
"anyOf": [
{
"type": "object",
"required": [
"message"
],
"properties": {
"message": {
"$ref": "#/definitions/UniversalMessage"
}
}
},
{
"type": "object",
"required": [
"started"
],
"properties": {
"started": {
"$ref": "#/definitions/Started"
}
}
},
{
"type": "object",
"required": [
"error"
],
"properties": {
"error": {
"$ref": "#/definitions/CrashInfo"
}
}
},
{
"type": "object",
"required": [
"questionAsked"
],
"properties": {
"questionAsked": {
"$ref": "#/definitions/QuestionRequest"
}
}
},
{
"type": "object",
"required": [
"permissionAsked"
],
"properties": {
"permissionAsked": {
"$ref": "#/definitions/PermissionRequest"
}
}
},
{
"type": "object",
"required": [
"raw"
],
"properties": {
"raw": true
}
}
]
},
"UniversalMessage": {
"anyOf": [
{
"$ref": "#/definitions/UniversalMessageParsed"
},
{
"type": "object",
"required": [
"raw"
],
"properties": {
"error": {
"type": [
"string",
"null"
]
},
"raw": true
}
}
]
},
"UniversalMessageParsed": {
"type": "object",
"required": [
"parts",
"role"
],
"properties": {
"id": {
"type": [
"string",
"null"
]
},
"metadata": {
"type": "object",
"additionalProperties": true
},
"parts": {
"type": "array",
"items": {
"$ref": "#/definitions/UniversalMessagePart"
}
},
"role": {
"type": "string"
}
}
},
"UniversalMessagePart": {
"oneOf": [
{
"type": "object",
"required": [
"text",
"type"
],
"properties": {
"text": {
"type": "string"
},
"type": {
"type": "string",
"enum": [
"text"
]
}
}
},
{
"type": "object",
"required": [
"input",
"name",
"type"
],
"properties": {
"id": {
"type": [
"string",
"null"
]
},
"input": true,
"name": {
"type": "string"
},
"type": {
"type": "string",
"enum": [
"tool_call"
]
}
}
},
{
"type": "object",
"required": [
"output",
"type"
],
"properties": {
"id": {
"type": [
"string",
"null"
]
},
"is_error": {
"type": [
"boolean",
"null"
]
},
"name": {
"type": [
"string",
"null"
]
},
"output": true,
"type": {
"type": "string",
"enum": [
"tool_result"
]
}
}
},
{
"type": "object",
"required": [
"arguments",
"type"
],
"properties": {
"arguments": true,
"id": {
"type": [
"string",
"null"
]
},
"name": {
"type": [
"string",
"null"
]
},
"raw": true,
"type": {
"type": "string",
"enum": [
"function_call"
]
}
}
},
{
"type": "object",
"required": [
"result",
"type"
],
"properties": {
"id": {
"type": [
"string",
"null"
]
},
"is_error": {
"type": [
"boolean",
"null"
]
},
"name": {
"type": [
"string",
"null"
]
},
"raw": true,
"result": true,
"type": {
"type": "string",
"enum": [
"function_result"
]
}
}
},
{
"type": "object",
"required": [
"source",
"type"
],
"properties": {
"filename": {
"type": [
"string",
"null"
]
},
"mime_type": {
"type": [
"string",
"null"
]
},
"raw": true,
"source": {
"$ref": "#/definitions/AttachmentSource"
},
"type": {
"type": "string",
"enum": [
"file"
]
}
}
},
{
"type": "object",
"required": [
"source",
"type"
],
"properties": {
"alt": {
"type": [
"string",
"null"
]
},
"mime_type": {
"type": [
"string",
"null"
]
},
"raw": true,
"source": {
"$ref": "#/definitions/AttachmentSource"
},
"type": {
"type": "string",
"enum": [
"image"
]
}
}
},
{
"type": "object",
"required": [
"message",
"type"
],
"properties": {
"message": {
"type": "string"
},
"type": {
"type": "string",
"enum": [
"error"
]
}
}
},
{
"type": "object",
"required": [
"raw",
"type"
],
"properties": {
"raw": true,
"type": {
"type": "string",
"enum": [
"unknown"
]
}
}
}
]
}
}
}

View file

@ -71,6 +71,7 @@
- [x] Add OpenCode server-mode tests (session create, prompt, SSE)
- [ ] Add tests for question/permission flows using deterministic prompts
- [x] Add HTTP/SSE snapshot tests for real agents (env-configured)
- [x] Add snapshot coverage for auth, CORS, and concurrent sessions
## Frontend (frontend/packages/inspector)
- [x] Build Vite + React app with connect screen (endpoint + optional token)