This commit is contained in:
Nathan Flurry 2026-01-27 13:56:09 -08:00
parent 34d4f3693e
commit 29b159ca20
28 changed files with 2138 additions and 395 deletions

View file

@ -0,0 +1,28 @@
import { describe, it, expect } from "vitest";
import { buildHeaders } from "../shared/sandbox-agent-client.ts";
import { setupDaytonaSandboxAgent } from "./daytona.ts";
const shouldRun = Boolean(process.env.DAYTONA_API_KEY);
const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000;
const testFn = shouldRun ? it : it.skip;
describe("daytona example", () => {
testFn(
"starts sandbox-agent and responds to /v1/health",
async () => {
const { baseUrl, token, extraHeaders, cleanup } = await setupDaytonaSandboxAgent();
try {
const response = await fetch(`${baseUrl}/v1/health`, {
headers: buildHeaders({ token, extraHeaders }),
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.status).toBe("ok");
} finally {
await cleanup();
}
},
timeoutMs
);
});

View file

@ -0,0 +1,82 @@
import { Daytona } from "@daytonaio/sdk";
import { pathToFileURL } from "node:url";
import {
ensureUrl,
runPrompt,
waitForHealth,
} from "../shared/sandbox-agent-client.ts";
const INSTALL_SCRIPT = "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh";
const DEFAULT_PORT = 3000;
export async function setupDaytonaSandboxAgent(): Promise<{
baseUrl: string;
token: string;
extraHeaders: Record<string, string>;
cleanup: () => Promise<void>;
}> {
const token = process.env.SANDBOX_TOKEN || "";
const port = Number.parseInt(process.env.SANDBOX_PORT || "", 10) || DEFAULT_PORT;
const language = process.env.DAYTONA_LANGUAGE || "typescript";
const daytona = new Daytona();
const sandbox = await daytona.create({
language,
});
await sandbox.process.executeCommand(`bash -lc "${INSTALL_SCRIPT}"`);
const tokenFlag = token ? "--token $SANDBOX_TOKEN" : "--no-token";
const serverCommand = `nohup sandbox-agent server ${tokenFlag} --host 0.0.0.0 --port ${port} >/tmp/sandbox-agent.log 2>&1 &`;
await sandbox.process.executeCommand(`bash -lc "${serverCommand}"`);
const preview = await sandbox.getPreviewLink(port);
const extraHeaders: Record<string, string> = {};
if (preview.token) {
extraHeaders["x-daytona-preview-token"] = preview.token;
}
extraHeaders["x-daytona-skip-preview-warning"] = "true";
const baseUrl = ensureUrl(preview.url);
await waitForHealth({ baseUrl, token, extraHeaders });
const cleanup = async () => {
try {
await sandbox.delete(60);
} catch {
// ignore cleanup errors
}
};
return {
baseUrl,
token,
extraHeaders,
cleanup,
};
}
async function main(): Promise<void> {
const { baseUrl, token, extraHeaders, cleanup } = await setupDaytonaSandboxAgent();
const exitHandler = async () => {
await cleanup();
process.exit(0);
};
process.on("SIGINT", () => {
void exitHandler();
});
process.on("SIGTERM", () => {
void exitHandler();
});
await runPrompt({ baseUrl, token, extraHeaders });
}
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View file

@ -0,0 +1,28 @@
import { describe, it, expect } from "vitest";
import { buildHeaders } from "../shared/sandbox-agent-client.ts";
import { setupDockerSandboxAgent } from "./docker.ts";
const shouldRun = process.env.RUN_DOCKER_EXAMPLES === "1";
const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000;
const testFn = shouldRun ? it : it.skip;
describe("docker example", () => {
testFn(
"starts sandbox-agent and responds to /v1/health",
async () => {
const { baseUrl, token, cleanup } = await setupDockerSandboxAgent();
try {
const response = await fetch(`${baseUrl}/v1/health`, {
headers: buildHeaders({ token }),
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.status).toBe("ok");
} finally {
await cleanup();
}
},
timeoutMs
);
});

130
examples/docker/docker.ts Normal file
View file

@ -0,0 +1,130 @@
import Docker from "dockerode";
import { pathToFileURL } from "node:url";
import {
ensureUrl,
runPrompt,
waitForHealth,
} from "../shared/sandbox-agent-client.ts";
const INSTALL_SCRIPT = "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh";
const DEFAULT_IMAGE = "debian:bookworm-slim";
const DEFAULT_PORT = 2468;
async function pullImage(docker: Docker, image: string): Promise<void> {
await new Promise<void>((resolve, reject) => {
docker.pull(image, (error, stream) => {
if (error) {
reject(error);
return;
}
docker.modem.followProgress(stream, (progressError) => {
if (progressError) {
reject(progressError);
} else {
resolve();
}
});
});
});
}
async function ensureImage(docker: Docker, image: string): Promise<void> {
try {
await docker.getImage(image).inspect();
} catch {
await pullImage(docker, image);
}
}
export async function setupDockerSandboxAgent(): Promise<{
baseUrl: string;
token: string;
cleanup: () => Promise<void>;
}> {
const token = process.env.SANDBOX_TOKEN || "";
const port = Number.parseInt(process.env.SANDBOX_PORT || "", 10) || DEFAULT_PORT;
const hostPort = Number.parseInt(process.env.SANDBOX_HOST_PORT || "", 10) || port;
const image = process.env.DOCKER_IMAGE || DEFAULT_IMAGE;
const containerName = process.env.DOCKER_CONTAINER_NAME;
const socketPath = process.env.DOCKER_SOCKET || "/var/run/docker.sock";
const docker = new Docker({ socketPath });
await ensureImage(docker, image);
const tokenFlag = token ? "--token $SANDBOX_TOKEN" : "--no-token";
const command = [
"bash",
"-lc",
[
"apt-get update",
"apt-get install -y curl ca-certificates",
INSTALL_SCRIPT,
`sandbox-agent server ${tokenFlag} --host 0.0.0.0 --port ${port}`,
].join(" && "),
];
const container = await docker.createContainer({
Image: image,
Cmd: command,
Env: token ? [`SANDBOX_TOKEN=${token}`] : [],
ExposedPorts: {
[`${port}/tcp`]: {},
},
HostConfig: {
AutoRemove: true,
PortBindings: {
[`${port}/tcp`]: [{ HostPort: `${hostPort}` }],
},
},
...(containerName ? { name: containerName } : {}),
});
await container.start();
const baseUrl = ensureUrl(`http://127.0.0.1:${hostPort}`);
await waitForHealth({ baseUrl, token });
const cleanup = async () => {
try {
await container.stop({ t: 5 });
} catch {
// ignore stop errors
}
try {
await container.remove({ force: true });
} catch {
// ignore remove errors
}
};
return {
baseUrl,
token,
cleanup,
};
}
async function main(): Promise<void> {
const { baseUrl, token, cleanup } = await setupDockerSandboxAgent();
const exitHandler = async () => {
await cleanup();
process.exit(0);
};
process.on("SIGINT", () => {
void exitHandler();
});
process.on("SIGTERM", () => {
void exitHandler();
});
await runPrompt({ baseUrl, token });
}
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

28
examples/e2b/e2b.test.ts Normal file
View file

@ -0,0 +1,28 @@
import { describe, it, expect } from "vitest";
import { buildHeaders } from "../shared/sandbox-agent-client.ts";
import { setupE2BSandboxAgent } from "./e2b.ts";
const shouldRun = Boolean(process.env.E2B_API_KEY);
const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000;
const testFn = shouldRun ? it : it.skip;
describe("e2b example", () => {
testFn(
"starts sandbox-agent and responds to /v1/health",
async () => {
const { baseUrl, token, cleanup } = await setupE2BSandboxAgent();
try {
const response = await fetch(`${baseUrl}/v1/health`, {
headers: buildHeaders({ token }),
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.status).toBe("ok");
} finally {
await cleanup();
}
},
timeoutMs
);
});

87
examples/e2b/e2b.ts Normal file
View file

@ -0,0 +1,87 @@
import { Sandbox } from "@e2b/code-interpreter";
import { pathToFileURL } from "node:url";
import {
ensureUrl,
runPrompt,
waitForHealth,
} from "../shared/sandbox-agent-client.ts";
const INSTALL_SCRIPT = "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh";
const DEFAULT_PORT = 2468;
type CommandRunner = (command: string, options?: Record<string, unknown>) => Promise<unknown>;
function resolveCommandRunner(sandbox: Sandbox): CommandRunner {
if (sandbox.commands?.run) {
return sandbox.commands.run.bind(sandbox.commands);
}
if (sandbox.commands?.exec) {
return sandbox.commands.exec.bind(sandbox.commands);
}
throw new Error("E2B SDK does not expose commands.run or commands.exec");
}
export async function setupE2BSandboxAgent(): Promise<{
baseUrl: string;
token: string;
cleanup: () => Promise<void>;
}> {
const token = process.env.SANDBOX_TOKEN || "";
const port = Number.parseInt(process.env.SANDBOX_PORT || "", 10) || DEFAULT_PORT;
const sandbox = await Sandbox.create({
allowInternetAccess: true,
envs: token ? { SANDBOX_TOKEN: token } : undefined,
});
const runCommand = resolveCommandRunner(sandbox);
await runCommand(`bash -lc "${INSTALL_SCRIPT}"`);
const tokenFlag = token ? "--token $SANDBOX_TOKEN" : "--no-token";
await runCommand(`bash -lc "sandbox-agent server ${tokenFlag} --host 0.0.0.0 --port ${port}"`, {
background: true,
envs: token ? { SANDBOX_TOKEN: token } : undefined,
});
const baseUrl = ensureUrl(sandbox.getHost(port));
await waitForHealth({ baseUrl, token });
const cleanup = async () => {
try {
await sandbox.kill();
} catch {
// ignore cleanup errors
}
};
return {
baseUrl,
token,
cleanup,
};
}
async function main(): Promise<void> {
const { baseUrl, token, cleanup } = await setupE2BSandboxAgent();
const exitHandler = async () => {
await cleanup();
process.exit(0);
};
process.on("SIGINT", () => {
void exitHandler();
});
process.on("SIGTERM", () => {
void exitHandler();
});
await runPrompt({ baseUrl, token });
}
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

25
examples/package.json Normal file
View file

@ -0,0 +1,25 @@
{
"name": "sandbox-agent-examples",
"private": true,
"type": "module",
"scripts": {
"test": "vitest run",
"test:watch": "vitest",
"start:docker": "tsx docker/docker.ts",
"start:e2b": "tsx e2b/e2b.ts",
"start:daytona": "tsx daytona/daytona.ts",
"start:vercel": "tsx vercel/vercel-sandbox.ts"
},
"dependencies": {
"@daytonaio/sdk": "latest",
"@e2b/code-interpreter": "latest",
"@vercel/sandbox": "latest",
"dockerode": "latest"
},
"devDependencies": {
"@types/node": "latest",
"tsx": "latest",
"typescript": "latest",
"vitest": "latest"
}
}

View file

@ -0,0 +1,288 @@
import { createInterface } from "node:readline";
import { randomUUID } from "node:crypto";
import { setTimeout as delay } from "node:timers/promises";
export function normalizeBaseUrl(baseUrl: string): string {
return baseUrl.replace(/\/+$/, "");
}
export function ensureUrl(rawUrl: string): string {
if (!rawUrl) {
throw new Error("Missing sandbox URL");
}
if (rawUrl.startsWith("http://") || rawUrl.startsWith("https://")) {
return rawUrl;
}
return `https://${rawUrl}`;
}
type HeaderOptions = {
token?: string;
extraHeaders?: Record<string, string>;
contentType?: boolean;
};
export function buildHeaders({ token, extraHeaders, contentType = false }: HeaderOptions): HeadersInit {
const headers: Record<string, string> = {
...(extraHeaders || {}),
};
if (token) {
headers.Authorization = `Bearer ${token}`;
}
if (contentType) {
headers["Content-Type"] = "application/json";
}
return headers;
}
async function fetchJson(
url: string,
{
token,
extraHeaders,
method = "GET",
body,
}: {
token?: string;
extraHeaders?: Record<string, string>;
method?: string;
body?: unknown;
} = {}
): Promise<any> {
const headers = buildHeaders({
token,
extraHeaders,
contentType: body !== undefined,
});
const response = await fetch(url, {
method,
headers,
body: body === undefined ? undefined : JSON.stringify(body),
});
const text = await response.text();
if (!response.ok) {
throw new Error(`HTTP ${response.status} ${response.statusText}: ${text}`);
}
return text ? JSON.parse(text) : {};
}
export async function waitForHealth({
baseUrl,
token,
extraHeaders,
timeoutMs = 120_000,
}: {
baseUrl: string;
token?: string;
extraHeaders?: Record<string, string>;
timeoutMs?: number;
}): Promise<void> {
const normalized = normalizeBaseUrl(baseUrl);
const deadline = Date.now() + timeoutMs;
let lastError: unknown;
while (Date.now() < deadline) {
try {
const data = await fetchJson(`${normalized}/v1/health`, { token, extraHeaders });
if (data?.status === "ok") {
return;
}
lastError = new Error(`Unexpected health response: ${JSON.stringify(data)}`);
} catch (error) {
lastError = error;
}
await delay(500);
}
throw (lastError ?? new Error("Timed out waiting for /v1/health")) as Error;
}
export async function createSession({
baseUrl,
token,
extraHeaders,
agentId,
agentMode,
permissionMode,
model,
variant,
agentVersion,
}: {
baseUrl: string;
token?: string;
extraHeaders?: Record<string, string>;
agentId?: string;
agentMode?: string;
permissionMode?: string;
model?: string;
variant?: string;
agentVersion?: string;
}): Promise<string> {
const normalized = normalizeBaseUrl(baseUrl);
const sessionId = randomUUID();
const body: Record<string, string> = {
agent: agentId || process.env.SANDBOX_AGENT || "codex",
};
const envAgentMode = agentMode || process.env.SANDBOX_AGENT_MODE;
const envPermissionMode = permissionMode || process.env.SANDBOX_PERMISSION_MODE;
const envModel = model || process.env.SANDBOX_MODEL;
const envVariant = variant || process.env.SANDBOX_VARIANT;
const envAgentVersion = agentVersion || process.env.SANDBOX_AGENT_VERSION;
if (envAgentMode) body.agentMode = envAgentMode;
if (envPermissionMode) body.permissionMode = envPermissionMode;
if (envModel) body.model = envModel;
if (envVariant) body.variant = envVariant;
if (envAgentVersion) body.agentVersion = envAgentVersion;
await fetchJson(`${normalized}/v1/sessions/${sessionId}`, {
token,
extraHeaders,
method: "POST",
body,
});
return sessionId;
}
export async function sendMessage({
baseUrl,
token,
extraHeaders,
sessionId,
message,
}: {
baseUrl: string;
token?: string;
extraHeaders?: Record<string, string>;
sessionId: string;
message: string;
}): Promise<void> {
const normalized = normalizeBaseUrl(baseUrl);
await fetchJson(`${normalized}/v1/sessions/${sessionId}/messages`, {
token,
extraHeaders,
method: "POST",
body: { message },
});
}
function extractTextFromItem(item: any): string {
if (!item?.content) return "";
const textParts = item.content
.filter((part: any) => part?.type === "text")
.map((part: any) => part.text || "")
.join("");
if (textParts.trim()) {
return textParts;
}
return JSON.stringify(item.content, null, 2);
}
export async function waitForAssistantComplete({
baseUrl,
token,
extraHeaders,
sessionId,
offset = 0,
timeoutMs = 120_000,
}: {
baseUrl: string;
token?: string;
extraHeaders?: Record<string, string>;
sessionId: string;
offset?: number;
timeoutMs?: number;
}): Promise<{ text: string; offset: number }> {
const normalized = normalizeBaseUrl(baseUrl);
const deadline = Date.now() + timeoutMs;
let currentOffset = offset;
while (Date.now() < deadline) {
const data = await fetchJson(
`${normalized}/v1/sessions/${sessionId}/events?offset=${currentOffset}&limit=100`,
{ token, extraHeaders }
);
for (const event of data.events || []) {
if (typeof event.sequence === "number") {
currentOffset = Math.max(currentOffset, event.sequence);
}
if (
event.type === "item.completed" &&
event.data?.item?.kind === "message" &&
event.data?.item?.role === "assistant"
) {
return {
text: extractTextFromItem(event.data.item),
offset: currentOffset,
};
}
}
if (!data.hasMore) {
await delay(300);
}
}
throw new Error("Timed out waiting for assistant response");
}
export async function runPrompt({
baseUrl,
token,
extraHeaders,
agentId,
}: {
baseUrl: string;
token?: string;
extraHeaders?: Record<string, string>;
agentId?: string;
}): Promise<void> {
const sessionId = await createSession({ baseUrl, token, extraHeaders, agentId });
let offset = 0;
console.log(`Session ${sessionId} ready. Type /exit to quit.`);
const rl = createInterface({
input: process.stdin,
output: process.stdout,
prompt: "> ",
});
const handleLine = async (line: string) => {
const trimmed = line.trim();
if (!trimmed) {
rl.prompt();
return;
}
if (trimmed === "/exit") {
rl.close();
return;
}
try {
await sendMessage({ baseUrl, token, extraHeaders, sessionId, message: trimmed });
const result = await waitForAssistantComplete({
baseUrl,
token,
extraHeaders,
sessionId,
offset,
});
offset = result.offset;
process.stdout.write(`${result.text}\n`);
} catch (error) {
console.error(error instanceof Error ? error.message : error);
}
rl.prompt();
};
rl.on("line", (line) => {
void handleLine(line);
});
rl.on("close", () => {
process.exit(0);
});
rl.prompt();
}

13
examples/tsconfig.json Normal file
View file

@ -0,0 +1,13 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"lib": ["ES2022", "DOM"],
"types": ["node"],
"strict": true,
"skipLibCheck": true,
"noEmit": true
},
"include": ["**/*.ts"]
}

View file

@ -0,0 +1,34 @@
import { describe, it, expect } from "vitest";
import { buildHeaders } from "../shared/sandbox-agent-client.ts";
import { setupVercelSandboxAgent } from "./vercel-sandbox.ts";
const hasOidc = Boolean(process.env.VERCEL_OIDC_TOKEN);
const hasAccess = Boolean(
process.env.VERCEL_TOKEN &&
process.env.VERCEL_TEAM_ID &&
process.env.VERCEL_PROJECT_ID
);
const shouldRun = hasOidc || hasAccess;
const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000;
const testFn = shouldRun ? it : it.skip;
describe("vercel sandbox example", () => {
testFn(
"starts sandbox-agent and responds to /v1/health",
async () => {
const { baseUrl, token, cleanup } = await setupVercelSandboxAgent();
try {
const response = await fetch(`${baseUrl}/v1/health`, {
headers: buildHeaders({ token }),
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.status).toBe("ok");
} finally {
await cleanup();
}
},
timeoutMs
);
});

View file

@ -0,0 +1,103 @@
import { Sandbox } from "@vercel/sandbox";
import { pathToFileURL } from "node:url";
import {
ensureUrl,
runPrompt,
waitForHealth,
} from "../shared/sandbox-agent-client.ts";
const INSTALL_SCRIPT = "curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh";
const DEFAULT_PORT = 2468;
type VercelSandboxOptions = {
runtime: string;
ports: number[];
token?: string;
teamId?: string;
projectId?: string;
};
export async function setupVercelSandboxAgent(): Promise<{
baseUrl: string;
token: string;
cleanup: () => Promise<void>;
}> {
const token = process.env.SANDBOX_TOKEN || "";
const port = Number.parseInt(process.env.SANDBOX_PORT || "", 10) || DEFAULT_PORT;
const runtime = process.env.VERCEL_RUNTIME || "node24";
const createOptions: VercelSandboxOptions = {
runtime,
ports: [port],
};
const accessToken = process.env.VERCEL_TOKEN;
const teamId = process.env.VERCEL_TEAM_ID;
const projectId = process.env.VERCEL_PROJECT_ID;
if (accessToken && teamId && projectId) {
createOptions.token = accessToken;
createOptions.teamId = teamId;
createOptions.projectId = projectId;
}
const sandbox = await Sandbox.create(createOptions);
await sandbox.runCommand({
cmd: "bash",
args: ["-lc", INSTALL_SCRIPT],
sudo: true,
});
const tokenFlag = token ? "--token $SANDBOX_TOKEN" : "--no-token";
await sandbox.runCommand({
cmd: "bash",
args: [
"-lc",
`SANDBOX_TOKEN=${token} sandbox-agent server ${tokenFlag} --host 0.0.0.0 --port ${port}`,
],
sudo: true,
detached: true,
});
const baseUrl = ensureUrl(sandbox.domain(port));
await waitForHealth({ baseUrl, token });
const cleanup = async () => {
try {
await sandbox.stop();
} catch {
// ignore cleanup errors
}
};
return {
baseUrl,
token,
cleanup,
};
}
async function main(): Promise<void> {
const { baseUrl, token, cleanup } = await setupVercelSandboxAgent();
const exitHandler = async () => {
await cleanup();
process.exit(0);
};
process.on("SIGINT", () => {
void exitHandler();
});
process.on("SIGTERM", () => {
void exitHandler();
});
await runPrompt({ baseUrl, token });
}
if (process.argv[1] && import.meta.url === pathToFileURL(process.argv[1]).href) {
main().catch((error) => {
console.error(error);
process.exit(1);
});
}

View file

@ -0,0 +1,9 @@
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
include: ["**/*.test.ts"],
testTimeout: 300_000,
hookTimeout: 300_000,
},
});

View file

@ -0,0 +1,117 @@
import * as fs from "node:fs/promises";
import * as path from "node:path";
import { $ } from "execa";
import type { ReleaseOpts } from "./main.js";
import {
assertDirExists,
copyReleasesPath,
deleteReleasesPath,
listReleasesObjects,
uploadContentToReleases,
uploadDirToReleases,
uploadFileToReleases,
} from "./utils.js";
const PREFIX = "sandbox-agent";
const BINARY_FILES = [
"sandbox-agent-x86_64-unknown-linux-musl",
"sandbox-agent-x86_64-pc-windows-gnu.exe",
"sandbox-agent-x86_64-apple-darwin",
"sandbox-agent-aarch64-apple-darwin",
];
/**
* Build TypeScript SDK and upload to commit directory.
* This is called during setup-ci phase.
*/
export async function buildAndUploadArtifacts(opts: ReleaseOpts) {
console.log("==> Building TypeScript SDK");
const sdkDir = path.join(opts.root, "sdks", "typescript");
await $({ stdio: "inherit", cwd: sdkDir })`pnpm install`;
await $({ stdio: "inherit", cwd: sdkDir })`pnpm run build`;
const distPath = path.join(sdkDir, "dist");
await assertDirExists(distPath);
console.log(`==> Uploading TypeScript SDK to ${PREFIX}/${opts.commit}/typescript/`);
await uploadDirToReleases(distPath, `${PREFIX}/${opts.commit}/typescript/`);
console.log("✅ TypeScript SDK artifacts uploaded");
}
/**
* Promote artifacts from commit directory to version directory.
* This is called during complete-ci phase.
*/
export async function promoteArtifacts(opts: ReleaseOpts) {
// Promote TypeScript SDK
await promotePath(opts, "typescript");
}
async function promotePath(opts: ReleaseOpts, name: string) {
console.log(`==> Promoting ${name} artifacts`);
const sourcePrefix = `${PREFIX}/${opts.commit}/${name}/`;
const commitFiles = await listReleasesObjects(sourcePrefix);
if (!Array.isArray(commitFiles?.Contents) || commitFiles.Contents.length === 0) {
throw new Error(`No files found under ${sourcePrefix}`);
}
await copyPath(sourcePrefix, `${PREFIX}/${opts.version}/${name}/`);
if (opts.latest) {
await copyPath(sourcePrefix, `${PREFIX}/latest/${name}/`);
}
}
async function copyPath(sourcePrefix: string, targetPrefix: string) {
console.log(`Copying ${sourcePrefix} -> ${targetPrefix}`);
await deleteReleasesPath(targetPrefix);
await copyReleasesPath(sourcePrefix, targetPrefix);
}
/**
* Upload install script with version substitution.
*/
export async function uploadInstallScripts(opts: ReleaseOpts) {
const installPath = path.join(opts.root, "scripts", "release", "static", "install.sh");
let installContent = await fs.readFile(installPath, "utf8");
const uploadForVersion = async (versionValue: string, remoteVersion: string) => {
const content = installContent.replace(/__VERSION__/g, versionValue);
const uploadKey = `${PREFIX}/${remoteVersion}/install.sh`;
console.log(`Uploading install script: ${uploadKey}`);
await uploadContentToReleases(content, uploadKey);
};
await uploadForVersion(opts.version, opts.version);
if (opts.latest) {
await uploadForVersion("latest", "latest");
}
}
/**
* Upload compiled binaries from dist/ directory.
*/
export async function uploadBinaries(opts: ReleaseOpts) {
const distDir = path.join(opts.root, "dist");
await assertDirExists(distDir);
for (const fileName of BINARY_FILES) {
const localPath = path.join(distDir, fileName);
try {
await fs.access(localPath);
} catch {
throw new Error(`Missing binary: ${localPath}`);
}
console.log(`Uploading binary: ${fileName}`);
await uploadFileToReleases(localPath, `${PREFIX}/${opts.version}/${fileName}`);
if (opts.latest) {
await uploadFileToReleases(localPath, `${PREFIX}/latest/${fileName}`);
}
}
console.log("✅ Binaries uploaded");
}

74
scripts/release/git.ts Normal file
View file

@ -0,0 +1,74 @@
import { $ } from "execa";
import * as semver from "semver";
import type { ReleaseOpts } from "./main.js";
export async function validateGit(_opts: ReleaseOpts) {
const result = await $`git status --porcelain`;
const status = result.stdout;
if (status.trim().length > 0) {
throw new Error(
"There are uncommitted changes. Please commit or stash them.",
);
}
}
export async function createAndPushTag(opts: ReleaseOpts) {
console.log(`Creating tag v${opts.version}...`);
try {
await $({ stdio: "inherit", cwd: opts.root })`git tag -f v${opts.version}`;
await $({ stdio: "inherit", cwd: opts.root })`git push origin v${opts.version} -f`;
console.log(`✅ Tag v${opts.version} created and pushed`);
} catch (err) {
console.error("❌ Failed to create or push tag");
throw err;
}
}
export async function createGitHubRelease(opts: ReleaseOpts) {
console.log("Creating GitHub release...");
try {
console.log(`Looking for existing release for ${opts.version}`);
const { stdout: releaseJson } = await $({
cwd: opts.root,
})`gh release list --json name,tagName`;
const releases = JSON.parse(releaseJson);
const existingRelease = releases.find(
(r: { name: string }) => r.name === opts.version,
);
if (existingRelease) {
console.log(
`Updating release ${opts.version} to point to tag v${opts.version}`,
);
await $({
stdio: "inherit",
cwd: opts.root,
})`gh release edit ${existingRelease.tagName} --tag v${opts.version}`;
} else {
console.log(
`Creating new release ${opts.version} pointing to tag v${opts.version}`,
);
await $({
stdio: "inherit",
cwd: opts.root,
})`gh release create v${opts.version} --title ${opts.version} --generate-notes`;
// Mark as prerelease if needed
const parsed = semver.parse(opts.version);
if (parsed && parsed.prerelease.length > 0) {
await $({
stdio: "inherit",
cwd: opts.root,
})`gh release edit v${opts.version} --prerelease`;
}
}
console.log("✅ GitHub release created/updated");
} catch (err) {
console.error("❌ Failed to create GitHub release");
console.warn("! You may need to create the release manually");
throw err;
}
}

View file

@ -47,6 +47,8 @@ const STEPS = [
"upload-typescript",
"upload-install",
"upload-binaries",
"push-tag",
"create-github-release",
] as const;
const PHASES = ["setup-local", "setup-ci", "complete-ci"] as const;
@ -71,6 +73,8 @@ const PHASE_MAP: Record<Phase, Step[]> = {
"upload-typescript",
"upload-install",
"upload-binaries",
"push-tag",
"create-github-release",
],
};
@ -207,6 +211,79 @@ function shouldTagAsLatest(version: string) {
return compareSemver(parsed, parseSemver(latestStable)) > 0;
}
function npmVersionExists(packageName: string, version: string): boolean {
const result = spawnSync("npm", ["view", `${packageName}@${version}`, "version"], {
stdio: ["ignore", "pipe", "pipe"],
encoding: "utf8",
});
if (result.status === 0) {
return true;
}
const stderr = result.stderr || "";
if (
stderr.includes(`No match found for version ${version}`) ||
stderr.includes(`'${packageName}@${version}' is not in this registry`)
) {
return false;
}
// If it's an unexpected error, assume version doesn't exist to allow publish attempt
return false;
}
function crateVersionExists(crateName: string, version: string): boolean {
const result = spawnSync("cargo", ["search", crateName, "--limit", "1"], {
stdio: ["ignore", "pipe", "pipe"],
encoding: "utf8",
});
if (result.status !== 0) {
return false;
}
// Output format: "crate_name = \"version\""
const output = result.stdout || "";
const match = output.match(new RegExp(`^${crateName}\\s*=\\s*"([^"]+)"`));
if (match && match[1] === version) {
return true;
}
return false;
}
function createAndPushTag(rootDir: string, version: string) {
console.log(`==> Creating tag v${version}`);
run("git", ["tag", "-f", `v${version}`], { cwd: rootDir });
run("git", ["push", "origin", `v${version}`, "-f"], { cwd: rootDir });
console.log(`Tag v${version} created and pushed`);
}
function createGitHubRelease(rootDir: string, version: string) {
console.log(`==> Creating GitHub release for v${version}`);
// Check if release already exists
const listResult = spawnSync("gh", ["release", "list", "--json", "tagName"], {
cwd: rootDir,
stdio: ["ignore", "pipe", "pipe"],
encoding: "utf8",
});
if (listResult.status === 0) {
const releases = JSON.parse(listResult.stdout || "[]");
const exists = releases.some((r: { tagName: string }) => r.tagName === `v${version}`);
if (exists) {
console.log(`Release v${version} already exists, updating...`);
run("gh", ["release", "edit", `v${version}`, "--tag", `v${version}`], { cwd: rootDir });
return;
}
}
// Create new release
const isPrerelease = parseSemver(version).prerelease.length > 0;
const releaseArgs = ["release", "create", `v${version}`, "--title", version, "--generate-notes"];
if (isPrerelease) {
releaseArgs.push("--prerelease");
}
run("gh", releaseArgs, { cwd: rootDir });
console.log(`GitHub release v${version} created`);
}
function getAwsEnv() {
const accessKey =
process.env.AWS_ACCESS_KEY_ID || process.env.R2_RELEASES_ACCESS_KEY_ID;
@ -416,7 +493,12 @@ function publishCrates(rootDir: string, version: string) {
updateVersion(rootDir, version);
for (const crate of CRATE_ORDER) {
console.log(`==> Publishing sandbox-agent-${crate}`);
const crateName = `sandbox-agent-${crate}`;
if (crateVersionExists(crateName, version)) {
console.log(`==> Skipping ${crateName}@${version} (already published)`);
continue;
}
console.log(`==> Publishing ${crateName}`);
const crateDir = path.join(rootDir, "server", "packages", crate);
run("cargo", ["publish", "--allow-dirty"], { cwd: crateDir });
console.log("Waiting 30s for index...");
@ -426,7 +508,14 @@ function publishCrates(rootDir: string, version: string) {
function publishNpmSdk(rootDir: string, version: string, latest: boolean) {
const sdkDir = path.join(rootDir, "sdks", "typescript");
console.log("==> Publishing TypeScript SDK to npm");
const packageName = "sandbox-agent";
if (npmVersionExists(packageName, version)) {
console.log(`==> Skipping ${packageName}@${version} (already published)`);
return;
}
console.log(`==> Publishing ${packageName}@${version} to npm`);
const npmTag = getNpmTag(version, latest);
run("npm", ["version", version, "--no-git-tag-version", "--allow-same-version"], { cwd: sdkDir });
run("pnpm", ["install"], { cwd: sdkDir });
@ -442,6 +531,13 @@ function publishNpmCli(rootDir: string, version: string, latest: boolean) {
const npmTag = getNpmTag(version, latest);
for (const [target, info] of Object.entries(PLATFORM_MAP)) {
const packageName = `@sandbox-agent/cli-${info.pkg}`;
if (npmVersionExists(packageName, version)) {
console.log(`==> Skipping ${packageName}@${version} (already published)`);
continue;
}
const platformDir = path.join(cliDir, "platforms", info.pkg);
const binDir = path.join(platformDir, "bin");
fs.mkdirSync(binDir, { recursive: true });
@ -451,14 +547,20 @@ function publishNpmCli(rootDir: string, version: string, latest: boolean) {
fs.copyFileSync(srcBinary, dstBinary);
if (info.ext !== ".exe") fs.chmodSync(dstBinary, 0o755);
console.log(`==> Publishing @sandbox-agent/cli-${info.pkg}`);
console.log(`==> Publishing ${packageName}@${version}`);
run("npm", ["version", version, "--no-git-tag-version", "--allow-same-version"], { cwd: platformDir });
const publishArgs = ["publish", "--access", "public"];
if (npmTag) publishArgs.push("--tag", npmTag);
run("npm", publishArgs, { cwd: platformDir });
}
console.log("==> Publishing @sandbox-agent/cli");
const mainPackageName = "@sandbox-agent/cli";
if (npmVersionExists(mainPackageName, version)) {
console.log(`==> Skipping ${mainPackageName}@${version} (already published)`);
return;
}
console.log(`==> Publishing ${mainPackageName}@${version}`);
const pkgPath = path.join(cliDir, "package.json");
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf8"));
pkg.version = version;
@ -617,26 +719,26 @@ async function main() {
}
}
if (shouldRun("trigger-workflow")) {
console.log("==> Triggering release workflow");
const branch = runCapture("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: rootDir });
const latestFlag = latest ? "true" : "false";
run(
"gh",
[
"workflow",
"run",
".github/workflows/release.yaml",
"-f",
`version=${version}`,
"-f",
`latest=${latestFlag}`,
"--ref",
branch,
],
{ cwd: rootDir },
);
}
// if (shouldRun("trigger-workflow")) {
// console.log("==> Triggering release workflow");
// const branch = runCapture("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: rootDir });
// const latestFlag = latest ? "true" : "false";
// run(
// "gh",
// [
// "workflow",
// "run",
// ".github/workflows/release.yaml",
// "-f",
// `version=${version}`,
// "-f",
// `latest=${latestFlag}`,
// "--ref",
// branch,
// ],
// { cwd: rootDir },
// );
// }
if (shouldRun("run-checks")) {
runChecks(rootDir);
@ -665,6 +767,14 @@ async function main() {
if (shouldRun("upload-binaries")) {
uploadBinaries(rootDir, version, latest);
}
if (shouldRun("push-tag")) {
createAndPushTag(rootDir, version);
}
if (shouldRun("create-github-release")) {
createGitHubRelease(rootDir, version);
}
}
main().catch((err) => {

View file

@ -0,0 +1,19 @@
{
"name": "release",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"check-types": "tsc --noEmit"
},
"devDependencies": {
"@types/node": "^22.0.0",
"@types/semver": "^7.5.8"
},
"dependencies": {
"commander": "^12.1.0",
"execa": "^9.5.0",
"glob": "^10.3.10",
"semver": "^7.6.0"
}
}

166
scripts/release/publish.ts Normal file
View file

@ -0,0 +1,166 @@
import * as fs from "node:fs/promises";
import * as path from "node:path";
import { $ } from "execa";
import * as semver from "semver";
import type { ReleaseOpts } from "./main.js";
const CRATE_ORDER = [
"error",
"agent-credentials",
"agent-schema",
"universal-agent-schema",
"agent-management",
"sandbox-agent",
];
const PLATFORM_MAP: Record<string, { pkg: string; os: string; cpu: string; ext: string }> = {
"x86_64-unknown-linux-musl": { pkg: "linux-x64", os: "linux", cpu: "x64", ext: "" },
"x86_64-pc-windows-gnu": { pkg: "win32-x64", os: "win32", cpu: "x64", ext: ".exe" },
"x86_64-apple-darwin": { pkg: "darwin-x64", os: "darwin", cpu: "x64", ext: "" },
"aarch64-apple-darwin": { pkg: "darwin-arm64", os: "darwin", cpu: "arm64", ext: "" },
};
async function npmVersionExists(packageName: string, version: string): Promise<boolean> {
console.log(`Checking if ${packageName}@${version} exists on npm...`);
try {
await $({
stdout: "ignore",
stderr: "pipe",
})`npm view ${packageName}@${version} version`;
return true;
} catch (error: unknown) {
const stderr = error && typeof error === "object" && "stderr" in error
? String(error.stderr)
: "";
if (
stderr.includes(`No match found for version ${version}`) ||
stderr.includes(`'${packageName}@${version}' is not in this registry`)
) {
return false;
}
// Unexpected error, assume not exists to allow publish attempt
return false;
}
}
async function crateVersionExists(crateName: string, version: string): Promise<boolean> {
console.log(`Checking if ${crateName}@${version} exists on crates.io...`);
try {
const result = await $`cargo search ${crateName} --limit 1`;
const output = result.stdout || "";
const match = output.match(new RegExp(`^${crateName}\\s*=\\s*"([^"]+)"`));
return !!(match && match[1] === version);
} catch {
return false;
}
}
function getNpmTag(version: string, latest: boolean): string | null {
if (latest) return null;
const parsed = semver.parse(version);
if (!parsed) throw new Error(`Invalid version: ${version}`);
if (parsed.prerelease.length === 0) {
return "next";
}
const hasRc = parsed.prerelease.some((part) =>
String(part).toLowerCase().startsWith("rc")
);
if (hasRc) {
return "rc";
}
throw new Error(`Prerelease versions must use rc tag when not latest: ${version}`);
}
export async function publishCrates(opts: ReleaseOpts) {
for (const crate of CRATE_ORDER) {
const crateName = `sandbox-agent-${crate}`;
if (await crateVersionExists(crateName, opts.version)) {
console.log(`==> Skipping ${crateName}@${opts.version} (already published)`);
continue;
}
console.log(`==> Publishing ${crateName}@${opts.version}`);
const crateDir = path.join(opts.root, "server", "packages", crate);
await $({ stdio: "inherit", cwd: crateDir })`cargo publish --allow-dirty`;
console.log("Waiting 30s for crates.io index...");
await new Promise(resolve => setTimeout(resolve, 30000));
}
}
export async function publishNpmSdk(opts: ReleaseOpts) {
const sdkDir = path.join(opts.root, "sdks", "typescript");
const packageName = "sandbox-agent";
if (await npmVersionExists(packageName, opts.version)) {
console.log(`==> Skipping ${packageName}@${opts.version} (already published)`);
return;
}
console.log(`==> Publishing ${packageName}@${opts.version}`);
const npmTag = getNpmTag(opts.version, opts.latest);
await $({ stdio: "inherit", cwd: sdkDir })`npm version ${opts.version} --no-git-tag-version --allow-same-version`;
await $({ stdio: "inherit", cwd: sdkDir })`pnpm install`;
await $({ stdio: "inherit", cwd: sdkDir })`pnpm run build`;
const publishArgs = ["publish", "--access", "public"];
if (npmTag) publishArgs.push("--tag", npmTag);
await $({ stdio: "inherit", cwd: sdkDir })`npm ${publishArgs}`;
}
export async function publishNpmCli(opts: ReleaseOpts) {
const cliDir = path.join(opts.root, "sdks", "cli");
const distDir = path.join(opts.root, "dist");
const npmTag = getNpmTag(opts.version, opts.latest);
// Publish platform-specific packages
for (const [target, info] of Object.entries(PLATFORM_MAP)) {
const packageName = `@sandbox-agent/cli-${info.pkg}`;
if (await npmVersionExists(packageName, opts.version)) {
console.log(`==> Skipping ${packageName}@${opts.version} (already published)`);
continue;
}
const platformDir = path.join(cliDir, "platforms", info.pkg);
const binDir = path.join(platformDir, "bin");
await fs.mkdir(binDir, { recursive: true });
const srcBinary = path.join(distDir, `sandbox-agent-${target}${info.ext}`);
const dstBinary = path.join(binDir, `sandbox-agent${info.ext}`);
await fs.copyFile(srcBinary, dstBinary);
if (info.ext !== ".exe") {
await fs.chmod(dstBinary, 0o755);
}
console.log(`==> Publishing ${packageName}@${opts.version}`);
await $({ stdio: "inherit", cwd: platformDir })`npm version ${opts.version} --no-git-tag-version --allow-same-version`;
const publishArgs = ["publish", "--access", "public"];
if (npmTag) publishArgs.push("--tag", npmTag);
await $({ stdio: "inherit", cwd: platformDir })`npm ${publishArgs}`;
}
// Publish main CLI package
const mainPackageName = "@sandbox-agent/cli";
if (await npmVersionExists(mainPackageName, opts.version)) {
console.log(`==> Skipping ${mainPackageName}@${opts.version} (already published)`);
return;
}
console.log(`==> Publishing ${mainPackageName}@${opts.version}`);
const pkgPath = path.join(cliDir, "package.json");
const pkg = JSON.parse(await fs.readFile(pkgPath, "utf8"));
pkg.version = opts.version;
for (const dep of Object.keys(pkg.optionalDependencies || {})) {
pkg.optionalDependencies[dep] = opts.version;
}
await fs.writeFile(pkgPath, JSON.stringify(pkg, null, 2) + "\n");
const publishArgs = ["publish", "--access", "public"];
if (npmTag) publishArgs.push("--tag", npmTag);
await $({ stdio: "inherit", cwd: cliDir })`npm ${publishArgs}`;
}

View file

@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"noEmit": true
},
"include": ["*.ts"]
}

View file

@ -0,0 +1,68 @@
import * as fs from "node:fs/promises";
import { glob } from "glob";
import { $ } from "execa";
import type { ReleaseOpts } from "./main.js";
import { assert } from "./utils.js";
export async function updateVersion(opts: ReleaseOpts) {
const findReplace = [
{
path: "Cargo.toml",
find: /^version = ".*"/m,
replace: `version = "${opts.version}"`,
},
{
path: "sdks/typescript/package.json",
find: /"version": ".*"/,
replace: `"version": "${opts.version}"`,
},
{
path: "sdks/cli/package.json",
find: /"version": ".*"/,
replace: `"version": "${opts.version}"`,
},
{
path: "sdks/cli/platforms/*/package.json",
find: /"version": ".*"/,
replace: `"version": "${opts.version}"`,
},
];
for (const { path: globPath, find, replace } of findReplace) {
const paths = await glob(globPath, { cwd: opts.root });
assert(paths.length > 0, `no paths matched: ${globPath}`);
for (const filePath of paths) {
const fullPath = `${opts.root}/${filePath}`;
const file = await fs.readFile(fullPath, "utf-8");
assert(find.test(file), `file does not match ${find}: ${filePath}`);
const newFile = file.replace(find, replace);
await fs.writeFile(fullPath, newFile);
await $({ cwd: opts.root })`git add ${filePath}`;
}
}
// Update optionalDependencies in CLI package.json
const cliPkgPath = `${opts.root}/sdks/cli/package.json`;
const cliPkg = JSON.parse(await fs.readFile(cliPkgPath, "utf-8"));
if (cliPkg.optionalDependencies) {
for (const dep of Object.keys(cliPkg.optionalDependencies)) {
cliPkg.optionalDependencies[dep] = opts.version;
}
await fs.writeFile(cliPkgPath, JSON.stringify(cliPkg, null, 2) + "\n");
await $({ cwd: opts.root })`git add sdks/cli/package.json`;
}
// Update optionalDependencies in TypeScript SDK package.json
const sdkPkgPath = `${opts.root}/sdks/typescript/package.json`;
const sdkPkg = JSON.parse(await fs.readFile(sdkPkgPath, "utf-8"));
if (sdkPkg.optionalDependencies) {
for (const dep of Object.keys(sdkPkg.optionalDependencies)) {
sdkPkg.optionalDependencies[dep] = opts.version;
}
await fs.writeFile(sdkPkgPath, JSON.stringify(sdkPkg, null, 2) + "\n");
await $({ cwd: opts.root })`git add sdks/typescript/package.json`;
}
}

175
scripts/release/utils.ts Normal file
View file

@ -0,0 +1,175 @@
import * as fs from "node:fs/promises";
import { $ } from "execa";
export function assert(condition: unknown, message?: string): asserts condition {
if (!condition) {
throw new Error(message || "Assertion failed");
}
}
export function assertEquals<T>(actual: T, expected: T, message?: string): void {
if (actual !== expected) {
throw new Error(message || `Expected ${expected}, got ${actual}`);
}
}
export function assertExists<T>(
value: T | null | undefined,
message?: string,
): asserts value is T {
if (value === null || value === undefined) {
throw new Error(message || "Value does not exist");
}
}
export async function assertDirExists(dirPath: string): Promise<void> {
try {
const stat = await fs.stat(dirPath);
if (!stat.isDirectory()) {
throw new Error(`Path exists but is not a directory: ${dirPath}`);
}
} catch (err: unknown) {
if (err && typeof err === "object" && "code" in err && err.code === "ENOENT") {
throw new Error(`Directory not found: ${dirPath}`);
}
throw err;
}
}
// R2 configuration
const ENDPOINT_URL = "https://2a94c6a0ced8d35ea63cddc86c2681e7.r2.cloudflarestorage.com";
const BUCKET = "rivet-releases";
interface ReleasesS3Config {
awsEnv: Record<string, string>;
endpointUrl: string;
}
let cachedConfig: ReleasesS3Config | null = null;
export async function getReleasesS3Config(): Promise<ReleasesS3Config> {
if (cachedConfig) {
return cachedConfig;
}
let awsAccessKeyId = process.env.R2_RELEASES_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID;
let awsSecretAccessKey = process.env.R2_RELEASES_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY;
// Try 1Password fallback for local development
if (!awsAccessKeyId) {
try {
const result = await $`op read ${"op://Engineering/rivet-releases R2 Upload/username"}`;
awsAccessKeyId = result.stdout.trim();
} catch {
// 1Password not available
}
}
if (!awsSecretAccessKey) {
try {
const result = await $`op read ${"op://Engineering/rivet-releases R2 Upload/password"}`;
awsSecretAccessKey = result.stdout.trim();
} catch {
// 1Password not available
}
}
assert(awsAccessKeyId, "R2_RELEASES_ACCESS_KEY_ID is required");
assert(awsSecretAccessKey, "R2_RELEASES_SECRET_ACCESS_KEY is required");
cachedConfig = {
awsEnv: {
AWS_ACCESS_KEY_ID: awsAccessKeyId,
AWS_SECRET_ACCESS_KEY: awsSecretAccessKey,
AWS_DEFAULT_REGION: "auto",
},
endpointUrl: ENDPOINT_URL,
};
return cachedConfig;
}
export async function uploadFileToReleases(
localPath: string,
remotePath: string,
): Promise<void> {
const { awsEnv, endpointUrl } = await getReleasesS3Config();
await $({
env: awsEnv,
stdio: "inherit",
})`aws s3 cp ${localPath} s3://${BUCKET}/${remotePath} --checksum-algorithm CRC32 --endpoint-url ${endpointUrl}`;
}
export async function uploadDirToReleases(
localPath: string,
remotePath: string,
): Promise<void> {
const { awsEnv, endpointUrl } = await getReleasesS3Config();
await $({
env: awsEnv,
stdio: "inherit",
})`aws s3 cp ${localPath} s3://${BUCKET}/${remotePath} --recursive --checksum-algorithm CRC32 --endpoint-url ${endpointUrl}`;
}
export async function uploadContentToReleases(
content: string,
remotePath: string,
): Promise<void> {
const { awsEnv, endpointUrl } = await getReleasesS3Config();
await $({
env: awsEnv,
input: content,
stdio: ["pipe", "inherit", "inherit"],
})`aws s3 cp - s3://${BUCKET}/${remotePath} --endpoint-url ${endpointUrl}`;
}
export interface ListReleasesResult {
Contents?: { Key: string; Size: number }[];
}
export async function listReleasesObjects(
prefix: string,
): Promise<ListReleasesResult> {
const { awsEnv, endpointUrl } = await getReleasesS3Config();
const result = await $({
env: awsEnv,
stdio: ["pipe", "pipe", "inherit"],
})`aws s3api list-objects --bucket ${BUCKET} --prefix ${prefix} --endpoint-url ${endpointUrl}`;
return JSON.parse(result.stdout);
}
export async function deleteReleasesPath(remotePath: string): Promise<void> {
const { awsEnv, endpointUrl } = await getReleasesS3Config();
await $({
env: awsEnv,
stdio: "inherit",
})`aws s3 rm s3://${BUCKET}/${remotePath} --recursive --endpoint-url ${endpointUrl}`;
}
/**
* Copies objects from one S3 path to another within the releases bucket.
* Uses s3api copy-object to avoid R2 tagging header issues.
*/
export async function copyReleasesPath(
sourcePath: string,
targetPath: string,
): Promise<void> {
const { awsEnv, endpointUrl } = await getReleasesS3Config();
const listResult = await $({
env: awsEnv,
})`aws s3api list-objects --bucket ${BUCKET} --prefix ${sourcePath} --endpoint-url ${endpointUrl}`;
const objects = JSON.parse(listResult.stdout);
if (!objects.Contents?.length) {
throw new Error(`No objects found under ${sourcePath}`);
}
for (const obj of objects.Contents) {
const sourceKey = obj.Key;
const targetKey = sourceKey.replace(sourcePath, targetPath);
console.log(` ${sourceKey} -> ${targetKey}`);
await $({
env: awsEnv,
})`aws s3api copy-object --bucket ${BUCKET} --key ${targetKey} --copy-source ${BUCKET}/${sourceKey} --endpoint-url ${endpointUrl}`;
}
}

View file

@ -318,6 +318,9 @@ impl SessionState {
self.codex_sender = sender;
}
// Note: This is unused now that Codex uses the shared server model,
// but keeping it for potential future use with other agents.
#[allow(dead_code)]
fn codex_sender(&self) -> Option<mpsc::UnboundedSender<String>> {
self.codex_sender.clone()
}
@ -1148,7 +1151,7 @@ impl SessionManager {
reply: PermissionReply,
) -> Result<(), SandboxError> {
let reply_for_status = reply.clone();
let (agent, native_session_id, codex_sender, pending_permission) = {
let (agent, native_session_id, pending_permission) = {
let mut sessions = self.sessions.lock().await;
let session = Self::session_mut(&mut sessions, session_id).ok_or_else(|| {
SandboxError::SessionNotFound {
@ -1164,23 +1167,21 @@ impl SessionManager {
if let Some(err) = session.ended_error() {
return Err(err);
}
let codex_sender = if session.agent == AgentId::Codex {
session.codex_sender()
} else {
None
};
(
session.agent,
session.native_session_id.clone(),
codex_sender,
pending,
)
};
if agent == AgentId::Codex {
let sender = codex_sender.ok_or_else(|| SandboxError::InvalidRequest {
message: "codex session not active".to_string(),
})?;
// Use the shared Codex server to send the permission reply
let server = {
let guard = self.codex_server.lock().await;
guard.clone().ok_or_else(|| SandboxError::InvalidRequest {
message: "codex server not running".to_string(),
})?
};
let pending =
pending_permission
.clone()
@ -1227,10 +1228,11 @@ impl SessionManager {
serde_json::to_string(&response).map_err(|err| SandboxError::InvalidRequest {
message: err.to_string(),
})?;
sender
server
.stdin_sender
.send(line)
.map_err(|_| SandboxError::InvalidRequest {
message: "codex session not active".to_string(),
message: "codex server not active".to_string(),
})?;
} else if agent == AgentId::Opencode {
let agent_session_id =
@ -2394,6 +2396,18 @@ pub struct AgentCapabilities {
pub file_changes: bool,
pub mcp_tools: bool,
pub streaming_deltas: bool,
/// Whether this agent uses a shared long-running server process (vs per-turn subprocess)
pub shared_process: bool,
}
/// Status of a shared server process for an agent
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, JsonSchema)]
#[serde(rename_all = "lowercase")]
pub enum ServerStatus {
/// Server is running and accepting requests
Running,
/// Server is not currently running
Stopped,
}
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, JsonSchema)]
@ -2406,6 +2420,9 @@ pub struct AgentInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub path: Option<String>,
pub capabilities: AgentCapabilities,
/// Status of the shared server process (only present for agents with shared_process=true)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub server_status: Option<ServerStatus>,
}
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, JsonSchema)]
@ -2611,6 +2628,21 @@ async fn list_agents(
State(state): State<Arc<AppState>>,
) -> Result<Json<AgentListResponse>, ApiError> {
let manager = state.agent_manager.clone();
// Check shared server status for agents that use them
let codex_server_running = state
.session_manager
.codex_server
.lock()
.await
.is_some();
let opencode_server_running = state
.session_manager
.opencode_server
.lock()
.await
.is_some();
let agents = tokio::task::spawn_blocking(move || {
all_agents()
.into_iter()
@ -2618,12 +2650,31 @@ async fn list_agents(
let installed = manager.is_installed(agent_id);
let version = manager.version(agent_id).ok().flatten();
let path = manager.resolve_binary(agent_id).ok();
let capabilities = agent_capabilities_for(agent_id);
// Add server_status for agents with shared processes
let server_status = if capabilities.shared_process {
let running = match agent_id {
AgentId::Codex => codex_server_running,
AgentId::Opencode => opencode_server_running,
_ => false,
};
Some(if running {
ServerStatus::Running
} else {
ServerStatus::Stopped
})
} else {
None
};
AgentInfo {
id: agent_id.as_str().to_string(),
installed,
version,
path: path.map(|path| path.to_string_lossy().to_string()),
capabilities: agent_capabilities_for(agent_id),
capabilities,
server_status,
}
})
.collect::<Vec<_>>()
@ -2940,6 +2991,7 @@ fn agent_capabilities_for(agent: AgentId) -> AgentCapabilities {
file_changes: false,
mcp_tools: false,
streaming_deltas: false,
shared_process: false, // per-turn subprocess with --resume
},
AgentId::Codex => AgentCapabilities {
plan_mode: true,
@ -2957,6 +3009,7 @@ fn agent_capabilities_for(agent: AgentId) -> AgentCapabilities {
file_changes: true,
mcp_tools: true,
streaming_deltas: true,
shared_process: true, // shared app-server via JSON-RPC
},
AgentId::Opencode => AgentCapabilities {
plan_mode: false,
@ -2974,6 +3027,7 @@ fn agent_capabilities_for(agent: AgentId) -> AgentCapabilities {
file_changes: false,
mcp_tools: false,
streaming_deltas: true,
shared_process: true, // shared HTTP server
},
AgentId::Amp => AgentCapabilities {
plan_mode: false,
@ -2991,6 +3045,7 @@ fn agent_capabilities_for(agent: AgentId) -> AgentCapabilities {
file_changes: false,
mcp_tools: false,
streaming_deltas: false,
shared_process: false, // per-turn subprocess with --continue
},
AgentId::Mock => AgentCapabilities {
plan_mode: true,
@ -3008,6 +3063,7 @@ fn agent_capabilities_for(agent: AgentId) -> AgentCapabilities {
file_changes: true,
mcp_tools: true,
streaming_deltas: true,
shared_process: false, // in-memory mock (no subprocess)
},
}
}

View file

@ -119,261 +119,13 @@ expression: normalize_events(&permission_events)
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 15
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 16
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 17
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 18
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 19
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 20
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 21
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 22
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 23
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 24
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 25
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 26
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 27
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 28
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 29
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 30
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 31
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 32
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 33
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 34
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 35
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 36
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 37
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 38
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 39
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 40
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 41
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 42
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 43
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 44
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 45
source: agent
synthetic: false
type: item.delta
- item:
content_types:
- reasoning
kind: message
role: assistant
status: completed
seq: 46
seq: 15
source: agent
synthetic: false
type: item.completed

View file

@ -263,13 +263,69 @@ expression: normalize_events(&reject_events)
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 33
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 34
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 35
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 36
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 37
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 38
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 39
source: agent
synthetic: false
type: item.delta
- item:
content_types:
- reasoning
kind: message
role: assistant
status: completed
seq: 33
seq: 40
source: agent
synthetic: false
type: item.completed

View file

@ -62,11 +62,254 @@ expression: normalize_events(&question_events)
synthetic: false
type: item.started
- item:
content_types: []
kind: message
role: assistant
content_types:
- status
kind: status
role: system
status: completed
seq: 8
source: agent
synthetic: false
type: item.completed
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 12
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 13
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 14
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 15
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 16
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 17
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 18
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 19
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 20
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 21
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 22
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 23
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 24
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 25
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 26
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 27
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 28
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 29
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 30
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 31
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 32
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 33
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 34
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 35
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 36
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 37
source: agent
synthetic: false
type: item.delta
- item:
content_types:
- reasoning
kind: message
role: assistant
status: completed
seq: 38
source: agent
synthetic: false
type: item.completed

View file

@ -9,13 +9,19 @@ session_a:
source: daemon
synthetic: true
type: session.started
- metadata: true
seq: 2
session: started
source: agent
synthetic: false
type: session.started
- item:
content_types:
- status
kind: status
role: system
status: completed
seq: 2
seq: 3
source: agent
synthetic: false
type: item.completed
@ -25,7 +31,7 @@ session_a:
kind: message
role: user
status: in_progress
seq: 3
seq: 4
source: agent
synthetic: false
type: item.started
@ -33,7 +39,7 @@ session_a:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 4
seq: 5
source: daemon
synthetic: true
type: item.delta
@ -43,7 +49,7 @@ session_a:
kind: message
role: user
status: completed
seq: 5
seq: 6
source: agent
synthetic: false
type: item.completed
@ -52,67 +58,16 @@ session_a:
kind: message
role: assistant
status: in_progress
seq: 6
seq: 7
source: agent
synthetic: false
type: item.started
- item:
content_types:
- status
kind: status
role: system
status: completed
seq: 7
source: agent
synthetic: false
type: item.completed
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 9
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 12
source: agent
synthetic: false
type: item.delta
- item:
content_types:
- reasoning
content_types: []
kind: message
role: assistant
status: completed
seq: 13
seq: 8
source: agent
synthetic: false
type: item.completed
@ -123,13 +78,19 @@ session_b:
source: daemon
synthetic: true
type: session.started
- metadata: true
seq: 2
session: started
source: agent
synthetic: false
type: session.started
- item:
content_types:
- status
kind: status
role: system
status: completed
seq: 2
seq: 3
source: agent
synthetic: false
type: item.completed
@ -139,7 +100,7 @@ session_b:
kind: message
role: user
status: in_progress
seq: 3
seq: 4
source: agent
synthetic: false
type: item.started
@ -147,7 +108,7 @@ session_b:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 4
seq: 5
source: daemon
synthetic: true
type: item.delta
@ -157,7 +118,7 @@ session_b:
kind: message
role: user
status: completed
seq: 5
seq: 6
source: agent
synthetic: false
type: item.completed
@ -166,7 +127,7 @@ session_b:
kind: message
role: assistant
status: in_progress
seq: 6
seq: 7
source: agent
synthetic: false
type: item.started
@ -176,18 +137,10 @@ session_b:
kind: status
role: system
status: completed
seq: 7
source: agent
synthetic: false
type: item.completed
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
source: agent
synthetic: false
type: item.delta
type: item.completed
- delta:
delta: "<redacted>"
item_id: "<redacted>"
@ -212,13 +165,37 @@ session_b:
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 12
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 13
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 14
source: agent
synthetic: false
type: item.delta
- item:
content_types:
- reasoning
kind: message
role: assistant
status: completed
seq: 12
seq: 15
source: agent
synthetic: false
type: item.completed

View file

@ -79,13 +79,93 @@ expression: normalized
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 12
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 13
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 14
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 15
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 16
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 17
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 18
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 19
source: agent
synthetic: false
type: item.delta
- item:
content_types:
- reasoning
kind: message
role: assistant
status: completed
seq: 10
seq: 20
source: agent
synthetic: false
type: item.completed

View file

@ -8,13 +8,19 @@ expression: normalized
source: daemon
synthetic: true
type: session.started
- metadata: true
seq: 2
session: started
source: agent
synthetic: false
type: session.started
- item:
content_types:
- status
kind: status
role: system
status: completed
seq: 2
seq: 3
source: agent
synthetic: false
type: item.completed
@ -24,7 +30,7 @@ expression: normalized
kind: message
role: user
status: in_progress
seq: 3
seq: 4
source: agent
synthetic: false
type: item.started
@ -32,7 +38,7 @@ expression: normalized
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 4
seq: 5
source: daemon
synthetic: true
type: item.delta
@ -42,7 +48,7 @@ expression: normalized
kind: message
role: user
status: completed
seq: 5
seq: 6
source: agent
synthetic: false
type: item.completed
@ -51,7 +57,7 @@ expression: normalized
kind: message
role: assistant
status: in_progress
seq: 6
seq: 7
source: agent
synthetic: false
type: item.started
@ -61,18 +67,10 @@ expression: normalized
kind: status
role: system
status: completed
seq: 7
seq: 8
source: agent
synthetic: false
type: item.completed
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 8
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
@ -81,29 +79,13 @@ expression: normalized
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 10
source: agent
synthetic: false
type: item.delta
- delta:
delta: "<redacted>"
item_id: "<redacted>"
native_item_id: "<redacted>"
seq: 11
source: agent
synthetic: false
type: item.delta
- item:
content_types:
- reasoning
kind: message
role: assistant
status: completed
seq: 12
seq: 10
source: agent
synthetic: false
type: item.completed

View file

@ -6,3 +6,4 @@
- [x] Update CLI + TypeScript SDK/OpenAPI for turn streaming.
- [x] Add inspector UI mode for turn stream and wire send flow.
- [x] Refresh docs for new endpoint and UI mode.
- [x] Add Docker/Vercel/Daytona/E2B examples with basic prompt scripts and tests.