chore: fix bad merge

This commit is contained in:
Nathan Flurry 2026-02-11 07:52:48 -08:00
parent 1dd45908a3
commit 94353f7696
205 changed files with 19244 additions and 14866 deletions

View file

@ -33,7 +33,7 @@ import {
type Stream,
} from "@agentclientprotocol/sdk";
const ACP_PATH = "/v2/rpc";
const DEFAULT_ACP_PATH = "/v1/rpc";
export interface ProblemDetails {
type: string;
@ -48,6 +48,13 @@ export type AcpEnvelopeDirection = "inbound" | "outbound";
export type AcpEnvelopeObserver = (envelope: AnyMessage, direction: AcpEnvelopeDirection) => void;
export type QueryValue = string | number | boolean | null | undefined;
export interface AcpHttpTransportOptions {
path?: string;
bootstrapQuery?: Record<string, QueryValue>;
}
export interface AcpHttpClientOptions {
baseUrl: string;
token?: string;
@ -55,6 +62,7 @@ export interface AcpHttpClientOptions {
headers?: HeadersInit;
client?: Partial<Client>;
onEnvelope?: AcpEnvelopeObserver;
transport?: AcpHttpTransportOptions;
}
export class AcpHttpError extends Error {
@ -71,6 +79,58 @@ export class AcpHttpError extends Error {
}
}
export interface RpcErrorResponse {
code: number;
message: string;
data?: unknown;
}
const RPC_CODE_LABELS: Record<number, string> = {
[-32700]: "Parse error",
[-32600]: "Invalid request",
[-32601]: "Method not supported by agent",
[-32602]: "Invalid parameters",
[-32603]: "Internal agent error",
[-32000]: "Authentication required",
[-32002]: "Resource not found",
};
export class AcpRpcError extends Error {
readonly code: number;
readonly data?: unknown;
constructor(code: number, message: string, data?: unknown) {
const label = RPC_CODE_LABELS[code];
const display = label ? `${label}: ${message}` : message;
super(display);
this.name = "AcpRpcError";
this.code = code;
this.data = data;
}
}
function isRpcErrorResponse(value: unknown): value is RpcErrorResponse {
return (
typeof value === "object" &&
value !== null &&
"code" in value &&
typeof (value as RpcErrorResponse).code === "number" &&
"message" in value &&
typeof (value as RpcErrorResponse).message === "string"
);
}
async function wrapRpc<T>(promise: Promise<T>): Promise<T> {
try {
return await promise;
} catch (error) {
if (isRpcErrorResponse(error)) {
throw new AcpRpcError(error.code, error.message, error.data);
}
throw error;
}
}
export class AcpHttpClient {
private readonly transport: StreamableHttpAcpTransport;
private readonly connection: ClientSideConnection;
@ -87,23 +147,20 @@ export class AcpHttpClient {
token: options.token,
defaultHeaders: options.headers,
onEnvelope: options.onEnvelope,
transport: options.transport,
});
const clientHandlers = buildClientHandlers(options.client);
this.connection = new ClientSideConnection(() => clientHandlers, this.transport.stream);
}
get clientId(): string | undefined {
return this.transport.clientId ?? undefined;
}
async initialize(request: Partial<InitializeRequest> = {}): Promise<InitializeResponse> {
const params: InitializeRequest = {
protocolVersion: request.protocolVersion ?? PROTOCOL_VERSION,
clientCapabilities: request.clientCapabilities,
clientInfo: request.clientInfo ?? {
name: "acp-http-client",
version: "v2",
version: "v1",
},
};
@ -111,23 +168,23 @@ export class AcpHttpClient {
params._meta = request._meta;
}
return this.connection.initialize(params);
return wrapRpc(this.connection.initialize(params));
}
async authenticate(request: AuthenticateRequest): Promise<AuthenticateResponse> {
return this.connection.authenticate(request);
return wrapRpc(this.connection.authenticate(request));
}
async newSession(request: NewSessionRequest): Promise<NewSessionResponse> {
return this.connection.newSession(request);
return wrapRpc(this.connection.newSession(request));
}
async loadSession(request: LoadSessionRequest): Promise<LoadSessionResponse> {
return this.connection.loadSession(request);
return wrapRpc(this.connection.loadSession(request));
}
async prompt(request: PromptRequest): Promise<PromptResponse> {
return this.connection.prompt(request);
return wrapRpc(this.connection.prompt(request));
}
async cancel(notification: CancelNotification): Promise<void> {
@ -135,35 +192,35 @@ export class AcpHttpClient {
}
async setSessionMode(request: SetSessionModeRequest): Promise<SetSessionModeResponse | void> {
return this.connection.setSessionMode(request);
return wrapRpc(this.connection.setSessionMode(request));
}
async setSessionConfigOption(
request: SetSessionConfigOptionRequest,
): Promise<SetSessionConfigOptionResponse> {
return this.connection.setSessionConfigOption(request);
return wrapRpc(this.connection.setSessionConfigOption(request));
}
async unstableListSessions(request: ListSessionsRequest): Promise<ListSessionsResponse> {
return this.connection.unstable_listSessions(request);
return wrapRpc(this.connection.unstable_listSessions(request));
}
async unstableForkSession(request: ForkSessionRequest): Promise<ForkSessionResponse> {
return this.connection.unstable_forkSession(request);
return wrapRpc(this.connection.unstable_forkSession(request));
}
async unstableResumeSession(request: ResumeSessionRequest): Promise<ResumeSessionResponse> {
return this.connection.unstable_resumeSession(request);
return wrapRpc(this.connection.unstable_resumeSession(request));
}
async unstableSetSessionModel(
request: SetSessionModelRequest,
): Promise<SetSessionModelResponse | void> {
return this.connection.unstable_setSessionModel(request);
return wrapRpc(this.connection.unstable_setSessionModel(request));
}
async extMethod(method: string, params: Record<string, unknown>): Promise<Record<string, unknown>> {
return this.connection.extMethod(method, params);
return wrapRpc(this.connection.extMethod(method, params));
}
async extNotification(method: string, params: Record<string, unknown>): Promise<void> {
@ -193,16 +250,19 @@ type StreamableHttpAcpTransportOptions = {
token?: string;
defaultHeaders?: HeadersInit;
onEnvelope?: AcpEnvelopeObserver;
transport?: AcpHttpTransportOptions;
};
class StreamableHttpAcpTransport {
readonly stream: Stream;
private readonly baseUrl: string;
private readonly path: string;
private readonly fetcher: typeof fetch;
private readonly token?: string;
private readonly defaultHeaders?: HeadersInit;
private readonly onEnvelope?: AcpEnvelopeObserver;
private readonly bootstrapQuery: URLSearchParams | null;
private readableController: ReadableStreamDefaultController<AnyMessage> | null = null;
private sseAbortController: AbortController | null = null;
@ -210,14 +270,18 @@ class StreamableHttpAcpTransport {
private lastEventId: string | null = null;
private closed = false;
private closingPromise: Promise<void> | null = null;
private _clientId: string | null = null;
private postedOnce = false;
constructor(options: StreamableHttpAcpTransportOptions) {
this.baseUrl = options.baseUrl.replace(/\/$/, "");
this.path = normalizePath(options.transport?.path ?? DEFAULT_ACP_PATH);
this.fetcher = options.fetcher;
this.token = options.token;
this.defaultHeaders = options.defaultHeaders;
this.onEnvelope = options.onEnvelope;
this.bootstrapQuery = options.transport?.bootstrapQuery
? buildQueryParams(options.transport.bootstrapQuery)
: null;
this.stream = {
readable: new ReadableStream<AnyMessage>({
@ -242,10 +306,6 @@ class StreamableHttpAcpTransport {
};
}
get clientId(): string | null {
return this._clientId;
}
async close(): Promise<void> {
if (this.closingPromise) {
return this.closingPromise;
@ -266,23 +326,32 @@ class StreamableHttpAcpTransport {
this.sseAbortController.abort();
}
const clientId = this._clientId;
if (clientId) {
if (!this.postedOnce) {
try {
const response = await this.fetcher(`${this.baseUrl}${ACP_PATH}`, {
method: "DELETE",
headers: this.buildHeaders({
"x-acp-connection-id": clientId,
Accept: "application/json",
}),
});
if (!response.ok && response.status !== 404) {
throw new AcpHttpError(response.status, await readProblem(response), response);
}
this.readableController?.close();
} catch {
// Ignore close errors; close must be best effort.
// no-op
}
this.readableController = null;
return;
}
const deleteHeaders = this.buildHeaders({
Accept: "application/json",
});
try {
const response = await this.fetcher(this.buildUrl(), {
method: "DELETE",
headers: deleteHeaders,
signal: timeoutSignal(2_000),
});
if (!response.ok && response.status !== 404) {
throw new AcpHttpError(response.status, await readProblem(response), response);
}
} catch {
// Ignore close errors; close must be best effort.
}
try {
@ -306,25 +375,20 @@ class StreamableHttpAcpTransport {
Accept: "application/json",
});
if (this._clientId) {
headers.set("x-acp-connection-id", this._clientId);
}
const response = await this.fetcher(`${this.baseUrl}${ACP_PATH}`, {
const url = this.buildUrl(this.bootstrapQueryIfNeeded());
const response = await this.fetcher(url, {
method: "POST",
headers,
body: JSON.stringify(message),
});
this.postedOnce = true;
if (!response.ok) {
throw new AcpHttpError(response.status, await readProblem(response), response);
}
const responseClientId = response.headers.get("x-acp-connection-id");
if (responseClientId && responseClientId !== this._clientId) {
this._clientId = responseClientId;
this.ensureSseLoop();
}
this.ensureSseLoop();
if (response.status === 200) {
const text = await response.text();
@ -332,11 +396,16 @@ class StreamableHttpAcpTransport {
const envelope = JSON.parse(text) as AnyMessage;
this.pushInbound(envelope);
}
} else {
// Drain response body so the underlying connection is released back to
// the pool. Without this, Node.js undici keeps the socket occupied and
// may stall subsequent requests to the same origin.
await response.text().catch(() => {});
}
}
private ensureSseLoop(): void {
if (this.sseLoop || this.closed || !this._clientId) {
if (this.sseLoop || this.closed || !this.postedOnce) {
return;
}
@ -346,11 +415,10 @@ class StreamableHttpAcpTransport {
}
private async runSseLoop(): Promise<void> {
while (!this.closed && this._clientId) {
while (!this.closed) {
this.sseAbortController = new AbortController();
const headers = this.buildHeaders({
"x-acp-connection-id": this._clientId,
Accept: "text/event-stream",
});
@ -359,12 +427,11 @@ class StreamableHttpAcpTransport {
}
try {
const response = await this.fetcher(`${this.baseUrl}${ACP_PATH}`, {
const response = await this.fetcher(this.buildUrl(), {
method: "GET",
headers,
signal: this.sseAbortController.signal,
});
if (!response.ok) {
throw new AcpHttpError(response.status, await readProblem(response), response);
}
@ -518,6 +585,23 @@ class StreamableHttpAcpTransport {
return headers;
}
private buildUrl(query?: URLSearchParams | null): string {
const url = new URL(`${this.baseUrl}${this.path}`);
if (query) {
for (const [key, value] of query.entries()) {
url.searchParams.set(key, value);
}
}
return url.toString();
}
private bootstrapQueryIfNeeded(): URLSearchParams | null {
if (this.postedOnce || !this.bootstrapQuery || this.bootstrapQuery.size === 0) {
return null;
}
return this.bootstrapQuery;
}
}
function buildClientHandlers(client?: Partial<Client>): Client {
@ -571,4 +655,30 @@ function delay(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
function timeoutSignal(timeoutMs: number): AbortSignal | undefined {
if (typeof AbortSignal !== "undefined" && typeof AbortSignal.timeout === "function") {
return AbortSignal.timeout(timeoutMs);
}
return undefined;
}
function normalizePath(path: string): string {
if (!path.startsWith("/")) {
return `/${path}`;
}
return path;
}
function buildQueryParams(source: Record<string, QueryValue>): URLSearchParams {
const params = new URLSearchParams();
for (const [key, value] of Object.entries(source)) {
if (value === undefined || value === null) {
continue;
}
params.set(key, String(value));
}
return params;
}
export type * from "@agentclientprotocol/sdk";
export { PROTOCOL_VERSION } from "@agentclientprotocol/sdk";

View file

@ -1,9 +1,11 @@
import { describe, expect, it, beforeAll, afterAll } from "vitest";
import { existsSync } from "node:fs";
import { dirname, resolve } from "node:path";
import { existsSync, mkdtempSync, rmSync } from "node:fs";
import { dirname, join, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { tmpdir } from "node:os";
import { AcpHttpClient, type SessionNotification } from "../src/index.ts";
import { spawnSandboxAgent, type SandboxAgentSpawnHandle } from "../../typescript/src/spawn.ts";
import { prepareMockAgentDataHome } from "../../typescript/tests/helpers/mock-agent.ts";
const __dirname = dirname(fileURLToPath(import.meta.url));
@ -60,12 +62,19 @@ describe("AcpHttpClient integration", () => {
let handle: SandboxAgentSpawnHandle;
let baseUrl: string;
let token: string;
let dataHome: string;
beforeAll(async () => {
dataHome = mkdtempSync(join(tmpdir(), "acp-http-client-"));
prepareMockAgentDataHome(dataHome);
handle = await spawnSandboxAgent({
enabled: true,
log: "silent",
timeoutMs: 30000,
env: {
XDG_DATA_HOME: dataHome,
},
});
baseUrl = handle.baseUrl;
token = handle.token;
@ -73,14 +82,20 @@ describe("AcpHttpClient integration", () => {
afterAll(async () => {
await handle.dispose();
rmSync(dataHome, { recursive: true, force: true });
});
it("runs initialize/newSession/prompt against real /v2/rpc", async () => {
it("runs initialize/newSession/prompt against real /v1/acp/{server_id}", async () => {
const updates: SessionNotification[] = [];
const serverId = `acp-http-client-${Date.now().toString(36)}`;
const client = new AcpHttpClient({
baseUrl,
token,
transport: {
path: `/v1/acp/${encodeURIComponent(serverId)}`,
bootstrapQuery: { agent: "mock" },
},
client: {
sessionUpdate: async (notification) => {
updates.push(notification);
@ -88,23 +103,12 @@ describe("AcpHttpClient integration", () => {
},
});
const initialize = await client.initialize({
_meta: {
"sandboxagent.dev": {
agent: "mock",
},
},
});
const initialize = await client.initialize();
expect(initialize.protocolVersion).toBeTruthy();
const session = await client.newSession({
cwd: process.cwd(),
mcpServers: [],
_meta: {
"sandboxagent.dev": {
agent: "mock",
},
},
});
expect(session.sessionId).toBeTruthy();

View file

@ -0,0 +1,38 @@
{
"name": "@sandbox-agent/persist-indexeddb",
"version": "0.1.0",
"description": "IndexedDB persistence driver for the Sandbox Agent TypeScript SDK",
"license": "Apache-2.0",
"repository": {
"type": "git",
"url": "https://github.com/rivet-dev/sandbox-agent"
},
"type": "module",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js"
}
},
"dependencies": {
"sandbox-agent": "workspace:*"
},
"files": [
"dist"
],
"scripts": {
"build": "tsup",
"typecheck": "tsc --noEmit",
"test": "vitest run",
"test:watch": "vitest"
},
"devDependencies": {
"@types/node": "^22.0.0",
"fake-indexeddb": "^6.2.4",
"tsup": "^8.0.0",
"typescript": "^5.7.0",
"vitest": "^3.0.0"
}
}

View file

@ -0,0 +1,327 @@
import type {
ListEventsRequest,
ListPage,
ListPageRequest,
SessionEvent,
SessionPersistDriver,
SessionRecord,
} from "sandbox-agent";
const DEFAULT_DB_NAME = "sandbox-agent-session-store";
const DEFAULT_DB_VERSION = 2;
const SESSIONS_STORE = "sessions";
const EVENTS_STORE = "events";
const EVENTS_BY_SESSION_INDEX = "by_session_index";
const DEFAULT_LIST_LIMIT = 100;
export interface IndexedDbSessionPersistDriverOptions {
databaseName?: string;
databaseVersion?: number;
indexedDb?: IDBFactory;
}
export class IndexedDbSessionPersistDriver implements SessionPersistDriver {
private readonly indexedDb: IDBFactory;
private readonly dbName: string;
private readonly dbVersion: number;
private readonly dbPromise: Promise<IDBDatabase>;
constructor(options: IndexedDbSessionPersistDriverOptions = {}) {
const indexedDb = options.indexedDb ?? globalThis.indexedDB;
if (!indexedDb) {
throw new Error("IndexedDB is not available in this runtime.");
}
this.indexedDb = indexedDb;
this.dbName = options.databaseName ?? DEFAULT_DB_NAME;
this.dbVersion = options.databaseVersion ?? DEFAULT_DB_VERSION;
this.dbPromise = this.openDatabase();
}
async getSession(id: string): Promise<SessionRecord | null> {
const db = await this.dbPromise;
const row = await requestToPromise<IDBValidKey | SessionRow | undefined>(
db.transaction(SESSIONS_STORE, "readonly").objectStore(SESSIONS_STORE).get(id),
);
if (!row || typeof row !== "object") {
return null;
}
return decodeSessionRow(row as SessionRow);
}
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
const db = await this.dbPromise;
const rows = await getAllRows<SessionRow>(db, SESSIONS_STORE);
rows.sort((a, b) => {
if (a.createdAt !== b.createdAt) {
return a.createdAt - b.createdAt;
}
return a.id.localeCompare(b.id);
});
const offset = parseCursor(request.cursor);
const limit = normalizeLimit(request.limit);
const slice = rows.slice(offset, offset + limit).map(decodeSessionRow);
const nextOffset = offset + slice.length;
return {
items: slice,
nextCursor: nextOffset < rows.length ? String(nextOffset) : undefined,
};
}
async updateSession(session: SessionRecord): Promise<void> {
const db = await this.dbPromise;
await transactionPromise(db, [SESSIONS_STORE], "readwrite", (tx) => {
tx.objectStore(SESSIONS_STORE).put(encodeSessionRow(session));
});
}
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
const db = await this.dbPromise;
const rows = (await getAllRows<EventRow>(db, EVENTS_STORE))
.filter((row) => row.sessionId === request.sessionId)
.sort(compareEventRowsByOrder);
const offset = parseCursor(request.cursor);
const limit = normalizeLimit(request.limit);
const slice = rows.slice(offset, offset + limit).map(decodeEventRow);
const nextOffset = offset + slice.length;
return {
items: slice,
nextCursor: nextOffset < rows.length ? String(nextOffset) : undefined,
};
}
async insertEvent(event: SessionEvent): Promise<void> {
const db = await this.dbPromise;
await transactionPromise(db, [EVENTS_STORE], "readwrite", (tx) => {
tx.objectStore(EVENTS_STORE).put(encodeEventRow(event));
});
}
async close(): Promise<void> {
const db = await this.dbPromise;
db.close();
}
private openDatabase(): Promise<IDBDatabase> {
return new Promise((resolve, reject) => {
const request = this.indexedDb.open(this.dbName, this.dbVersion);
request.onupgradeneeded = () => {
const db = request.result;
if (!db.objectStoreNames.contains(SESSIONS_STORE)) {
db.createObjectStore(SESSIONS_STORE, { keyPath: "id" });
}
if (!db.objectStoreNames.contains(EVENTS_STORE)) {
const events = db.createObjectStore(EVENTS_STORE, { keyPath: "id" });
events.createIndex(EVENTS_BY_SESSION_INDEX, ["sessionId", "eventIndex", "id"], {
unique: false,
});
} else {
const tx = request.transaction;
if (!tx) {
return;
}
const events = tx.objectStore(EVENTS_STORE);
if (!events.indexNames.contains(EVENTS_BY_SESSION_INDEX)) {
events.createIndex(EVENTS_BY_SESSION_INDEX, ["sessionId", "eventIndex", "id"], {
unique: false,
});
}
}
};
request.onsuccess = () => resolve(request.result);
request.onerror = () => reject(request.error ?? new Error("Unable to open IndexedDB"));
});
}
}
type SessionRow = {
id: string;
agent: string;
agentSessionId: string;
lastConnectionId: string;
createdAt: number;
destroyedAt?: number;
sessionInit?: SessionRecord["sessionInit"];
};
type EventRow = {
id: number | string;
eventIndex?: number;
sessionId: string;
createdAt: number;
connectionId: string;
sender: "client" | "agent";
payload: unknown;
};
function encodeSessionRow(session: SessionRecord): SessionRow {
return {
id: session.id,
agent: session.agent,
agentSessionId: session.agentSessionId,
lastConnectionId: session.lastConnectionId,
createdAt: session.createdAt,
destroyedAt: session.destroyedAt,
sessionInit: session.sessionInit,
};
}
function decodeSessionRow(row: SessionRow): SessionRecord {
return {
id: row.id,
agent: row.agent,
agentSessionId: row.agentSessionId,
lastConnectionId: row.lastConnectionId,
createdAt: row.createdAt,
destroyedAt: row.destroyedAt,
sessionInit: row.sessionInit,
};
}
function encodeEventRow(event: SessionEvent): EventRow {
return {
id: event.id,
eventIndex: event.eventIndex,
sessionId: event.sessionId,
createdAt: event.createdAt,
connectionId: event.connectionId,
sender: event.sender,
payload: event.payload,
};
}
function decodeEventRow(row: EventRow): SessionEvent {
return {
id: String(row.id),
eventIndex: parseEventIndex(row.eventIndex, row.id),
sessionId: row.sessionId,
createdAt: row.createdAt,
connectionId: row.connectionId,
sender: row.sender,
payload: row.payload as SessionEvent["payload"],
};
}
async function getAllRows<T>(db: IDBDatabase, storeName: string): Promise<T[]> {
return await transactionPromise<T[]>(db, [storeName], "readonly", async (tx) => {
const request = tx.objectStore(storeName).getAll();
return (await requestToPromise(request)) as T[];
});
}
function normalizeLimit(limit: number | undefined): number {
if (!Number.isFinite(limit) || (limit ?? 0) < 1) {
return DEFAULT_LIST_LIMIT;
}
return Math.floor(limit as number);
}
function parseCursor(cursor: string | undefined): number {
if (!cursor) {
return 0;
}
const parsed = Number.parseInt(cursor, 10);
if (!Number.isFinite(parsed) || parsed < 0) {
return 0;
}
return parsed;
}
function compareEventRowsByOrder(a: EventRow, b: EventRow): number {
const indexA = parseEventIndex(a.eventIndex, a.id);
const indexB = parseEventIndex(b.eventIndex, b.id);
if (indexA !== indexB) {
return indexA - indexB;
}
return String(a.id).localeCompare(String(b.id));
}
function parseEventIndex(value: number | undefined, fallback: number | string): number {
if (typeof value === "number" && Number.isFinite(value)) {
return Math.max(0, Math.floor(value));
}
const parsed = Number.parseInt(String(fallback), 10);
if (!Number.isFinite(parsed) || parsed < 0) {
return 0;
}
return parsed;
}
function requestToPromise<T>(request: IDBRequest<T>): Promise<T> {
return new Promise((resolve, reject) => {
request.onsuccess = () => resolve(request.result);
request.onerror = () => reject(request.error ?? new Error("IndexedDB request failed"));
});
}
function transactionPromise<T>(
db: IDBDatabase,
stores: string[],
mode: IDBTransactionMode,
run: (tx: IDBTransaction) => T | Promise<T>,
): Promise<T> {
return new Promise((resolve, reject) => {
const tx = db.transaction(stores, mode);
let settled = false;
let resultValue: T | undefined;
let runCompleted = false;
let txCompleted = false;
function tryResolve() {
if (settled || !runCompleted || !txCompleted) {
return;
}
settled = true;
resolve(resultValue as T);
}
tx.oncomplete = () => {
txCompleted = true;
tryResolve();
};
tx.onerror = () => {
if (settled) {
return;
}
settled = true;
reject(tx.error ?? new Error("IndexedDB transaction failed"));
};
tx.onabort = () => {
if (settled) {
return;
}
settled = true;
reject(tx.error ?? new Error("IndexedDB transaction aborted"));
};
Promise.resolve(run(tx))
.then((value) => {
resultValue = value;
runCompleted = true;
tryResolve();
})
.catch((error) => {
if (!settled) {
settled = true;
reject(error);
}
try {
tx.abort();
} catch {
// no-op
}
});
});
}

View file

@ -0,0 +1,96 @@
import "fake-indexeddb/auto";
import { describe, it, expect } from "vitest";
import { IndexedDbSessionPersistDriver } from "../src/index.ts";
function uniqueDbName(prefix: string): string {
return `${prefix}-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`;
}
describe("IndexedDbSessionPersistDriver", () => {
it("stores and pages sessions and events", async () => {
const dbName = uniqueDbName("indexeddb-driver");
const driver = new IndexedDbSessionPersistDriver({ databaseName: dbName });
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 100,
});
await driver.updateSession({
id: "s-2",
agent: "mock",
agentSessionId: "a-2",
lastConnectionId: "c-2",
createdAt: 200,
destroyedAt: 300,
});
await driver.insertEvent({
id: "evt-1",
eventIndex: 1,
sessionId: "s-1",
createdAt: 1,
connectionId: "c-1",
sender: "client",
payload: { jsonrpc: "2.0", method: "session/prompt", params: { sessionId: "a-1" } },
});
await driver.insertEvent({
id: "evt-2",
eventIndex: 2,
sessionId: "s-1",
createdAt: 2,
connectionId: "c-1",
sender: "agent",
payload: { jsonrpc: "2.0", method: "session/update", params: { sessionId: "a-1" } },
});
const loaded = await driver.getSession("s-2");
expect(loaded?.destroyedAt).toBe(300);
const page1 = await driver.listSessions({ limit: 1 });
expect(page1.items).toHaveLength(1);
expect(page1.items[0]?.id).toBe("s-1");
expect(page1.nextCursor).toBeTruthy();
const page2 = await driver.listSessions({ cursor: page1.nextCursor, limit: 1 });
expect(page2.items).toHaveLength(1);
expect(page2.items[0]?.id).toBe("s-2");
expect(page2.nextCursor).toBeUndefined();
const eventsPage = await driver.listEvents({ sessionId: "s-1", limit: 10 });
expect(eventsPage.items).toHaveLength(2);
expect(eventsPage.items[0]?.id).toBe("evt-1");
expect(eventsPage.items[0]?.eventIndex).toBe(1);
expect(eventsPage.items[1]?.id).toBe("evt-2");
expect(eventsPage.items[1]?.eventIndex).toBe(2);
await driver.close();
});
it("persists across driver instances for same database", async () => {
const dbName = uniqueDbName("indexeddb-reopen");
{
const driver = new IndexedDbSessionPersistDriver({ databaseName: dbName });
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 1,
});
await driver.close();
}
{
const driver = new IndexedDbSessionPersistDriver({ databaseName: dbName });
const session = await driver.getSession("s-1");
expect(session?.id).toBe("s-1");
await driver.close();
}
});
});

View file

@ -0,0 +1,134 @@
import "fake-indexeddb/auto";
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { existsSync, mkdtempSync, rmSync } from "node:fs";
import { dirname, join, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { tmpdir } from "node:os";
import { SandboxAgent } from "sandbox-agent";
import { spawnSandboxAgent, type SandboxAgentSpawnHandle } from "../../typescript/src/spawn.ts";
import { prepareMockAgentDataHome } from "../../typescript/tests/helpers/mock-agent.ts";
import { IndexedDbSessionPersistDriver } from "../src/index.ts";
const __dirname = dirname(fileURLToPath(import.meta.url));
function findBinary(): string | null {
if (process.env.SANDBOX_AGENT_BIN) {
return process.env.SANDBOX_AGENT_BIN;
}
const cargoPaths = [
resolve(__dirname, "../../../target/debug/sandbox-agent"),
resolve(__dirname, "../../../target/release/sandbox-agent"),
];
for (const p of cargoPaths) {
if (existsSync(p)) {
return p;
}
}
return null;
}
function uniqueDbName(prefix: string): string {
return `${prefix}-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`;
}
const BINARY_PATH = findBinary();
if (!BINARY_PATH) {
throw new Error(
"sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN.",
);
}
if (!process.env.SANDBOX_AGENT_BIN) {
process.env.SANDBOX_AGENT_BIN = BINARY_PATH;
}
describe("IndexedDB persistence end-to-end", () => {
let handle: SandboxAgentSpawnHandle;
let baseUrl: string;
let token: string;
let dataHome: string;
beforeAll(async () => {
dataHome = mkdtempSync(join(tmpdir(), "indexeddb-integration-"));
prepareMockAgentDataHome(dataHome);
handle = await spawnSandboxAgent({
enabled: true,
log: "silent",
timeoutMs: 30000,
env: {
XDG_DATA_HOME: dataHome,
},
});
baseUrl = handle.baseUrl;
token = handle.token;
});
afterAll(async () => {
await handle.dispose();
rmSync(dataHome, { recursive: true, force: true });
});
it("restores sessions/events across sdk instances", async () => {
const dbName = uniqueDbName("sandbox-agent-browser-e2e");
const persist1 = new IndexedDbSessionPersistDriver({ databaseName: dbName });
const sdk1 = await SandboxAgent.connect({
baseUrl,
token,
persist: persist1,
replayMaxEvents: 40,
replayMaxChars: 16000,
});
const created = await sdk1.createSession({ agent: "mock" });
await created.prompt([{ type: "text", text: "indexeddb-first" }]);
const firstConnectionId = created.lastConnectionId;
await sdk1.dispose();
await persist1.close();
const persist2 = new IndexedDbSessionPersistDriver({ databaseName: dbName });
const sdk2 = await SandboxAgent.connect({
baseUrl,
token,
persist: persist2,
replayMaxEvents: 40,
replayMaxChars: 16000,
});
const restored = await sdk2.resumeSession(created.id);
expect(restored.lastConnectionId).not.toBe(firstConnectionId);
await restored.prompt([{ type: "text", text: "indexeddb-second" }]);
const sessions = await sdk2.listSessions({ limit: 20 });
expect(sessions.items.some((entry) => entry.id === created.id)).toBe(true);
const events = await sdk2.getEvents({ sessionId: created.id, limit: 1000 });
expect(events.items.length).toBeGreaterThan(0);
const replayInjected = events.items.find((event) => {
if (event.sender !== "client") {
return false;
}
const payload = event.payload as Record<string, unknown>;
const method = payload.method;
const params = payload.params as Record<string, unknown> | undefined;
const prompt = Array.isArray(params?.prompt) ? params?.prompt : [];
const firstBlock = prompt[0] as Record<string, unknown> | undefined;
return (
method === "session/prompt" &&
typeof firstBlock?.text === "string" &&
firstBlock.text.includes("Previous session history is replayed below")
);
});
expect(replayInjected).toBeTruthy();
await sdk2.dispose();
await persist2.close();
});
});

View file

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022", "DOM"],
"module": "ESNext",
"moduleResolution": "Bundler",
"allowImportingTsExtensions": true,
"noEmit": true,
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"resolveJsonModule": true
},
"include": ["src/**/*", "tests/**/*"],
"exclude": ["node_modules", "dist"]
}

View file

@ -0,0 +1,10 @@
import { defineConfig } from "tsup";
export default defineConfig({
entry: ["src/index.ts"],
format: ["esm"],
dts: true,
sourcemap: true,
clean: true,
target: "es2022",
});

View file

@ -0,0 +1,9 @@
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
include: ["tests/**/*.test.ts"],
testTimeout: 60000,
environment: "node"
},
});

View file

@ -0,0 +1,39 @@
{
"name": "@sandbox-agent/persist-postgres",
"version": "0.1.0",
"description": "PostgreSQL persistence driver for the Sandbox Agent TypeScript SDK",
"license": "Apache-2.0",
"repository": {
"type": "git",
"url": "https://github.com/rivet-dev/sandbox-agent"
},
"type": "module",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js"
}
},
"dependencies": {
"pg": "^8.16.3",
"sandbox-agent": "workspace:*"
},
"files": [
"dist"
],
"scripts": {
"build": "tsup",
"typecheck": "tsc --noEmit",
"test": "vitest run",
"test:watch": "vitest"
},
"devDependencies": {
"@types/node": "^22.0.0",
"@types/pg": "^8.15.6",
"tsup": "^8.0.0",
"typescript": "^5.7.0",
"vitest": "^3.0.0"
}
}

View file

@ -0,0 +1,322 @@
import { Pool, type PoolConfig } from "pg";
import type {
ListEventsRequest,
ListPage,
ListPageRequest,
SessionEvent,
SessionPersistDriver,
SessionRecord,
} from "sandbox-agent";
const DEFAULT_LIST_LIMIT = 100;
export interface PostgresSessionPersistDriverOptions {
connectionString?: string;
pool?: Pool;
poolConfig?: PoolConfig;
schema?: string;
}
export class PostgresSessionPersistDriver implements SessionPersistDriver {
private readonly pool: Pool;
private readonly ownsPool: boolean;
private readonly schema: string;
private readonly initialized: Promise<void>;
constructor(options: PostgresSessionPersistDriverOptions = {}) {
this.schema = normalizeSchema(options.schema ?? "public");
if (options.pool) {
this.pool = options.pool;
this.ownsPool = false;
} else {
this.pool = new Pool({
connectionString: options.connectionString,
...options.poolConfig,
});
this.ownsPool = true;
}
this.initialized = this.initialize();
}
async getSession(id: string): Promise<SessionRecord | null> {
await this.ready();
const result = await this.pool.query<SessionRow>(
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
FROM ${this.table("sessions")}
WHERE id = $1`,
[id],
);
if (result.rows.length === 0) {
return null;
}
return decodeSessionRow(result.rows[0]);
}
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
await this.ready();
const offset = parseCursor(request.cursor);
const limit = normalizeLimit(request.limit);
const rowsResult = await this.pool.query<SessionRow>(
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
FROM ${this.table("sessions")}
ORDER BY created_at ASC, id ASC
LIMIT $1 OFFSET $2`,
[limit, offset],
);
const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("sessions")}`);
const total = parseInteger(countResult.rows[0]?.count ?? "0");
const nextOffset = offset + rowsResult.rows.length;
return {
items: rowsResult.rows.map(decodeSessionRow),
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
};
}
async updateSession(session: SessionRecord): Promise<void> {
await this.ready();
await this.pool.query(
`INSERT INTO ${this.table("sessions")} (
id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
) VALUES ($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT(id) DO UPDATE SET
agent = EXCLUDED.agent,
agent_session_id = EXCLUDED.agent_session_id,
last_connection_id = EXCLUDED.last_connection_id,
created_at = EXCLUDED.created_at,
destroyed_at = EXCLUDED.destroyed_at,
session_init_json = EXCLUDED.session_init_json`,
[
session.id,
session.agent,
session.agentSessionId,
session.lastConnectionId,
session.createdAt,
session.destroyedAt ?? null,
session.sessionInit ?? null,
],
);
}
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
await this.ready();
const offset = parseCursor(request.cursor);
const limit = normalizeLimit(request.limit);
const rowsResult = await this.pool.query<EventRow>(
`SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json
FROM ${this.table("events")}
WHERE session_id = $1
ORDER BY event_index ASC, id ASC
LIMIT $2 OFFSET $3`,
[request.sessionId, limit, offset],
);
const countResult = await this.pool.query<{ count: string }>(
`SELECT COUNT(*) AS count FROM ${this.table("events")} WHERE session_id = $1`,
[request.sessionId],
);
const total = parseInteger(countResult.rows[0]?.count ?? "0");
const nextOffset = offset + rowsResult.rows.length;
return {
items: rowsResult.rows.map(decodeEventRow),
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
};
}
async insertEvent(event: SessionEvent): Promise<void> {
await this.ready();
await this.pool.query(
`INSERT INTO ${this.table("events")} (
id, event_index, session_id, created_at, connection_id, sender, payload_json
) VALUES ($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT(id) DO UPDATE SET
event_index = EXCLUDED.event_index,
session_id = EXCLUDED.session_id,
created_at = EXCLUDED.created_at,
connection_id = EXCLUDED.connection_id,
sender = EXCLUDED.sender,
payload_json = EXCLUDED.payload_json`,
[
event.id,
event.eventIndex,
event.sessionId,
event.createdAt,
event.connectionId,
event.sender,
event.payload,
],
);
}
async close(): Promise<void> {
if (!this.ownsPool) {
return;
}
await this.pool.end();
}
private async ready(): Promise<void> {
await this.initialized;
}
private table(name: "sessions" | "events"): string {
return `"${this.schema}"."${name}"`;
}
private async initialize(): Promise<void> {
await this.pool.query(`CREATE SCHEMA IF NOT EXISTS "${this.schema}"`);
await this.pool.query(`
CREATE TABLE IF NOT EXISTS ${this.table("sessions")} (
id TEXT PRIMARY KEY,
agent TEXT NOT NULL,
agent_session_id TEXT NOT NULL,
last_connection_id TEXT NOT NULL,
created_at BIGINT NOT NULL,
destroyed_at BIGINT,
session_init_json JSONB
)
`);
await this.pool.query(`
CREATE TABLE IF NOT EXISTS ${this.table("events")} (
id TEXT PRIMARY KEY,
event_index BIGINT NOT NULL,
session_id TEXT NOT NULL,
created_at BIGINT NOT NULL,
connection_id TEXT NOT NULL,
sender TEXT NOT NULL,
payload_json JSONB NOT NULL
)
`);
await this.pool.query(`
ALTER TABLE ${this.table("events")}
ALTER COLUMN id TYPE TEXT USING id::TEXT
`);
await this.pool.query(`
ALTER TABLE ${this.table("events")}
ADD COLUMN IF NOT EXISTS event_index BIGINT
`);
await this.pool.query(`
WITH ranked AS (
SELECT id, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC) AS ranked_index
FROM ${this.table("events")}
)
UPDATE ${this.table("events")} AS current_events
SET event_index = ranked.ranked_index
FROM ranked
WHERE current_events.id = ranked.id
AND current_events.event_index IS NULL
`);
await this.pool.query(`
ALTER TABLE ${this.table("events")}
ALTER COLUMN event_index SET NOT NULL
`);
await this.pool.query(`
CREATE INDEX IF NOT EXISTS idx_events_session_order
ON ${this.table("events")}(session_id, event_index, id)
`);
}
}
type SessionRow = {
id: string;
agent: string;
agent_session_id: string;
last_connection_id: string;
created_at: string | number;
destroyed_at: string | number | null;
session_init_json: unknown | null;
};
type EventRow = {
id: string | number;
event_index: string | number;
session_id: string;
created_at: string | number;
connection_id: string;
sender: string;
payload_json: unknown;
};
function decodeSessionRow(row: SessionRow): SessionRecord {
return {
id: row.id,
agent: row.agent,
agentSessionId: row.agent_session_id,
lastConnectionId: row.last_connection_id,
createdAt: parseInteger(row.created_at),
destroyedAt: row.destroyed_at === null ? undefined : parseInteger(row.destroyed_at),
sessionInit: row.session_init_json ? (row.session_init_json as SessionRecord["sessionInit"]) : undefined,
};
}
function decodeEventRow(row: EventRow): SessionEvent {
return {
id: String(row.id),
eventIndex: parseInteger(row.event_index),
sessionId: row.session_id,
createdAt: parseInteger(row.created_at),
connectionId: row.connection_id,
sender: parseSender(row.sender),
payload: row.payload_json as SessionEvent["payload"],
};
}
function normalizeLimit(limit: number | undefined): number {
if (!Number.isFinite(limit) || (limit ?? 0) < 1) {
return DEFAULT_LIST_LIMIT;
}
return Math.floor(limit as number);
}
function parseCursor(cursor: string | undefined): number {
if (!cursor) {
return 0;
}
const parsed = Number.parseInt(cursor, 10);
if (!Number.isFinite(parsed) || parsed < 0) {
return 0;
}
return parsed;
}
function parseInteger(value: string | number): number {
const parsed = typeof value === "number" ? value : Number.parseInt(value, 10);
if (!Number.isFinite(parsed)) {
throw new Error(`Invalid integer value returned by postgres: ${String(value)}`);
}
return parsed;
}
function parseSender(value: string): SessionEvent["sender"] {
if (value === "agent" || value === "client") {
return value;
}
throw new Error(`Invalid sender value returned by postgres: ${value}`);
}
function normalizeSchema(schema: string): string {
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(schema)) {
throw new Error(`Invalid schema name '${schema}'. Use letters, numbers, and underscores only.`);
}
return schema;
}

View file

@ -0,0 +1,250 @@
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from "vitest";
import { execFileSync } from "node:child_process";
import { existsSync, mkdtempSync, rmSync } from "node:fs";
import { dirname, join, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { tmpdir } from "node:os";
import { randomUUID } from "node:crypto";
import { Client } from "pg";
import { SandboxAgent } from "sandbox-agent";
import { spawnSandboxAgent, type SandboxAgentSpawnHandle } from "../../typescript/src/spawn.ts";
import { prepareMockAgentDataHome } from "../../typescript/tests/helpers/mock-agent.ts";
import { PostgresSessionPersistDriver } from "../src/index.ts";
const __dirname = dirname(fileURLToPath(import.meta.url));
function findBinary(): string | null {
if (process.env.SANDBOX_AGENT_BIN) {
return process.env.SANDBOX_AGENT_BIN;
}
const cargoPaths = [
resolve(__dirname, "../../../target/debug/sandbox-agent"),
resolve(__dirname, "../../../target/release/sandbox-agent"),
];
for (const p of cargoPaths) {
if (existsSync(p)) {
return p;
}
}
return null;
}
const BINARY_PATH = findBinary();
if (!BINARY_PATH) {
throw new Error(
"sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN.",
);
}
if (!process.env.SANDBOX_AGENT_BIN) {
process.env.SANDBOX_AGENT_BIN = BINARY_PATH;
}
interface PostgresContainer {
containerId: string;
connectionString: string;
}
describe("Postgres persistence driver", () => {
let handle: SandboxAgentSpawnHandle;
let baseUrl: string;
let token: string;
let dataHome: string;
let postgres: PostgresContainer | null = null;
beforeAll(async () => {
dataHome = mkdtempSync(join(tmpdir(), "postgres-integration-"));
prepareMockAgentDataHome(dataHome);
handle = await spawnSandboxAgent({
enabled: true,
log: "silent",
timeoutMs: 30000,
env: {
XDG_DATA_HOME: dataHome,
},
});
baseUrl = handle.baseUrl;
token = handle.token;
});
beforeEach(async () => {
postgres = await startPostgresContainer();
});
afterEach(() => {
if (postgres) {
stopPostgresContainer(postgres.containerId);
postgres = null;
}
});
afterAll(async () => {
await handle.dispose();
rmSync(dataHome, { recursive: true, force: true });
});
it("persists session/event history across SDK instances and supports replay restore", async () => {
const connectionString = requirePostgres(postgres).connectionString;
const persist1 = new PostgresSessionPersistDriver({
connectionString,
});
const sdk1 = await SandboxAgent.connect({
baseUrl,
token,
persist: persist1,
replayMaxEvents: 40,
replayMaxChars: 16000,
});
const created = await sdk1.createSession({ agent: "mock" });
await created.prompt([{ type: "text", text: "postgres-first" }]);
const firstConnectionId = created.lastConnectionId;
await sdk1.dispose();
await persist1.close();
const persist2 = new PostgresSessionPersistDriver({
connectionString,
});
const sdk2 = await SandboxAgent.connect({
baseUrl,
token,
persist: persist2,
replayMaxEvents: 40,
replayMaxChars: 16000,
});
const restored = await sdk2.resumeSession(created.id);
expect(restored.lastConnectionId).not.toBe(firstConnectionId);
await restored.prompt([{ type: "text", text: "postgres-second" }]);
const sessions = await sdk2.listSessions({ limit: 20 });
expect(sessions.items.some((entry) => entry.id === created.id)).toBe(true);
const events = await sdk2.getEvents({ sessionId: created.id, limit: 1000 });
expect(events.items.length).toBeGreaterThan(0);
expect(events.items.every((event) => typeof event.id === "string")).toBe(true);
expect(events.items.every((event) => Number.isInteger(event.eventIndex))).toBe(true);
for (let i = 1; i < events.items.length; i += 1) {
expect(events.items[i]!.eventIndex).toBeGreaterThanOrEqual(events.items[i - 1]!.eventIndex);
}
const replayInjected = events.items.find((event) => {
if (event.sender !== "client") {
return false;
}
const payload = event.payload as Record<string, unknown>;
const method = payload.method;
const params = payload.params as Record<string, unknown> | undefined;
const prompt = Array.isArray(params?.prompt) ? params?.prompt : [];
const firstBlock = prompt[0] as Record<string, unknown> | undefined;
return (
method === "session/prompt" &&
typeof firstBlock?.text === "string" &&
firstBlock.text.includes("Previous session history is replayed below")
);
});
expect(replayInjected).toBeTruthy();
await sdk2.dispose();
await persist2.close();
});
});
async function startPostgresContainer(): Promise<PostgresContainer> {
const name = `sandbox-agent-postgres-${randomUUID()}`;
const containerId = runDockerCommand([
"run",
"-d",
"--rm",
"--name",
name,
"-e",
"POSTGRES_USER=postgres",
"-e",
"POSTGRES_PASSWORD=postgres",
"-e",
"POSTGRES_DB=sandboxagent",
"-p",
"127.0.0.1::5432",
"postgres:16-alpine",
]);
const portOutput = runDockerCommand(["port", containerId, "5432/tcp"]);
const port = parsePort(portOutput);
const connectionString = `postgres://postgres:postgres@127.0.0.1:${port}/sandboxagent`;
await waitForPostgres(connectionString);
return {
containerId,
connectionString,
};
}
function stopPostgresContainer(containerId: string): void {
try {
runDockerCommand(["rm", "-f", containerId]);
} catch {
// Container may already be gone when test teardown runs.
}
}
function runDockerCommand(args: string[]): string {
return execFileSync("docker", args, {
encoding: "utf8",
stdio: ["ignore", "pipe", "pipe"],
}).trim();
}
function parsePort(output: string): string {
const firstLine = output.split("\n")[0]?.trim() ?? "";
const match = firstLine.match(/:(\d+)$/);
if (!match) {
throw new Error(`Failed to parse docker port output: '${output}'`);
}
return match[1];
}
async function waitForPostgres(connectionString: string): Promise<void> {
const timeoutMs = 30000;
const deadline = Date.now() + timeoutMs;
let lastError: unknown;
while (Date.now() < deadline) {
const client = new Client({ connectionString });
try {
await client.connect();
await client.query("SELECT 1");
await client.end();
return;
} catch (error) {
lastError = error;
try {
await client.end();
} catch {
// Ignore cleanup failures while retrying.
}
await delay(250);
}
}
throw new Error(`Postgres container did not become ready: ${String(lastError)}`);
}
function delay(ms: number): Promise<void> {
return new Promise((resolvePromise) => setTimeout(resolvePromise, ms));
}
function requirePostgres(container: PostgresContainer | null): PostgresContainer {
if (!container) {
throw new Error("Postgres container was not initialized for this test.");
}
return container;
}

View file

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022", "DOM"],
"module": "ESNext",
"moduleResolution": "Bundler",
"allowImportingTsExtensions": true,
"noEmit": true,
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"resolveJsonModule": true
},
"include": ["src/**/*", "tests/**/*"],
"exclude": ["node_modules", "dist"]
}

View file

@ -0,0 +1,10 @@
import { defineConfig } from "tsup";
export default defineConfig({
entry: ["src/index.ts"],
format: ["esm"],
dts: true,
sourcemap: true,
clean: true,
target: "es2022",
});

View file

@ -0,0 +1,9 @@
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
include: ["tests/**/*.test.ts"],
testTimeout: 120000,
hookTimeout: 120000,
},
});

View file

@ -0,0 +1,45 @@
{
"name": "@sandbox-agent/persist-rivet",
"version": "0.1.0",
"description": "Rivet Actor persistence driver for the Sandbox Agent TypeScript SDK",
"license": "Apache-2.0",
"repository": {
"type": "git",
"url": "https://github.com/rivet-dev/sandbox-agent"
},
"type": "module",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js"
}
},
"dependencies": {
"sandbox-agent": "workspace:*"
},
"peerDependencies": {
"rivetkit": ">=0.5.0"
},
"peerDependenciesMeta": {
"rivetkit": {
"optional": true
}
},
"files": [
"dist"
],
"scripts": {
"build": "tsup",
"typecheck": "tsc --noEmit",
"test": "vitest run",
"test:watch": "vitest"
},
"devDependencies": {
"@types/node": "^22.0.0",
"tsup": "^8.0.0",
"typescript": "^5.7.0",
"vitest": "^3.0.0"
}
}

View file

@ -0,0 +1,180 @@
import type {
ListEventsRequest,
ListPage,
ListPageRequest,
SessionEvent,
SessionPersistDriver,
SessionRecord,
} from "sandbox-agent";
/** Structural type compatible with rivetkit's ActorContext without importing it. */
export interface ActorContextLike {
state: Record<string, unknown>;
}
export interface RivetPersistData {
sessions: Record<string, SessionRecord>;
events: Record<string, SessionEvent[]>;
}
export type RivetPersistState = {
_sandboxAgentPersist: RivetPersistData;
};
export interface RivetSessionPersistDriverOptions {
/** Maximum number of sessions to retain. Oldest are evicted first. Default: 1024. */
maxSessions?: number;
/** Maximum events per session. Oldest are trimmed first. Default: 500. */
maxEventsPerSession?: number;
/** Key on `c.state` where persist data is stored. Default: `"_sandboxAgentPersist"`. */
stateKey?: string;
}
const DEFAULT_MAX_SESSIONS = 1024;
const DEFAULT_MAX_EVENTS_PER_SESSION = 500;
const DEFAULT_LIST_LIMIT = 100;
const DEFAULT_STATE_KEY = "_sandboxAgentPersist";
export class RivetSessionPersistDriver implements SessionPersistDriver {
private readonly maxSessions: number;
private readonly maxEventsPerSession: number;
private readonly stateKey: string;
private readonly ctx: ActorContextLike;
constructor(ctx: ActorContextLike, options: RivetSessionPersistDriverOptions = {}) {
this.ctx = ctx;
this.maxSessions = normalizeCap(options.maxSessions, DEFAULT_MAX_SESSIONS);
this.maxEventsPerSession = normalizeCap(
options.maxEventsPerSession,
DEFAULT_MAX_EVENTS_PER_SESSION,
);
this.stateKey = options.stateKey ?? DEFAULT_STATE_KEY;
// Auto-initialize if absent; preserve existing data on actor wake.
if (!this.ctx.state[this.stateKey]) {
this.ctx.state[this.stateKey] = { sessions: {}, events: {} } satisfies RivetPersistData;
}
}
private get data(): RivetPersistData {
return this.ctx.state[this.stateKey] as RivetPersistData;
}
async getSession(id: string): Promise<SessionRecord | null> {
const session = this.data.sessions[id];
return session ? cloneSessionRecord(session) : null;
}
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
const sorted = Object.values(this.data.sessions).sort((a, b) => {
if (a.createdAt !== b.createdAt) {
return a.createdAt - b.createdAt;
}
return a.id.localeCompare(b.id);
});
const page = paginate(sorted, request);
return {
items: page.items.map(cloneSessionRecord),
nextCursor: page.nextCursor,
};
}
async updateSession(session: SessionRecord): Promise<void> {
this.data.sessions[session.id] = { ...session };
if (!this.data.events[session.id]) {
this.data.events[session.id] = [];
}
const ids = Object.keys(this.data.sessions);
if (ids.length <= this.maxSessions) {
return;
}
const overflow = ids.length - this.maxSessions;
const removable = Object.values(this.data.sessions)
.sort((a, b) => {
if (a.createdAt !== b.createdAt) {
return a.createdAt - b.createdAt;
}
return a.id.localeCompare(b.id);
})
.slice(0, overflow)
.map((s) => s.id);
for (const sessionId of removable) {
delete this.data.sessions[sessionId];
delete this.data.events[sessionId];
}
}
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
const all = [...(this.data.events[request.sessionId] ?? [])].sort((a, b) => {
if (a.eventIndex !== b.eventIndex) {
return a.eventIndex - b.eventIndex;
}
return a.id.localeCompare(b.id);
});
const page = paginate(all, request);
return {
items: page.items.map(cloneSessionEvent),
nextCursor: page.nextCursor,
};
}
async insertEvent(event: SessionEvent): Promise<void> {
const events = this.data.events[event.sessionId] ?? [];
events.push(cloneSessionEvent(event));
if (events.length > this.maxEventsPerSession) {
events.splice(0, events.length - this.maxEventsPerSession);
}
this.data.events[event.sessionId] = events;
}
}
function cloneSessionRecord(session: SessionRecord): SessionRecord {
return {
...session,
sessionInit: session.sessionInit
? (JSON.parse(JSON.stringify(session.sessionInit)) as SessionRecord["sessionInit"])
: undefined,
};
}
function cloneSessionEvent(event: SessionEvent): SessionEvent {
return {
...event,
payload: JSON.parse(JSON.stringify(event.payload)) as SessionEvent["payload"],
};
}
function normalizeCap(value: number | undefined, fallback: number): number {
if (!Number.isFinite(value) || (value ?? 0) < 1) {
return fallback;
}
return Math.floor(value as number);
}
function paginate<T>(items: T[], request: ListPageRequest): ListPage<T> {
const offset = parseCursor(request.cursor);
const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT);
const slice = items.slice(offset, offset + limit);
const nextOffset = offset + slice.length;
return {
items: slice,
nextCursor: nextOffset < items.length ? String(nextOffset) : undefined,
};
}
function parseCursor(cursor: string | undefined): number {
if (!cursor) {
return 0;
}
const parsed = Number.parseInt(cursor, 10);
if (!Number.isFinite(parsed) || parsed < 0) {
return 0;
}
return parsed;
}

View file

@ -0,0 +1,236 @@
import { describe, it, expect } from "vitest";
import { RivetSessionPersistDriver } from "../src/index.ts";
import type { RivetPersistData } from "../src/index.ts";
function makeCtx() {
return { state: {} as Record<string, unknown> };
}
describe("RivetSessionPersistDriver", () => {
it("auto-initializes state on construction", () => {
const ctx = makeCtx();
new RivetSessionPersistDriver(ctx);
const data = ctx.state._sandboxAgentPersist as RivetPersistData;
expect(data).toBeDefined();
expect(data.sessions).toEqual({});
expect(data.events).toEqual({});
});
it("preserves existing state on construction (actor wake)", async () => {
const ctx = makeCtx();
const driver1 = new RivetSessionPersistDriver(ctx);
await driver1.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 100,
});
// Simulate actor wake: new driver instance, same state object
const driver2 = new RivetSessionPersistDriver(ctx);
const session = await driver2.getSession("s-1");
expect(session?.id).toBe("s-1");
expect(session?.createdAt).toBe(100);
});
it("stores and retrieves sessions", async () => {
const driver = new RivetSessionPersistDriver(makeCtx());
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 100,
});
await driver.updateSession({
id: "s-2",
agent: "mock",
agentSessionId: "a-2",
lastConnectionId: "c-2",
createdAt: 200,
destroyedAt: 300,
});
const loaded = await driver.getSession("s-2");
expect(loaded?.destroyedAt).toBe(300);
const missing = await driver.getSession("s-nonexistent");
expect(missing).toBeNull();
});
it("pages sessions sorted by createdAt", async () => {
const driver = new RivetSessionPersistDriver(makeCtx());
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 100,
});
await driver.updateSession({
id: "s-2",
agent: "mock",
agentSessionId: "a-2",
lastConnectionId: "c-2",
createdAt: 200,
});
const page1 = await driver.listSessions({ limit: 1 });
expect(page1.items).toHaveLength(1);
expect(page1.items[0]?.id).toBe("s-1");
expect(page1.nextCursor).toBeTruthy();
const page2 = await driver.listSessions({ cursor: page1.nextCursor, limit: 1 });
expect(page2.items).toHaveLength(1);
expect(page2.items[0]?.id).toBe("s-2");
expect(page2.nextCursor).toBeUndefined();
});
it("stores and pages events", async () => {
const driver = new RivetSessionPersistDriver(makeCtx());
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 1,
});
await driver.insertEvent({
id: "evt-1",
eventIndex: 1,
sessionId: "s-1",
createdAt: 1,
connectionId: "c-1",
sender: "client",
payload: { jsonrpc: "2.0", method: "session/prompt", params: { sessionId: "a-1" } },
});
await driver.insertEvent({
id: "evt-2",
eventIndex: 2,
sessionId: "s-1",
createdAt: 2,
connectionId: "c-1",
sender: "agent",
payload: { jsonrpc: "2.0", method: "session/update", params: { sessionId: "a-1" } },
});
const eventsPage = await driver.listEvents({ sessionId: "s-1", limit: 10 });
expect(eventsPage.items).toHaveLength(2);
expect(eventsPage.items[0]?.id).toBe("evt-1");
expect(eventsPage.items[0]?.eventIndex).toBe(1);
expect(eventsPage.items[1]?.id).toBe("evt-2");
expect(eventsPage.items[1]?.eventIndex).toBe(2);
});
it("evicts oldest sessions when maxSessions exceeded", async () => {
const driver = new RivetSessionPersistDriver(makeCtx(), { maxSessions: 2 });
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 100,
});
await driver.updateSession({
id: "s-2",
agent: "mock",
agentSessionId: "a-2",
lastConnectionId: "c-2",
createdAt: 200,
});
// Adding a third session should evict the oldest (s-1)
await driver.updateSession({
id: "s-3",
agent: "mock",
agentSessionId: "a-3",
lastConnectionId: "c-3",
createdAt: 300,
});
expect(await driver.getSession("s-1")).toBeNull();
expect(await driver.getSession("s-2")).not.toBeNull();
expect(await driver.getSession("s-3")).not.toBeNull();
});
it("trims oldest events when maxEventsPerSession exceeded", async () => {
const driver = new RivetSessionPersistDriver(makeCtx(), { maxEventsPerSession: 2 });
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 1,
});
for (let i = 1; i <= 3; i++) {
await driver.insertEvent({
id: `evt-${i}`,
eventIndex: i,
sessionId: "s-1",
createdAt: i,
connectionId: "c-1",
sender: "client",
payload: { jsonrpc: "2.0", method: "session/prompt", params: { sessionId: "a-1" } },
});
}
const page = await driver.listEvents({ sessionId: "s-1" });
expect(page.items).toHaveLength(2);
// Oldest event (evt-1) should be trimmed
expect(page.items[0]?.id).toBe("evt-2");
expect(page.items[1]?.id).toBe("evt-3");
});
it("clones data to prevent external mutation", async () => {
const driver = new RivetSessionPersistDriver(makeCtx());
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 1,
});
const s1 = await driver.getSession("s-1");
const s2 = await driver.getSession("s-1");
expect(s1).toEqual(s2);
expect(s1).not.toBe(s2); // Different object references
});
it("supports custom stateKey", async () => {
const ctx = makeCtx();
const driver = new RivetSessionPersistDriver(ctx, { stateKey: "myPersist" });
await driver.updateSession({
id: "s-1",
agent: "mock",
agentSessionId: "a-1",
lastConnectionId: "c-1",
createdAt: 1,
});
expect((ctx.state.myPersist as RivetPersistData).sessions["s-1"]).toBeDefined();
expect(ctx.state._sandboxAgentPersist).toBeUndefined();
});
it("returns empty results for unknown session events", async () => {
const driver = new RivetSessionPersistDriver(makeCtx());
const page = await driver.listEvents({ sessionId: "nonexistent" });
expect(page.items).toHaveLength(0);
expect(page.nextCursor).toBeUndefined();
});
});

View file

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022"],
"module": "ESNext",
"moduleResolution": "Bundler",
"allowImportingTsExtensions": true,
"noEmit": true,
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"resolveJsonModule": true
},
"include": ["src/**/*", "tests/**/*"],
"exclude": ["node_modules", "dist"]
}

View file

@ -0,0 +1,10 @@
import { defineConfig } from "tsup";
export default defineConfig({
entry: ["src/index.ts"],
format: ["esm"],
dts: true,
sourcemap: true,
clean: true,
target: "es2022",
});

View file

@ -0,0 +1,37 @@
{
"name": "@sandbox-agent/persist-sqlite",
"version": "0.1.0",
"description": "SQLite persistence driver for the Sandbox Agent TypeScript SDK",
"license": "Apache-2.0",
"repository": {
"type": "git",
"url": "https://github.com/rivet-dev/sandbox-agent"
},
"type": "module",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js"
}
},
"dependencies": {
"sandbox-agent": "workspace:*"
},
"files": [
"dist"
],
"scripts": {
"build": "tsup",
"typecheck": "tsc --noEmit",
"test": "vitest run",
"test:watch": "vitest"
},
"devDependencies": {
"@types/node": "^22.0.0",
"tsup": "^8.0.0",
"typescript": "^5.7.0",
"vitest": "^3.0.0"
}
}

View file

@ -0,0 +1,306 @@
import { DatabaseSync } from "node:sqlite";
import type {
ListEventsRequest,
ListPage,
ListPageRequest,
SessionEvent,
SessionPersistDriver,
SessionRecord,
} from "sandbox-agent";
const DEFAULT_LIST_LIMIT = 100;
export interface SQLiteSessionPersistDriverOptions {
filename?: string;
open?: boolean;
}
export class SQLiteSessionPersistDriver implements SessionPersistDriver {
private readonly db: DatabaseSync;
constructor(options: SQLiteSessionPersistDriverOptions = {}) {
this.db = new DatabaseSync(options.filename ?? ":memory:", {
open: options.open ?? true,
});
this.initialize();
}
async getSession(id: string): Promise<SessionRecord | null> {
const row = this.db
.prepare(
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
FROM sessions WHERE id = ?`,
)
.get(id) as SessionRow | undefined;
if (!row) {
return null;
}
return decodeSessionRow(row);
}
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
const offset = parseCursor(request.cursor);
const limit = normalizeLimit(request.limit);
const rows = this.db
.prepare(
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
FROM sessions
ORDER BY created_at ASC, id ASC
LIMIT ? OFFSET ?`,
)
.all(limit, offset) as SessionRow[];
const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM sessions`).get() as { count: number };
const nextOffset = offset + rows.length;
return {
items: rows.map(decodeSessionRow),
nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined,
};
}
async updateSession(session: SessionRecord): Promise<void> {
this.db
.prepare(
`INSERT INTO sessions (
id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
) VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
agent = excluded.agent,
agent_session_id = excluded.agent_session_id,
last_connection_id = excluded.last_connection_id,
created_at = excluded.created_at,
destroyed_at = excluded.destroyed_at,
session_init_json = excluded.session_init_json`,
)
.run(
session.id,
session.agent,
session.agentSessionId,
session.lastConnectionId,
session.createdAt,
session.destroyedAt ?? null,
session.sessionInit ? JSON.stringify(session.sessionInit) : null,
);
}
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
const offset = parseCursor(request.cursor);
const limit = normalizeLimit(request.limit);
const rows = this.db
.prepare(
`SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json
FROM events
WHERE session_id = ?
ORDER BY event_index ASC, id ASC
LIMIT ? OFFSET ?`,
)
.all(request.sessionId, limit, offset) as EventRow[];
const countRow = this.db
.prepare(`SELECT COUNT(*) as count FROM events WHERE session_id = ?`)
.get(request.sessionId) as { count: number };
const nextOffset = offset + rows.length;
return {
items: rows.map(decodeEventRow),
nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined,
};
}
async insertEvent(event: SessionEvent): Promise<void> {
this.db
.prepare(
`INSERT INTO events (
id, event_index, session_id, created_at, connection_id, sender, payload_json
) VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
event_index = excluded.event_index,
session_id = excluded.session_id,
created_at = excluded.created_at,
connection_id = excluded.connection_id,
sender = excluded.sender,
payload_json = excluded.payload_json`,
)
.run(
event.id,
event.eventIndex,
event.sessionId,
event.createdAt,
event.connectionId,
event.sender,
JSON.stringify(event.payload),
);
}
close(): void {
this.db.close();
}
private initialize(): void {
this.db.exec(`
CREATE TABLE IF NOT EXISTS sessions (
id TEXT PRIMARY KEY,
agent TEXT NOT NULL,
agent_session_id TEXT NOT NULL,
last_connection_id TEXT NOT NULL,
created_at INTEGER NOT NULL,
destroyed_at INTEGER,
session_init_json TEXT
)
`);
this.ensureEventsTable();
}
private ensureEventsTable(): void {
const tableInfo = this.db.prepare(`PRAGMA table_info(events)`).all() as TableInfoRow[];
if (tableInfo.length === 0) {
this.createEventsTable();
return;
}
const idColumn = tableInfo.find((column) => column.name === "id");
const hasEventIndex = tableInfo.some((column) => column.name === "event_index");
const idType = (idColumn?.type ?? "").trim().toUpperCase();
const idIsText = idType === "TEXT";
if (!idIsText || !hasEventIndex) {
this.rebuildEventsTable(hasEventIndex);
}
this.db.exec(`
CREATE INDEX IF NOT EXISTS idx_events_session_order
ON events(session_id, event_index, id)
`);
}
private createEventsTable(): void {
this.db.exec(`
CREATE TABLE IF NOT EXISTS events (
id TEXT PRIMARY KEY,
event_index INTEGER NOT NULL,
session_id TEXT NOT NULL,
created_at INTEGER NOT NULL,
connection_id TEXT NOT NULL,
sender TEXT NOT NULL,
payload_json TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_events_session_order
ON events(session_id, event_index, id)
`);
}
private rebuildEventsTable(hasEventIndex: boolean): void {
this.db.exec(`
ALTER TABLE events RENAME TO events_legacy;
`);
this.createEventsTable();
if (hasEventIndex) {
this.db.exec(`
INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json)
SELECT
CAST(id AS TEXT),
COALESCE(event_index, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC)),
session_id,
created_at,
connection_id,
sender,
payload_json
FROM events_legacy
`);
} else {
this.db.exec(`
INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json)
SELECT
CAST(id AS TEXT),
ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC),
session_id,
created_at,
connection_id,
sender,
payload_json
FROM events_legacy
`);
}
this.db.exec(`DROP TABLE events_legacy`);
}
}
type SessionRow = {
id: string;
agent: string;
agent_session_id: string;
last_connection_id: string;
created_at: number;
destroyed_at: number | null;
session_init_json: string | null;
};
type EventRow = {
id: string;
event_index: number;
session_id: string;
created_at: number;
connection_id: string;
sender: "client" | "agent";
payload_json: string;
};
type TableInfoRow = {
name: string;
type: string;
};
function decodeSessionRow(row: SessionRow): SessionRecord {
return {
id: row.id,
agent: row.agent,
agentSessionId: row.agent_session_id,
lastConnectionId: row.last_connection_id,
createdAt: row.created_at,
destroyedAt: row.destroyed_at ?? undefined,
sessionInit: row.session_init_json
? (JSON.parse(row.session_init_json) as SessionRecord["sessionInit"])
: undefined,
};
}
function decodeEventRow(row: EventRow): SessionEvent {
return {
id: row.id,
eventIndex: row.event_index,
sessionId: row.session_id,
createdAt: row.created_at,
connectionId: row.connection_id,
sender: row.sender,
payload: JSON.parse(row.payload_json),
};
}
function normalizeLimit(limit: number | undefined): number {
if (!Number.isFinite(limit) || (limit ?? 0) < 1) {
return DEFAULT_LIST_LIMIT;
}
return Math.floor(limit as number);
}
function parseCursor(cursor: string | undefined): number {
if (!cursor) {
return 0;
}
const parsed = Number.parseInt(cursor, 10);
if (!Number.isFinite(parsed) || parsed < 0) {
return 0;
}
return parsed;
}

View file

@ -0,0 +1,136 @@
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { existsSync, mkdtempSync, rmSync } from "node:fs";
import { dirname, join, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { tmpdir } from "node:os";
import { SandboxAgent } from "sandbox-agent";
import { spawnSandboxAgent, type SandboxAgentSpawnHandle } from "../../typescript/src/spawn.ts";
import { prepareMockAgentDataHome } from "../../typescript/tests/helpers/mock-agent.ts";
import { SQLiteSessionPersistDriver } from "../src/index.ts";
const __dirname = dirname(fileURLToPath(import.meta.url));
function findBinary(): string | null {
if (process.env.SANDBOX_AGENT_BIN) {
return process.env.SANDBOX_AGENT_BIN;
}
const cargoPaths = [
resolve(__dirname, "../../../target/debug/sandbox-agent"),
resolve(__dirname, "../../../target/release/sandbox-agent"),
];
for (const p of cargoPaths) {
if (existsSync(p)) {
return p;
}
}
return null;
}
const BINARY_PATH = findBinary();
if (!BINARY_PATH) {
throw new Error(
"sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN.",
);
}
if (!process.env.SANDBOX_AGENT_BIN) {
process.env.SANDBOX_AGENT_BIN = BINARY_PATH;
}
describe("SQLite persistence driver", () => {
let handle: SandboxAgentSpawnHandle;
let baseUrl: string;
let token: string;
let dataHome: string;
beforeAll(async () => {
dataHome = mkdtempSync(join(tmpdir(), "sqlite-integration-"));
prepareMockAgentDataHome(dataHome);
handle = await spawnSandboxAgent({
enabled: true,
log: "silent",
timeoutMs: 30000,
env: {
XDG_DATA_HOME: dataHome,
},
});
baseUrl = handle.baseUrl;
token = handle.token;
});
afterAll(async () => {
await handle.dispose();
rmSync(dataHome, { recursive: true, force: true });
});
it("persists session/event history across SDK instances and supports replay restore", async () => {
const tempDir = mkdtempSync(join(tmpdir(), "sqlite-persist-"));
const dbPath = join(tempDir, "session-store.db");
const persist1 = new SQLiteSessionPersistDriver({ filename: dbPath });
const sdk1 = await SandboxAgent.connect({
baseUrl,
token,
persist: persist1,
replayMaxEvents: 40,
replayMaxChars: 16000,
});
const created = await sdk1.createSession({ agent: "mock" });
await created.prompt([{ type: "text", text: "sqlite-first" }]);
const firstConnectionId = created.lastConnectionId;
await sdk1.dispose();
persist1.close();
const persist2 = new SQLiteSessionPersistDriver({ filename: dbPath });
const sdk2 = await SandboxAgent.connect({
baseUrl,
token,
persist: persist2,
replayMaxEvents: 40,
replayMaxChars: 16000,
});
const restored = await sdk2.resumeSession(created.id);
expect(restored.lastConnectionId).not.toBe(firstConnectionId);
await restored.prompt([{ type: "text", text: "sqlite-second" }]);
const sessions = await sdk2.listSessions({ limit: 20 });
expect(sessions.items.some((entry) => entry.id === created.id)).toBe(true);
const events = await sdk2.getEvents({ sessionId: created.id, limit: 1000 });
expect(events.items.length).toBeGreaterThan(0);
expect(events.items.every((event) => typeof event.id === "string")).toBe(true);
expect(events.items.every((event) => Number.isInteger(event.eventIndex))).toBe(true);
for (let i = 1; i < events.items.length; i += 1) {
expect(events.items[i]!.eventIndex).toBeGreaterThanOrEqual(events.items[i - 1]!.eventIndex);
}
const replayInjected = events.items.find((event) => {
if (event.sender !== "client") {
return false;
}
const payload = event.payload as Record<string, unknown>;
const method = payload.method;
const params = payload.params as Record<string, unknown> | undefined;
const prompt = Array.isArray(params?.prompt) ? params?.prompt : [];
const firstBlock = prompt[0] as Record<string, unknown> | undefined;
return (
method === "session/prompt" &&
typeof firstBlock?.text === "string" &&
firstBlock.text.includes("Previous session history is replayed below")
);
});
expect(replayInjected).toBeTruthy();
await sdk2.dispose();
persist2.close();
rmSync(tempDir, { recursive: true, force: true });
});
});

View file

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022", "DOM"],
"module": "ESNext",
"moduleResolution": "Bundler",
"allowImportingTsExtensions": true,
"noEmit": true,
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"resolveJsonModule": true
},
"include": ["src/**/*", "tests/**/*"],
"exclude": ["node_modules", "dist"]
}

View file

@ -0,0 +1,10 @@
import { defineConfig } from "tsup";
export default defineConfig({
entry: ["src/index.ts"],
format: ["esm"],
dts: true,
sourcemap: true,
clean: true,
target: "es2022",
});

View file

@ -0,0 +1,8 @@
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
include: ["tests/**/*.test.ts"],
testTimeout: 60000,
},
});

View file

@ -25,11 +25,11 @@
],
"scripts": {
"generate:openapi": "SANDBOX_AGENT_SKIP_INSPECTOR=1 cargo run -p sandbox-agent-openapi-gen -- --out ../../docs/openapi.json",
"generate:types": "openapi-typescript ../../docs/openapi.json -o src/generated/openapi.ts",
"generate:types": "openapi-typescript ../../docs/openapi.json -o src/generated/openapi.ts && node ./scripts/patch-openapi-types.mjs",
"generate": "pnpm run generate:openapi && pnpm run generate:types",
"build": "pnpm --filter acp-http-client build && if [ -z \"$SKIP_OPENAPI_GEN\" ]; then pnpm run generate:openapi; fi && pnpm run generate:types && tsup",
"typecheck": "pnpm --filter acp-http-client build && tsc --noEmit",
"test": "pnpm --filter acp-http-client build && vitest run",
"typecheck": "tsc --noEmit",
"test": "vitest run",
"test:watch": "vitest"
},
"devDependencies": {

View file

@ -0,0 +1,17 @@
import { readFileSync, writeFileSync } from "node:fs";
import { resolve } from "node:path";
const target = resolve(process.cwd(), "src/generated/openapi.ts");
let source = readFileSync(target, "utf8");
const replacements = [
["components[\"schemas\"][\"McpCommand\"]", "string"],
["components[\"schemas\"][\"McpOAuthConfigOrDisabled\"]", "Record<string, unknown> | null"],
["components[\"schemas\"][\"McpRemoteTransport\"]", "string"],
];
for (const [from, to] of replacements) {
source = source.split(from).join(to);
}
writeFileSync(target, source);

File diff suppressed because it is too large Load diff

View file

@ -5,36 +5,57 @@
export interface paths {
"/v2/fs/file": {
get: operations["get_v2_fs_file"];
put: operations["put_v2_fs_file"];
"/v1/acp": {
get: operations["get_v1_acp_servers"];
};
"/v2/fs/upload-batch": {
post: operations["post_v2_fs_upload_batch"];
"/v1/acp/{server_id}": {
get: operations["get_v1_acp"];
post: operations["post_v1_acp"];
delete: operations["delete_v1_acp"];
};
"/v2/health": {
/**
* v2 Health
* @description Returns server health for the v2 ACP surface.
*/
get: operations["get_v2_health"];
"/v1/agents": {
get: operations["get_v1_agents"];
};
"/v2/rpc": {
/**
* ACP SSE
* @description Streams ACP JSON-RPC envelopes for an ACP client over SSE.
*/
get: operations["get_v2_acp"];
/**
* ACP POST
* @description Sends ACP JSON-RPC envelopes to an ACP client and returns request responses.
*/
post: operations["post_v2_acp"];
/**
* ACP Close
* @description Closes an ACP client and releases agent process resources.
*/
delete: operations["delete_v2_acp"];
"/v1/agents/{agent}": {
get: operations["get_v1_agent"];
};
"/v1/agents/{agent}/install": {
post: operations["post_v1_agent_install"];
};
"/v1/config/mcp": {
get: operations["get_v1_config_mcp"];
put: operations["put_v1_config_mcp"];
delete: operations["delete_v1_config_mcp"];
};
"/v1/config/skills": {
get: operations["get_v1_config_skills"];
put: operations["put_v1_config_skills"];
delete: operations["delete_v1_config_skills"];
};
"/v1/fs/entries": {
get: operations["get_v1_fs_entries"];
};
"/v1/fs/entry": {
delete: operations["delete_v1_fs_entry"];
};
"/v1/fs/file": {
get: operations["get_v1_fs_file"];
put: operations["put_v1_fs_file"];
};
"/v1/fs/mkdir": {
post: operations["post_v1_fs_mkdir"];
};
"/v1/fs/move": {
post: operations["post_v1_fs_move"];
};
"/v1/fs/stat": {
get: operations["get_v1_fs_stat"];
};
"/v1/fs/upload-batch": {
post: operations["post_v1_fs_upload_batch"];
};
"/v1/health": {
get: operations["get_v1_health"];
};
}
@ -50,6 +71,18 @@ export interface components {
params?: unknown;
result?: unknown;
};
AcpPostQuery: {
agent?: string | null;
};
AcpServerInfo: {
agent: string;
/** Format: int64 */
createdAtMs: number;
serverId: string;
};
AcpServerListResponse: {
servers: components["schemas"]["AcpServerInfo"][];
};
AgentCapabilities: {
commandExecution: boolean;
errorEvents: boolean;
@ -72,12 +105,11 @@ export interface components {
};
AgentInfo: {
capabilities: components["schemas"]["AgentCapabilities"];
configError?: string | null;
configOptions?: unknown[] | null;
credentialsAvailable: boolean;
defaultModel?: string | null;
id: string;
installed: boolean;
models?: components["schemas"]["AgentModelInfo"][] | null;
modes?: components["schemas"]["AgentModeInfo"][] | null;
path?: string | null;
serverStatus?: components["schemas"]["ServerStatusInfo"] | null;
version?: string | null;
@ -100,28 +132,17 @@ export interface components {
AgentListResponse: {
agents: components["schemas"]["AgentInfo"][];
};
AgentModeInfo: {
description: string;
id: string;
name: string;
};
AgentModelInfo: {
id: string;
name?: string | null;
};
/** @enum {string} */
ErrorType: "invalid_request" | "unsupported_agent" | "agent_not_installed" | "install_failed" | "agent_process_exited" | "token_invalid" | "permission_denied" | "session_not_found" | "session_already_exists" | "mode_not_supported" | "stream_error" | "timeout";
ErrorType: "invalid_request" | "conflict" | "unsupported_agent" | "agent_not_installed" | "install_failed" | "agent_process_exited" | "token_invalid" | "permission_denied" | "not_acceptable" | "unsupported_media_type" | "session_not_found" | "session_already_exists" | "mode_not_supported" | "stream_error" | "timeout";
FsActionResponse: {
path: string;
};
FsDeleteQuery: {
path: string;
recursive?: boolean | null;
sessionId?: string | null;
};
FsEntriesQuery: {
path?: string | null;
sessionId?: string | null;
};
FsEntry: {
entryType: components["schemas"]["FsEntryType"];
@ -144,10 +165,6 @@ export interface components {
};
FsPathQuery: {
path: string;
sessionId?: string | null;
};
FsSessionQuery: {
sessionId?: string | null;
};
FsStat: {
entryType: components["schemas"]["FsEntryType"];
@ -158,7 +175,6 @@ export interface components {
};
FsUploadBatchQuery: {
path?: string | null;
sessionId?: string | null;
};
FsUploadBatchResponse: {
paths: string[];
@ -172,6 +188,39 @@ export interface components {
HealthResponse: {
status: string;
};
McpConfigQuery: {
directory: string;
mcpName: string;
};
McpServerConfig: ({
args?: string[];
command: string;
cwd?: string | null;
enabled?: boolean | null;
env?: {
[key: string]: string;
} | null;
/** Format: int64 */
timeoutMs?: number | null;
/** @enum {string} */
type: "local";
}) | ({
bearerTokenEnvVar?: string | null;
enabled?: boolean | null;
envHeaders?: {
[key: string]: string;
} | null;
headers?: {
[key: string]: string;
} | null;
oauth?: Record<string, unknown> | null | null;
/** Format: int64 */
timeoutMs?: number | null;
transport?: string | null;
/** @enum {string} */
type: "remote";
url: string;
});
ProblemDetails: {
detail?: string | null;
instance?: string | null;
@ -182,50 +231,25 @@ export interface components {
[key: string]: unknown;
};
/** @enum {string} */
ServerStatus: "running" | "stopped" | "error";
ServerStatus: "running" | "stopped";
ServerStatusInfo: {
baseUrl?: string | null;
lastError?: string | null;
/** Format: int64 */
restartCount: number;
status: components["schemas"]["ServerStatus"];
/** Format: int64 */
uptimeMs?: number | null;
};
SessionInfo: {
agent: string;
agentMode: string;
/** Format: int64 */
createdAt: number;
directory?: string | null;
ended: boolean;
/** Format: int64 */
eventCount: number;
model?: string | null;
nativeSessionId?: string | null;
permissionMode: string;
sessionId: string;
terminationInfo?: components["schemas"]["TerminationInfo"] | null;
title?: string | null;
/** Format: int64 */
updatedAt: number;
SkillSource: {
ref?: string | null;
skills?: string[] | null;
source: string;
subpath?: string | null;
type: string;
};
SessionListResponse: {
sessions: components["schemas"]["SessionInfo"][];
SkillsConfig: {
sources: components["schemas"]["SkillSource"][];
};
StderrOutput: {
head?: string | null;
tail?: string | null;
totalLines?: number | null;
truncated: boolean;
};
TerminationInfo: {
/** Format: int32 */
exitCode?: number | null;
message?: string | null;
reason: string;
stderr?: components["schemas"]["StderrOutput"] | null;
terminatedBy: string;
SkillsConfigQuery: {
directory: string;
skillName: string;
};
};
responses: never;
@ -241,89 +265,23 @@ export type external = Record<string, never>;
export interface operations {
get_v2_fs_file: {
parameters: {
query: {
/** @description File path */
path: string;
/** @description Session id for relative path base */
session_id?: string | null;
};
};
get_v1_acp_servers: {
responses: {
/** @description File content */
200: {
content: never;
};
};
};
put_v2_fs_file: {
parameters: {
query: {
/** @description File path */
path: string;
/** @description Session id for relative path base */
session_id?: string | null;
};
};
/** @description Raw file bytes */
requestBody: {
content: {
"text/plain": string;
};
};
responses: {
/** @description Write result */
/** @description Active ACP server instances */
200: {
content: {
"application/json": components["schemas"]["FsWriteResponse"];
"application/json": components["schemas"]["AcpServerListResponse"];
};
};
};
};
post_v2_fs_upload_batch: {
get_v1_acp: {
parameters: {
query?: {
/** @description Destination path */
path?: string | null;
/** @description Session id for relative path base */
session_id?: string | null;
path: {
/** @description Client-defined ACP server id */
server_id: string;
};
};
/** @description tar archive body */
requestBody: {
content: {
"text/plain": string;
};
};
responses: {
/** @description Upload/extract result */
200: {
content: {
"application/json": components["schemas"]["FsUploadBatchResponse"];
};
};
};
};
/**
* v2 Health
* @description Returns server health for the v2 ACP surface.
*/
get_v2_health: {
responses: {
/** @description Service health response */
200: {
content: {
"application/json": components["schemas"]["HealthResponse"];
};
};
};
};
/**
* ACP SSE
* @description Streams ACP JSON-RPC envelopes for an ACP client over SSE.
*/
get_v2_acp: {
responses: {
/** @description SSE stream of ACP envelopes */
200: {
@ -335,19 +293,31 @@ export interface operations {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description Unknown ACP client */
/** @description Unknown ACP server */
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description Client does not accept SSE responses */
406: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
/**
* ACP POST
* @description Sends ACP JSON-RPC envelopes to an ACP client and returns request responses.
*/
post_v2_acp: {
post_v1_acp: {
parameters: {
query?: {
/** @description Agent id required for first POST */
agent?: string | null;
};
path: {
/** @description Client-defined ACP server id */
server_id: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AcpEnvelope"];
@ -370,12 +340,30 @@ export interface operations {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description Unknown ACP client */
/** @description Unknown ACP server */
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description Client does not accept JSON responses */
406: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description ACP server bound to different agent */
409: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description Unsupported media type */
415: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description ACP agent process response timeout */
504: {
content: {
@ -384,23 +372,128 @@ export interface operations {
};
};
};
/**
* ACP Close
* @description Closes an ACP client and releases agent process resources.
*/
delete_v2_acp: {
delete_v1_acp: {
parameters: {
path: {
/** @description Client-defined ACP server id */
server_id: string;
};
};
responses: {
/** @description ACP client closed */
/** @description ACP server closed */
204: {
content: never;
};
};
};
get_v1_agents: {
parameters: {
query?: {
/** @description When true, include version/path/configOptions (slower) */
config?: boolean | null;
/** @description When true, bypass version cache */
no_cache?: boolean | null;
};
};
responses: {
/** @description List of v1 agents */
200: {
content: {
"application/json": components["schemas"]["AgentListResponse"];
};
};
/** @description Authentication required */
401: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
get_v1_agent: {
parameters: {
query?: {
/** @description When true, include version/path/configOptions (slower) */
config?: boolean | null;
/** @description When true, bypass version cache */
no_cache?: boolean | null;
};
path: {
/** @description Agent id */
agent: string;
};
};
responses: {
/** @description Agent info */
200: {
content: {
"application/json": components["schemas"]["AgentInfo"];
};
};
/** @description Unknown agent */
400: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description Authentication required */
401: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
post_v1_agent_install: {
parameters: {
path: {
/** @description Agent id */
agent: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["AgentInstallRequest"];
};
};
responses: {
/** @description Agent install result */
200: {
content: {
"application/json": components["schemas"]["AgentInstallResponse"];
};
};
/** @description Invalid request */
400: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
/** @description Unknown ACP client */
/** @description Install failed */
500: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
get_v1_config_mcp: {
parameters: {
query: {
/** @description Target directory */
directory: string;
/** @description MCP entry name */
mcpName: string;
};
};
responses: {
/** @description MCP entry */
200: {
content: {
"application/json": components["schemas"]["McpServerConfig"];
};
};
/** @description Entry not found */
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
@ -408,4 +501,251 @@ export interface operations {
};
};
};
put_v1_config_mcp: {
parameters: {
query: {
/** @description Target directory */
directory: string;
/** @description MCP entry name */
mcpName: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["McpServerConfig"];
};
};
responses: {
/** @description Stored */
204: {
content: never;
};
};
};
delete_v1_config_mcp: {
parameters: {
query: {
/** @description Target directory */
directory: string;
/** @description MCP entry name */
mcpName: string;
};
};
responses: {
/** @description Deleted */
204: {
content: never;
};
};
};
get_v1_config_skills: {
parameters: {
query: {
/** @description Target directory */
directory: string;
/** @description Skill entry name */
skillName: string;
};
};
responses: {
/** @description Skills entry */
200: {
content: {
"application/json": components["schemas"]["SkillsConfig"];
};
};
/** @description Entry not found */
404: {
content: {
"application/json": components["schemas"]["ProblemDetails"];
};
};
};
};
put_v1_config_skills: {
parameters: {
query: {
/** @description Target directory */
directory: string;
/** @description Skill entry name */
skillName: string;
};
};
requestBody: {
content: {
"application/json": components["schemas"]["SkillsConfig"];
};
};
responses: {
/** @description Stored */
204: {
content: never;
};
};
};
delete_v1_config_skills: {
parameters: {
query: {
/** @description Target directory */
directory: string;
/** @description Skill entry name */
skillName: string;
};
};
responses: {
/** @description Deleted */
204: {
content: never;
};
};
};
get_v1_fs_entries: {
parameters: {
query?: {
/** @description Directory path */
path?: string | null;
};
};
responses: {
/** @description Directory entries */
200: {
content: {
"application/json": components["schemas"]["FsEntry"][];
};
};
};
};
delete_v1_fs_entry: {
parameters: {
query: {
/** @description File or directory path */
path: string;
/** @description Delete directory recursively */
recursive?: boolean | null;
};
};
responses: {
/** @description Delete result */
200: {
content: {
"application/json": components["schemas"]["FsActionResponse"];
};
};
};
};
get_v1_fs_file: {
parameters: {
query: {
/** @description File path */
path: string;
};
};
responses: {
/** @description File content */
200: {
content: never;
};
};
};
put_v1_fs_file: {
parameters: {
query: {
/** @description File path */
path: string;
};
};
/** @description Raw file bytes */
requestBody: {
content: {
"text/plain": string;
};
};
responses: {
/** @description Write result */
200: {
content: {
"application/json": components["schemas"]["FsWriteResponse"];
};
};
};
};
post_v1_fs_mkdir: {
parameters: {
query: {
/** @description Directory path */
path: string;
};
};
responses: {
/** @description Directory created */
200: {
content: {
"application/json": components["schemas"]["FsActionResponse"];
};
};
};
};
post_v1_fs_move: {
requestBody: {
content: {
"application/json": components["schemas"]["FsMoveRequest"];
};
};
responses: {
/** @description Move result */
200: {
content: {
"application/json": components["schemas"]["FsMoveResponse"];
};
};
};
};
get_v1_fs_stat: {
parameters: {
query: {
/** @description Path to stat */
path: string;
};
};
responses: {
/** @description Path metadata */
200: {
content: {
"application/json": components["schemas"]["FsStat"];
};
};
};
};
post_v1_fs_upload_batch: {
parameters: {
query?: {
/** @description Destination path */
path?: string | null;
};
};
/** @description tar archive body */
requestBody: {
content: {
"text/plain": string;
};
};
responses: {
/** @description Upload/extract result */
200: {
content: {
"application/json": components["schemas"]["FsUploadBatchResponse"];
};
};
};
};
get_v1_health: {
responses: {
/** @description Service health response */
200: {
content: {
"application/json": components["schemas"]["HealthResponse"];
};
};
};
};
}

View file

@ -1,45 +1,61 @@
export {
AlreadyConnectedError,
NotConnectedError,
LiveAcpConnection,
SandboxAgent,
SandboxAgentClient,
SandboxAgentError,
Session,
} from "./client.ts";
export { AcpRpcError } from "acp-http-client";
export { buildInspectorUrl } from "./inspector.ts";
export type {
AgentEvent,
AgentUnparsedNotification,
ListModelsResponse,
PermissionRequest,
PermissionResponse,
SandboxAgentClientConnectOptions,
SandboxAgentClientOptions,
SandboxAgentConnectOptions,
SandboxAgentEventObserver,
SandboxAgentStartOptions,
SandboxMetadata,
SessionCreateRequest,
SessionModelInfo,
SessionUpdateNotification,
SessionResumeOrCreateRequest,
SessionSendOptions,
SessionEventListener,
} from "./client.ts";
export type {
InspectorUrlOptions,
} from "./inspector.ts";
export type { InspectorUrlOptions } from "./inspector.ts";
export {
InMemorySessionPersistDriver,
} from "./types.ts";
export type {
AgentCapabilities,
AcpEnvelope,
AcpServerInfo,
AcpServerListResponse,
AgentInfo,
AgentInstallArtifact,
AgentInstallRequest,
AgentInstallResponse,
AgentListResponse,
FsActionResponse,
FsDeleteQuery,
FsEntriesQuery,
FsEntry,
FsMoveRequest,
FsMoveResponse,
FsPathQuery,
FsStat,
FsUploadBatchQuery,
FsUploadBatchResponse,
FsWriteResponse,
HealthResponse,
InMemorySessionPersistDriverOptions,
ListEventsRequest,
ListPage,
ListPageRequest,
McpConfigQuery,
McpServerConfig,
ProblemDetails,
SessionInfo,
SessionListResponse,
SessionTerminateResponse,
SessionEvent,
SessionPersistDriver,
SessionRecord,
SkillsConfig,
SkillsConfigQuery,
} from "./types.ts";
export type {

View file

@ -207,7 +207,7 @@ async function waitForHealth(
throw new Error("sandbox-agent exited before becoming healthy.");
}
try {
const response = await fetcher(`${baseUrl}/v2/health`, {
const response = await fetcher(`${baseUrl}/v1/health`, {
headers: { Authorization: `Bearer ${token}` },
});
if (response.ok) {

View file

@ -1,282 +1,237 @@
export interface ProblemDetails {
type: string;
title: string;
status: number;
detail?: string;
instance?: string;
[key: string]: unknown;
}
import type { AnyMessage, NewSessionRequest } from "acp-http-client";
import type { components, operations } from "./generated/openapi.ts";
export type HealthStatus = "healthy" | "degraded" | "unhealthy" | "ok";
export type ProblemDetails = components["schemas"]["ProblemDetails"];
export interface AgentHealthInfo {
export type HealthResponse = JsonResponse<operations["get_v1_health"], 200>;
export type AgentListResponse = JsonResponse<operations["get_v1_agents"], 200>;
export type AgentInfo = components["schemas"]["AgentInfo"];
export type AgentInstallRequest = JsonRequestBody<operations["post_v1_agent_install"]>;
export type AgentInstallResponse = JsonResponse<operations["post_v1_agent_install"], 200>;
export type AcpEnvelope = components["schemas"]["AcpEnvelope"];
export type AcpServerInfo = components["schemas"]["AcpServerInfo"];
export type AcpServerListResponse = JsonResponse<operations["get_v1_acp_servers"], 200>;
export type FsEntriesQuery = QueryParams<operations["get_v1_fs_entries"]>;
export type FsEntry = components["schemas"]["FsEntry"];
export type FsPathQuery = QueryParams<operations["get_v1_fs_file"]>;
export type FsDeleteQuery = QueryParams<operations["delete_v1_fs_entry"]>;
export type FsUploadBatchQuery = QueryParams<operations["post_v1_fs_upload_batch"]>;
export type FsWriteResponse = JsonResponse<operations["put_v1_fs_file"], 200>;
export type FsActionResponse = JsonResponse<operations["delete_v1_fs_entry"], 200>;
export type FsMoveRequest = JsonRequestBody<operations["post_v1_fs_move"]>;
export type FsMoveResponse = JsonResponse<operations["post_v1_fs_move"], 200>;
export type FsStat = JsonResponse<operations["get_v1_fs_stat"], 200>;
export type FsUploadBatchResponse = JsonResponse<operations["post_v1_fs_upload_batch"], 200>;
export type McpConfigQuery = QueryParams<operations["get_v1_config_mcp"]>;
export type McpServerConfig = components["schemas"]["McpServerConfig"];
export type SkillsConfigQuery = QueryParams<operations["get_v1_config_skills"]>;
export type SkillsConfig = components["schemas"]["SkillsConfig"];
export interface SessionRecord {
id: string;
agent: string;
installed: boolean;
running: boolean;
[key: string]: unknown;
agentSessionId: string;
lastConnectionId: string;
createdAt: number;
destroyedAt?: number;
sessionInit?: Omit<NewSessionRequest, "_meta">;
}
export interface HealthResponse {
status: HealthStatus | string;
version: string;
uptime_ms: number;
agents: AgentHealthInfo[];
// Backward-compatible field from earlier v2 payloads.
api_version?: string;
[key: string]: unknown;
}
export type SessionEventSender = "client" | "agent";
export type ServerStatus = "running" | "stopped" | "error";
export interface ServerStatusInfo {
status: ServerStatus | string;
base_url?: string | null;
baseUrl?: string | null;
uptime_ms?: number | null;
uptimeMs?: number | null;
restart_count?: number;
restartCount?: number;
last_error?: string | null;
lastError?: string | null;
[key: string]: unknown;
}
export interface AgentModelInfo {
id?: string;
model_id?: string;
modelId?: string;
name?: string | null;
description?: string | null;
default_variant?: string | null;
defaultVariant?: string | null;
variants?: string[] | null;
[key: string]: unknown;
}
export interface AgentModeInfo {
export interface SessionEvent {
// Stable unique event id. For ordering, sort by (sessionId, eventIndex).
id: string;
name: string;
description: string;
[key: string]: unknown;
eventIndex: number;
sessionId: string;
createdAt: number;
connectionId: string;
sender: SessionEventSender;
payload: AnyMessage;
}
export interface AgentCapabilities {
plan_mode?: boolean;
permissions?: boolean;
questions?: boolean;
tool_calls?: boolean;
tool_results?: boolean;
text_messages?: boolean;
images?: boolean;
file_attachments?: boolean;
session_lifecycle?: boolean;
error_events?: boolean;
reasoning?: boolean;
status?: boolean;
command_execution?: boolean;
file_changes?: boolean;
mcp_tools?: boolean;
streaming_deltas?: boolean;
item_started?: boolean;
shared_process?: boolean;
unstable_methods?: boolean;
[key: string]: unknown;
export interface ListPageRequest {
cursor?: string;
limit?: number;
}
export interface AgentInfo {
id: string;
installed?: boolean;
credentials_available?: boolean;
native_required?: boolean;
native_installed?: boolean;
native_version?: string | null;
agent_process_installed?: boolean;
agent_process_source?: string | null;
agent_process_version?: string | null;
version?: string | null;
path?: string | null;
server_status?: ServerStatusInfo | null;
models?: AgentModelInfo[] | null;
default_model?: string | null;
modes?: AgentModeInfo[] | null;
capabilities: AgentCapabilities;
[key: string]: unknown;
export interface ListPage<T> {
items: T[];
nextCursor?: string;
}
export interface AgentListResponse {
agents: AgentInfo[];
export interface ListEventsRequest extends ListPageRequest {
sessionId: string;
}
export interface AgentInstallRequest {
reinstall?: boolean;
agentVersion?: string;
agentProcessVersion?: string;
export interface SessionPersistDriver {
getSession(id: string): Promise<SessionRecord | null>;
listSessions(request?: ListPageRequest): Promise<ListPage<SessionRecord>>;
updateSession(session: SessionRecord): Promise<void>;
listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>>;
insertEvent(event: SessionEvent): Promise<void>;
}
export interface AgentInstallArtifact {
kind: string;
path: string;
source: string;
version?: string | null;
export interface InMemorySessionPersistDriverOptions {
maxSessions?: number;
maxEventsPerSession?: number;
}
export interface AgentInstallResponse {
already_installed: boolean;
artifacts: AgentInstallArtifact[];
const DEFAULT_MAX_SESSIONS = 1024;
const DEFAULT_MAX_EVENTS_PER_SESSION = 500;
const DEFAULT_LIST_LIMIT = 100;
export class InMemorySessionPersistDriver implements SessionPersistDriver {
private readonly maxSessions: number;
private readonly maxEventsPerSession: number;
private readonly sessions = new Map<string, SessionRecord>();
private readonly eventsBySession = new Map<string, SessionEvent[]>();
constructor(options: InMemorySessionPersistDriverOptions = {}) {
this.maxSessions = normalizeCap(options.maxSessions, DEFAULT_MAX_SESSIONS);
this.maxEventsPerSession = normalizeCap(
options.maxEventsPerSession,
DEFAULT_MAX_EVENTS_PER_SESSION,
);
}
async getSession(id: string): Promise<SessionRecord | null> {
const session = this.sessions.get(id);
return session ? cloneSessionRecord(session) : null;
}
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
const sorted = [...this.sessions.values()].sort((a, b) => {
if (a.createdAt !== b.createdAt) {
return a.createdAt - b.createdAt;
}
return a.id.localeCompare(b.id);
});
const page = paginate(sorted, request);
return {
items: page.items.map(cloneSessionRecord),
nextCursor: page.nextCursor,
};
}
async updateSession(session: SessionRecord): Promise<void> {
this.sessions.set(session.id, { ...session });
if (!this.eventsBySession.has(session.id)) {
this.eventsBySession.set(session.id, []);
}
if (this.sessions.size <= this.maxSessions) {
return;
}
const overflow = this.sessions.size - this.maxSessions;
const removable = [...this.sessions.values()]
.sort((a, b) => {
if (a.createdAt !== b.createdAt) {
return a.createdAt - b.createdAt;
}
return a.id.localeCompare(b.id);
})
.slice(0, overflow)
.map((sessionToRemove) => sessionToRemove.id);
for (const sessionId of removable) {
this.sessions.delete(sessionId);
this.eventsBySession.delete(sessionId);
}
}
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
const all = [...(this.eventsBySession.get(request.sessionId) ?? [])].sort((a, b) => {
if (a.eventIndex !== b.eventIndex) {
return a.eventIndex - b.eventIndex;
}
return a.id.localeCompare(b.id);
});
const page = paginate(all, request);
return {
items: page.items.map(cloneSessionEvent),
nextCursor: page.nextCursor,
};
}
async insertEvent(event: SessionEvent): Promise<void> {
const events = this.eventsBySession.get(event.sessionId) ?? [];
events.push(cloneSessionEvent(event));
if (events.length > this.maxEventsPerSession) {
events.splice(0, events.length - this.maxEventsPerSession);
}
this.eventsBySession.set(event.sessionId, events);
}
}
export type SessionEndReason = "completed" | "error" | "terminated";
export type TerminatedBy = "agent" | "daemon";
export interface StderrOutput {
head?: string | null;
tail?: string | null;
truncated: boolean;
total_lines?: number | null;
function cloneSessionRecord(session: SessionRecord): SessionRecord {
return {
...session,
sessionInit: session.sessionInit
? (JSON.parse(JSON.stringify(session.sessionInit)) as SessionRecord["sessionInit"])
: undefined,
};
}
export interface SessionTerminationInfo {
reason: SessionEndReason | string;
terminated_by: TerminatedBy | string;
message?: string | null;
exit_code?: number | null;
stderr?: StderrOutput | null;
[key: string]: unknown;
function cloneSessionEvent(event: SessionEvent): SessionEvent {
return {
...event,
payload: JSON.parse(JSON.stringify(event.payload)) as AnyMessage,
};
}
export interface SessionInfo {
session_id: string;
sessionId?: string;
agent?: string;
cwd?: string;
title?: string | null;
ended?: boolean;
created_at?: string | number | null;
createdAt?: string | number | null;
updated_at?: string | number | null;
updatedAt?: string | number | null;
model?: string | null;
metadata?: Record<string, unknown> | null;
agent_mode?: string;
agentMode?: string;
permission_mode?: string;
permissionMode?: string;
native_session_id?: string | null;
nativeSessionId?: string | null;
event_count?: number;
eventCount?: number;
directory?: string | null;
variant?: string | null;
mcp?: Record<string, unknown> | null;
skills?: Record<string, unknown> | null;
termination_info?: SessionTerminationInfo | null;
terminationInfo?: SessionTerminationInfo | null;
[key: string]: unknown;
type ResponsesOf<T> = T extends { responses: infer R } ? R : never;
type JsonResponse<T, StatusCode extends keyof ResponsesOf<T>> = ResponsesOf<T>[StatusCode] extends {
content: { "application/json": infer B };
}
? B
: never;
type JsonRequestBody<T> = T extends {
requestBody: { content: { "application/json": infer B } };
}
? B
: never;
type QueryParams<T> = T extends { parameters: { query: infer Q } }
? Q
: T extends { parameters: { query?: infer Q } }
? Q
: never;
function normalizeCap(value: number | undefined, fallback: number): number {
if (!Number.isFinite(value) || (value ?? 0) < 1) {
return fallback;
}
return Math.floor(value as number);
}
export interface SessionListResponse {
sessions: SessionInfo[];
function paginate<T>(items: T[], request: ListPageRequest): ListPage<T> {
const offset = parseCursor(request.cursor);
const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT);
const slice = items.slice(offset, offset + limit);
const nextOffset = offset + slice.length;
return {
items: slice,
nextCursor: nextOffset < items.length ? String(nextOffset) : undefined,
};
}
export interface SessionTerminateResponse {
terminated?: boolean;
reason?: SessionEndReason | string;
terminated_by?: TerminatedBy | string;
terminatedBy?: TerminatedBy | string;
[key: string]: unknown;
}
export interface SessionEndedParams {
session_id?: string;
sessionId?: string;
data?: SessionTerminationInfo;
reason?: SessionEndReason | string;
terminated_by?: TerminatedBy | string;
terminatedBy?: TerminatedBy | string;
message?: string | null;
exit_code?: number | null;
stderr?: StderrOutput | null;
[key: string]: unknown;
}
export interface SessionEndedNotification {
jsonrpc: "2.0";
method: "_sandboxagent/session/ended";
params: SessionEndedParams;
[key: string]: unknown;
}
export interface FsPathQuery {
path: string;
session_id?: string | null;
sessionId?: string | null;
}
export interface FsEntriesQuery {
path?: string | null;
session_id?: string | null;
sessionId?: string | null;
}
export interface FsSessionQuery {
session_id?: string | null;
sessionId?: string | null;
}
export interface FsDeleteQuery {
path: string;
recursive?: boolean | null;
session_id?: string | null;
sessionId?: string | null;
}
export interface FsUploadBatchQuery {
path?: string | null;
session_id?: string | null;
sessionId?: string | null;
}
export type FsEntryType = "file" | "directory";
export interface FsEntry {
name: string;
path: string;
size: number;
entry_type?: FsEntryType;
entryType?: FsEntryType;
modified?: string | null;
}
export interface FsStat {
path: string;
size: number;
entry_type?: FsEntryType;
entryType?: FsEntryType;
modified?: string | null;
}
export interface FsWriteResponse {
path: string;
bytes_written?: number;
bytesWritten?: number;
}
export interface FsMoveRequest {
from: string;
to: string;
overwrite?: boolean | null;
}
export interface FsMoveResponse {
from: string;
to: string;
}
export interface FsActionResponse {
path: string;
}
export interface FsUploadBatchResponse {
paths: string[];
truncated: boolean;
function parseCursor(cursor: string | undefined): number {
if (!cursor) {
return 0;
}
const parsed = Number.parseInt(cursor, 10);
if (!Number.isFinite(parsed) || parsed < 0) {
return 0;
}
return parsed;
}

View file

@ -0,0 +1,140 @@
import { chmodSync, mkdirSync, writeFileSync } from "node:fs";
import { join } from "node:path";
export function prepareMockAgentDataHome(dataHome: string): void {
const installDir = join(dataHome, "sandbox-agent", "bin");
const processDir = join(installDir, "agent_processes");
mkdirSync(processDir, { recursive: true });
const runner = process.platform === "win32"
? join(processDir, "mock-acp.cmd")
: join(processDir, "mock-acp");
const scriptFile = process.platform === "win32"
? join(processDir, "mock-acp.js")
: runner;
const nodeScript = String.raw`#!/usr/bin/env node
const { createInterface } = require("node:readline");
let nextSession = 0;
function emit(value) {
process.stdout.write(JSON.stringify(value) + "\n");
}
function firstText(prompt) {
if (!Array.isArray(prompt)) {
return "";
}
for (const block of prompt) {
if (block && block.type === "text" && typeof block.text === "string") {
return block.text;
}
}
return "";
}
const rl = createInterface({
input: process.stdin,
crlfDelay: Infinity,
});
rl.on("line", (line) => {
let msg;
try {
msg = JSON.parse(line);
} catch {
return;
}
const hasMethod = typeof msg?.method === "string";
const hasId = Object.prototype.hasOwnProperty.call(msg, "id");
const method = hasMethod ? msg.method : undefined;
if (method === "session/prompt") {
const sessionId = typeof msg?.params?.sessionId === "string" ? msg.params.sessionId : "";
const text = firstText(msg?.params?.prompt);
emit({
jsonrpc: "2.0",
method: "session/update",
params: {
sessionId,
update: {
sessionUpdate: "agent_message_chunk",
content: {
type: "text",
text: "mock: " + text,
},
},
},
});
}
if (!hasMethod || !hasId) {
return;
}
if (method === "initialize") {
emit({
jsonrpc: "2.0",
id: msg.id,
result: {
protocolVersion: 1,
capabilities: {},
serverInfo: {
name: "mock-acp-agent",
version: "0.0.1",
},
},
});
return;
}
if (method === "session/new") {
nextSession += 1;
emit({
jsonrpc: "2.0",
id: msg.id,
result: {
sessionId: "mock-session-" + nextSession,
},
});
return;
}
if (method === "session/prompt") {
emit({
jsonrpc: "2.0",
id: msg.id,
result: {
stopReason: "end_turn",
},
});
return;
}
emit({
jsonrpc: "2.0",
id: msg.id,
result: {
ok: true,
echoedMethod: method,
},
});
});
`;
writeFileSync(scriptFile, nodeScript);
if (process.platform === "win32") {
writeFileSync(runner, `@echo off\r\nnode "${scriptFile}" %*\r\n`);
}
chmodSync(scriptFile, 0o755);
if (process.platform === "win32") {
chmodSync(runner, 0o755);
}
}

View file

@ -1,18 +1,19 @@
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { existsSync } from "node:fs";
import { mkdtempSync, rmSync } from "node:fs";
import { dirname, resolve } from "node:path";
import { join } from "node:path";
import { fileURLToPath } from "node:url";
import { tmpdir } from "node:os";
import {
AlreadyConnectedError,
NotConnectedError,
InMemorySessionPersistDriver,
SandboxAgent,
SandboxAgentClient,
type AgentEvent,
type SessionEvent,
} from "../src/index.ts";
import { spawnSandboxAgent, isNodeRuntime, type SandboxAgentSpawnHandle } from "../src/spawn.ts";
import { prepareMockAgentDataHome } from "./helpers/mock-agent.ts";
const __dirname = dirname(fileURLToPath(import.meta.url));
const AGENT_UNPARSED_METHOD = "_sandboxagent/agent/unparsed";
function findBinary(): string | null {
if (process.env.SANDBOX_AGENT_BIN) {
@ -49,8 +50,8 @@ function sleep(ms: number): Promise<void> {
async function waitFor<T>(
fn: () => T | undefined | null,
timeoutMs = 5000,
stepMs = 25,
timeoutMs = 6000,
stepMs = 30,
): Promise<T> {
const started = Date.now();
while (Date.now() - started < timeoutMs) {
@ -63,16 +64,23 @@ async function waitFor<T>(
throw new Error("timed out waiting for condition");
}
describe("Integration: TypeScript SDK against real server/runtime", () => {
describe("Integration: TypeScript SDK flat session API", () => {
let handle: SandboxAgentSpawnHandle;
let baseUrl: string;
let token: string;
let dataHome: string;
beforeAll(async () => {
dataHome = mkdtempSync(join(tmpdir(), "sdk-integration-"));
prepareMockAgentDataHome(dataHome);
handle = await spawnSandboxAgent({
enabled: true,
log: "silent",
timeoutMs: 30000,
env: {
XDG_DATA_HOME: dataHome,
},
});
baseUrl = handle.baseUrl;
token = handle.token;
@ -80,246 +88,197 @@ describe("Integration: TypeScript SDK against real server/runtime", () => {
afterAll(async () => {
await handle.dispose();
rmSync(dataHome, { recursive: true, force: true });
});
it("detects Node.js runtime", () => {
expect(isNodeRuntime()).toBe(true);
});
it("keeps health on HTTP and requires ACP connection for ACP-backed helpers", async () => {
const client = await SandboxAgent.connect({
it("creates a session, sends prompt, and persists events", async () => {
const sdk = await SandboxAgent.connect({
baseUrl,
token,
agent: "mock",
autoConnect: false,
});
const health = await client.getHealth();
expect(health.status).toBe("ok");
const session = await sdk.createSession({ agent: "mock" });
await expect(client.listAgents()).rejects.toBeInstanceOf(NotConnectedError);
await client.connect();
const agents = await client.listAgents();
expect(Array.isArray(agents.agents)).toBe(true);
expect(agents.agents.length).toBeGreaterThan(0);
await client.disconnect();
});
it("auto-connects on constructor and runs initialize/new/prompt flow", async () => {
const events: AgentEvent[] = [];
const client = new SandboxAgentClient({
baseUrl,
token,
agent: "mock",
onEvent: (event) => {
events.push(event);
},
const observed: SessionEvent[] = [];
const off = session.onEvent((event) => {
observed.push(event);
});
const session = await client.newSession({
cwd: process.cwd(),
mcpServers: [],
metadata: {
agent: "mock",
},
});
expect(session.sessionId).toBeTruthy();
const prompt = await client.prompt({
sessionId: session.sessionId,
prompt: [{ type: "text", text: "hello integration" }],
});
const prompt = await session.prompt([{ type: "text", text: "hello flat sdk" }]);
expect(prompt.stopReason).toBe("end_turn");
await waitFor(() => {
const text = events
.filter((event): event is Extract<AgentEvent, { type: "sessionUpdate" }> => {
return event.type === "sessionUpdate";
})
.map((event) => event.notification)
.filter((entry) => entry.update.sessionUpdate === "agent_message_chunk")
.map((entry) => entry.update.content)
.filter((content) => content.type === "text")
.map((content) => content.text)
.join("");
return text.includes("mock: hello integration") ? text : undefined;
const inbound = observed.find((event) => event.sender === "agent");
return inbound;
});
await client.disconnect();
});
const listed = await sdk.listSessions({ limit: 20 });
expect(listed.items.some((entry) => entry.id === session.id)).toBe(true);
it("enforces manual connect and disconnect lifecycle when autoConnect is disabled", async () => {
const client = new SandboxAgentClient({
baseUrl,
token,
agent: "mock",
autoConnect: false,
});
const fetched = await sdk.getSession(session.id);
expect(fetched?.agent).toBe("mock");
await expect(
client.newSession({
cwd: process.cwd(),
mcpServers: [],
metadata: {
agent: "mock",
},
}),
).rejects.toBeInstanceOf(NotConnectedError);
const events = await sdk.getEvents({ sessionId: session.id, limit: 100 });
expect(events.items.length).toBeGreaterThan(0);
expect(events.items.some((event) => event.sender === "client")).toBe(true);
expect(events.items.some((event) => event.sender === "agent")).toBe(true);
expect(events.items.every((event) => typeof event.id === "string")).toBe(true);
expect(events.items.every((event) => Number.isInteger(event.eventIndex))).toBe(true);
await client.connect();
const session = await client.newSession({
cwd: process.cwd(),
mcpServers: [],
metadata: {
agent: "mock",
},
});
expect(session.sessionId).toBeTruthy();
await client.disconnect();
await expect(
client.prompt({
sessionId: session.sessionId,
prompt: [{ type: "text", text: "after disconnect" }],
}),
).rejects.toBeInstanceOf(NotConnectedError);
});
it("rejects duplicate connect calls for a single client instance", async () => {
const client = new SandboxAgentClient({
baseUrl,
token,
agent: "mock",
autoConnect: false,
});
await client.connect();
await expect(client.connect()).rejects.toBeInstanceOf(AlreadyConnectedError);
await client.disconnect();
});
it("injects metadata on newSession and extracts metadata from session/list", async () => {
const client = new SandboxAgentClient({
baseUrl,
token,
agent: "mock",
autoConnect: false,
});
await client.connect();
const session = await client.newSession({
cwd: process.cwd(),
mcpServers: [],
metadata: {
agent: "mock",
variant: "high",
},
});
await client.setMetadata(session.sessionId, {
title: "sdk title",
permissionMode: "ask",
model: "mock",
});
const listed = await client.unstableListSessions({});
const current = listed.sessions.find((entry) => entry.sessionId === session.sessionId) as
| (Record<string, unknown> & { metadata?: Record<string, unknown> })
| undefined;
expect(current).toBeTruthy();
expect(current?.title).toBe("sdk title");
const metadata =
(current?.metadata as Record<string, unknown> | undefined) ??
((current?._meta as Record<string, unknown> | undefined)?.["sandboxagent.dev"] as
| Record<string, unknown>
| undefined);
expect(metadata?.variant).toBe("high");
expect(metadata?.permissionMode).toBe("ask");
expect(metadata?.model).toBe("mock");
await client.disconnect();
});
it("converts _sandboxagent/session/ended into typed agent events", async () => {
const events: AgentEvent[] = [];
const client = new SandboxAgentClient({
baseUrl,
token,
agent: "mock",
autoConnect: false,
onEvent: (event) => {
events.push(event);
},
});
await client.connect();
const session = await client.newSession({
cwd: process.cwd(),
mcpServers: [],
metadata: {
agent: "mock",
},
});
await client.terminateSession(session.sessionId);
const ended = await waitFor(() => {
return events.find((event) => event.type === "sessionEnded");
});
expect(ended.type).toBe("sessionEnded");
if (ended.type === "sessionEnded") {
const endedSessionId =
ended.notification.params.sessionId ?? ended.notification.params.session_id;
expect(endedSessionId).toBe(session.sessionId);
for (let i = 1; i < events.items.length; i += 1) {
expect(events.items[i]!.eventIndex).toBeGreaterThanOrEqual(events.items[i - 1]!.eventIndex);
}
await client.disconnect();
off();
await sdk.dispose();
});
it("converts _sandboxagent/agent/unparsed notifications through the event adapter", async () => {
const events: AgentEvent[] = [];
const client = new SandboxAgentClient({
it("restores a session on stale connection by recreating and replaying history on first prompt", async () => {
const persist = new InMemorySessionPersistDriver({
maxEventsPerSession: 200,
});
const first = await SandboxAgent.connect({
baseUrl,
token,
autoConnect: false,
onEvent: (event) => {
events.push(event);
},
persist,
replayMaxEvents: 50,
replayMaxChars: 20_000,
});
(client as any).handleEnvelope(
const created = await first.createSession({ agent: "mock" });
await created.prompt([{ type: "text", text: "first run" }]);
const oldConnectionId = created.lastConnectionId;
await first.dispose();
const second = await SandboxAgent.connect({
baseUrl,
token,
persist,
replayMaxEvents: 50,
replayMaxChars: 20_000,
});
const restored = await second.resumeSession(created.id);
expect(restored.lastConnectionId).not.toBe(oldConnectionId);
await restored.prompt([{ type: "text", text: "second run" }]);
const events = await second.getEvents({ sessionId: restored.id, limit: 500 });
const replayInjected = events.items.find((event) => {
if (event.sender !== "client") {
return false;
}
const payload = event.payload as Record<string, unknown>;
const method = payload.method;
const params = payload.params as Record<string, unknown> | undefined;
const prompt = Array.isArray(params?.prompt) ? params?.prompt : [];
const firstBlock = prompt[0] as Record<string, unknown> | undefined;
return (
method === "session/prompt" &&
typeof firstBlock?.text === "string" &&
firstBlock.text.includes("Previous session history is replayed below")
);
});
expect(replayInjected).toBeTruthy();
await second.dispose();
});
it("enforces in-memory event cap to avoid leaks", async () => {
const persist = new InMemorySessionPersistDriver({
maxEventsPerSession: 8,
});
const sdk = await SandboxAgent.connect({
baseUrl,
token,
persist,
});
const session = await sdk.createSession({ agent: "mock" });
for (let i = 0; i < 20; i += 1) {
await session.prompt([{ type: "text", text: `event-cap-${i}` }]);
}
const events = await sdk.getEvents({ sessionId: session.id, limit: 200 });
expect(events.items.length).toBeLessThanOrEqual(8);
await sdk.dispose();
});
it("supports MCP and skills config HTTP helpers", async () => {
const sdk = await SandboxAgent.connect({
baseUrl,
token,
});
const directory = mkdtempSync(join(tmpdir(), "sdk-config-"));
const mcpConfig = {
type: "local" as const,
command: "node",
args: ["server.js"],
env: { LOG_LEVEL: "debug" },
};
await sdk.setMcpConfig(
{
jsonrpc: "2.0",
method: AGENT_UNPARSED_METHOD,
params: {
raw: "unexpected payload",
},
directory,
mcpName: "local-test",
},
"inbound",
mcpConfig,
);
const unparsed = events.find((event) => event.type === "agentUnparsed");
expect(unparsed?.type).toBe("agentUnparsed");
});
const loadedMcp = await sdk.getMcpConfig({
directory,
mcpName: "local-test",
});
expect(loadedMcp.type).toBe("local");
it("rejects invalid token on protected /v2 endpoints", async () => {
const client = new SandboxAgentClient({
baseUrl,
token: "invalid-token",
autoConnect: false,
await sdk.deleteMcpConfig({
directory,
mcpName: "local-test",
});
await expect(client.getHealth()).rejects.toThrow();
const skillsConfig = {
sources: [
{
type: "github",
source: "rivet-dev/skills",
skills: ["sandbox-agent"],
},
],
};
await sdk.setSkillsConfig(
{
directory,
skillName: "default",
},
skillsConfig,
);
const loadedSkills = await sdk.getSkillsConfig({
directory,
skillName: "default",
});
expect(Array.isArray(loadedSkills.sources)).toBe(true);
await sdk.deleteSkillsConfig({
directory,
skillName: "default",
});
await sdk.dispose();
rmSync(directory, { recursive: true, force: true });
});
});