feat: acp http adapter

This commit is contained in:
Nathan Flurry 2026-02-10 16:05:56 -08:00
parent 2ba630c180
commit b4c8564cb2
217 changed files with 18785 additions and 17400 deletions

View file

@ -0,0 +1,39 @@
{
"name": "@sandbox-agent/persist-postgres",
"version": "0.1.0",
"description": "PostgreSQL persistence driver for the Sandbox Agent TypeScript SDK",
"license": "Apache-2.0",
"repository": {
"type": "git",
"url": "https://github.com/rivet-dev/sandbox-agent"
},
"type": "module",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js"
}
},
"dependencies": {
"pg": "^8.16.3",
"sandbox-agent": "workspace:*"
},
"files": [
"dist"
],
"scripts": {
"build": "tsup",
"typecheck": "tsc --noEmit",
"test": "vitest run",
"test:watch": "vitest"
},
"devDependencies": {
"@types/node": "^22.0.0",
"@types/pg": "^8.15.6",
"tsup": "^8.0.0",
"typescript": "^5.7.0",
"vitest": "^3.0.0"
}
}

View file

@ -0,0 +1,322 @@
import { Pool, type PoolConfig } from "pg";
import type {
ListEventsRequest,
ListPage,
ListPageRequest,
SessionEvent,
SessionPersistDriver,
SessionRecord,
} from "sandbox-agent";
const DEFAULT_LIST_LIMIT = 100;
export interface PostgresSessionPersistDriverOptions {
connectionString?: string;
pool?: Pool;
poolConfig?: PoolConfig;
schema?: string;
}
export class PostgresSessionPersistDriver implements SessionPersistDriver {
private readonly pool: Pool;
private readonly ownsPool: boolean;
private readonly schema: string;
private readonly initialized: Promise<void>;
constructor(options: PostgresSessionPersistDriverOptions = {}) {
this.schema = normalizeSchema(options.schema ?? "public");
if (options.pool) {
this.pool = options.pool;
this.ownsPool = false;
} else {
this.pool = new Pool({
connectionString: options.connectionString,
...options.poolConfig,
});
this.ownsPool = true;
}
this.initialized = this.initialize();
}
async getSession(id: string): Promise<SessionRecord | null> {
await this.ready();
const result = await this.pool.query<SessionRow>(
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
FROM ${this.table("sessions")}
WHERE id = $1`,
[id],
);
if (result.rows.length === 0) {
return null;
}
return decodeSessionRow(result.rows[0]);
}
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
await this.ready();
const offset = parseCursor(request.cursor);
const limit = normalizeLimit(request.limit);
const rowsResult = await this.pool.query<SessionRow>(
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
FROM ${this.table("sessions")}
ORDER BY created_at ASC, id ASC
LIMIT $1 OFFSET $2`,
[limit, offset],
);
const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("sessions")}`);
const total = parseInteger(countResult.rows[0]?.count ?? "0");
const nextOffset = offset + rowsResult.rows.length;
return {
items: rowsResult.rows.map(decodeSessionRow),
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
};
}
async updateSession(session: SessionRecord): Promise<void> {
await this.ready();
await this.pool.query(
`INSERT INTO ${this.table("sessions")} (
id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json
) VALUES ($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT(id) DO UPDATE SET
agent = EXCLUDED.agent,
agent_session_id = EXCLUDED.agent_session_id,
last_connection_id = EXCLUDED.last_connection_id,
created_at = EXCLUDED.created_at,
destroyed_at = EXCLUDED.destroyed_at,
session_init_json = EXCLUDED.session_init_json`,
[
session.id,
session.agent,
session.agentSessionId,
session.lastConnectionId,
session.createdAt,
session.destroyedAt ?? null,
session.sessionInit ?? null,
],
);
}
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
await this.ready();
const offset = parseCursor(request.cursor);
const limit = normalizeLimit(request.limit);
const rowsResult = await this.pool.query<EventRow>(
`SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json
FROM ${this.table("events")}
WHERE session_id = $1
ORDER BY event_index ASC, id ASC
LIMIT $2 OFFSET $3`,
[request.sessionId, limit, offset],
);
const countResult = await this.pool.query<{ count: string }>(
`SELECT COUNT(*) AS count FROM ${this.table("events")} WHERE session_id = $1`,
[request.sessionId],
);
const total = parseInteger(countResult.rows[0]?.count ?? "0");
const nextOffset = offset + rowsResult.rows.length;
return {
items: rowsResult.rows.map(decodeEventRow),
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
};
}
async insertEvent(event: SessionEvent): Promise<void> {
await this.ready();
await this.pool.query(
`INSERT INTO ${this.table("events")} (
id, event_index, session_id, created_at, connection_id, sender, payload_json
) VALUES ($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT(id) DO UPDATE SET
event_index = EXCLUDED.event_index,
session_id = EXCLUDED.session_id,
created_at = EXCLUDED.created_at,
connection_id = EXCLUDED.connection_id,
sender = EXCLUDED.sender,
payload_json = EXCLUDED.payload_json`,
[
event.id,
event.eventIndex,
event.sessionId,
event.createdAt,
event.connectionId,
event.sender,
event.payload,
],
);
}
async close(): Promise<void> {
if (!this.ownsPool) {
return;
}
await this.pool.end();
}
private async ready(): Promise<void> {
await this.initialized;
}
private table(name: "sessions" | "events"): string {
return `"${this.schema}"."${name}"`;
}
private async initialize(): Promise<void> {
await this.pool.query(`CREATE SCHEMA IF NOT EXISTS "${this.schema}"`);
await this.pool.query(`
CREATE TABLE IF NOT EXISTS ${this.table("sessions")} (
id TEXT PRIMARY KEY,
agent TEXT NOT NULL,
agent_session_id TEXT NOT NULL,
last_connection_id TEXT NOT NULL,
created_at BIGINT NOT NULL,
destroyed_at BIGINT,
session_init_json JSONB
)
`);
await this.pool.query(`
CREATE TABLE IF NOT EXISTS ${this.table("events")} (
id TEXT PRIMARY KEY,
event_index BIGINT NOT NULL,
session_id TEXT NOT NULL,
created_at BIGINT NOT NULL,
connection_id TEXT NOT NULL,
sender TEXT NOT NULL,
payload_json JSONB NOT NULL
)
`);
await this.pool.query(`
ALTER TABLE ${this.table("events")}
ALTER COLUMN id TYPE TEXT USING id::TEXT
`);
await this.pool.query(`
ALTER TABLE ${this.table("events")}
ADD COLUMN IF NOT EXISTS event_index BIGINT
`);
await this.pool.query(`
WITH ranked AS (
SELECT id, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC) AS ranked_index
FROM ${this.table("events")}
)
UPDATE ${this.table("events")} AS current_events
SET event_index = ranked.ranked_index
FROM ranked
WHERE current_events.id = ranked.id
AND current_events.event_index IS NULL
`);
await this.pool.query(`
ALTER TABLE ${this.table("events")}
ALTER COLUMN event_index SET NOT NULL
`);
await this.pool.query(`
CREATE INDEX IF NOT EXISTS idx_events_session_order
ON ${this.table("events")}(session_id, event_index, id)
`);
}
}
type SessionRow = {
id: string;
agent: string;
agent_session_id: string;
last_connection_id: string;
created_at: string | number;
destroyed_at: string | number | null;
session_init_json: unknown | null;
};
type EventRow = {
id: string | number;
event_index: string | number;
session_id: string;
created_at: string | number;
connection_id: string;
sender: string;
payload_json: unknown;
};
function decodeSessionRow(row: SessionRow): SessionRecord {
return {
id: row.id,
agent: row.agent,
agentSessionId: row.agent_session_id,
lastConnectionId: row.last_connection_id,
createdAt: parseInteger(row.created_at),
destroyedAt: row.destroyed_at === null ? undefined : parseInteger(row.destroyed_at),
sessionInit: row.session_init_json ? (row.session_init_json as SessionRecord["sessionInit"]) : undefined,
};
}
function decodeEventRow(row: EventRow): SessionEvent {
return {
id: String(row.id),
eventIndex: parseInteger(row.event_index),
sessionId: row.session_id,
createdAt: parseInteger(row.created_at),
connectionId: row.connection_id,
sender: parseSender(row.sender),
payload: row.payload_json as SessionEvent["payload"],
};
}
function normalizeLimit(limit: number | undefined): number {
if (!Number.isFinite(limit) || (limit ?? 0) < 1) {
return DEFAULT_LIST_LIMIT;
}
return Math.floor(limit as number);
}
function parseCursor(cursor: string | undefined): number {
if (!cursor) {
return 0;
}
const parsed = Number.parseInt(cursor, 10);
if (!Number.isFinite(parsed) || parsed < 0) {
return 0;
}
return parsed;
}
function parseInteger(value: string | number): number {
const parsed = typeof value === "number" ? value : Number.parseInt(value, 10);
if (!Number.isFinite(parsed)) {
throw new Error(`Invalid integer value returned by postgres: ${String(value)}`);
}
return parsed;
}
function parseSender(value: string): SessionEvent["sender"] {
if (value === "agent" || value === "client") {
return value;
}
throw new Error(`Invalid sender value returned by postgres: ${value}`);
}
function normalizeSchema(schema: string): string {
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(schema)) {
throw new Error(`Invalid schema name '${schema}'. Use letters, numbers, and underscores only.`);
}
return schema;
}

View file

@ -0,0 +1,250 @@
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from "vitest";
import { execFileSync } from "node:child_process";
import { existsSync, mkdtempSync, rmSync } from "node:fs";
import { dirname, join, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { tmpdir } from "node:os";
import { randomUUID } from "node:crypto";
import { Client } from "pg";
import { SandboxAgent } from "sandbox-agent";
import { spawnSandboxAgent, type SandboxAgentSpawnHandle } from "../../typescript/src/spawn.ts";
import { prepareMockAgentDataHome } from "../../typescript/tests/helpers/mock-agent.ts";
import { PostgresSessionPersistDriver } from "../src/index.ts";
const __dirname = dirname(fileURLToPath(import.meta.url));
function findBinary(): string | null {
if (process.env.SANDBOX_AGENT_BIN) {
return process.env.SANDBOX_AGENT_BIN;
}
const cargoPaths = [
resolve(__dirname, "../../../target/debug/sandbox-agent"),
resolve(__dirname, "../../../target/release/sandbox-agent"),
];
for (const p of cargoPaths) {
if (existsSync(p)) {
return p;
}
}
return null;
}
const BINARY_PATH = findBinary();
if (!BINARY_PATH) {
throw new Error(
"sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN.",
);
}
if (!process.env.SANDBOX_AGENT_BIN) {
process.env.SANDBOX_AGENT_BIN = BINARY_PATH;
}
interface PostgresContainer {
containerId: string;
connectionString: string;
}
describe("Postgres persistence driver", () => {
let handle: SandboxAgentSpawnHandle;
let baseUrl: string;
let token: string;
let dataHome: string;
let postgres: PostgresContainer | null = null;
beforeAll(async () => {
dataHome = mkdtempSync(join(tmpdir(), "postgres-integration-"));
prepareMockAgentDataHome(dataHome);
handle = await spawnSandboxAgent({
enabled: true,
log: "silent",
timeoutMs: 30000,
env: {
XDG_DATA_HOME: dataHome,
},
});
baseUrl = handle.baseUrl;
token = handle.token;
});
beforeEach(async () => {
postgres = await startPostgresContainer();
});
afterEach(() => {
if (postgres) {
stopPostgresContainer(postgres.containerId);
postgres = null;
}
});
afterAll(async () => {
await handle.dispose();
rmSync(dataHome, { recursive: true, force: true });
});
it("persists session/event history across SDK instances and supports replay restore", async () => {
const connectionString = requirePostgres(postgres).connectionString;
const persist1 = new PostgresSessionPersistDriver({
connectionString,
});
const sdk1 = await SandboxAgent.connect({
baseUrl,
token,
persist: persist1,
replayMaxEvents: 40,
replayMaxChars: 16000,
});
const created = await sdk1.createSession({ agent: "mock" });
await created.prompt([{ type: "text", text: "postgres-first" }]);
const firstConnectionId = created.lastConnectionId;
await sdk1.dispose();
await persist1.close();
const persist2 = new PostgresSessionPersistDriver({
connectionString,
});
const sdk2 = await SandboxAgent.connect({
baseUrl,
token,
persist: persist2,
replayMaxEvents: 40,
replayMaxChars: 16000,
});
const restored = await sdk2.resumeSession(created.id);
expect(restored.lastConnectionId).not.toBe(firstConnectionId);
await restored.prompt([{ type: "text", text: "postgres-second" }]);
const sessions = await sdk2.listSessions({ limit: 20 });
expect(sessions.items.some((entry) => entry.id === created.id)).toBe(true);
const events = await sdk2.getEvents({ sessionId: created.id, limit: 1000 });
expect(events.items.length).toBeGreaterThan(0);
expect(events.items.every((event) => typeof event.id === "string")).toBe(true);
expect(events.items.every((event) => Number.isInteger(event.eventIndex))).toBe(true);
for (let i = 1; i < events.items.length; i += 1) {
expect(events.items[i]!.eventIndex).toBeGreaterThanOrEqual(events.items[i - 1]!.eventIndex);
}
const replayInjected = events.items.find((event) => {
if (event.sender !== "client") {
return false;
}
const payload = event.payload as Record<string, unknown>;
const method = payload.method;
const params = payload.params as Record<string, unknown> | undefined;
const prompt = Array.isArray(params?.prompt) ? params?.prompt : [];
const firstBlock = prompt[0] as Record<string, unknown> | undefined;
return (
method === "session/prompt" &&
typeof firstBlock?.text === "string" &&
firstBlock.text.includes("Previous session history is replayed below")
);
});
expect(replayInjected).toBeTruthy();
await sdk2.dispose();
await persist2.close();
});
});
async function startPostgresContainer(): Promise<PostgresContainer> {
const name = `sandbox-agent-postgres-${randomUUID()}`;
const containerId = runDockerCommand([
"run",
"-d",
"--rm",
"--name",
name,
"-e",
"POSTGRES_USER=postgres",
"-e",
"POSTGRES_PASSWORD=postgres",
"-e",
"POSTGRES_DB=sandboxagent",
"-p",
"127.0.0.1::5432",
"postgres:16-alpine",
]);
const portOutput = runDockerCommand(["port", containerId, "5432/tcp"]);
const port = parsePort(portOutput);
const connectionString = `postgres://postgres:postgres@127.0.0.1:${port}/sandboxagent`;
await waitForPostgres(connectionString);
return {
containerId,
connectionString,
};
}
function stopPostgresContainer(containerId: string): void {
try {
runDockerCommand(["rm", "-f", containerId]);
} catch {
// Container may already be gone when test teardown runs.
}
}
function runDockerCommand(args: string[]): string {
return execFileSync("docker", args, {
encoding: "utf8",
stdio: ["ignore", "pipe", "pipe"],
}).trim();
}
function parsePort(output: string): string {
const firstLine = output.split("\n")[0]?.trim() ?? "";
const match = firstLine.match(/:(\d+)$/);
if (!match) {
throw new Error(`Failed to parse docker port output: '${output}'`);
}
return match[1];
}
async function waitForPostgres(connectionString: string): Promise<void> {
const timeoutMs = 30000;
const deadline = Date.now() + timeoutMs;
let lastError: unknown;
while (Date.now() < deadline) {
const client = new Client({ connectionString });
try {
await client.connect();
await client.query("SELECT 1");
await client.end();
return;
} catch (error) {
lastError = error;
try {
await client.end();
} catch {
// Ignore cleanup failures while retrying.
}
await delay(250);
}
}
throw new Error(`Postgres container did not become ready: ${String(lastError)}`);
}
function delay(ms: number): Promise<void> {
return new Promise((resolvePromise) => setTimeout(resolvePromise, ms));
}
function requirePostgres(container: PostgresContainer | null): PostgresContainer {
if (!container) {
throw new Error("Postgres container was not initialized for this test.");
}
return container;
}

View file

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022", "DOM"],
"module": "ESNext",
"moduleResolution": "Bundler",
"allowImportingTsExtensions": true,
"noEmit": true,
"esModuleInterop": true,
"strict": true,
"skipLibCheck": true,
"resolveJsonModule": true
},
"include": ["src/**/*", "tests/**/*"],
"exclude": ["node_modules", "dist"]
}

View file

@ -0,0 +1,10 @@
import { defineConfig } from "tsup";
export default defineConfig({
entry: ["src/index.ts"],
format: ["esm"],
dts: true,
sourcemap: true,
clean: true,
target: "es2022",
});

View file

@ -0,0 +1,9 @@
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
include: ["tests/**/*.test.ts"],
testTimeout: 120000,
hookTimeout: 120000,
},
});