mirror of
https://github.com/harivansh-afk/sandbox-agent.git
synced 2026-04-19 12:04:12 +00:00
Fix SDK typecheck errors and update persist drivers for insertEvent signature
- Fix insertEvent call in client.ts to pass sessionId as first argument - Update Daytona provider create options to use Partial type (image has default) - Update StrictUniqueSessionPersistDriver in tests to match new insertEvent signature - Sync persist packages, openapi spec, and docs with upstream changes Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
6a42f06342
commit
441083ea2a
33 changed files with 1051 additions and 2121 deletions
|
|
@ -8,7 +8,6 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@sandbox-agent/example-shared": "workspace:*",
|
||||
"@sandbox-agent/persist-postgres": "workspace:*",
|
||||
"pg": "latest",
|
||||
"sandbox-agent": "workspace:*"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { randomUUID } from "node:crypto";
|
|||
import { Client } from "pg";
|
||||
import { setTimeout as delay } from "node:timers/promises";
|
||||
import { SandboxAgent } from "sandbox-agent";
|
||||
import { PostgresSessionPersistDriver } from "@sandbox-agent/persist-postgres";
|
||||
import { PostgresSessionPersistDriver } from "./persist.ts";
|
||||
import { startDockerSandbox } from "@sandbox-agent/example-shared/docker";
|
||||
import { detectAgent } from "@sandbox-agent/example-shared";
|
||||
|
||||
|
|
|
|||
316
examples/persist-postgres/src/persist.ts
Normal file
316
examples/persist-postgres/src/persist.ts
Normal file
|
|
@ -0,0 +1,316 @@
|
|||
import { Pool, type PoolConfig } from "pg";
|
||||
import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent";
|
||||
|
||||
const DEFAULT_LIST_LIMIT = 100;
|
||||
|
||||
export interface PostgresSessionPersistDriverOptions {
|
||||
connectionString?: string;
|
||||
pool?: Pool;
|
||||
poolConfig?: PoolConfig;
|
||||
schema?: string;
|
||||
}
|
||||
|
||||
export class PostgresSessionPersistDriver implements SessionPersistDriver {
|
||||
private readonly pool: Pool;
|
||||
private readonly ownsPool: boolean;
|
||||
private readonly schema: string;
|
||||
private readonly initialized: Promise<void>;
|
||||
|
||||
constructor(options: PostgresSessionPersistDriverOptions = {}) {
|
||||
this.schema = normalizeSchema(options.schema ?? "public");
|
||||
|
||||
if (options.pool) {
|
||||
this.pool = options.pool;
|
||||
this.ownsPool = false;
|
||||
} else {
|
||||
this.pool = new Pool({
|
||||
connectionString: options.connectionString,
|
||||
...options.poolConfig,
|
||||
});
|
||||
this.ownsPool = true;
|
||||
}
|
||||
|
||||
this.initialized = this.initialize();
|
||||
}
|
||||
|
||||
async getSession(id: string): Promise<SessionRecord | undefined> {
|
||||
await this.ready();
|
||||
|
||||
const result = await this.pool.query<SessionRow>(
|
||||
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json
|
||||
FROM ${this.table("sessions")}
|
||||
WHERE id = $1`,
|
||||
[id],
|
||||
);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return decodeSessionRow(result.rows[0]);
|
||||
}
|
||||
|
||||
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
|
||||
await this.ready();
|
||||
|
||||
const offset = parseCursor(request.cursor);
|
||||
const limit = normalizeLimit(request.limit);
|
||||
|
||||
const rowsResult = await this.pool.query<SessionRow>(
|
||||
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json
|
||||
FROM ${this.table("sessions")}
|
||||
ORDER BY created_at ASC, id ASC
|
||||
LIMIT $1 OFFSET $2`,
|
||||
[limit, offset],
|
||||
);
|
||||
|
||||
const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("sessions")}`);
|
||||
const total = parseInteger(countResult.rows[0]?.count ?? "0");
|
||||
const nextOffset = offset + rowsResult.rows.length;
|
||||
|
||||
return {
|
||||
items: rowsResult.rows.map(decodeSessionRow),
|
||||
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async updateSession(session: SessionRecord): Promise<void> {
|
||||
await this.ready();
|
||||
|
||||
await this.pool.query(
|
||||
`INSERT INTO ${this.table("sessions")} (
|
||||
id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
agent = EXCLUDED.agent,
|
||||
agent_session_id = EXCLUDED.agent_session_id,
|
||||
last_connection_id = EXCLUDED.last_connection_id,
|
||||
created_at = EXCLUDED.created_at,
|
||||
destroyed_at = EXCLUDED.destroyed_at,
|
||||
sandbox_id = EXCLUDED.sandbox_id,
|
||||
session_init_json = EXCLUDED.session_init_json`,
|
||||
[
|
||||
session.id,
|
||||
session.agent,
|
||||
session.agentSessionId,
|
||||
session.lastConnectionId,
|
||||
session.createdAt,
|
||||
session.destroyedAt ?? null,
|
||||
session.sandboxId ?? null,
|
||||
session.sessionInit ?? null,
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
|
||||
await this.ready();
|
||||
|
||||
const offset = parseCursor(request.cursor);
|
||||
const limit = normalizeLimit(request.limit);
|
||||
|
||||
const rowsResult = await this.pool.query<EventRow>(
|
||||
`SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json
|
||||
FROM ${this.table("events")}
|
||||
WHERE session_id = $1
|
||||
ORDER BY event_index ASC, id ASC
|
||||
LIMIT $2 OFFSET $3`,
|
||||
[request.sessionId, limit, offset],
|
||||
);
|
||||
|
||||
const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("events")} WHERE session_id = $1`, [
|
||||
request.sessionId,
|
||||
]);
|
||||
const total = parseInteger(countResult.rows[0]?.count ?? "0");
|
||||
const nextOffset = offset + rowsResult.rows.length;
|
||||
|
||||
return {
|
||||
items: rowsResult.rows.map(decodeEventRow),
|
||||
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async insertEvent(_sessionId: string, event: SessionEvent): Promise<void> {
|
||||
await this.ready();
|
||||
|
||||
await this.pool.query(
|
||||
`INSERT INTO ${this.table("events")} (
|
||||
id, event_index, session_id, created_at, connection_id, sender, payload_json
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
event_index = EXCLUDED.event_index,
|
||||
session_id = EXCLUDED.session_id,
|
||||
created_at = EXCLUDED.created_at,
|
||||
connection_id = EXCLUDED.connection_id,
|
||||
sender = EXCLUDED.sender,
|
||||
payload_json = EXCLUDED.payload_json`,
|
||||
[event.id, event.eventIndex, event.sessionId, event.createdAt, event.connectionId, event.sender, event.payload],
|
||||
);
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
if (!this.ownsPool) {
|
||||
return;
|
||||
}
|
||||
await this.pool.end();
|
||||
}
|
||||
|
||||
private async ready(): Promise<void> {
|
||||
await this.initialized;
|
||||
}
|
||||
|
||||
private table(name: "sessions" | "events"): string {
|
||||
return `"${this.schema}"."${name}"`;
|
||||
}
|
||||
|
||||
private async initialize(): Promise<void> {
|
||||
await this.pool.query(`CREATE SCHEMA IF NOT EXISTS "${this.schema}"`);
|
||||
|
||||
await this.pool.query(`
|
||||
CREATE TABLE IF NOT EXISTS ${this.table("sessions")} (
|
||||
id TEXT PRIMARY KEY,
|
||||
agent TEXT NOT NULL,
|
||||
agent_session_id TEXT NOT NULL,
|
||||
last_connection_id TEXT NOT NULL,
|
||||
created_at BIGINT NOT NULL,
|
||||
destroyed_at BIGINT,
|
||||
sandbox_id TEXT,
|
||||
session_init_json JSONB
|
||||
)
|
||||
`);
|
||||
|
||||
await this.pool.query(`
|
||||
ALTER TABLE ${this.table("sessions")}
|
||||
ADD COLUMN IF NOT EXISTS sandbox_id TEXT
|
||||
`);
|
||||
|
||||
await this.pool.query(`
|
||||
CREATE TABLE IF NOT EXISTS ${this.table("events")} (
|
||||
id TEXT PRIMARY KEY,
|
||||
event_index BIGINT NOT NULL,
|
||||
session_id TEXT NOT NULL,
|
||||
created_at BIGINT NOT NULL,
|
||||
connection_id TEXT NOT NULL,
|
||||
sender TEXT NOT NULL,
|
||||
payload_json JSONB NOT NULL
|
||||
)
|
||||
`);
|
||||
|
||||
await this.pool.query(`
|
||||
ALTER TABLE ${this.table("events")}
|
||||
ALTER COLUMN id TYPE TEXT USING id::TEXT
|
||||
`);
|
||||
|
||||
await this.pool.query(`
|
||||
ALTER TABLE ${this.table("events")}
|
||||
ADD COLUMN IF NOT EXISTS event_index BIGINT
|
||||
`);
|
||||
|
||||
await this.pool.query(`
|
||||
WITH ranked AS (
|
||||
SELECT id, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC) AS ranked_index
|
||||
FROM ${this.table("events")}
|
||||
)
|
||||
UPDATE ${this.table("events")} AS current_events
|
||||
SET event_index = ranked.ranked_index
|
||||
FROM ranked
|
||||
WHERE current_events.id = ranked.id
|
||||
AND current_events.event_index IS NULL
|
||||
`);
|
||||
|
||||
await this.pool.query(`
|
||||
ALTER TABLE ${this.table("events")}
|
||||
ALTER COLUMN event_index SET NOT NULL
|
||||
`);
|
||||
|
||||
await this.pool.query(`
|
||||
CREATE INDEX IF NOT EXISTS idx_events_session_order
|
||||
ON ${this.table("events")}(session_id, event_index, id)
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
type SessionRow = {
|
||||
id: string;
|
||||
agent: string;
|
||||
agent_session_id: string;
|
||||
last_connection_id: string;
|
||||
created_at: string | number;
|
||||
destroyed_at: string | number | null;
|
||||
sandbox_id: string | null;
|
||||
session_init_json: unknown | null;
|
||||
};
|
||||
|
||||
type EventRow = {
|
||||
id: string | number;
|
||||
event_index: string | number;
|
||||
session_id: string;
|
||||
created_at: string | number;
|
||||
connection_id: string;
|
||||
sender: string;
|
||||
payload_json: unknown;
|
||||
};
|
||||
|
||||
function decodeSessionRow(row: SessionRow): SessionRecord {
|
||||
return {
|
||||
id: row.id,
|
||||
agent: row.agent,
|
||||
agentSessionId: row.agent_session_id,
|
||||
lastConnectionId: row.last_connection_id,
|
||||
createdAt: parseInteger(row.created_at),
|
||||
destroyedAt: row.destroyed_at === null ? undefined : parseInteger(row.destroyed_at),
|
||||
sandboxId: row.sandbox_id ?? undefined,
|
||||
sessionInit: row.session_init_json ? (row.session_init_json as SessionRecord["sessionInit"]) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
function decodeEventRow(row: EventRow): SessionEvent {
|
||||
return {
|
||||
id: String(row.id),
|
||||
eventIndex: parseInteger(row.event_index),
|
||||
sessionId: row.session_id,
|
||||
createdAt: parseInteger(row.created_at),
|
||||
connectionId: row.connection_id,
|
||||
sender: parseSender(row.sender),
|
||||
payload: row.payload_json as SessionEvent["payload"],
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeLimit(limit: number | undefined): number {
|
||||
if (!Number.isFinite(limit) || (limit ?? 0) < 1) {
|
||||
return DEFAULT_LIST_LIMIT;
|
||||
}
|
||||
return Math.floor(limit as number);
|
||||
}
|
||||
|
||||
function parseCursor(cursor: string | undefined): number {
|
||||
if (!cursor) {
|
||||
return 0;
|
||||
}
|
||||
const parsed = Number.parseInt(cursor, 10);
|
||||
if (!Number.isFinite(parsed) || parsed < 0) {
|
||||
return 0;
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function parseInteger(value: string | number): number {
|
||||
const parsed = typeof value === "number" ? value : Number.parseInt(value, 10);
|
||||
if (!Number.isFinite(parsed)) {
|
||||
throw new Error(`Invalid integer value returned by postgres: ${String(value)}`);
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function parseSender(value: string): SessionEvent["sender"] {
|
||||
if (value === "agent" || value === "client") {
|
||||
return value;
|
||||
}
|
||||
throw new Error(`Invalid sender value returned by postgres: ${value}`);
|
||||
}
|
||||
|
||||
function normalizeSchema(schema: string): string {
|
||||
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(schema)) {
|
||||
throw new Error(`Invalid schema name '${schema}'. Use letters, numbers, and underscores only.`);
|
||||
}
|
||||
return schema;
|
||||
}
|
||||
|
|
@ -8,10 +8,11 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@sandbox-agent/example-shared": "workspace:*",
|
||||
"@sandbox-agent/persist-sqlite": "workspace:*",
|
||||
"better-sqlite3": "^11.0.0",
|
||||
"sandbox-agent": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "^7.0.0",
|
||||
"@types/node": "latest",
|
||||
"tsx": "latest",
|
||||
"typescript": "latest"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { SandboxAgent } from "sandbox-agent";
|
||||
import { SQLiteSessionPersistDriver } from "@sandbox-agent/persist-sqlite";
|
||||
import { SQLiteSessionPersistDriver } from "./persist.ts";
|
||||
import { startDockerSandbox } from "@sandbox-agent/example-shared/docker";
|
||||
import { detectAgent } from "@sandbox-agent/example-shared";
|
||||
|
||||
|
|
|
|||
294
examples/persist-sqlite/src/persist.ts
Normal file
294
examples/persist-sqlite/src/persist.ts
Normal file
|
|
@ -0,0 +1,294 @@
|
|||
import Database from "better-sqlite3";
|
||||
import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent";
|
||||
|
||||
const DEFAULT_LIST_LIMIT = 100;
|
||||
|
||||
export interface SQLiteSessionPersistDriverOptions {
|
||||
filename?: string;
|
||||
}
|
||||
|
||||
export class SQLiteSessionPersistDriver implements SessionPersistDriver {
|
||||
private readonly db: Database.Database;
|
||||
|
||||
constructor(options: SQLiteSessionPersistDriverOptions = {}) {
|
||||
this.db = new Database(options.filename ?? ":memory:");
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
async getSession(id: string): Promise<SessionRecord | undefined> {
|
||||
const row = this.db
|
||||
.prepare(
|
||||
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json
|
||||
FROM sessions WHERE id = ?`,
|
||||
)
|
||||
.get(id) as SessionRow | undefined;
|
||||
|
||||
if (!row) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return decodeSessionRow(row);
|
||||
}
|
||||
|
||||
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
|
||||
const offset = parseCursor(request.cursor);
|
||||
const limit = normalizeLimit(request.limit);
|
||||
|
||||
const rows = this.db
|
||||
.prepare(
|
||||
`SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json
|
||||
FROM sessions
|
||||
ORDER BY created_at ASC, id ASC
|
||||
LIMIT ? OFFSET ?`,
|
||||
)
|
||||
.all(limit, offset) as SessionRow[];
|
||||
|
||||
const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM sessions`).get() as { count: number };
|
||||
const nextOffset = offset + rows.length;
|
||||
|
||||
return {
|
||||
items: rows.map(decodeSessionRow),
|
||||
nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async updateSession(session: SessionRecord): Promise<void> {
|
||||
this.db
|
||||
.prepare(
|
||||
`INSERT INTO sessions (
|
||||
id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
agent = excluded.agent,
|
||||
agent_session_id = excluded.agent_session_id,
|
||||
last_connection_id = excluded.last_connection_id,
|
||||
created_at = excluded.created_at,
|
||||
destroyed_at = excluded.destroyed_at,
|
||||
sandbox_id = excluded.sandbox_id,
|
||||
session_init_json = excluded.session_init_json`,
|
||||
)
|
||||
.run(
|
||||
session.id,
|
||||
session.agent,
|
||||
session.agentSessionId,
|
||||
session.lastConnectionId,
|
||||
session.createdAt,
|
||||
session.destroyedAt ?? null,
|
||||
session.sandboxId ?? null,
|
||||
session.sessionInit ? JSON.stringify(session.sessionInit) : null,
|
||||
);
|
||||
}
|
||||
|
||||
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
|
||||
const offset = parseCursor(request.cursor);
|
||||
const limit = normalizeLimit(request.limit);
|
||||
|
||||
const rows = this.db
|
||||
.prepare(
|
||||
`SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json
|
||||
FROM events
|
||||
WHERE session_id = ?
|
||||
ORDER BY event_index ASC, id ASC
|
||||
LIMIT ? OFFSET ?`,
|
||||
)
|
||||
.all(request.sessionId, limit, offset) as EventRow[];
|
||||
|
||||
const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM events WHERE session_id = ?`).get(request.sessionId) as { count: number };
|
||||
|
||||
const nextOffset = offset + rows.length;
|
||||
|
||||
return {
|
||||
items: rows.map(decodeEventRow),
|
||||
nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async insertEvent(_sessionId: string, event: SessionEvent): Promise<void> {
|
||||
this.db
|
||||
.prepare(
|
||||
`INSERT INTO events (
|
||||
id, event_index, session_id, created_at, connection_id, sender, payload_json
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
event_index = excluded.event_index,
|
||||
session_id = excluded.session_id,
|
||||
created_at = excluded.created_at,
|
||||
connection_id = excluded.connection_id,
|
||||
sender = excluded.sender,
|
||||
payload_json = excluded.payload_json`,
|
||||
)
|
||||
.run(event.id, event.eventIndex, event.sessionId, event.createdAt, event.connectionId, event.sender, JSON.stringify(event.payload));
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.db.close();
|
||||
}
|
||||
|
||||
private initialize(): void {
|
||||
this.db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
agent TEXT NOT NULL,
|
||||
agent_session_id TEXT NOT NULL,
|
||||
last_connection_id TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
destroyed_at INTEGER,
|
||||
sandbox_id TEXT,
|
||||
session_init_json TEXT
|
||||
)
|
||||
`);
|
||||
|
||||
const sessionColumns = this.db.prepare(`PRAGMA table_info(sessions)`).all() as TableInfoRow[];
|
||||
if (!sessionColumns.some((column) => column.name === "sandbox_id")) {
|
||||
this.db.exec(`ALTER TABLE sessions ADD COLUMN sandbox_id TEXT`);
|
||||
}
|
||||
|
||||
this.ensureEventsTable();
|
||||
}
|
||||
|
||||
private ensureEventsTable(): void {
|
||||
const tableInfo = this.db.prepare(`PRAGMA table_info(events)`).all() as TableInfoRow[];
|
||||
if (tableInfo.length === 0) {
|
||||
this.createEventsTable();
|
||||
return;
|
||||
}
|
||||
|
||||
const idColumn = tableInfo.find((column) => column.name === "id");
|
||||
const hasEventIndex = tableInfo.some((column) => column.name === "event_index");
|
||||
const idType = (idColumn?.type ?? "").trim().toUpperCase();
|
||||
const idIsText = idType === "TEXT";
|
||||
|
||||
if (!idIsText || !hasEventIndex) {
|
||||
this.rebuildEventsTable(hasEventIndex);
|
||||
}
|
||||
|
||||
this.db.exec(`
|
||||
CREATE INDEX IF NOT EXISTS idx_events_session_order
|
||||
ON events(session_id, event_index, id)
|
||||
`);
|
||||
}
|
||||
|
||||
private createEventsTable(): void {
|
||||
this.db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id TEXT PRIMARY KEY,
|
||||
event_index INTEGER NOT NULL,
|
||||
session_id TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
connection_id TEXT NOT NULL,
|
||||
sender TEXT NOT NULL,
|
||||
payload_json TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_events_session_order
|
||||
ON events(session_id, event_index, id)
|
||||
`);
|
||||
}
|
||||
|
||||
private rebuildEventsTable(hasEventIndex: boolean): void {
|
||||
this.db.exec(`
|
||||
ALTER TABLE events RENAME TO events_legacy;
|
||||
`);
|
||||
|
||||
this.createEventsTable();
|
||||
|
||||
if (hasEventIndex) {
|
||||
this.db.exec(`
|
||||
INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json)
|
||||
SELECT
|
||||
CAST(id AS TEXT),
|
||||
COALESCE(event_index, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC)),
|
||||
session_id,
|
||||
created_at,
|
||||
connection_id,
|
||||
sender,
|
||||
payload_json
|
||||
FROM events_legacy
|
||||
`);
|
||||
} else {
|
||||
this.db.exec(`
|
||||
INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json)
|
||||
SELECT
|
||||
CAST(id AS TEXT),
|
||||
ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC),
|
||||
session_id,
|
||||
created_at,
|
||||
connection_id,
|
||||
sender,
|
||||
payload_json
|
||||
FROM events_legacy
|
||||
`);
|
||||
}
|
||||
|
||||
this.db.exec(`DROP TABLE events_legacy`);
|
||||
}
|
||||
}
|
||||
|
||||
type SessionRow = {
|
||||
id: string;
|
||||
agent: string;
|
||||
agent_session_id: string;
|
||||
last_connection_id: string;
|
||||
created_at: number;
|
||||
destroyed_at: number | null;
|
||||
sandbox_id: string | null;
|
||||
session_init_json: string | null;
|
||||
};
|
||||
|
||||
type EventRow = {
|
||||
id: string;
|
||||
event_index: number;
|
||||
session_id: string;
|
||||
created_at: number;
|
||||
connection_id: string;
|
||||
sender: "client" | "agent";
|
||||
payload_json: string;
|
||||
};
|
||||
|
||||
type TableInfoRow = {
|
||||
name: string;
|
||||
type: string;
|
||||
};
|
||||
|
||||
function decodeSessionRow(row: SessionRow): SessionRecord {
|
||||
return {
|
||||
id: row.id,
|
||||
agent: row.agent,
|
||||
agentSessionId: row.agent_session_id,
|
||||
lastConnectionId: row.last_connection_id,
|
||||
createdAt: row.created_at,
|
||||
destroyedAt: row.destroyed_at ?? undefined,
|
||||
sandboxId: row.sandbox_id ?? undefined,
|
||||
sessionInit: row.session_init_json ? (JSON.parse(row.session_init_json) as SessionRecord["sessionInit"]) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
function decodeEventRow(row: EventRow): SessionEvent {
|
||||
return {
|
||||
id: row.id,
|
||||
eventIndex: row.event_index,
|
||||
sessionId: row.session_id,
|
||||
createdAt: row.created_at,
|
||||
connectionId: row.connection_id,
|
||||
sender: row.sender,
|
||||
payload: JSON.parse(row.payload_json),
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeLimit(limit: number | undefined): number {
|
||||
if (!Number.isFinite(limit) || (limit ?? 0) < 1) {
|
||||
return DEFAULT_LIST_LIMIT;
|
||||
}
|
||||
return Math.floor(limit as number);
|
||||
}
|
||||
|
||||
function parseCursor(cursor: string | undefined): number {
|
||||
if (!cursor) {
|
||||
return 0;
|
||||
}
|
||||
const parsed = Number.parseInt(cursor, 10);
|
||||
if (!Number.isFinite(parsed) || parsed < 0) {
|
||||
return 0;
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue