Integrate OpenHandoff factory workspace (#212)

This commit is contained in:
Nathan Flurry 2026-03-09 14:00:20 -07:00 committed by GitHub
parent 3d9476ed0b
commit bf282199b5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
251 changed files with 42824 additions and 692 deletions

View file

@ -0,0 +1,136 @@
import { mkdir, readdir, readFile, rm, writeFile } from "node:fs/promises";
import { dirname, join, resolve } from "node:path";
type Journal = {
entries?: Array<{
idx: number;
when: number;
tag: string;
breakpoints?: boolean;
version?: string;
}>;
};
function padMigrationKey(idx: number): string {
return `m${String(idx).padStart(4, "0")}`;
}
function escapeTemplateLiteral(value: string): string {
return value.replace(/`/g, "\\`").replace(/\$\{/g, "\\${");
}
async function fileExists(path: string): Promise<boolean> {
try {
await readFile(path);
return true;
} catch {
return false;
}
}
async function walkDirectories(root: string, onDir: (dir: string) => Promise<void>): Promise<void> {
const entries = await readdir(root, { withFileTypes: true });
await onDir(root);
for (const entry of entries) {
if (!entry.isDirectory()) continue;
if (entry.name === "node_modules" || entry.name === "dist" || entry.name.startsWith(".")) {
continue;
}
await walkDirectories(join(root, entry.name), onDir);
}
}
async function generateOne(drizzleDir: string): Promise<void> {
const metaDir = resolve(drizzleDir, "meta");
const journalPath = resolve(metaDir, "_journal.json");
if (!(await fileExists(journalPath))) {
return;
}
const drizzleEntries = (await readdir(drizzleDir, { withFileTypes: true }))
.filter((entry) => entry.isFile() && entry.name.endsWith(".sql"))
.map((entry) => entry.name)
.sort();
if (drizzleEntries.length === 0) {
return;
}
const journalRaw = await readFile(journalPath, "utf8");
const journal = JSON.parse(journalRaw) as Journal;
const entries = journal.entries ?? [];
const sqlByKey = new Map<string, string>();
for (const entry of entries) {
const file = drizzleEntries[entry.idx];
if (!file) {
throw new Error(`Missing migration SQL file for idx=${entry.idx} in ${drizzleDir}`);
}
const sqlPath = resolve(drizzleDir, file);
const sqlRaw = await readFile(sqlPath, "utf8");
sqlByKey.set(padMigrationKey(entry.idx), sqlRaw);
}
const migrationsObjectLines: string[] = [];
for (const entry of entries) {
const key = padMigrationKey(entry.idx);
const sql = sqlByKey.get(key);
if (!sql) continue;
migrationsObjectLines.push(` ${key}: \`${escapeTemplateLiteral(sql)}\`,`);
}
const banner = `// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
`;
const journalLiteral = JSON.stringify(
{
entries: entries.map((entry) => ({
idx: entry.idx,
when: entry.when,
tag: entry.tag,
breakpoints: Boolean(entry.breakpoints),
})),
},
null,
2
);
const outPath = resolve(drizzleDir, "..", "migrations.ts");
const content = `${banner}
const journal = ${journalLiteral} as const;
export default {
journal,
migrations: {
${migrationsObjectLines.join("\n")}
} as const
};
`;
await mkdir(dirname(outPath), { recursive: true });
await writeFile(outPath, content, "utf8");
// drizzle-kit generates a JS helper file by default; delete to keep TS-only sources.
await rm(resolve(drizzleDir, "migrations.js"), { force: true });
}
async function main(): Promise<void> {
const packageRoot = resolve(import.meta.dirname, "..", "..", ".."); // packages/backend
const actorsRoot = resolve(packageRoot, "src", "actors");
await walkDirectories(actorsRoot, async (dir) => {
if (dir.endsWith(`${join("db", "drizzle")}`)) {
await generateOne(dir);
}
});
}
main().catch((error: unknown) => {
const message = error instanceof Error ? error.stack ?? error.message : String(error);
// eslint-disable-next-line no-console
console.error(message);
process.exitCode = 1;
});

View file

@ -0,0 +1,43 @@
import type { AppConfig } from "@openhandoff/shared";
import type { BackendDriver } from "../driver.js";
import type { NotificationService } from "../notifications/index.js";
import type { ProviderRegistry } from "../providers/index.js";
let runtimeConfig: AppConfig | null = null;
let providerRegistry: ProviderRegistry | null = null;
let notificationService: NotificationService | null = null;
let runtimeDriver: BackendDriver | null = null;
export function initActorRuntimeContext(
config: AppConfig,
providers: ProviderRegistry,
notifications?: NotificationService,
driver?: BackendDriver
): void {
runtimeConfig = config;
providerRegistry = providers;
notificationService = notifications ?? null;
runtimeDriver = driver ?? null;
}
export function getActorRuntimeContext(): {
config: AppConfig;
providers: ProviderRegistry;
notifications: NotificationService | null;
driver: BackendDriver;
} {
if (!runtimeConfig || !providerRegistry) {
throw new Error("Actor runtime context not initialized");
}
if (!runtimeDriver) {
throw new Error("Actor runtime context missing driver");
}
return {
config: runtimeConfig,
providers: providerRegistry,
notifications: notificationService,
driver: runtimeDriver,
};
}

View file

@ -0,0 +1,112 @@
import type { HandoffStatus, ProviderId } from "@openhandoff/shared";
export interface HandoffCreatedEvent {
workspaceId: string;
repoId: string;
handoffId: string;
providerId: ProviderId;
branchName: string;
title: string;
}
export interface HandoffStatusEvent {
workspaceId: string;
repoId: string;
handoffId: string;
status: HandoffStatus;
message: string;
}
export interface ProjectSnapshotEvent {
workspaceId: string;
repoId: string;
updatedAt: number;
}
export interface AgentStartedEvent {
workspaceId: string;
repoId: string;
handoffId: string;
sessionId: string;
}
export interface AgentIdleEvent {
workspaceId: string;
repoId: string;
handoffId: string;
sessionId: string;
}
export interface AgentErrorEvent {
workspaceId: string;
repoId: string;
handoffId: string;
message: string;
}
export interface PrCreatedEvent {
workspaceId: string;
repoId: string;
handoffId: string;
prNumber: number;
url: string;
}
export interface PrClosedEvent {
workspaceId: string;
repoId: string;
handoffId: string;
prNumber: number;
merged: boolean;
}
export interface PrReviewEvent {
workspaceId: string;
repoId: string;
handoffId: string;
prNumber: number;
reviewer: string;
status: string;
}
export interface CiStatusChangedEvent {
workspaceId: string;
repoId: string;
handoffId: string;
prNumber: number;
status: string;
}
export type HandoffStepName = "auto_commit" | "push" | "pr_submit";
export type HandoffStepStatus = "started" | "completed" | "skipped" | "failed";
export interface HandoffStepEvent {
workspaceId: string;
repoId: string;
handoffId: string;
step: HandoffStepName;
status: HandoffStepStatus;
message: string;
}
export interface BranchSwitchedEvent {
workspaceId: string;
repoId: string;
handoffId: string;
branchName: string;
}
export interface SessionAttachedEvent {
workspaceId: string;
repoId: string;
handoffId: string;
sessionId: string;
}
export interface BranchSyncedEvent {
workspaceId: string;
repoId: string;
handoffId: string;
branchName: string;
strategy: string;
}

View file

@ -0,0 +1,160 @@
import {
handoffKey,
handoffStatusSyncKey,
historyKey,
projectBranchSyncKey,
projectKey,
projectPrSyncKey,
sandboxInstanceKey,
workspaceKey
} from "./keys.js";
import type { ProviderId } from "@openhandoff/shared";
export function actorClient(c: any) {
return c.client();
}
export async function getOrCreateWorkspace(c: any, workspaceId: string) {
return await actorClient(c).workspace.getOrCreate(workspaceKey(workspaceId), {
createWithInput: workspaceId
});
}
export async function getOrCreateProject(c: any, workspaceId: string, repoId: string, remoteUrl: string) {
return await actorClient(c).project.getOrCreate(projectKey(workspaceId, repoId), {
createWithInput: {
workspaceId,
repoId,
remoteUrl
}
});
}
export function getProject(c: any, workspaceId: string, repoId: string) {
return actorClient(c).project.get(projectKey(workspaceId, repoId));
}
export function getHandoff(c: any, workspaceId: string, repoId: string, handoffId: string) {
return actorClient(c).handoff.get(handoffKey(workspaceId, repoId, handoffId));
}
export async function getOrCreateHandoff(
c: any,
workspaceId: string,
repoId: string,
handoffId: string,
createWithInput: Record<string, unknown>
) {
return await actorClient(c).handoff.getOrCreate(handoffKey(workspaceId, repoId, handoffId), {
createWithInput
});
}
export async function getOrCreateHistory(c: any, workspaceId: string, repoId: string) {
return await actorClient(c).history.getOrCreate(historyKey(workspaceId, repoId), {
createWithInput: {
workspaceId,
repoId
}
});
}
export async function getOrCreateProjectPrSync(
c: any,
workspaceId: string,
repoId: string,
repoPath: string,
intervalMs: number
) {
return await actorClient(c).projectPrSync.getOrCreate(projectPrSyncKey(workspaceId, repoId), {
createWithInput: {
workspaceId,
repoId,
repoPath,
intervalMs
}
});
}
export async function getOrCreateProjectBranchSync(
c: any,
workspaceId: string,
repoId: string,
repoPath: string,
intervalMs: number
) {
return await actorClient(c).projectBranchSync.getOrCreate(projectBranchSyncKey(workspaceId, repoId), {
createWithInput: {
workspaceId,
repoId,
repoPath,
intervalMs
}
});
}
export function getSandboxInstance(c: any, workspaceId: string, providerId: ProviderId, sandboxId: string) {
return actorClient(c).sandboxInstance.get(sandboxInstanceKey(workspaceId, providerId, sandboxId));
}
export async function getOrCreateSandboxInstance(
c: any,
workspaceId: string,
providerId: ProviderId,
sandboxId: string,
createWithInput: Record<string, unknown>
) {
return await actorClient(c).sandboxInstance.getOrCreate(
sandboxInstanceKey(workspaceId, providerId, sandboxId),
{ createWithInput }
);
}
export async function getOrCreateHandoffStatusSync(
c: any,
workspaceId: string,
repoId: string,
handoffId: string,
sandboxId: string,
sessionId: string,
createWithInput: Record<string, unknown>
) {
return await actorClient(c).handoffStatusSync.getOrCreate(
handoffStatusSyncKey(workspaceId, repoId, handoffId, sandboxId, sessionId),
{
createWithInput
}
);
}
export function selfProjectPrSync(c: any) {
return actorClient(c).projectPrSync.getForId(c.actorId);
}
export function selfProjectBranchSync(c: any) {
return actorClient(c).projectBranchSync.getForId(c.actorId);
}
export function selfHandoffStatusSync(c: any) {
return actorClient(c).handoffStatusSync.getForId(c.actorId);
}
export function selfHistory(c: any) {
return actorClient(c).history.getForId(c.actorId);
}
export function selfHandoff(c: any) {
return actorClient(c).handoff.getForId(c.actorId);
}
export function selfWorkspace(c: any) {
return actorClient(c).workspace.getForId(c.actorId);
}
export function selfProject(c: any) {
return actorClient(c).project.getForId(c.actorId);
}
export function selfSandboxInstance(c: any) {
return actorClient(c).sandboxInstance.getForId(c.actorId);
}

View file

@ -0,0 +1,108 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import type { ProviderId } from "@openhandoff/shared";
import { getHandoff, getSandboxInstance, selfHandoffStatusSync } from "../handles.js";
import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js";
import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js";
export interface HandoffStatusSyncInput {
workspaceId: string;
repoId: string;
handoffId: string;
providerId: ProviderId;
sandboxId: string;
sessionId: string;
intervalMs: number;
}
interface SetIntervalCommand {
intervalMs: number;
}
interface HandoffStatusSyncState extends PollingControlState {
workspaceId: string;
repoId: string;
handoffId: string;
providerId: ProviderId;
sandboxId: string;
sessionId: string;
}
const CONTROL = {
start: "handoff.status_sync.control.start",
stop: "handoff.status_sync.control.stop",
setInterval: "handoff.status_sync.control.set_interval",
force: "handoff.status_sync.control.force"
} as const;
async function pollSessionStatus(c: { state: HandoffStatusSyncState }): Promise<void> {
const sandboxInstance = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, c.state.sandboxId);
const status = await sandboxInstance.sessionStatus({ sessionId: c.state.sessionId });
const parent = getHandoff(c, c.state.workspaceId, c.state.repoId, c.state.handoffId);
await parent.syncWorkbenchSessionStatus({
sessionId: c.state.sessionId,
status: status.status,
at: Date.now()
});
}
export const handoffStatusSync = actor({
queues: {
[CONTROL.start]: queue(),
[CONTROL.stop]: queue(),
[CONTROL.setInterval]: queue(),
[CONTROL.force]: queue(),
},
options: {
// Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling.
noSleep: true
},
createState: (_c, input: HandoffStatusSyncInput): HandoffStatusSyncState => ({
workspaceId: input.workspaceId,
repoId: input.repoId,
handoffId: input.handoffId,
providerId: input.providerId,
sandboxId: input.sandboxId,
sessionId: input.sessionId,
intervalMs: input.intervalMs,
running: true
}),
actions: {
async start(c): Promise<void> {
const self = selfHandoffStatusSync(c);
await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 });
},
async stop(c): Promise<void> {
const self = selfHandoffStatusSync(c);
await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 });
},
async setIntervalMs(c, payload: SetIntervalCommand): Promise<void> {
const self = selfHandoffStatusSync(c);
await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 });
},
async force(c): Promise<void> {
const self = selfHandoffStatusSync(c);
await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 });
}
},
run: workflow(async (ctx) => {
await runWorkflowPollingLoop<HandoffStatusSyncState>(ctx, {
loopName: "handoff-status-sync-loop",
control: CONTROL,
onPoll: async (loopCtx) => {
try {
await pollSessionStatus(loopCtx);
} catch (error) {
logActorWarning("handoff-status-sync", "poll failed", {
error: resolveErrorMessage(error),
stack: resolveErrorStack(error)
});
}
}
});
})
});

View file

@ -0,0 +1,10 @@
import { actorSqliteDb } from "../../../db/actor-sqlite.js";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const handoffDb = actorSqliteDb({
actorName: "handoff",
schema,
migrations,
migrationsFolderUrl: new URL("./drizzle/", import.meta.url),
});

View file

@ -0,0 +1,7 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/handoff/db/drizzle",
schema: "./src/actors/handoff/db/schema.ts",
});

View file

@ -0,0 +1,24 @@
CREATE TABLE `handoff` (
`id` integer PRIMARY KEY NOT NULL,
`branch_name` text NOT NULL,
`title` text NOT NULL,
`task` text NOT NULL,
`provider_id` text NOT NULL,
`status` text NOT NULL,
`agent_type` text DEFAULT 'claude',
`auto_committed` integer DEFAULT 0,
`pushed` integer DEFAULT 0,
`pr_submitted` integer DEFAULT 0,
`needs_push` integer DEFAULT 0,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `handoff_runtime` (
`id` integer PRIMARY KEY NOT NULL,
`sandbox_id` text,
`session_id` text,
`switch_target` text,
`status_message` text,
`updated_at` integer NOT NULL
);

View file

@ -0,0 +1,3 @@
ALTER TABLE `handoff` DROP COLUMN `auto_committed`;--> statement-breakpoint
ALTER TABLE `handoff` DROP COLUMN `pushed`;--> statement-breakpoint
ALTER TABLE `handoff` DROP COLUMN `needs_push`;

View file

@ -0,0 +1,38 @@
ALTER TABLE `handoff_runtime` RENAME COLUMN "sandbox_id" TO "active_sandbox_id";--> statement-breakpoint
ALTER TABLE `handoff_runtime` RENAME COLUMN "session_id" TO "active_session_id";--> statement-breakpoint
ALTER TABLE `handoff_runtime` RENAME COLUMN "switch_target" TO "active_switch_target";--> statement-breakpoint
CREATE TABLE `handoff_sandboxes` (
`sandbox_id` text PRIMARY KEY NOT NULL,
`provider_id` text NOT NULL,
`switch_target` text NOT NULL,
`cwd` text,
`status_message` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
ALTER TABLE `handoff_runtime` ADD `active_cwd` text;
--> statement-breakpoint
INSERT INTO `handoff_sandboxes` (
`sandbox_id`,
`provider_id`,
`switch_target`,
`cwd`,
`status_message`,
`created_at`,
`updated_at`
)
SELECT
r.`active_sandbox_id`,
(SELECT h.`provider_id` FROM `handoff` h WHERE h.`id` = 1),
r.`active_switch_target`,
r.`active_cwd`,
r.`status_message`,
COALESCE((SELECT h.`created_at` FROM `handoff` h WHERE h.`id` = 1), r.`updated_at`),
r.`updated_at`
FROM `handoff_runtime` r
WHERE
r.`id` = 1
AND r.`active_sandbox_id` IS NOT NULL
AND r.`active_switch_target` IS NOT NULL
ON CONFLICT(`sandbox_id`) DO NOTHING;

View file

@ -0,0 +1,48 @@
-- Allow handoffs to exist before their branch/title are determined.
-- Drizzle doesn't support altering column nullability in SQLite directly, so rebuild the table.
PRAGMA foreign_keys=off;
CREATE TABLE `handoff__new` (
`id` integer PRIMARY KEY NOT NULL,
`branch_name` text,
`title` text,
`task` text NOT NULL,
`provider_id` text NOT NULL,
`status` text NOT NULL,
`agent_type` text DEFAULT 'claude',
`pr_submitted` integer DEFAULT 0,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);
INSERT INTO `handoff__new` (
`id`,
`branch_name`,
`title`,
`task`,
`provider_id`,
`status`,
`agent_type`,
`pr_submitted`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`branch_name`,
`title`,
`task`,
`provider_id`,
`status`,
`agent_type`,
`pr_submitted`,
`created_at`,
`updated_at`
FROM `handoff`;
DROP TABLE `handoff`;
ALTER TABLE `handoff__new` RENAME TO `handoff`;
PRAGMA foreign_keys=on;

View file

@ -0,0 +1,57 @@
-- Fix: make branch_name/title nullable during initial "naming" stage.
-- 0003 was missing statement breakpoints, so drizzle's migrator marked it applied without executing all statements.
-- Rebuild the table again with proper statement breakpoints.
PRAGMA foreign_keys=off;
--> statement-breakpoint
DROP TABLE IF EXISTS `handoff__new`;
--> statement-breakpoint
CREATE TABLE `handoff__new` (
`id` integer PRIMARY KEY NOT NULL,
`branch_name` text,
`title` text,
`task` text NOT NULL,
`provider_id` text NOT NULL,
`status` text NOT NULL,
`agent_type` text DEFAULT 'claude',
`pr_submitted` integer DEFAULT 0,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
INSERT INTO `handoff__new` (
`id`,
`branch_name`,
`title`,
`task`,
`provider_id`,
`status`,
`agent_type`,
`pr_submitted`,
`created_at`,
`updated_at`
)
SELECT
`id`,
`branch_name`,
`title`,
`task`,
`provider_id`,
`status`,
`agent_type`,
`pr_submitted`,
`created_at`,
`updated_at`
FROM `handoff`;
--> statement-breakpoint
DROP TABLE `handoff`;
--> statement-breakpoint
ALTER TABLE `handoff__new` RENAME TO `handoff`;
--> statement-breakpoint
PRAGMA foreign_keys=on;

View file

@ -0,0 +1 @@
ALTER TABLE `handoff_sandboxes` ADD `sandbox_actor_id` text;

View file

@ -0,0 +1,14 @@
CREATE TABLE `handoff_workbench_sessions` (
`session_id` text PRIMARY KEY NOT NULL,
`session_name` text NOT NULL,
`model` text NOT NULL,
`unread` integer DEFAULT 0 NOT NULL,
`draft_text` text DEFAULT '' NOT NULL,
`draft_attachments_json` text DEFAULT '[]' NOT NULL,
`draft_updated_at` integer,
`created` integer DEFAULT 1 NOT NULL,
`closed` integer DEFAULT 0 NOT NULL,
`thinking_since_ms` integer,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);

View file

@ -0,0 +1,176 @@
{
"version": "6",
"dialect": "sqlite",
"id": "9b004d3b-0722-4bb5-a410-d47635db7df3",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"handoff": {
"name": "handoff",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"task": {
"name": "task",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"provider_id": {
"name": "provider_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"agent_type": {
"name": "agent_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'claude'"
},
"auto_committed": {
"name": "auto_committed",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"pushed": {
"name": "pushed",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"pr_submitted": {
"name": "pr_submitted",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"needs_push": {
"name": "needs_push",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"handoff_runtime": {
"name": "handoff_runtime",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"sandbox_id": {
"name": "sandbox_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"switch_target": {
"name": "switch_target",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"status_message": {
"name": "status_message",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,152 @@
{
"version": "6",
"dialect": "sqlite",
"id": "0fca0f14-69df-4fca-bc52-29e902247909",
"prevId": "9b004d3b-0722-4bb5-a410-d47635db7df3",
"tables": {
"handoff": {
"name": "handoff",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"task": {
"name": "task",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"provider_id": {
"name": "provider_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"agent_type": {
"name": "agent_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'claude'"
},
"pr_submitted": {
"name": "pr_submitted",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"handoff_runtime": {
"name": "handoff_runtime",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"sandbox_id": {
"name": "sandbox_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"switch_target": {
"name": "switch_target",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"status_message": {
"name": "status_message",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,222 @@
{
"version": "6",
"dialect": "sqlite",
"id": "72cef919-e545-48be-a7c0-7ac74cfcf9e6",
"prevId": "0fca0f14-69df-4fca-bc52-29e902247909",
"tables": {
"handoff": {
"name": "handoff",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"task": {
"name": "task",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"provider_id": {
"name": "provider_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"agent_type": {
"name": "agent_type",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'claude'"
},
"pr_submitted": {
"name": "pr_submitted",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"handoff_runtime": {
"name": "handoff_runtime",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"active_sandbox_id": {
"name": "active_sandbox_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"active_session_id": {
"name": "active_session_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"active_switch_target": {
"name": "active_switch_target",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"active_cwd": {
"name": "active_cwd",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"status_message": {
"name": "status_message",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"handoff_sandboxes": {
"name": "handoff_sandboxes",
"columns": {
"sandbox_id": {
"name": "sandbox_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"provider_id": {
"name": "provider_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"switch_target": {
"name": "switch_target",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"cwd": {
"name": "cwd",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"status_message": {
"name": "status_message",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {
"\"handoff_runtime\".\"sandbox_id\"": "\"handoff_runtime\".\"active_sandbox_id\"",
"\"handoff_runtime\".\"session_id\"": "\"handoff_runtime\".\"active_session_id\"",
"\"handoff_runtime\".\"switch_target\"": "\"handoff_runtime\".\"active_switch_target\""
}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,48 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1770924374665,
"tag": "0000_condemned_maria_hill",
"breakpoints": true
},
{
"idx": 1,
"version": "6",
"when": 1770947251055,
"tag": "0001_rapid_eddie_brock",
"breakpoints": true
},
{
"idx": 2,
"version": "6",
"when": 1770948428907,
"tag": "0002_lazy_moira_mactaggert",
"breakpoints": true
},
{
"idx": 3,
"version": "6",
"when": 1771027535276,
"tag": "0003_plucky_bran",
"breakpoints": true
},
{
"idx": 4,
"version": "6",
"when": 1771097651912,
"tag": "0004_focused_shuri",
"breakpoints": true
},
{
"idx": 5,
"version": "6",
"when": 1771370000000,
"tag": "0005_sandbox_actor_id",
"breakpoints": true
}
]
}

View file

@ -0,0 +1,245 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
"entries": [
{
"idx": 0,
"when": 1770924374665,
"tag": "0000_condemned_maria_hill",
"breakpoints": true
},
{
"idx": 1,
"when": 1770947251055,
"tag": "0001_rapid_eddie_brock",
"breakpoints": true
},
{
"idx": 2,
"when": 1770948428907,
"tag": "0002_lazy_moira_mactaggert",
"breakpoints": true
},
{
"idx": 3,
"when": 1771027535276,
"tag": "0003_plucky_bran",
"breakpoints": true
},
{
"idx": 4,
"when": 1771097651912,
"tag": "0004_focused_shuri",
"breakpoints": true
},
{
"idx": 5,
"when": 1771370000000,
"tag": "0005_sandbox_actor_id",
"breakpoints": true
},
{
"idx": 6,
"when": 1773020000000,
"tag": "0006_workbench_sessions",
"breakpoints": true
}
]
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`handoff\` (
\`id\` integer PRIMARY KEY NOT NULL,
\`branch_name\` text NOT NULL,
\`title\` text NOT NULL,
\`task\` text NOT NULL,
\`provider_id\` text NOT NULL,
\`status\` text NOT NULL,
\`agent_type\` text DEFAULT 'claude',
\`auto_committed\` integer DEFAULT 0,
\`pushed\` integer DEFAULT 0,
\`pr_submitted\` integer DEFAULT 0,
\`needs_push\` integer DEFAULT 0,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`handoff_runtime\` (
\`id\` integer PRIMARY KEY NOT NULL,
\`sandbox_id\` text,
\`session_id\` text,
\`switch_target\` text,
\`status_message\` text,
\`updated_at\` integer NOT NULL
);
`,
m0001: `ALTER TABLE \`handoff\` DROP COLUMN \`auto_committed\`;--> statement-breakpoint
ALTER TABLE \`handoff\` DROP COLUMN \`pushed\`;--> statement-breakpoint
ALTER TABLE \`handoff\` DROP COLUMN \`needs_push\`;`,
m0002: `ALTER TABLE \`handoff_runtime\` RENAME COLUMN "sandbox_id" TO "active_sandbox_id";--> statement-breakpoint
ALTER TABLE \`handoff_runtime\` RENAME COLUMN "session_id" TO "active_session_id";--> statement-breakpoint
ALTER TABLE \`handoff_runtime\` RENAME COLUMN "switch_target" TO "active_switch_target";--> statement-breakpoint
CREATE TABLE \`handoff_sandboxes\` (
\`sandbox_id\` text PRIMARY KEY NOT NULL,
\`provider_id\` text NOT NULL,
\`switch_target\` text NOT NULL,
\`cwd\` text,
\`status_message\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
ALTER TABLE \`handoff_runtime\` ADD \`active_cwd\` text;
--> statement-breakpoint
INSERT INTO \`handoff_sandboxes\` (
\`sandbox_id\`,
\`provider_id\`,
\`switch_target\`,
\`cwd\`,
\`status_message\`,
\`created_at\`,
\`updated_at\`
)
SELECT
r.\`active_sandbox_id\`,
(SELECT h.\`provider_id\` FROM \`handoff\` h WHERE h.\`id\` = 1),
r.\`active_switch_target\`,
r.\`active_cwd\`,
r.\`status_message\`,
COALESCE((SELECT h.\`created_at\` FROM \`handoff\` h WHERE h.\`id\` = 1), r.\`updated_at\`),
r.\`updated_at\`
FROM \`handoff_runtime\` r
WHERE
r.\`id\` = 1
AND r.\`active_sandbox_id\` IS NOT NULL
AND r.\`active_switch_target\` IS NOT NULL
ON CONFLICT(\`sandbox_id\`) DO NOTHING;
`,
m0003: `-- Allow handoffs to exist before their branch/title are determined.
-- Drizzle doesn't support altering column nullability in SQLite directly, so rebuild the table.
PRAGMA foreign_keys=off;
CREATE TABLE \`handoff__new\` (
\`id\` integer PRIMARY KEY NOT NULL,
\`branch_name\` text,
\`title\` text,
\`task\` text NOT NULL,
\`provider_id\` text NOT NULL,
\`status\` text NOT NULL,
\`agent_type\` text DEFAULT 'claude',
\`pr_submitted\` integer DEFAULT 0,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
INSERT INTO \`handoff__new\` (
\`id\`,
\`branch_name\`,
\`title\`,
\`task\`,
\`provider_id\`,
\`status\`,
\`agent_type\`,
\`pr_submitted\`,
\`created_at\`,
\`updated_at\`
)
SELECT
\`id\`,
\`branch_name\`,
\`title\`,
\`task\`,
\`provider_id\`,
\`status\`,
\`agent_type\`,
\`pr_submitted\`,
\`created_at\`,
\`updated_at\`
FROM \`handoff\`;
DROP TABLE \`handoff\`;
ALTER TABLE \`handoff__new\` RENAME TO \`handoff\`;
PRAGMA foreign_keys=on;
`,
m0004: `-- Fix: make branch_name/title nullable during initial "naming" stage.
-- 0003 was missing statement breakpoints, so drizzle's migrator marked it applied without executing all statements.
-- Rebuild the table again with proper statement breakpoints.
PRAGMA foreign_keys=off;
--> statement-breakpoint
DROP TABLE IF EXISTS \`handoff__new\`;
--> statement-breakpoint
CREATE TABLE \`handoff__new\` (
\`id\` integer PRIMARY KEY NOT NULL,
\`branch_name\` text,
\`title\` text,
\`task\` text NOT NULL,
\`provider_id\` text NOT NULL,
\`status\` text NOT NULL,
\`agent_type\` text DEFAULT 'claude',
\`pr_submitted\` integer DEFAULT 0,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
INSERT INTO \`handoff__new\` (
\`id\`,
\`branch_name\`,
\`title\`,
\`task\`,
\`provider_id\`,
\`status\`,
\`agent_type\`,
\`pr_submitted\`,
\`created_at\`,
\`updated_at\`
)
SELECT
\`id\`,
\`branch_name\`,
\`title\`,
\`task\`,
\`provider_id\`,
\`status\`,
\`agent_type\`,
\`pr_submitted\`,
\`created_at\`,
\`updated_at\`
FROM \`handoff\`;
--> statement-breakpoint
DROP TABLE \`handoff\`;
--> statement-breakpoint
ALTER TABLE \`handoff__new\` RENAME TO \`handoff\`;
--> statement-breakpoint
PRAGMA foreign_keys=on;
`,
m0005: `ALTER TABLE \`handoff_sandboxes\` ADD \`sandbox_actor_id\` text;`,
m0006: `CREATE TABLE \`handoff_workbench_sessions\` (
\`session_id\` text PRIMARY KEY NOT NULL,
\`session_name\` text NOT NULL,
\`model\` text NOT NULL,
\`unread\` integer DEFAULT 0 NOT NULL,
\`draft_text\` text DEFAULT '' NOT NULL,
\`draft_attachments_json\` text DEFAULT '[]' NOT NULL,
\`draft_updated_at\` integer,
\`created\` integer DEFAULT 1 NOT NULL,
\`closed\` integer DEFAULT 0 NOT NULL,
\`thinking_since_ms\` integer,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);`,
} as const
};

View file

@ -0,0 +1,51 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
// SQLite is per handoff actor instance, so these tables only ever store one row (id=1).
export const handoff = sqliteTable("handoff", {
id: integer("id").primaryKey(),
branchName: text("branch_name"),
title: text("title"),
task: text("task").notNull(),
providerId: text("provider_id").notNull(),
status: text("status").notNull(),
agentType: text("agent_type").default("claude"),
prSubmitted: integer("pr_submitted").default(0),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const handoffRuntime = sqliteTable("handoff_runtime", {
id: integer("id").primaryKey(),
activeSandboxId: text("active_sandbox_id"),
activeSessionId: text("active_session_id"),
activeSwitchTarget: text("active_switch_target"),
activeCwd: text("active_cwd"),
statusMessage: text("status_message"),
updatedAt: integer("updated_at").notNull(),
});
export const handoffSandboxes = sqliteTable("handoff_sandboxes", {
sandboxId: text("sandbox_id").notNull().primaryKey(),
providerId: text("provider_id").notNull(),
sandboxActorId: text("sandbox_actor_id"),
switchTarget: text("switch_target").notNull(),
cwd: text("cwd"),
statusMessage: text("status_message"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const handoffWorkbenchSessions = sqliteTable("handoff_workbench_sessions", {
sessionId: text("session_id").notNull().primaryKey(),
sessionName: text("session_name").notNull(),
model: text("model").notNull(),
unread: integer("unread").notNull().default(0),
draftText: text("draft_text").notNull().default(""),
draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"),
draftUpdatedAt: integer("draft_updated_at"),
created: integer("created").notNull().default(1),
closed: integer("closed").notNull().default(0),
thinkingSinceMs: integer("thinking_since_ms"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});

View file

@ -0,0 +1,399 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import type {
AgentType,
HandoffRecord,
HandoffWorkbenchChangeModelInput,
HandoffWorkbenchRenameInput,
HandoffWorkbenchRenameSessionInput,
HandoffWorkbenchSetSessionUnreadInput,
HandoffWorkbenchSendMessageInput,
HandoffWorkbenchUpdateDraftInput,
ProviderId
} from "@openhandoff/shared";
import { expectQueueResponse } from "../../services/queue.js";
import { selfHandoff } from "../handles.js";
import { handoffDb } from "./db/db.js";
import { getCurrentRecord } from "./workflow/common.js";
import {
changeWorkbenchModel,
closeWorkbenchSession,
createWorkbenchSession,
getWorkbenchHandoff,
markWorkbenchUnread,
publishWorkbenchPr,
renameWorkbenchBranch,
renameWorkbenchHandoff,
renameWorkbenchSession,
revertWorkbenchFile,
sendWorkbenchMessage,
syncWorkbenchSessionStatus,
setWorkbenchSessionUnread,
stopWorkbenchSession,
updateWorkbenchDraft
} from "./workbench.js";
import {
HANDOFF_QUEUE_NAMES,
handoffWorkflowQueueName,
runHandoffWorkflow
} from "./workflow/index.js";
export interface HandoffInput {
workspaceId: string;
repoId: string;
handoffId: string;
repoRemote: string;
repoLocalPath: string;
branchName: string | null;
title: string | null;
task: string;
providerId: ProviderId;
agentType: AgentType | null;
explicitTitle: string | null;
explicitBranchName: string | null;
}
interface InitializeCommand {
providerId?: ProviderId;
}
interface HandoffActionCommand {
reason?: string;
}
interface HandoffTabCommand {
tabId: string;
}
interface HandoffStatusSyncCommand {
sessionId: string;
status: "running" | "idle" | "error";
at: number;
}
interface HandoffWorkbenchValueCommand {
value: string;
}
interface HandoffWorkbenchSessionTitleCommand {
sessionId: string;
title: string;
}
interface HandoffWorkbenchSessionUnreadCommand {
sessionId: string;
unread: boolean;
}
interface HandoffWorkbenchUpdateDraftCommand {
sessionId: string;
text: string;
attachments: Array<any>;
}
interface HandoffWorkbenchChangeModelCommand {
sessionId: string;
model: string;
}
interface HandoffWorkbenchSendMessageCommand {
sessionId: string;
text: string;
attachments: Array<any>;
}
interface HandoffWorkbenchCreateSessionCommand {
model?: string;
}
interface HandoffWorkbenchSessionCommand {
sessionId: string;
}
export const handoff = actor({
db: handoffDb,
queues: Object.fromEntries(HANDOFF_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
actionTimeout: 5 * 60_000
},
createState: (_c, input: HandoffInput) => ({
workspaceId: input.workspaceId,
repoId: input.repoId,
handoffId: input.handoffId,
repoRemote: input.repoRemote,
repoLocalPath: input.repoLocalPath,
branchName: input.branchName,
title: input.title,
task: input.task,
providerId: input.providerId,
agentType: input.agentType,
explicitTitle: input.explicitTitle,
explicitBranchName: input.explicitBranchName,
initialized: false,
previousStatus: null as string | null,
}),
actions: {
async initialize(c, cmd: InitializeCommand): Promise<HandoffRecord> {
const self = selfHandoff(c);
const result = await self.send(handoffWorkflowQueueName("handoff.command.initialize"), cmd ?? {}, {
wait: true,
timeout: 60_000,
});
return expectQueueResponse<HandoffRecord>(result);
},
async provision(c, cmd: InitializeCommand): Promise<{ ok: true }> {
const self = selfHandoff(c);
await self.send(handoffWorkflowQueueName("handoff.command.provision"), cmd ?? {}, {
wait: true,
timeout: 30 * 60_000,
});
return { ok: true };
},
async attach(c, cmd?: HandoffActionCommand): Promise<{ target: string; sessionId: string | null }> {
const self = selfHandoff(c);
const result = await self.send(handoffWorkflowQueueName("handoff.command.attach"), cmd ?? {}, {
wait: true,
timeout: 20_000
});
return expectQueueResponse<{ target: string; sessionId: string | null }>(result);
},
async switch(c): Promise<{ switchTarget: string }> {
const self = selfHandoff(c);
const result = await self.send(handoffWorkflowQueueName("handoff.command.switch"), {}, {
wait: true,
timeout: 20_000
});
return expectQueueResponse<{ switchTarget: string }>(result);
},
async push(c, cmd?: HandoffActionCommand): Promise<void> {
const self = selfHandoff(c);
await self.send(handoffWorkflowQueueName("handoff.command.push"), cmd ?? {}, {
wait: true,
timeout: 180_000
});
},
async sync(c, cmd?: HandoffActionCommand): Promise<void> {
const self = selfHandoff(c);
await self.send(handoffWorkflowQueueName("handoff.command.sync"), cmd ?? {}, {
wait: true,
timeout: 30_000
});
},
async merge(c, cmd?: HandoffActionCommand): Promise<void> {
const self = selfHandoff(c);
await self.send(handoffWorkflowQueueName("handoff.command.merge"), cmd ?? {}, {
wait: true,
timeout: 30_000
});
},
async archive(c, cmd?: HandoffActionCommand): Promise<void> {
const self = selfHandoff(c);
void self
.send(handoffWorkflowQueueName("handoff.command.archive"), cmd ?? {}, {
wait: true,
timeout: 60_000,
})
.catch((error: unknown) => {
c.log.warn({
msg: "archive command failed",
error: error instanceof Error ? error.message : String(error),
});
});
},
async kill(c, cmd?: HandoffActionCommand): Promise<void> {
const self = selfHandoff(c);
await self.send(handoffWorkflowQueueName("handoff.command.kill"), cmd ?? {}, {
wait: true,
timeout: 60_000
});
},
async get(c): Promise<HandoffRecord> {
return await getCurrentRecord({ db: c.db, state: c.state });
},
async getWorkbench(c) {
return await getWorkbenchHandoff(c);
},
async markWorkbenchUnread(c): Promise<void> {
const self = selfHandoff(c);
await self.send(handoffWorkflowQueueName("handoff.command.workbench.mark_unread"), {}, {
wait: true,
timeout: 20_000,
});
},
async renameWorkbenchHandoff(c, input: HandoffWorkbenchRenameInput): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.rename_handoff"),
{ value: input.value } satisfies HandoffWorkbenchValueCommand,
{
wait: true,
timeout: 20_000,
},
);
},
async renameWorkbenchBranch(c, input: HandoffWorkbenchRenameInput): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.rename_branch"),
{ value: input.value } satisfies HandoffWorkbenchValueCommand,
{
wait: true,
timeout: 5 * 60_000,
},
);
},
async createWorkbenchSession(c, input?: { model?: string }): Promise<{ tabId: string }> {
const self = selfHandoff(c);
const result = await self.send(
handoffWorkflowQueueName("handoff.command.workbench.create_session"),
{ ...(input?.model ? { model: input.model } : {}) } satisfies HandoffWorkbenchCreateSessionCommand,
{
wait: true,
timeout: 5 * 60_000,
},
);
return expectQueueResponse<{ tabId: string }>(result);
},
async renameWorkbenchSession(c, input: HandoffWorkbenchRenameSessionInput): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.rename_session"),
{ sessionId: input.tabId, title: input.title } satisfies HandoffWorkbenchSessionTitleCommand,
{
wait: true,
timeout: 20_000,
},
);
},
async setWorkbenchSessionUnread(c, input: HandoffWorkbenchSetSessionUnreadInput): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.set_session_unread"),
{ sessionId: input.tabId, unread: input.unread } satisfies HandoffWorkbenchSessionUnreadCommand,
{
wait: true,
timeout: 20_000,
},
);
},
async updateWorkbenchDraft(c, input: HandoffWorkbenchUpdateDraftInput): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.update_draft"),
{
sessionId: input.tabId,
text: input.text,
attachments: input.attachments,
} satisfies HandoffWorkbenchUpdateDraftCommand,
{
wait: true,
timeout: 20_000,
},
);
},
async changeWorkbenchModel(c, input: HandoffWorkbenchChangeModelInput): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.change_model"),
{ sessionId: input.tabId, model: input.model } satisfies HandoffWorkbenchChangeModelCommand,
{
wait: true,
timeout: 20_000,
},
);
},
async sendWorkbenchMessage(c, input: HandoffWorkbenchSendMessageInput): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.send_message"),
{
sessionId: input.tabId,
text: input.text,
attachments: input.attachments,
} satisfies HandoffWorkbenchSendMessageCommand,
{
wait: true,
timeout: 10 * 60_000,
},
);
},
async stopWorkbenchSession(c, input: HandoffTabCommand): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.stop_session"),
{ sessionId: input.tabId } satisfies HandoffWorkbenchSessionCommand,
{
wait: true,
timeout: 5 * 60_000,
},
);
},
async syncWorkbenchSessionStatus(c, input: HandoffStatusSyncCommand): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.sync_session_status"),
input,
{
wait: true,
timeout: 20_000,
},
);
},
async closeWorkbenchSession(c, input: HandoffTabCommand): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.close_session"),
{ sessionId: input.tabId } satisfies HandoffWorkbenchSessionCommand,
{
wait: true,
timeout: 5 * 60_000,
},
);
},
async publishWorkbenchPr(c): Promise<void> {
const self = selfHandoff(c);
await self.send(handoffWorkflowQueueName("handoff.command.workbench.publish_pr"), {}, {
wait: true,
timeout: 10 * 60_000,
});
},
async revertWorkbenchFile(c, input: { path: string }): Promise<void> {
const self = selfHandoff(c);
await self.send(
handoffWorkflowQueueName("handoff.command.workbench.revert_file"),
input,
{
wait: true,
timeout: 5 * 60_000,
},
);
}
},
run: workflow(runHandoffWorkflow)
});
export { HANDOFF_QUEUE_NAMES };

View file

@ -0,0 +1,861 @@
// @ts-nocheck
import { basename } from "node:path";
import { asc, eq } from "drizzle-orm";
import { getActorRuntimeContext } from "../context.js";
import {
getOrCreateHandoffStatusSync,
getOrCreateProject,
getOrCreateWorkspace,
getSandboxInstance,
} from "../handles.js";
import { handoff as handoffTable, handoffRuntime, handoffWorkbenchSessions } from "./db/schema.js";
import { getCurrentRecord } from "./workflow/common.js";
const STATUS_SYNC_INTERVAL_MS = 1_000;
async function ensureWorkbenchSessionTable(c: any): Promise<void> {
await c.db.execute(`
CREATE TABLE IF NOT EXISTS handoff_workbench_sessions (
session_id text PRIMARY KEY NOT NULL,
session_name text NOT NULL,
model text NOT NULL,
unread integer DEFAULT 0 NOT NULL,
draft_text text DEFAULT '' NOT NULL,
draft_attachments_json text DEFAULT '[]' NOT NULL,
draft_updated_at integer,
created integer DEFAULT 1 NOT NULL,
closed integer DEFAULT 0 NOT NULL,
thinking_since_ms integer,
created_at integer NOT NULL,
updated_at integer NOT NULL
)
`);
}
function defaultModelForAgent(agentType: string | null | undefined) {
return agentType === "codex" ? "gpt-4o" : "claude-sonnet-4";
}
function agentKindForModel(model: string) {
if (model === "gpt-4o" || model === "o3") {
return "Codex";
}
return "Claude";
}
export function agentTypeForModel(model: string) {
if (model === "gpt-4o" || model === "o3") {
return "codex";
}
return "claude";
}
function repoLabelFromRemote(remoteUrl: string): string {
const trimmed = remoteUrl.trim();
try {
const url = new URL(trimmed.startsWith("http") ? trimmed : `https://${trimmed}`);
const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean);
if (parts.length >= 2) {
return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/, "")}`;
}
} catch {
// ignore
}
return basename(trimmed.replace(/\.git$/, ""));
}
function parseDraftAttachments(value: string | null | undefined): Array<any> {
if (!value) {
return [];
}
try {
const parsed = JSON.parse(value) as unknown;
return Array.isArray(parsed) ? parsed : [];
} catch {
return [];
}
}
export function shouldMarkSessionUnreadForStatus(meta: { thinkingSinceMs?: number | null }, status: "running" | "idle" | "error"): boolean {
if (status === "running") {
return false;
}
// Only mark unread when we observe the transition out of an active thinking state.
// Repeated idle polls for an already-finished session must not flip unread back on.
return Boolean(meta.thinkingSinceMs);
}
async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean }): Promise<Array<any>> {
await ensureWorkbenchSessionTable(c);
const rows = await c.db
.select()
.from(handoffWorkbenchSessions)
.orderBy(asc(handoffWorkbenchSessions.createdAt))
.all();
const mapped = rows.map((row: any) => ({
...row,
id: row.sessionId,
sessionId: row.sessionId,
draftAttachments: parseDraftAttachments(row.draftAttachmentsJson),
draftUpdatedAtMs: row.draftUpdatedAt ?? null,
unread: row.unread === 1,
created: row.created === 1,
closed: row.closed === 1,
}));
if (options?.includeClosed === true) {
return mapped;
}
return mapped.filter((row: any) => row.closed !== true);
}
async function nextSessionName(c: any): Promise<string> {
const rows = await listSessionMetaRows(c, { includeClosed: true });
return `Session ${rows.length + 1}`;
}
async function readSessionMeta(c: any, sessionId: string): Promise<any | null> {
await ensureWorkbenchSessionTable(c);
const row = await c.db
.select()
.from(handoffWorkbenchSessions)
.where(eq(handoffWorkbenchSessions.sessionId, sessionId))
.get();
if (!row) {
return null;
}
return {
...row,
id: row.sessionId,
sessionId: row.sessionId,
draftAttachments: parseDraftAttachments(row.draftAttachmentsJson),
draftUpdatedAtMs: row.draftUpdatedAt ?? null,
unread: row.unread === 1,
created: row.created === 1,
closed: row.closed === 1,
};
}
async function ensureSessionMeta(c: any, params: {
sessionId: string;
model?: string;
sessionName?: string;
unread?: boolean;
}): Promise<any> {
await ensureWorkbenchSessionTable(c);
const existing = await readSessionMeta(c, params.sessionId);
if (existing) {
return existing;
}
const now = Date.now();
const sessionName = params.sessionName ?? (await nextSessionName(c));
const model = params.model ?? defaultModelForAgent(c.state.agentType);
const unread = params.unread ?? false;
await c.db
.insert(handoffWorkbenchSessions)
.values({
sessionId: params.sessionId,
sessionName,
model,
unread: unread ? 1 : 0,
draftText: "",
draftAttachmentsJson: "[]",
draftUpdatedAt: null,
created: 1,
closed: 0,
thinkingSinceMs: null,
createdAt: now,
updatedAt: now,
})
.run();
return await readSessionMeta(c, params.sessionId);
}
async function updateSessionMeta(c: any, sessionId: string, values: Record<string, unknown>): Promise<any> {
await ensureSessionMeta(c, { sessionId });
await c.db
.update(handoffWorkbenchSessions)
.set({
...values,
updatedAt: Date.now(),
})
.where(eq(handoffWorkbenchSessions.sessionId, sessionId))
.run();
return await readSessionMeta(c, sessionId);
}
async function notifyWorkbenchUpdated(c: any): Promise<void> {
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.notifyWorkbenchUpdated({});
}
function shellFragment(parts: string[]): string {
return parts.join(" && ");
}
async function executeInSandbox(c: any, params: {
sandboxId: string;
cwd: string;
command: string;
label: string;
}): Promise<{ exitCode: number; result: string }> {
const { providers } = getActorRuntimeContext();
const provider = providers.get(c.state.providerId);
return await provider.executeCommand({
workspaceId: c.state.workspaceId,
sandboxId: params.sandboxId,
command: `bash -lc ${JSON.stringify(shellFragment([`cd ${JSON.stringify(params.cwd)}`, params.command]))}`,
label: params.label,
});
}
function parseGitStatus(output: string): Array<{ path: string; type: "M" | "A" | "D" }> {
return output
.split("\n")
.map((line) => line.trimEnd())
.filter(Boolean)
.map((line) => {
const status = line.slice(0, 2).trim();
const rawPath = line.slice(3).trim();
const path = rawPath.includes(" -> ") ? rawPath.split(" -> ").pop() ?? rawPath : rawPath;
const type =
status.includes("D")
? "D"
: status.includes("A") || status === "??"
? "A"
: "M";
return { path, type };
});
}
function parseNumstat(output: string): Map<string, { added: number; removed: number }> {
const map = new Map<string, { added: number; removed: number }>();
for (const line of output.split("\n")) {
const trimmed = line.trim();
if (!trimmed) continue;
const [addedRaw, removedRaw, ...pathParts] = trimmed.split("\t");
const path = pathParts.join("\t").trim();
if (!path) continue;
map.set(path, {
added: Number.parseInt(addedRaw ?? "0", 10) || 0,
removed: Number.parseInt(removedRaw ?? "0", 10) || 0,
});
}
return map;
}
function buildFileTree(paths: string[]): Array<any> {
const root = {
children: new Map<string, any>(),
};
for (const path of paths) {
const parts = path.split("/").filter(Boolean);
let current = root;
let currentPath = "";
for (let index = 0; index < parts.length; index += 1) {
const part = parts[index]!;
currentPath = currentPath ? `${currentPath}/${part}` : part;
const isDir = index < parts.length - 1;
let node = current.children.get(part);
if (!node) {
node = {
name: part,
path: currentPath,
isDir,
children: isDir ? new Map<string, any>() : undefined,
};
current.children.set(part, node);
} else if (isDir && !(node.children instanceof Map)) {
node.children = new Map<string, any>();
}
current = node;
}
}
function sortNodes(nodes: Iterable<any>): Array<any> {
return [...nodes]
.map((node) =>
node.isDir
? {
name: node.name,
path: node.path,
isDir: true,
children: sortNodes(node.children?.values?.() ?? []),
}
: {
name: node.name,
path: node.path,
isDir: false,
},
)
.sort((left, right) => {
if (left.isDir !== right.isDir) {
return left.isDir ? -1 : 1;
}
return left.path.localeCompare(right.path);
});
}
return sortNodes(root.children.values());
}
async function collectWorkbenchGitState(c: any, record: any) {
const activeSandboxId = record.activeSandboxId;
const activeSandbox =
activeSandboxId != null
? (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === activeSandboxId) ?? null
: null;
const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null;
if (!activeSandboxId || !cwd) {
return {
fileChanges: [],
diffs: {},
fileTree: [],
};
}
const statusResult = await executeInSandbox(c, {
sandboxId: activeSandboxId,
cwd,
command: "git status --porcelain=v1 -uall",
label: "git status",
});
if (statusResult.exitCode !== 0) {
return {
fileChanges: [],
diffs: {},
fileTree: [],
};
}
const statusRows = parseGitStatus(statusResult.result);
const numstatResult = await executeInSandbox(c, {
sandboxId: activeSandboxId,
cwd,
command: "git diff --numstat",
label: "git diff numstat",
});
const numstat = parseNumstat(numstatResult.result);
const diffs: Record<string, string> = {};
for (const row of statusRows) {
const diffResult = await executeInSandbox(c, {
sandboxId: activeSandboxId,
cwd,
command: `if git ls-files --error-unmatch -- ${JSON.stringify(row.path)} >/dev/null 2>&1; then git diff -- ${JSON.stringify(row.path)}; else git diff --no-index -- /dev/null ${JSON.stringify(row.path)} || true; fi`,
label: `git diff ${row.path}`,
});
diffs[row.path] = diffResult.result;
}
const filesResult = await executeInSandbox(c, {
sandboxId: activeSandboxId,
cwd,
command: "git ls-files --cached --others --exclude-standard",
label: "git ls-files",
});
const allPaths = filesResult.result
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
return {
fileChanges: statusRows.map((row) => {
const counts = numstat.get(row.path) ?? { added: 0, removed: 0 };
return {
path: row.path,
added: counts.added,
removed: counts.removed,
type: row.type,
};
}),
diffs,
fileTree: buildFileTree(allPaths),
};
}
async function readSessionTranscript(c: any, record: any, sessionId: string) {
const sandboxId = record.activeSandboxId ?? record.sandboxes?.[0]?.sandboxId ?? null;
if (!sandboxId) {
return [];
}
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, sandboxId);
const page = await sandbox.listSessionEvents({
sessionId,
limit: 500,
});
return page.items.map((event: any) => ({
id: event.id,
eventIndex: event.eventIndex,
sessionId: event.sessionId,
createdAt: event.createdAt,
connectionId: event.connectionId,
sender: event.sender,
payload: event.payload,
}));
}
async function activeSessionStatus(c: any, record: any, sessionId: string) {
if (record.activeSessionId !== sessionId || !record.activeSandboxId) {
return "idle";
}
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
const status = await sandbox.sessionStatus({ sessionId });
return status.status;
}
async function readPullRequestSummary(c: any, branchName: string | null) {
if (!branchName) {
return null;
}
try {
const project = await getOrCreateProject(
c,
c.state.workspaceId,
c.state.repoId,
c.state.repoRemote,
);
return await project.getPullRequestForBranch({ branchName });
} catch {
return null;
}
}
export async function ensureWorkbenchSeeded(c: any): Promise<any> {
const record = await getCurrentRecord({ db: c.db, state: c.state });
if (record.activeSessionId) {
await ensureSessionMeta(c, {
sessionId: record.activeSessionId,
model: defaultModelForAgent(record.agentType),
sessionName: "Session 1",
});
}
return record;
}
export async function getWorkbenchHandoff(c: any): Promise<any> {
const record = await ensureWorkbenchSeeded(c);
const gitState = await collectWorkbenchGitState(c, record);
const sessions = await listSessionMetaRows(c);
const tabs = [];
for (const meta of sessions) {
const status = await activeSessionStatus(c, record, meta.sessionId);
let thinkingSinceMs = meta.thinkingSinceMs ?? null;
let unread = Boolean(meta.unread);
if (thinkingSinceMs && status !== "running") {
thinkingSinceMs = null;
unread = true;
}
tabs.push({
id: meta.id,
sessionId: meta.sessionId,
sessionName: meta.sessionName,
agent: agentKindForModel(meta.model),
model: meta.model,
status,
thinkingSinceMs: status === "running" ? thinkingSinceMs : null,
unread,
created: Boolean(meta.created),
draft: {
text: meta.draftText ?? "",
attachments: Array.isArray(meta.draftAttachments) ? meta.draftAttachments : [],
updatedAtMs: meta.draftUpdatedAtMs ?? null,
},
transcript: await readSessionTranscript(c, record, meta.sessionId),
});
}
return {
id: c.state.handoffId,
repoId: c.state.repoId,
title: record.title ?? "New Handoff",
status: record.status === "archived" ? "archived" : record.status === "running" ? "running" : record.status === "idle" ? "idle" : "new",
repoName: repoLabelFromRemote(c.state.repoRemote),
updatedAtMs: record.updatedAt,
branch: record.branchName,
pullRequest: await readPullRequestSummary(c, record.branchName),
tabs,
fileChanges: gitState.fileChanges,
diffs: gitState.diffs,
fileTree: gitState.fileTree,
};
}
export async function renameWorkbenchHandoff(c: any, value: string): Promise<void> {
const nextTitle = value.trim();
if (!nextTitle) {
throw new Error("handoff title is required");
}
await c.db
.update(handoffTable)
.set({
title: nextTitle,
updatedAt: Date.now(),
})
.where(eq(handoffTable.id, 1))
.run();
c.state.title = nextTitle;
await notifyWorkbenchUpdated(c);
}
export async function renameWorkbenchBranch(c: any, value: string): Promise<void> {
const nextBranch = value.trim();
if (!nextBranch) {
throw new Error("branch name is required");
}
const record = await ensureWorkbenchSeeded(c);
if (!record.branchName) {
throw new Error("cannot rename branch before handoff branch exists");
}
if (!record.activeSandboxId) {
throw new Error("cannot rename branch without an active sandbox");
}
const activeSandbox =
(record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null;
if (!activeSandbox?.cwd) {
throw new Error("cannot rename branch without a sandbox cwd");
}
const renameResult = await executeInSandbox(c, {
sandboxId: record.activeSandboxId,
cwd: activeSandbox.cwd,
command: [
`git branch -m ${JSON.stringify(record.branchName)} ${JSON.stringify(nextBranch)}`,
`if git ls-remote --exit-code --heads origin ${JSON.stringify(record.branchName)} >/dev/null 2>&1; then git push origin :${JSON.stringify(record.branchName)}; fi`,
`git push origin ${JSON.stringify(nextBranch)}`,
`git branch --set-upstream-to=${JSON.stringify(`origin/${nextBranch}`)} ${JSON.stringify(nextBranch)} || git push --set-upstream origin ${JSON.stringify(nextBranch)}`,
].join(" && "),
label: `git branch -m ${record.branchName} ${nextBranch}`,
});
if (renameResult.exitCode !== 0) {
throw new Error(`branch rename failed (${renameResult.exitCode}): ${renameResult.result}`);
}
await c.db
.update(handoffTable)
.set({
branchName: nextBranch,
updatedAt: Date.now(),
})
.where(eq(handoffTable.id, 1))
.run();
c.state.branchName = nextBranch;
const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote);
await project.registerHandoffBranch({
handoffId: c.state.handoffId,
branchName: nextBranch,
});
await notifyWorkbenchUpdated(c);
}
export async function createWorkbenchSession(c: any, model?: string): Promise<{ tabId: string }> {
const record = await ensureWorkbenchSeeded(c);
if (!record.activeSandboxId) {
throw new Error("cannot create session without an active sandbox");
}
const activeSandbox =
(record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null;
const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null;
if (!cwd) {
throw new Error("cannot create session without a sandbox cwd");
}
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
const created = await sandbox.createSession({
prompt: "",
cwd,
agent: agentTypeForModel(model ?? defaultModelForAgent(record.agentType)),
});
if (!created.id) {
throw new Error(created.error ?? "sandbox-agent session creation failed");
}
await ensureSessionMeta(c, {
sessionId: created.id,
model: model ?? defaultModelForAgent(record.agentType),
});
await notifyWorkbenchUpdated(c);
return { tabId: created.id };
}
export async function renameWorkbenchSession(c: any, sessionId: string, title: string): Promise<void> {
const trimmed = title.trim();
if (!trimmed) {
throw new Error("session title is required");
}
await updateSessionMeta(c, sessionId, {
sessionName: trimmed,
});
await notifyWorkbenchUpdated(c);
}
export async function setWorkbenchSessionUnread(c: any, sessionId: string, unread: boolean): Promise<void> {
await updateSessionMeta(c, sessionId, {
unread: unread ? 1 : 0,
});
await notifyWorkbenchUpdated(c);
}
export async function updateWorkbenchDraft(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
await updateSessionMeta(c, sessionId, {
draftText: text,
draftAttachmentsJson: JSON.stringify(attachments),
draftUpdatedAt: Date.now(),
});
await notifyWorkbenchUpdated(c);
}
export async function changeWorkbenchModel(c: any, sessionId: string, model: string): Promise<void> {
await updateSessionMeta(c, sessionId, {
model,
});
await notifyWorkbenchUpdated(c);
}
export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
if (!record.activeSandboxId) {
throw new Error("cannot send message without an active sandbox");
}
await ensureSessionMeta(c, { sessionId });
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
const prompt = [
text.trim(),
...attachments.map((attachment: any) => `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`),
]
.filter(Boolean)
.join("\n\n");
if (!prompt) {
throw new Error("message text is required");
}
await sandbox.sendPrompt({
sessionId,
prompt,
notification: true,
});
await updateSessionMeta(c, sessionId, {
unread: 0,
created: 1,
draftText: "",
draftAttachmentsJson: "[]",
draftUpdatedAt: Date.now(),
thinkingSinceMs: Date.now(),
});
await c.db
.update(handoffRuntime)
.set({
activeSessionId: sessionId,
updatedAt: Date.now(),
})
.where(eq(handoffRuntime.id, 1))
.run();
const sync = await getOrCreateHandoffStatusSync(
c,
c.state.workspaceId,
c.state.repoId,
c.state.handoffId,
record.activeSandboxId,
sessionId,
{
workspaceId: c.state.workspaceId,
repoId: c.state.repoId,
handoffId: c.state.handoffId,
providerId: c.state.providerId,
sandboxId: record.activeSandboxId,
sessionId,
intervalMs: STATUS_SYNC_INTERVAL_MS,
},
);
await sync.setIntervalMs({ intervalMs: STATUS_SYNC_INTERVAL_MS });
await sync.start();
await sync.force();
await notifyWorkbenchUpdated(c);
}
export async function stopWorkbenchSession(c: any, sessionId: string): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
if (!record.activeSandboxId) {
return;
}
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
await sandbox.cancelSession({ sessionId });
await updateSessionMeta(c, sessionId, {
thinkingSinceMs: null,
});
await notifyWorkbenchUpdated(c);
}
export async function syncWorkbenchSessionStatus(
c: any,
sessionId: string,
status: "running" | "idle" | "error",
at: number,
): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
const meta = await ensureSessionMeta(c, { sessionId });
let changed = false;
if (record.activeSessionId === sessionId) {
const mappedStatus = status === "running" ? "running" : status === "error" ? "error" : "idle";
if (record.status !== mappedStatus) {
await c.db
.update(handoffTable)
.set({
status: mappedStatus,
updatedAt: at,
})
.where(eq(handoffTable.id, 1))
.run();
changed = true;
}
const statusMessage = `session:${status}`;
if (record.statusMessage !== statusMessage) {
await c.db
.update(handoffRuntime)
.set({
statusMessage,
updatedAt: at,
})
.where(eq(handoffRuntime.id, 1))
.run();
changed = true;
}
}
if (status === "running") {
if (!meta.thinkingSinceMs) {
await updateSessionMeta(c, sessionId, {
thinkingSinceMs: at,
});
changed = true;
}
} else {
if (meta.thinkingSinceMs) {
await updateSessionMeta(c, sessionId, {
thinkingSinceMs: null,
});
changed = true;
}
if (!meta.unread && shouldMarkSessionUnreadForStatus(meta, status)) {
await updateSessionMeta(c, sessionId, {
unread: 1,
});
changed = true;
}
}
if (changed) {
await notifyWorkbenchUpdated(c);
}
}
export async function closeWorkbenchSession(c: any, sessionId: string): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
if (!record.activeSandboxId) {
return;
}
const sessions = await listSessionMetaRows(c);
if (sessions.filter((candidate) => candidate.closed !== true).length <= 1) {
return;
}
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
await sandbox.destroySession({ sessionId });
await updateSessionMeta(c, sessionId, {
closed: 1,
thinkingSinceMs: null,
});
if (record.activeSessionId === sessionId) {
await c.db
.update(handoffRuntime)
.set({
activeSessionId: null,
updatedAt: Date.now(),
})
.where(eq(handoffRuntime.id, 1))
.run();
}
await notifyWorkbenchUpdated(c);
}
export async function markWorkbenchUnread(c: any): Promise<void> {
const sessions = await listSessionMetaRows(c);
const latest = sessions[sessions.length - 1];
if (!latest) {
return;
}
await updateSessionMeta(c, latest.sessionId, {
unread: 1,
});
await notifyWorkbenchUpdated(c);
}
export async function publishWorkbenchPr(c: any): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
if (!record.branchName) {
throw new Error("cannot publish PR without a branch");
}
const { driver } = getActorRuntimeContext();
const created = await driver.github.createPr(
c.state.repoLocalPath,
record.branchName,
record.title ?? c.state.task,
);
await c.db
.update(handoffTable)
.set({
prSubmitted: 1,
updatedAt: Date.now(),
})
.where(eq(handoffTable.id, 1))
.run();
await notifyWorkbenchUpdated(c);
}
export async function revertWorkbenchFile(c: any, path: string): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
if (!record.activeSandboxId) {
throw new Error("cannot revert file without an active sandbox");
}
const activeSandbox =
(record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null;
if (!activeSandbox?.cwd) {
throw new Error("cannot revert file without a sandbox cwd");
}
const result = await executeInSandbox(c, {
sandboxId: record.activeSandboxId,
cwd: activeSandbox.cwd,
command: `if git ls-files --error-unmatch -- ${JSON.stringify(path)} >/dev/null 2>&1; then git restore --staged --worktree -- ${JSON.stringify(path)} || git checkout -- ${JSON.stringify(path)}; else rm -f ${JSON.stringify(path)}; fi`,
label: `git restore ${path}`,
});
if (result.exitCode !== 0) {
throw new Error(`file revert failed (${result.exitCode}): ${result.result}`);
}
await notifyWorkbenchUpdated(c);
}

View file

@ -0,0 +1,209 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import { getActorRuntimeContext } from "../../context.js";
import { getOrCreateHandoffStatusSync } from "../../handles.js";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { handoff as handoffTable, handoffRuntime } from "../db/schema.js";
import { HANDOFF_ROW_ID, appendHistory, getCurrentRecord, setHandoffState } from "./common.js";
import { pushActiveBranchActivity } from "./push.js";
async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: string): Promise<T> {
let timer: ReturnType<typeof setTimeout> | undefined;
try {
return await Promise.race([
promise,
new Promise<T>((_resolve, reject) => {
timer = setTimeout(() => reject(new Error(`${label} timed out after ${timeoutMs}ms`)), timeoutMs);
})
]);
} finally {
if (timer) {
clearTimeout(timer);
}
}
}
export async function handleAttachActivity(loopCtx: any, msg: any): Promise<void> {
const record = await getCurrentRecord(loopCtx);
const { providers } = getActorRuntimeContext();
const activeSandbox =
record.activeSandboxId
? record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null
: null;
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
const target = await provider.attachTarget({
workspaceId: loopCtx.state.workspaceId,
sandboxId: record.activeSandboxId ?? ""
});
await appendHistory(loopCtx, "handoff.attach", {
target: target.target,
sessionId: record.activeSessionId
});
await msg.complete({
target: target.target,
sessionId: record.activeSessionId
});
}
export async function handleSwitchActivity(loopCtx: any, msg: any): Promise<void> {
const db = loopCtx.db;
const runtime = await db
.select({ switchTarget: handoffRuntime.activeSwitchTarget })
.from(handoffRuntime)
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.get();
await msg.complete({ switchTarget: runtime?.switchTarget ?? "" });
}
export async function handlePushActivity(loopCtx: any, msg: any): Promise<void> {
await pushActiveBranchActivity(loopCtx, {
reason: msg.body?.reason ?? null,
historyKind: "handoff.push"
});
await msg.complete({ ok: true });
}
export async function handleSimpleCommandActivity(
loopCtx: any,
msg: any,
statusMessage: string,
historyKind: string
): Promise<void> {
const db = loopCtx.db;
await db
.update(handoffRuntime)
.set({ statusMessage, updatedAt: Date.now() })
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.run();
await appendHistory(loopCtx, historyKind, { reason: msg.body?.reason ?? null });
await msg.complete({ ok: true });
}
export async function handleArchiveActivity(loopCtx: any, msg: any): Promise<void> {
await setHandoffState(loopCtx, "archive_stop_status_sync", "stopping status sync");
const record = await getCurrentRecord(loopCtx);
if (record.activeSandboxId && record.activeSessionId) {
try {
const sync = await getOrCreateHandoffStatusSync(
loopCtx,
loopCtx.state.workspaceId,
loopCtx.state.repoId,
loopCtx.state.handoffId,
record.activeSandboxId,
record.activeSessionId,
{
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
handoffId: loopCtx.state.handoffId,
providerId: record.providerId,
sandboxId: record.activeSandboxId,
sessionId: record.activeSessionId,
intervalMs: 2_000
}
);
await withTimeout(sync.stop(), 15_000, "handoff status sync stop");
} catch (error) {
logActorWarning("handoff.commands", "failed to stop status sync during archive", {
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
handoffId: loopCtx.state.handoffId,
sandboxId: record.activeSandboxId,
sessionId: record.activeSessionId,
error: resolveErrorMessage(error)
});
}
}
if (record.activeSandboxId) {
await setHandoffState(loopCtx, "archive_release_sandbox", "releasing sandbox");
const { providers } = getActorRuntimeContext();
const activeSandbox =
record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null;
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
const workspaceId = loopCtx.state.workspaceId;
const repoId = loopCtx.state.repoId;
const handoffId = loopCtx.state.handoffId;
const sandboxId = record.activeSandboxId;
// Do not block archive finalization on provider stop. Some provider stop calls can
// run longer than the synchronous archive UX budget.
void withTimeout(
provider.releaseSandbox({
workspaceId,
sandboxId
}),
45_000,
"provider releaseSandbox"
).catch((error) => {
logActorWarning("handoff.commands", "failed to release sandbox during archive", {
workspaceId,
repoId,
handoffId,
sandboxId,
error: resolveErrorMessage(error)
});
});
}
const db = loopCtx.db;
await setHandoffState(loopCtx, "archive_finalize", "finalizing archive");
await db
.update(handoffTable)
.set({ status: "archived", updatedAt: Date.now() })
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.run();
await db
.update(handoffRuntime)
.set({ activeSessionId: null, statusMessage: "archived", updatedAt: Date.now() })
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.run();
await appendHistory(loopCtx, "handoff.archive", { reason: msg.body?.reason ?? null });
await msg.complete({ ok: true });
}
export async function killDestroySandboxActivity(loopCtx: any): Promise<void> {
await setHandoffState(loopCtx, "kill_destroy_sandbox", "destroying sandbox");
const record = await getCurrentRecord(loopCtx);
if (!record.activeSandboxId) {
return;
}
const { providers } = getActorRuntimeContext();
const activeSandbox =
record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null;
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
await provider.destroySandbox({
workspaceId: loopCtx.state.workspaceId,
sandboxId: record.activeSandboxId
});
}
export async function killWriteDbActivity(loopCtx: any, msg: any): Promise<void> {
await setHandoffState(loopCtx, "kill_finalize", "finalizing kill");
const db = loopCtx.db;
await db
.update(handoffTable)
.set({ status: "killed", updatedAt: Date.now() })
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.run();
await db
.update(handoffRuntime)
.set({ statusMessage: "killed", updatedAt: Date.now() })
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.run();
await appendHistory(loopCtx, "handoff.kill", { reason: msg.body?.reason ?? null });
await msg.complete({ ok: true });
}
export async function handleGetActivity(loopCtx: any, msg: any): Promise<void> {
await msg.complete(await getCurrentRecord(loopCtx));
}

View file

@ -0,0 +1,192 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import type { HandoffRecord, HandoffStatus } from "@openhandoff/shared";
import { getOrCreateWorkspace } from "../../handles.js";
import { handoff as handoffTable, handoffRuntime, handoffSandboxes } from "../db/schema.js";
import { historyKey } from "../../keys.js";
export const HANDOFF_ROW_ID = 1;
export function collectErrorMessages(error: unknown): string[] {
if (error == null) {
return [];
}
const out: string[] = [];
const seen = new Set<unknown>();
let current: unknown = error;
while (current != null && !seen.has(current)) {
seen.add(current);
if (current instanceof Error) {
const message = current.message?.trim();
if (message) {
out.push(message);
}
current = (current as { cause?: unknown }).cause;
continue;
}
if (typeof current === "string") {
const message = current.trim();
if (message) {
out.push(message);
}
break;
}
break;
}
return out.filter((msg, index) => out.indexOf(msg) === index);
}
export function resolveErrorDetail(error: unknown): string {
const messages = collectErrorMessages(error);
if (messages.length === 0) {
return String(error);
}
const nonWorkflowWrapper = messages.find(
(msg) => !/^Step\s+"[^"]+"\s+failed\b/i.test(msg)
);
return nonWorkflowWrapper ?? messages[0]!;
}
export function buildAgentPrompt(task: string): string {
return task.trim();
}
export async function setHandoffState(
ctx: any,
status: HandoffStatus,
statusMessage?: string
): Promise<void> {
const now = Date.now();
const db = ctx.db;
await db
.update(handoffTable)
.set({ status, updatedAt: now })
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.run();
if (statusMessage != null) {
await db
.insert(handoffRuntime)
.values({
id: HANDOFF_ROW_ID,
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage,
updatedAt: now
})
.onConflictDoUpdate({
target: handoffRuntime.id,
set: {
statusMessage,
updatedAt: now
}
})
.run();
}
const workspace = await getOrCreateWorkspace(ctx, ctx.state.workspaceId);
await workspace.notifyWorkbenchUpdated({});
}
export async function getCurrentRecord(ctx: any): Promise<HandoffRecord> {
const db = ctx.db;
const row = await db
.select({
branchName: handoffTable.branchName,
title: handoffTable.title,
task: handoffTable.task,
providerId: handoffTable.providerId,
status: handoffTable.status,
statusMessage: handoffRuntime.statusMessage,
activeSandboxId: handoffRuntime.activeSandboxId,
activeSessionId: handoffRuntime.activeSessionId,
agentType: handoffTable.agentType,
prSubmitted: handoffTable.prSubmitted,
createdAt: handoffTable.createdAt,
updatedAt: handoffTable.updatedAt
})
.from(handoffTable)
.leftJoin(handoffRuntime, eq(handoffTable.id, handoffRuntime.id))
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.get();
if (!row) {
throw new Error(`Handoff not found: ${ctx.state.handoffId}`);
}
const sandboxes = await db
.select({
sandboxId: handoffSandboxes.sandboxId,
providerId: handoffSandboxes.providerId,
sandboxActorId: handoffSandboxes.sandboxActorId,
switchTarget: handoffSandboxes.switchTarget,
cwd: handoffSandboxes.cwd,
createdAt: handoffSandboxes.createdAt,
updatedAt: handoffSandboxes.updatedAt,
})
.from(handoffSandboxes)
.all();
return {
workspaceId: ctx.state.workspaceId,
repoId: ctx.state.repoId,
repoRemote: ctx.state.repoRemote,
handoffId: ctx.state.handoffId,
branchName: row.branchName,
title: row.title,
task: row.task,
providerId: row.providerId,
status: row.status,
statusMessage: row.statusMessage ?? null,
activeSandboxId: row.activeSandboxId ?? null,
activeSessionId: row.activeSessionId ?? null,
sandboxes: sandboxes.map((sb) => ({
sandboxId: sb.sandboxId,
providerId: sb.providerId,
sandboxActorId: sb.sandboxActorId ?? null,
switchTarget: sb.switchTarget,
cwd: sb.cwd ?? null,
createdAt: sb.createdAt,
updatedAt: sb.updatedAt,
})),
agentType: row.agentType ?? null,
prSubmitted: Boolean(row.prSubmitted),
diffStat: null,
hasUnpushed: null,
conflictsWithMain: null,
parentBranch: null,
prUrl: null,
prAuthor: null,
ciStatus: null,
reviewStatus: null,
reviewer: null,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
} as HandoffRecord;
}
export async function appendHistory(ctx: any, kind: string, payload: Record<string, unknown>): Promise<void> {
const client = ctx.client();
const history = await client.history.getOrCreate(
historyKey(ctx.state.workspaceId, ctx.state.repoId),
{ createWithInput: { workspaceId: ctx.state.workspaceId, repoId: ctx.state.repoId } }
);
await history.append({
kind,
handoffId: ctx.state.handoffId,
branchName: ctx.state.branchName,
payload
});
const workspace = await getOrCreateWorkspace(ctx, ctx.state.workspaceId);
await workspace.notifyWorkbenchUpdated({});
}

View file

@ -0,0 +1,290 @@
import { Loop } from "rivetkit/workflow";
import { getActorRuntimeContext } from "../../context.js";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { getCurrentRecord } from "./common.js";
import {
initAssertNameActivity,
initBootstrapDbActivity,
initCompleteActivity,
initCreateSandboxActivity,
initCreateSessionActivity,
initEnsureAgentActivity,
initEnsureNameActivity,
initFailedActivity,
initStartSandboxInstanceActivity,
initStartStatusSyncActivity,
initWriteDbActivity
} from "./init.js";
import {
handleArchiveActivity,
handleAttachActivity,
handleGetActivity,
handlePushActivity,
handleSimpleCommandActivity,
handleSwitchActivity,
killDestroySandboxActivity,
killWriteDbActivity
} from "./commands.js";
import { idleNotifyActivity, idleSubmitPrActivity, statusUpdateActivity } from "./status-sync.js";
import { HANDOFF_QUEUE_NAMES } from "./queue.js";
import {
changeWorkbenchModel,
closeWorkbenchSession,
createWorkbenchSession,
markWorkbenchUnread,
publishWorkbenchPr,
renameWorkbenchBranch,
renameWorkbenchHandoff,
renameWorkbenchSession,
revertWorkbenchFile,
sendWorkbenchMessage,
setWorkbenchSessionUnread,
stopWorkbenchSession,
syncWorkbenchSessionStatus,
updateWorkbenchDraft,
} from "../workbench.js";
export { HANDOFF_QUEUE_NAMES, handoffWorkflowQueueName } from "./queue.js";
type HandoffQueueName = (typeof HANDOFF_QUEUE_NAMES)[number];
type WorkflowHandler = (loopCtx: any, msg: { name: HandoffQueueName; body: any; complete: (response: unknown) => Promise<void> }) => Promise<void>;
const commandHandlers: Record<HandoffQueueName, WorkflowHandler> = {
"handoff.command.initialize": async (loopCtx, msg) => {
const body = msg.body;
await loopCtx.step("init-bootstrap-db", async () => initBootstrapDbActivity(loopCtx, body));
await loopCtx.removed("init-enqueue-provision", "step");
await loopCtx.removed("init-dispatch-provision-v2", "step");
const currentRecord = await loopCtx.step(
"init-read-current-record",
async () => getCurrentRecord(loopCtx)
);
try {
await msg.complete(currentRecord);
} catch (error) {
logActorWarning("handoff.workflow", "initialize completion failed", {
error: resolveErrorMessage(error)
});
}
},
"handoff.command.provision": async (loopCtx, msg) => {
const body = msg.body;
await loopCtx.removed("init-failed", "step");
try {
await loopCtx.step("init-ensure-name", async () => initEnsureNameActivity(loopCtx));
await loopCtx.step("init-assert-name", async () => initAssertNameActivity(loopCtx));
const sandbox = await loopCtx.step({
name: "init-create-sandbox",
timeout: 180_000,
run: async () => initCreateSandboxActivity(loopCtx, body),
});
const agent = await loopCtx.step({
name: "init-ensure-agent",
timeout: 180_000,
run: async () => initEnsureAgentActivity(loopCtx, body, sandbox),
});
const sandboxInstanceReady = await loopCtx.step({
name: "init-start-sandbox-instance",
timeout: 60_000,
run: async () => initStartSandboxInstanceActivity(loopCtx, body, sandbox, agent),
});
const session = await loopCtx.step({
name: "init-create-session",
timeout: 180_000,
run: async () => initCreateSessionActivity(loopCtx, body, sandbox, sandboxInstanceReady),
});
await loopCtx.step(
"init-write-db",
async () => initWriteDbActivity(loopCtx, body, sandbox, session, sandboxInstanceReady)
);
await loopCtx.step("init-start-status-sync", async () => initStartStatusSyncActivity(loopCtx, body, sandbox, session));
await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, body, sandbox, session));
await msg.complete({ ok: true });
} catch (error) {
await loopCtx.step("init-failed-v2", async () => initFailedActivity(loopCtx, error));
await msg.complete({ ok: false });
}
},
"handoff.command.attach": async (loopCtx, msg) => {
await loopCtx.step("handle-attach", async () => handleAttachActivity(loopCtx, msg));
},
"handoff.command.switch": async (loopCtx, msg) => {
await loopCtx.step("handle-switch", async () => handleSwitchActivity(loopCtx, msg));
},
"handoff.command.push": async (loopCtx, msg) => {
await loopCtx.step("handle-push", async () => handlePushActivity(loopCtx, msg));
},
"handoff.command.sync": async (loopCtx, msg) => {
await loopCtx.step(
"handle-sync",
async () => handleSimpleCommandActivity(loopCtx, msg, "sync requested", "handoff.sync")
);
},
"handoff.command.merge": async (loopCtx, msg) => {
await loopCtx.step(
"handle-merge",
async () => handleSimpleCommandActivity(loopCtx, msg, "merge requested", "handoff.merge")
);
},
"handoff.command.archive": async (loopCtx, msg) => {
await loopCtx.step("handle-archive", async () => handleArchiveActivity(loopCtx, msg));
},
"handoff.command.kill": async (loopCtx, msg) => {
await loopCtx.step("kill-destroy-sandbox", async () => killDestroySandboxActivity(loopCtx));
await loopCtx.step("kill-write-db", async () => killWriteDbActivity(loopCtx, msg));
},
"handoff.command.get": async (loopCtx, msg) => {
await loopCtx.step("handle-get", async () => handleGetActivity(loopCtx, msg));
},
"handoff.command.workbench.mark_unread": async (loopCtx, msg) => {
await loopCtx.step("workbench-mark-unread", async () => markWorkbenchUnread(loopCtx));
await msg.complete({ ok: true });
},
"handoff.command.workbench.rename_handoff": async (loopCtx, msg) => {
await loopCtx.step("workbench-rename-handoff", async () => renameWorkbenchHandoff(loopCtx, msg.body.value));
await msg.complete({ ok: true });
},
"handoff.command.workbench.rename_branch": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-rename-branch",
timeout: 5 * 60_000,
run: async () => renameWorkbenchBranch(loopCtx, msg.body.value),
});
await msg.complete({ ok: true });
},
"handoff.command.workbench.create_session": async (loopCtx, msg) => {
const created = await loopCtx.step({
name: "workbench-create-session",
timeout: 5 * 60_000,
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
});
await msg.complete(created);
},
"handoff.command.workbench.rename_session": async (loopCtx, msg) => {
await loopCtx.step("workbench-rename-session", async () =>
renameWorkbenchSession(loopCtx, msg.body.sessionId, msg.body.title),
);
await msg.complete({ ok: true });
},
"handoff.command.workbench.set_session_unread": async (loopCtx, msg) => {
await loopCtx.step("workbench-set-session-unread", async () =>
setWorkbenchSessionUnread(loopCtx, msg.body.sessionId, msg.body.unread),
);
await msg.complete({ ok: true });
},
"handoff.command.workbench.update_draft": async (loopCtx, msg) => {
await loopCtx.step("workbench-update-draft", async () =>
updateWorkbenchDraft(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments),
);
await msg.complete({ ok: true });
},
"handoff.command.workbench.change_model": async (loopCtx, msg) => {
await loopCtx.step("workbench-change-model", async () =>
changeWorkbenchModel(loopCtx, msg.body.sessionId, msg.body.model),
);
await msg.complete({ ok: true });
},
"handoff.command.workbench.send_message": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-send-message",
timeout: 10 * 60_000,
run: async () => sendWorkbenchMessage(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments),
});
await msg.complete({ ok: true });
},
"handoff.command.workbench.stop_session": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-stop-session",
timeout: 5 * 60_000,
run: async () => stopWorkbenchSession(loopCtx, msg.body.sessionId),
});
await msg.complete({ ok: true });
},
"handoff.command.workbench.sync_session_status": async (loopCtx, msg) => {
await loopCtx.step("workbench-sync-session-status", async () =>
syncWorkbenchSessionStatus(loopCtx, msg.body.sessionId, msg.body.status, msg.body.at),
);
await msg.complete({ ok: true });
},
"handoff.command.workbench.close_session": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-close-session",
timeout: 5 * 60_000,
run: async () => closeWorkbenchSession(loopCtx, msg.body.sessionId),
});
await msg.complete({ ok: true });
},
"handoff.command.workbench.publish_pr": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-publish-pr",
timeout: 10 * 60_000,
run: async () => publishWorkbenchPr(loopCtx),
});
await msg.complete({ ok: true });
},
"handoff.command.workbench.revert_file": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-revert-file",
timeout: 5 * 60_000,
run: async () => revertWorkbenchFile(loopCtx, msg.body.path),
});
await msg.complete({ ok: true });
},
"handoff.status_sync.result": async (loopCtx, msg) => {
const transitionedToIdle = await loopCtx.step("status-update", async () => statusUpdateActivity(loopCtx, msg.body));
if (transitionedToIdle) {
const { config } = getActorRuntimeContext();
if (config.auto_submit) {
await loopCtx.step("idle-submit-pr", async () => idleSubmitPrActivity(loopCtx));
}
await loopCtx.step("idle-notify", async () => idleNotifyActivity(loopCtx));
}
}
};
export async function runHandoffWorkflow(ctx: any): Promise<void> {
await ctx.loop("handoff-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-command", {
names: [...HANDOFF_QUEUE_NAMES],
completable: true
});
if (!msg) {
return Loop.continue(undefined);
}
const handler = commandHandlers[msg.name as HandoffQueueName];
if (handler) {
await handler(loopCtx, msg);
}
return Loop.continue(undefined);
});
}

View file

@ -0,0 +1,643 @@
// @ts-nocheck
import { desc, eq } from "drizzle-orm";
import { resolveCreateFlowDecision } from "../../../services/create-flow.js";
import { getActorRuntimeContext } from "../../context.js";
import {
getOrCreateHandoffStatusSync,
getOrCreateHistory,
getOrCreateProject,
getOrCreateSandboxInstance,
getSandboxInstance,
selfHandoff
} from "../../handles.js";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { handoff as handoffTable, handoffRuntime, handoffSandboxes } from "../db/schema.js";
import {
HANDOFF_ROW_ID,
appendHistory,
buildAgentPrompt,
collectErrorMessages,
resolveErrorDetail,
setHandoffState
} from "./common.js";
import { handoffWorkflowQueueName } from "./queue.js";
const DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS = 180_000;
function getInitCreateSandboxActivityTimeoutMs(): number {
const raw = process.env.HF_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
if (!raw) {
return DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
}
const parsed = Number(raw);
if (!Number.isFinite(parsed) || parsed <= 0) {
return DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
}
return Math.floor(parsed);
}
function debugInit(loopCtx: any, message: string, context?: Record<string, unknown>): void {
loopCtx.log.debug({
msg: message,
scope: "handoff.init",
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
handoffId: loopCtx.state.handoffId,
...(context ?? {})
});
}
async function withActivityTimeout<T>(
timeoutMs: number,
label: string,
run: () => Promise<T>
): Promise<T> {
let timer: ReturnType<typeof setTimeout> | null = null;
try {
return await Promise.race([
run(),
new Promise<T>((_, reject) => {
timer = setTimeout(() => {
reject(new Error(`${label} timed out after ${timeoutMs}ms`));
}, timeoutMs);
})
]);
} finally {
if (timer) {
clearTimeout(timer);
}
}
}
export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<void> {
const providerId = body?.providerId ?? loopCtx.state.providerId;
const { config } = getActorRuntimeContext();
const now = Date.now();
const db = loopCtx.db;
const initialStatusMessage = loopCtx.state.branchName && loopCtx.state.title ? "provisioning" : "naming";
try {
await db
.insert(handoffTable)
.values({
id: HANDOFF_ROW_ID,
branchName: loopCtx.state.branchName,
title: loopCtx.state.title,
task: loopCtx.state.task,
providerId,
status: "init_bootstrap_db",
agentType: loopCtx.state.agentType ?? config.default_agent,
createdAt: now,
updatedAt: now
})
.onConflictDoUpdate({
target: handoffTable.id,
set: {
branchName: loopCtx.state.branchName,
title: loopCtx.state.title,
task: loopCtx.state.task,
providerId,
status: "init_bootstrap_db",
agentType: loopCtx.state.agentType ?? config.default_agent,
updatedAt: now
}
})
.run();
await db
.insert(handoffRuntime)
.values({
id: HANDOFF_ROW_ID,
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: initialStatusMessage,
updatedAt: now
})
.onConflictDoUpdate({
target: handoffRuntime.id,
set: {
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: initialStatusMessage,
updatedAt: now
}
})
.run();
} catch (error) {
const detail = resolveErrorMessage(error);
throw new Error(`handoff init bootstrap db failed: ${detail}`);
}
}
export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise<void> {
await setHandoffState(loopCtx, "init_enqueue_provision", "provision queued");
const self = selfHandoff(loopCtx);
void self
.send(handoffWorkflowQueueName("handoff.command.provision"), body, {
wait: false,
})
.catch((error: unknown) => {
logActorWarning("handoff.init", "background provision command failed", {
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
handoffId: loopCtx.state.handoffId,
error: resolveErrorMessage(error),
});
});
}
export async function initEnsureNameActivity(loopCtx: any): Promise<void> {
await setHandoffState(loopCtx, "init_ensure_name", "determining title and branch");
const existing = await loopCtx.db
.select({
branchName: handoffTable.branchName,
title: handoffTable.title
})
.from(handoffTable)
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.get();
if (existing?.branchName && existing?.title) {
loopCtx.state.branchName = existing.branchName;
loopCtx.state.title = existing.title;
return;
}
const { driver } = getActorRuntimeContext();
try {
await driver.git.fetch(loopCtx.state.repoLocalPath);
} catch (error) {
logActorWarning("handoff.init", "fetch before naming failed", {
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
handoffId: loopCtx.state.handoffId,
error: resolveErrorMessage(error)
});
}
const remoteBranches = (await driver.git.listRemoteBranches(loopCtx.state.repoLocalPath)).map(
(branch: any) => branch.branchName
);
const project = await getOrCreateProject(
loopCtx,
loopCtx.state.workspaceId,
loopCtx.state.repoId,
loopCtx.state.repoRemote
);
const reservedBranches = await project.listReservedBranches({});
const resolved = resolveCreateFlowDecision({
task: loopCtx.state.task,
explicitTitle: loopCtx.state.explicitTitle ?? undefined,
explicitBranchName: loopCtx.state.explicitBranchName ?? undefined,
localBranches: remoteBranches,
handoffBranches: reservedBranches
});
const now = Date.now();
await loopCtx.db
.update(handoffTable)
.set({
branchName: resolved.branchName,
title: resolved.title,
updatedAt: now
})
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.run();
loopCtx.state.branchName = resolved.branchName;
loopCtx.state.title = resolved.title;
loopCtx.state.explicitTitle = null;
loopCtx.state.explicitBranchName = null;
await loopCtx.db
.update(handoffRuntime)
.set({
statusMessage: "provisioning",
updatedAt: now
})
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.run();
await project.registerHandoffBranch({
handoffId: loopCtx.state.handoffId,
branchName: resolved.branchName
});
await appendHistory(loopCtx, "handoff.named", {
title: resolved.title,
branchName: resolved.branchName
});
}
export async function initAssertNameActivity(loopCtx: any): Promise<void> {
await setHandoffState(loopCtx, "init_assert_name", "validating naming");
if (!loopCtx.state.branchName) {
throw new Error("handoff branchName is not initialized");
}
}
export async function initCreateSandboxActivity(loopCtx: any, body: any): Promise<any> {
await setHandoffState(loopCtx, "init_create_sandbox", "creating sandbox");
const { providers } = getActorRuntimeContext();
const providerId = body?.providerId ?? loopCtx.state.providerId;
const provider = providers.get(providerId);
const timeoutMs = getInitCreateSandboxActivityTimeoutMs();
const startedAt = Date.now();
debugInit(loopCtx, "init_create_sandbox started", {
providerId,
timeoutMs,
supportsSessionReuse: provider.capabilities().supportsSessionReuse
});
if (provider.capabilities().supportsSessionReuse) {
const runtime = await loopCtx.db
.select({ activeSandboxId: handoffRuntime.activeSandboxId })
.from(handoffRuntime)
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.get();
const existing = await loopCtx.db
.select({ sandboxId: handoffSandboxes.sandboxId })
.from(handoffSandboxes)
.where(eq(handoffSandboxes.providerId, providerId))
.orderBy(desc(handoffSandboxes.updatedAt))
.limit(1)
.get();
const sandboxId = runtime?.activeSandboxId ?? existing?.sandboxId ?? null;
if (sandboxId) {
debugInit(loopCtx, "init_create_sandbox attempting resume", { sandboxId });
try {
const resumed = await withActivityTimeout(
timeoutMs,
"resumeSandbox",
async () => provider.resumeSandbox({
workspaceId: loopCtx.state.workspaceId,
sandboxId
})
);
debugInit(loopCtx, "init_create_sandbox resume succeeded", {
sandboxId: resumed.sandboxId,
durationMs: Date.now() - startedAt
});
return resumed;
} catch (error) {
logActorWarning("handoff.init", "resume sandbox failed; creating a new sandbox", {
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
handoffId: loopCtx.state.handoffId,
sandboxId,
error: resolveErrorMessage(error)
});
}
}
}
debugInit(loopCtx, "init_create_sandbox creating fresh sandbox", {
branchName: loopCtx.state.branchName
});
try {
const sandbox = await withActivityTimeout(
timeoutMs,
"createSandbox",
async () => provider.createSandbox({
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
repoRemote: loopCtx.state.repoRemote,
branchName: loopCtx.state.branchName,
handoffId: loopCtx.state.handoffId,
debug: (message, context) => debugInit(loopCtx, message, context)
})
);
debugInit(loopCtx, "init_create_sandbox create succeeded", {
sandboxId: sandbox.sandboxId,
durationMs: Date.now() - startedAt
});
return sandbox;
} catch (error) {
debugInit(loopCtx, "init_create_sandbox failed", {
durationMs: Date.now() - startedAt,
error: resolveErrorMessage(error)
});
throw error;
}
}
export async function initEnsureAgentActivity(loopCtx: any, body: any, sandbox: any): Promise<any> {
await setHandoffState(loopCtx, "init_ensure_agent", "ensuring sandbox agent");
const { providers } = getActorRuntimeContext();
const providerId = body?.providerId ?? loopCtx.state.providerId;
const provider = providers.get(providerId);
return await provider.ensureSandboxAgent({
workspaceId: loopCtx.state.workspaceId,
sandboxId: sandbox.sandboxId
});
}
export async function initStartSandboxInstanceActivity(
loopCtx: any,
body: any,
sandbox: any,
agent: any
): Promise<any> {
await setHandoffState(loopCtx, "init_start_sandbox_instance", "starting sandbox runtime");
try {
const providerId = body?.providerId ?? loopCtx.state.providerId;
const sandboxInstance = await getOrCreateSandboxInstance(
loopCtx,
loopCtx.state.workspaceId,
providerId,
sandbox.sandboxId,
{
workspaceId: loopCtx.state.workspaceId,
providerId,
sandboxId: sandbox.sandboxId
}
);
await sandboxInstance.ensure({
metadata: sandbox.metadata,
status: "ready",
agentEndpoint: agent.endpoint,
agentToken: agent.token
});
const actorId = typeof (sandboxInstance as any).resolve === "function"
? await (sandboxInstance as any).resolve()
: null;
return {
ok: true as const,
actorId: typeof actorId === "string" ? actorId : null
};
} catch (error) {
const detail = error instanceof Error ? error.message : String(error);
return {
ok: false as const,
error: `sandbox-instance ensure failed: ${detail}`
};
}
}
export async function initCreateSessionActivity(
loopCtx: any,
body: any,
sandbox: any,
sandboxInstanceReady: any
): Promise<any> {
await setHandoffState(loopCtx, "init_create_session", "creating agent session");
if (!sandboxInstanceReady.ok) {
return {
id: null,
status: "error",
error: sandboxInstanceReady.error ?? "sandbox instance is not ready"
} as const;
}
const { config } = getActorRuntimeContext();
const providerId = body?.providerId ?? loopCtx.state.providerId;
const sandboxInstance = getSandboxInstance(loopCtx, loopCtx.state.workspaceId, providerId, sandbox.sandboxId);
const cwd =
sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string"
? ((sandbox.metadata as any).cwd as string)
: undefined;
return await sandboxInstance.createSession({
prompt: buildAgentPrompt(loopCtx.state.task),
cwd,
agent: (loopCtx.state.agentType ?? config.default_agent) as any
});
}
export async function initWriteDbActivity(
loopCtx: any,
body: any,
sandbox: any,
session: any,
sandboxInstanceReady?: { actorId?: string | null }
): Promise<void> {
await setHandoffState(loopCtx, "init_write_db", "persisting handoff runtime");
const providerId = body?.providerId ?? loopCtx.state.providerId;
const { config } = getActorRuntimeContext();
const now = Date.now();
const db = loopCtx.db;
const sessionId = session?.id ?? null;
const sessionHealthy = Boolean(sessionId) && session?.status !== "error";
const activeSessionId = sessionHealthy ? sessionId : null;
const statusMessage =
sessionHealthy
? "session created"
: session?.status === "error"
? (session.error ?? "session create failed")
: "session unavailable";
const activeCwd =
sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string"
? ((sandbox.metadata as any).cwd as string)
: null;
const sandboxActorId =
typeof sandboxInstanceReady?.actorId === "string" && sandboxInstanceReady.actorId.length > 0
? sandboxInstanceReady.actorId
: null;
await db
.update(handoffTable)
.set({
providerId,
status: sessionHealthy ? "running" : "error",
agentType: loopCtx.state.agentType ?? config.default_agent,
updatedAt: now
})
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.run();
await db
.insert(handoffSandboxes)
.values({
sandboxId: sandbox.sandboxId,
providerId,
sandboxActorId,
switchTarget: sandbox.switchTarget,
cwd: activeCwd,
statusMessage,
createdAt: now,
updatedAt: now
})
.onConflictDoUpdate({
target: handoffSandboxes.sandboxId,
set: {
providerId,
sandboxActorId,
switchTarget: sandbox.switchTarget,
cwd: activeCwd,
statusMessage,
updatedAt: now
}
})
.run();
await db
.insert(handoffRuntime)
.values({
id: HANDOFF_ROW_ID,
activeSandboxId: sandbox.sandboxId,
activeSessionId,
activeSwitchTarget: sandbox.switchTarget,
activeCwd,
statusMessage,
updatedAt: now
})
.onConflictDoUpdate({
target: handoffRuntime.id,
set: {
activeSandboxId: sandbox.sandboxId,
activeSessionId,
activeSwitchTarget: sandbox.switchTarget,
activeCwd,
statusMessage,
updatedAt: now
}
})
.run();
}
export async function initStartStatusSyncActivity(
loopCtx: any,
body: any,
sandbox: any,
session: any
): Promise<void> {
const sessionId = session?.id ?? null;
if (!sessionId || session?.status === "error") {
return;
}
await setHandoffState(loopCtx, "init_start_status_sync", "starting session status sync");
const providerId = body?.providerId ?? loopCtx.state.providerId;
const sync = await getOrCreateHandoffStatusSync(
loopCtx,
loopCtx.state.workspaceId,
loopCtx.state.repoId,
loopCtx.state.handoffId,
sandbox.sandboxId,
sessionId,
{
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
handoffId: loopCtx.state.handoffId,
providerId,
sandboxId: sandbox.sandboxId,
sessionId,
intervalMs: 2_000
}
);
await sync.start();
await sync.force();
}
export async function initCompleteActivity(loopCtx: any, body: any, sandbox: any, session: any): Promise<void> {
const providerId = body?.providerId ?? loopCtx.state.providerId;
const sessionId = session?.id ?? null;
const sessionHealthy = Boolean(sessionId) && session?.status !== "error";
if (sessionHealthy) {
await setHandoffState(loopCtx, "init_complete", "handoff initialized");
const history = await getOrCreateHistory(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId);
await history.append({
kind: "handoff.initialized",
handoffId: loopCtx.state.handoffId,
branchName: loopCtx.state.branchName,
payload: { providerId, sandboxId: sandbox.sandboxId, sessionId }
});
loopCtx.state.initialized = true;
return;
}
const detail =
session?.status === "error"
? (session.error ?? "session create failed")
: "session unavailable";
await setHandoffState(loopCtx, "error", detail);
await appendHistory(loopCtx, "handoff.error", {
detail,
messages: [detail]
});
loopCtx.state.initialized = false;
}
export async function initFailedActivity(loopCtx: any, error: unknown): Promise<void> {
const now = Date.now();
const detail = resolveErrorDetail(error);
const messages = collectErrorMessages(error);
const db = loopCtx.db;
const { config, providers } = getActorRuntimeContext();
const providerId = loopCtx.state.providerId ?? providers.defaultProviderId();
await db
.insert(handoffTable)
.values({
id: HANDOFF_ROW_ID,
branchName: loopCtx.state.branchName ?? null,
title: loopCtx.state.title ?? null,
task: loopCtx.state.task,
providerId,
status: "error",
agentType: loopCtx.state.agentType ?? config.default_agent,
createdAt: now,
updatedAt: now
})
.onConflictDoUpdate({
target: handoffTable.id,
set: {
branchName: loopCtx.state.branchName ?? null,
title: loopCtx.state.title ?? null,
task: loopCtx.state.task,
providerId,
status: "error",
agentType: loopCtx.state.agentType ?? config.default_agent,
updatedAt: now
}
})
.run();
await db
.insert(handoffRuntime)
.values({
id: HANDOFF_ROW_ID,
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: detail,
updatedAt: now
})
.onConflictDoUpdate({
target: handoffRuntime.id,
set: {
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: detail,
updatedAt: now
}
})
.run();
await appendHistory(loopCtx, "handoff.error", {
detail,
messages
});
}

View file

@ -0,0 +1,88 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import { getActorRuntimeContext } from "../../context.js";
import { handoffRuntime, handoffSandboxes } from "../db/schema.js";
import { HANDOFF_ROW_ID, appendHistory, getCurrentRecord } from "./common.js";
export interface PushActiveBranchOptions {
reason?: string | null;
historyKind?: string;
}
export async function pushActiveBranchActivity(
loopCtx: any,
options: PushActiveBranchOptions = {}
): Promise<void> {
const record = await getCurrentRecord(loopCtx);
const activeSandboxId = record.activeSandboxId;
const branchName = loopCtx.state.branchName ?? record.branchName;
if (!activeSandboxId) {
throw new Error("cannot push: no active sandbox");
}
if (!branchName) {
throw new Error("cannot push: handoff branch is not set");
}
const activeSandbox =
record.sandboxes.find((sandbox: any) => sandbox.sandboxId === activeSandboxId) ?? null;
const providerId = activeSandbox?.providerId ?? record.providerId;
const cwd = activeSandbox?.cwd ?? null;
if (!cwd) {
throw new Error("cannot push: active sandbox cwd is not set");
}
const { providers } = getActorRuntimeContext();
const provider = providers.get(providerId);
const now = Date.now();
await loopCtx.db
.update(handoffRuntime)
.set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now })
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.run();
await loopCtx.db
.update(handoffSandboxes)
.set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now })
.where(eq(handoffSandboxes.sandboxId, activeSandboxId))
.run();
const script = [
"set -euo pipefail",
`cd ${JSON.stringify(cwd)}`,
"git rev-parse --verify HEAD >/dev/null",
"git config credential.helper '!f() { echo username=x-access-token; echo password=${GH_TOKEN:-$GITHUB_TOKEN}; }; f'",
`git push -u origin ${JSON.stringify(branchName)}`
].join("; ");
const result = await provider.executeCommand({
workspaceId: loopCtx.state.workspaceId,
sandboxId: activeSandboxId,
command: ["bash", "-lc", JSON.stringify(script)].join(" "),
label: `git push ${branchName}`
});
if (result.exitCode !== 0) {
throw new Error(`git push failed (${result.exitCode}): ${result.result}`);
}
const updatedAt = Date.now();
await loopCtx.db
.update(handoffRuntime)
.set({ statusMessage: `push complete for ${branchName}`, updatedAt })
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.run();
await loopCtx.db
.update(handoffSandboxes)
.set({ statusMessage: `push complete for ${branchName}`, updatedAt })
.where(eq(handoffSandboxes.sandboxId, activeSandboxId))
.run();
await appendHistory(loopCtx, options.historyKind ?? "handoff.push", {
reason: options.reason ?? null,
branchName,
sandboxId: activeSandboxId
});
}

View file

@ -0,0 +1,31 @@
export const HANDOFF_QUEUE_NAMES = [
"handoff.command.initialize",
"handoff.command.provision",
"handoff.command.attach",
"handoff.command.switch",
"handoff.command.push",
"handoff.command.sync",
"handoff.command.merge",
"handoff.command.archive",
"handoff.command.kill",
"handoff.command.get",
"handoff.command.workbench.mark_unread",
"handoff.command.workbench.rename_handoff",
"handoff.command.workbench.rename_branch",
"handoff.command.workbench.create_session",
"handoff.command.workbench.rename_session",
"handoff.command.workbench.set_session_unread",
"handoff.command.workbench.update_draft",
"handoff.command.workbench.change_model",
"handoff.command.workbench.send_message",
"handoff.command.workbench.stop_session",
"handoff.command.workbench.sync_session_status",
"handoff.command.workbench.close_session",
"handoff.command.workbench.publish_pr",
"handoff.command.workbench.revert_file",
"handoff.status_sync.result"
] as const;
export function handoffWorkflowQueueName(name: string): string {
return name;
}

View file

@ -0,0 +1,160 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import { getActorRuntimeContext } from "../../context.js";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { handoff as handoffTable, handoffRuntime, handoffSandboxes } from "../db/schema.js";
import { HANDOFF_ROW_ID, appendHistory, resolveErrorDetail } from "./common.js";
import { pushActiveBranchActivity } from "./push.js";
function mapSessionStatus(status: "running" | "idle" | "error") {
if (status === "idle") return "idle";
if (status === "error") return "error";
return "running";
}
export async function statusUpdateActivity(loopCtx: any, body: any): Promise<boolean> {
const newStatus = mapSessionStatus(body.status);
const wasIdle = loopCtx.state.previousStatus === "idle";
const didTransition = newStatus === "idle" && !wasIdle;
const isDuplicateStatus = loopCtx.state.previousStatus === newStatus;
if (isDuplicateStatus) {
return false;
}
const db = loopCtx.db;
const runtime = await db
.select({
activeSandboxId: handoffRuntime.activeSandboxId,
activeSessionId: handoffRuntime.activeSessionId
})
.from(handoffRuntime)
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.get();
const isActive =
runtime?.activeSandboxId === body.sandboxId && runtime?.activeSessionId === body.sessionId;
if (isActive) {
await db
.update(handoffTable)
.set({ status: newStatus, updatedAt: body.at })
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.run();
await db
.update(handoffRuntime)
.set({ statusMessage: `session:${body.status}`, updatedAt: body.at })
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.run();
}
await db
.update(handoffSandboxes)
.set({ statusMessage: `session:${body.status}`, updatedAt: body.at })
.where(eq(handoffSandboxes.sandboxId, body.sandboxId))
.run();
await appendHistory(loopCtx, "handoff.status", {
status: body.status,
sessionId: body.sessionId,
sandboxId: body.sandboxId
});
if (isActive) {
loopCtx.state.previousStatus = newStatus;
const { driver } = getActorRuntimeContext();
if (loopCtx.state.branchName) {
driver.tmux.setWindowStatus(loopCtx.state.branchName, newStatus);
}
return didTransition;
}
return false;
}
export async function idleSubmitPrActivity(loopCtx: any): Promise<void> {
const { driver } = getActorRuntimeContext();
const db = loopCtx.db;
const self = await db
.select({ prSubmitted: handoffTable.prSubmitted })
.from(handoffTable)
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.get();
if (self && self.prSubmitted) return;
try {
await driver.git.fetch(loopCtx.state.repoLocalPath);
} catch (error) {
logActorWarning("handoff.status-sync", "fetch before PR submit failed", {
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
handoffId: loopCtx.state.handoffId,
error: resolveErrorMessage(error)
});
}
if (!loopCtx.state.branchName || !loopCtx.state.title) {
throw new Error("cannot submit PR before handoff has a branch and title");
}
try {
await pushActiveBranchActivity(loopCtx, {
reason: "auto_submit_idle",
historyKind: "handoff.push.auto"
});
const pr = await driver.github.createPr(
loopCtx.state.repoLocalPath,
loopCtx.state.branchName,
loopCtx.state.title
);
await db
.update(handoffTable)
.set({ prSubmitted: 1, updatedAt: Date.now() })
.where(eq(handoffTable.id, HANDOFF_ROW_ID))
.run();
await appendHistory(loopCtx, "handoff.step", {
step: "pr_submit",
handoffId: loopCtx.state.handoffId,
branchName: loopCtx.state.branchName,
prUrl: pr.url,
prNumber: pr.number
});
await appendHistory(loopCtx, "handoff.pr_created", {
handoffId: loopCtx.state.handoffId,
branchName: loopCtx.state.branchName,
prUrl: pr.url,
prNumber: pr.number
});
} catch (error) {
const detail = resolveErrorDetail(error);
await db
.update(handoffRuntime)
.set({
statusMessage: `pr submit failed: ${detail}`,
updatedAt: Date.now()
})
.where(eq(handoffRuntime.id, HANDOFF_ROW_ID))
.run();
await appendHistory(loopCtx, "handoff.pr_create_failed", {
handoffId: loopCtx.state.handoffId,
branchName: loopCtx.state.branchName,
error: detail
});
}
}
export async function idleNotifyActivity(loopCtx: any): Promise<void> {
const { notifications } = getActorRuntimeContext();
if (notifications && loopCtx.state.branchName) {
await notifications.agentIdle(loopCtx.state.branchName);
}
}

View file

@ -0,0 +1,10 @@
import { actorSqliteDb } from "../../../db/actor-sqlite.js";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const historyDb = actorSqliteDb({
actorName: "history",
schema,
migrations,
migrationsFolderUrl: new URL("./drizzle/", import.meta.url),
});

View file

@ -0,0 +1,7 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/history/db/drizzle",
schema: "./src/actors/history/db/schema.ts",
});

View file

@ -0,0 +1,8 @@
CREATE TABLE `events` (
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
`handoff_id` text,
`branch_name` text,
`kind` text NOT NULL,
`payload_json` text NOT NULL,
`created_at` integer NOT NULL
);

View file

@ -0,0 +1,70 @@
{
"version": "6",
"dialect": "sqlite",
"id": "9d9ebe3c-8341-449c-bd14-2b6fd62853a1",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"events": {
"name": "events",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": true
},
"handoff_id": {
"name": "handoff_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"kind": {
"name": "kind",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"payload_json": {
"name": "payload_json",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,13 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1770924375133,
"tag": "0000_watery_bushwacker",
"breakpoints": true
}
]
}

View file

@ -0,0 +1,29 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
"entries": [
{
"idx": 0,
"when": 1770924375133,
"tag": "0000_watery_bushwacker",
"breakpoints": true
}
]
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`events\` (
\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
\`handoff_id\` text,
\`branch_name\` text,
\`kind\` text NOT NULL,
\`payload_json\` text NOT NULL,
\`created_at\` integer NOT NULL
);
`,
} as const
};

View file

@ -0,0 +1,10 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
export const events = sqliteTable("events", {
id: integer("id").primaryKey({ autoIncrement: true }),
handoffId: text("handoff_id"),
branchName: text("branch_name"),
kind: text("kind").notNull(),
payloadJson: text("payload_json").notNull(),
createdAt: integer("created_at").notNull(),
});

View file

@ -0,0 +1,111 @@
// @ts-nocheck
import { and, desc, eq } from "drizzle-orm";
import { actor, queue } from "rivetkit";
import { Loop, workflow } from "rivetkit/workflow";
import type { HistoryEvent } from "@openhandoff/shared";
import { selfHistory } from "../handles.js";
import { historyDb } from "./db/db.js";
import { events } from "./db/schema.js";
export interface HistoryInput {
workspaceId: string;
repoId: string;
}
export interface AppendHistoryCommand {
kind: string;
handoffId?: string;
branchName?: string;
payload: Record<string, unknown>;
}
export interface ListHistoryParams {
branch?: string;
handoffId?: string;
limit?: number;
}
const HISTORY_QUEUE_NAMES = ["history.command.append"] as const;
async function appendHistoryRow(loopCtx: any, body: AppendHistoryCommand): Promise<void> {
const now = Date.now();
await loopCtx.db
.insert(events)
.values({
handoffId: body.handoffId ?? null,
branchName: body.branchName ?? null,
kind: body.kind,
payloadJson: JSON.stringify(body.payload),
createdAt: now
})
.run();
}
async function runHistoryWorkflow(ctx: any): Promise<void> {
await ctx.loop("history-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-history-command", {
names: [...HISTORY_QUEUE_NAMES],
completable: true
});
if (!msg) {
return Loop.continue(undefined);
}
if (msg.name === "history.command.append") {
await loopCtx.step("append-history-row", async () => appendHistoryRow(loopCtx, msg.body as AppendHistoryCommand));
await msg.complete({ ok: true });
}
return Loop.continue(undefined);
});
}
export const history = actor({
db: historyDb,
queues: {
"history.command.append": queue()
},
createState: (_c, input: HistoryInput) => ({
workspaceId: input.workspaceId,
repoId: input.repoId
}),
actions: {
async append(c, command: AppendHistoryCommand): Promise<void> {
const self = selfHistory(c);
await self.send("history.command.append", command, { wait: true, timeout: 15_000 });
},
async list(c, params?: ListHistoryParams): Promise<HistoryEvent[]> {
const whereParts = [];
if (params?.handoffId) {
whereParts.push(eq(events.handoffId, params.handoffId));
}
if (params?.branch) {
whereParts.push(eq(events.branchName, params.branch));
}
const base = c.db
.select({
id: events.id,
handoffId: events.handoffId,
branchName: events.branchName,
kind: events.kind,
payloadJson: events.payloadJson,
createdAt: events.createdAt
})
.from(events);
const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base)
.orderBy(desc(events.createdAt))
.limit(params?.limit ?? 100)
.all();
return rows.map((row) => ({
...row,
workspaceId: c.state.workspaceId,
repoId: c.state.repoId
}));
}
},
run: workflow(runHistoryWorkflow)
});

View file

@ -0,0 +1,54 @@
import { setup } from "rivetkit";
import { handoffStatusSync } from "./handoff-status-sync/index.js";
import { handoff } from "./handoff/index.js";
import { history } from "./history/index.js";
import { projectBranchSync } from "./project-branch-sync/index.js";
import { projectPrSync } from "./project-pr-sync/index.js";
import { project } from "./project/index.js";
import { sandboxInstance } from "./sandbox-instance/index.js";
import { workspace } from "./workspace/index.js";
function resolveManagerPort(): number {
const raw = process.env.HF_RIVET_MANAGER_PORT ?? process.env.RIVETKIT_MANAGER_PORT;
if (!raw) {
return 7750;
}
const parsed = Number(raw);
if (!Number.isInteger(parsed) || parsed <= 0 || parsed > 65535) {
throw new Error(`Invalid HF_RIVET_MANAGER_PORT/RIVETKIT_MANAGER_PORT: ${raw}`);
}
return parsed;
}
function resolveManagerHost(): string {
const raw = process.env.HF_RIVET_MANAGER_HOST ?? process.env.RIVETKIT_MANAGER_HOST;
return raw && raw.trim().length > 0 ? raw.trim() : "0.0.0.0";
}
export const registry = setup({
use: {
workspace,
project,
handoff,
sandboxInstance,
history,
projectPrSync,
projectBranchSync,
handoffStatusSync
},
managerPort: resolveManagerPort(),
managerHost: resolveManagerHost()
});
export * from "./context.js";
export * from "./events.js";
export * from "./handoff-status-sync/index.js";
export * from "./handoff/index.js";
export * from "./history/index.js";
export * from "./keys.js";
export * from "./project-branch-sync/index.js";
export * from "./project-pr-sync/index.js";
export * from "./project/index.js";
export * from "./sandbox-instance/index.js";
export * from "./workspace/index.js";

View file

@ -0,0 +1,44 @@
export type ActorKey = string[];
export function workspaceKey(workspaceId: string): ActorKey {
return ["ws", workspaceId];
}
export function projectKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId];
}
export function handoffKey(workspaceId: string, repoId: string, handoffId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "handoff", handoffId];
}
export function sandboxInstanceKey(
workspaceId: string,
providerId: string,
sandboxId: string
): ActorKey {
return ["ws", workspaceId, "provider", providerId, "sandbox", sandboxId];
}
export function historyKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "history"];
}
export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "pr-sync"];
}
export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "branch-sync"];
}
export function handoffStatusSyncKey(
workspaceId: string,
repoId: string,
handoffId: string,
sandboxId: string,
sessionId: string
): ActorKey {
// Include sandbox + session so multiple sandboxes/sessions can be tracked per handoff.
return ["ws", workspaceId, "project", repoId, "handoff", handoffId, "status-sync", sandboxId, sessionId];
}

View file

@ -0,0 +1,31 @@
export function resolveErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}
export function isActorNotFoundError(error: unknown): boolean {
return resolveErrorMessage(error).includes("Actor not found:");
}
export function resolveErrorStack(error: unknown): string | undefined {
if (error instanceof Error && typeof error.stack === "string") {
return error.stack;
}
return undefined;
}
export function logActorWarning(
scope: string,
message: string,
context?: Record<string, unknown>
): void {
const payload = {
scope,
message,
...(context ?? {})
};
// eslint-disable-next-line no-console
console.warn("[openhandoff][actor:warn]", payload);
}

View file

@ -0,0 +1,208 @@
import { Loop } from "rivetkit/workflow";
import { normalizeMessages } from "../services/queue.js";
export interface PollingControlState {
intervalMs: number;
running: boolean;
}
export interface PollingControlQueueNames {
start: string;
stop: string;
setInterval: string;
force: string;
}
export interface PollingQueueMessage {
name: string;
body: unknown;
complete(response: unknown): Promise<void>;
}
interface PollingActorContext<TState extends PollingControlState> {
state: TState;
abortSignal: AbortSignal;
queue: {
nextBatch(options: {
names: readonly string[];
timeout: number;
count: number;
completable: true;
}): Promise<PollingQueueMessage[]>;
};
}
interface RunPollingOptions<TState extends PollingControlState> {
control: PollingControlQueueNames;
onPoll(c: PollingActorContext<TState>): Promise<void>;
}
export async function runPollingControlLoop<TState extends PollingControlState>(
c: PollingActorContext<TState>,
options: RunPollingOptions<TState>
): Promise<void> {
while (!c.abortSignal.aborted) {
const messages = normalizeMessages(
await c.queue.nextBatch({
names: [
options.control.start,
options.control.stop,
options.control.setInterval,
options.control.force
],
timeout: Math.max(500, c.state.intervalMs),
count: 16,
completable: true
})
) as PollingQueueMessage[];
if (messages.length === 0) {
if (!c.state.running) {
continue;
}
await options.onPoll(c);
continue;
}
for (const msg of messages) {
if (msg.name === options.control.start) {
c.state.running = true;
await msg.complete({ ok: true });
continue;
}
if (msg.name === options.control.stop) {
c.state.running = false;
await msg.complete({ ok: true });
continue;
}
if (msg.name === options.control.setInterval) {
const intervalMs = Number((msg.body as { intervalMs?: unknown })?.intervalMs);
c.state.intervalMs = Number.isFinite(intervalMs) ? Math.max(500, intervalMs) : c.state.intervalMs;
await msg.complete({ ok: true });
continue;
}
if (msg.name === options.control.force) {
await options.onPoll(c);
await msg.complete({ ok: true });
}
}
}
}
interface WorkflowPollingActorContext<TState extends PollingControlState> {
state: TState;
loop(config: {
name: string;
historyEvery: number;
historyKeep: number;
run(ctx: WorkflowPollingActorContext<TState>): Promise<unknown>;
}): Promise<void>;
}
interface WorkflowPollingQueueMessage extends PollingQueueMessage {}
interface WorkflowPollingLoopContext<TState extends PollingControlState> {
state: TState;
queue: {
nextBatch(name: string, options: {
names: readonly string[];
timeout: number;
count: number;
completable: true;
}): Promise<WorkflowPollingQueueMessage[]>;
};
step<T>(
nameOrConfig:
| string
| {
name: string;
timeout?: number;
run: () => Promise<T>;
},
run?: () => Promise<T>,
): Promise<T>;
}
export async function runWorkflowPollingLoop<TState extends PollingControlState>(
ctx: any,
options: RunPollingOptions<TState> & { loopName: string },
): Promise<void> {
await ctx.loop(options.loopName, async (loopCtx: WorkflowPollingLoopContext<TState>) => {
const control = await loopCtx.step("read-control-state", async () => ({
intervalMs: Math.max(500, Number(loopCtx.state.intervalMs) || 500),
running: Boolean(loopCtx.state.running),
}));
const messages = normalizeMessages(
await loopCtx.queue.nextBatch("next-polling-control-batch", {
names: [
options.control.start,
options.control.stop,
options.control.setInterval,
options.control.force,
],
timeout: control.running ? control.intervalMs : 60_000,
count: 16,
completable: true,
}),
) as WorkflowPollingQueueMessage[];
if (messages.length === 0) {
if (control.running) {
await loopCtx.step({
name: "poll-tick",
timeout: 5 * 60_000,
run: async () => {
await options.onPoll(loopCtx as unknown as PollingActorContext<TState>);
},
});
}
return Loop.continue(undefined);
}
for (const msg of messages) {
if (msg.name === options.control.start) {
await loopCtx.step("control-start", async () => {
loopCtx.state.running = true;
});
await msg.complete({ ok: true });
continue;
}
if (msg.name === options.control.stop) {
await loopCtx.step("control-stop", async () => {
loopCtx.state.running = false;
});
await msg.complete({ ok: true });
continue;
}
if (msg.name === options.control.setInterval) {
await loopCtx.step("control-set-interval", async () => {
const intervalMs = Number((msg.body as { intervalMs?: unknown })?.intervalMs);
loopCtx.state.intervalMs = Number.isFinite(intervalMs)
? Math.max(500, intervalMs)
: loopCtx.state.intervalMs;
});
await msg.complete({ ok: true });
continue;
}
if (msg.name === options.control.force) {
await loopCtx.step({
name: "control-force",
timeout: 5 * 60_000,
run: async () => {
await options.onPoll(loopCtx as unknown as PollingActorContext<TState>);
},
});
await msg.complete({ ok: true });
}
}
return Loop.continue(undefined);
});
}

View file

@ -0,0 +1,181 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import type { GitDriver } from "../../driver.js";
import { getActorRuntimeContext } from "../context.js";
import { getProject, selfProjectBranchSync } from "../handles.js";
import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js";
import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js";
import { parentLookupFromStack } from "../project/stack-model.js";
import { withRepoGitLock } from "../../services/repo-git-lock.js";
export interface ProjectBranchSyncInput {
workspaceId: string;
repoId: string;
repoPath: string;
intervalMs: number;
}
interface SetIntervalCommand {
intervalMs: number;
}
interface EnrichedBranchSnapshot {
branchName: string;
commitSha: string;
parentBranch: string | null;
trackedInStack: boolean;
diffStat: string | null;
hasUnpushed: boolean;
conflictsWithMain: boolean;
}
interface ProjectBranchSyncState extends PollingControlState {
workspaceId: string;
repoId: string;
repoPath: string;
}
const CONTROL = {
start: "project.branch_sync.control.start",
stop: "project.branch_sync.control.stop",
setInterval: "project.branch_sync.control.set_interval",
force: "project.branch_sync.control.force"
} as const;
async function enrichBranches(
workspaceId: string,
repoId: string,
repoPath: string,
git: GitDriver
): Promise<EnrichedBranchSnapshot[]> {
return await withRepoGitLock(repoPath, async () => {
await git.fetch(repoPath);
const branches = await git.listRemoteBranches(repoPath);
const { driver } = getActorRuntimeContext();
const stackEntries = await driver.stack.listStack(repoPath).catch(() => []);
const parentByBranch = parentLookupFromStack(stackEntries);
const enriched: EnrichedBranchSnapshot[] = [];
const baseRef = await git.remoteDefaultBaseRef(repoPath);
const baseSha = await git.revParse(repoPath, baseRef).catch(() => "");
for (const branch of branches) {
let branchDiffStat: string | null = null;
let branchHasUnpushed = false;
let branchConflicts = false;
try {
branchDiffStat = await git.diffStatForBranch(repoPath, branch.branchName);
} catch (error) {
logActorWarning("project-branch-sync", "diffStatForBranch failed", {
workspaceId,
repoId,
branchName: branch.branchName,
error: resolveErrorMessage(error)
});
branchDiffStat = null;
}
try {
const headSha = await git.revParse(repoPath, `origin/${branch.branchName}`);
branchHasUnpushed = Boolean(baseSha && headSha && headSha !== baseSha);
} catch (error) {
logActorWarning("project-branch-sync", "revParse failed", {
workspaceId,
repoId,
branchName: branch.branchName,
error: resolveErrorMessage(error)
});
branchHasUnpushed = false;
}
try {
branchConflicts = await git.conflictsWithMain(repoPath, branch.branchName);
} catch (error) {
logActorWarning("project-branch-sync", "conflictsWithMain failed", {
workspaceId,
repoId,
branchName: branch.branchName,
error: resolveErrorMessage(error)
});
branchConflicts = false;
}
enriched.push({
branchName: branch.branchName,
commitSha: branch.commitSha,
parentBranch: parentByBranch.get(branch.branchName) ?? null,
trackedInStack: parentByBranch.has(branch.branchName),
diffStat: branchDiffStat,
hasUnpushed: branchHasUnpushed,
conflictsWithMain: branchConflicts
});
}
return enriched;
});
}
async function pollBranches(c: { state: ProjectBranchSyncState }): Promise<void> {
const { driver } = getActorRuntimeContext();
const enrichedItems = await enrichBranches(c.state.workspaceId, c.state.repoId, c.state.repoPath, driver.git);
const parent = getProject(c, c.state.workspaceId, c.state.repoId);
await parent.applyBranchSyncResult({ items: enrichedItems, at: Date.now() });
}
export const projectBranchSync = actor({
queues: {
[CONTROL.start]: queue(),
[CONTROL.stop]: queue(),
[CONTROL.setInterval]: queue(),
[CONTROL.force]: queue(),
},
options: {
// Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling.
noSleep: true
},
createState: (_c, input: ProjectBranchSyncInput): ProjectBranchSyncState => ({
workspaceId: input.workspaceId,
repoId: input.repoId,
repoPath: input.repoPath,
intervalMs: input.intervalMs,
running: true
}),
actions: {
async start(c): Promise<void> {
const self = selfProjectBranchSync(c);
await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 });
},
async stop(c): Promise<void> {
const self = selfProjectBranchSync(c);
await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 });
},
async setIntervalMs(c, payload: SetIntervalCommand): Promise<void> {
const self = selfProjectBranchSync(c);
await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 });
},
async force(c): Promise<void> {
const self = selfProjectBranchSync(c);
await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 });
}
},
run: workflow(async (ctx) => {
await runWorkflowPollingLoop<ProjectBranchSyncState>(ctx, {
loopName: "project-branch-sync-loop",
control: CONTROL,
onPoll: async (loopCtx) => {
try {
await pollBranches(loopCtx);
} catch (error) {
logActorWarning("project-branch-sync", "poll failed", {
error: resolveErrorMessage(error),
stack: resolveErrorStack(error)
});
}
}
});
})
});

View file

@ -0,0 +1,94 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { getActorRuntimeContext } from "../context.js";
import { getProject, selfProjectPrSync } from "../handles.js";
import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js";
import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js";
export interface ProjectPrSyncInput {
workspaceId: string;
repoId: string;
repoPath: string;
intervalMs: number;
}
interface SetIntervalCommand {
intervalMs: number;
}
interface ProjectPrSyncState extends PollingControlState {
workspaceId: string;
repoId: string;
repoPath: string;
}
const CONTROL = {
start: "project.pr_sync.control.start",
stop: "project.pr_sync.control.stop",
setInterval: "project.pr_sync.control.set_interval",
force: "project.pr_sync.control.force"
} as const;
async function pollPrs(c: { state: ProjectPrSyncState }): Promise<void> {
const { driver } = getActorRuntimeContext();
const items = await driver.github.listPullRequests(c.state.repoPath);
const parent = getProject(c, c.state.workspaceId, c.state.repoId);
await parent.applyPrSyncResult({ items, at: Date.now() });
}
export const projectPrSync = actor({
queues: {
[CONTROL.start]: queue(),
[CONTROL.stop]: queue(),
[CONTROL.setInterval]: queue(),
[CONTROL.force]: queue(),
},
options: {
// Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling.
noSleep: true
},
createState: (_c, input: ProjectPrSyncInput): ProjectPrSyncState => ({
workspaceId: input.workspaceId,
repoId: input.repoId,
repoPath: input.repoPath,
intervalMs: input.intervalMs,
running: true
}),
actions: {
async start(c): Promise<void> {
const self = selfProjectPrSync(c);
await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 });
},
async stop(c): Promise<void> {
const self = selfProjectPrSync(c);
await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 });
},
async setIntervalMs(c, payload: SetIntervalCommand): Promise<void> {
const self = selfProjectPrSync(c);
await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 });
},
async force(c): Promise<void> {
const self = selfProjectPrSync(c);
await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 });
}
},
run: workflow(async (ctx) => {
await runWorkflowPollingLoop<ProjectPrSyncState>(ctx, {
loopName: "project-pr-sync-loop",
control: CONTROL,
onPoll: async (loopCtx) => {
try {
await pollPrs(loopCtx);
} catch (error) {
logActorWarning("project-pr-sync", "poll failed", {
error: resolveErrorMessage(error),
stack: resolveErrorStack(error)
});
}
}
});
})
});

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,10 @@
import { actorSqliteDb } from "../../../db/actor-sqlite.js";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const projectDb = actorSqliteDb({
actorName: "project",
schema,
migrations,
migrationsFolderUrl: new URL("./drizzle/", import.meta.url),
});

View file

@ -0,0 +1,7 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/project/db/drizzle",
schema: "./src/actors/project/db/schema.ts",
});

View file

@ -0,0 +1,27 @@
CREATE TABLE `branches` (
`branch_name` text PRIMARY KEY NOT NULL,
`commit_sha` text NOT NULL,
`worktree_path` text,
`parent_branch` text,
`diff_stat` text,
`has_unpushed` integer,
`conflicts_with_main` integer,
`first_seen_at` integer,
`last_seen_at` integer,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `pr_cache` (
`branch_name` text PRIMARY KEY NOT NULL,
`pr_number` integer NOT NULL,
`state` text NOT NULL,
`title` text NOT NULL,
`pr_url` text,
`pr_author` text,
`is_draft` integer,
`ci_status` text,
`review_status` text,
`reviewer` text,
`fetched_at` integer,
`updated_at` integer NOT NULL
);

View file

@ -0,0 +1,7 @@
CREATE TABLE `repo_meta` (
`id` integer PRIMARY KEY NOT NULL,
`remote_url` text NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
ALTER TABLE `branches` DROP COLUMN `worktree_path`;

View file

@ -0,0 +1,6 @@
CREATE TABLE `handoff_index` (
`handoff_id` text PRIMARY KEY NOT NULL,
`branch_name` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);

View file

@ -0,0 +1 @@
ALTER TABLE `branches` ADD `tracked_in_stack` integer;

View file

@ -0,0 +1,192 @@
{
"version": "6",
"dialect": "sqlite",
"id": "03d97613-0108-4197-8660-5f2af5409fe6",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"branches": {
"name": "branches",
"columns": {
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"commit_sha": {
"name": "commit_sha",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"worktree_path": {
"name": "worktree_path",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"parent_branch": {
"name": "parent_branch",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"diff_stat": {
"name": "diff_stat",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"has_unpushed": {
"name": "has_unpushed",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"conflicts_with_main": {
"name": "conflicts_with_main",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"first_seen_at": {
"name": "first_seen_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"last_seen_at": {
"name": "last_seen_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"pr_cache": {
"name": "pr_cache",
"columns": {
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"pr_number": {
"name": "pr_number",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"state": {
"name": "state",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"pr_url": {
"name": "pr_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"pr_author": {
"name": "pr_author",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"is_draft": {
"name": "is_draft",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"ci_status": {
"name": "ci_status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"review_status": {
"name": "review_status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"reviewer": {
"name": "reviewer",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"fetched_at": {
"name": "fetched_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,216 @@
{
"version": "6",
"dialect": "sqlite",
"id": "e6d294b6-27ce-424b-a3b3-c100b42e628b",
"prevId": "03d97613-0108-4197-8660-5f2af5409fe6",
"tables": {
"branches": {
"name": "branches",
"columns": {
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"commit_sha": {
"name": "commit_sha",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"parent_branch": {
"name": "parent_branch",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"diff_stat": {
"name": "diff_stat",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"has_unpushed": {
"name": "has_unpushed",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"conflicts_with_main": {
"name": "conflicts_with_main",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"first_seen_at": {
"name": "first_seen_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"last_seen_at": {
"name": "last_seen_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"pr_cache": {
"name": "pr_cache",
"columns": {
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"pr_number": {
"name": "pr_number",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"state": {
"name": "state",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"pr_url": {
"name": "pr_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"pr_author": {
"name": "pr_author",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"is_draft": {
"name": "is_draft",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"ci_status": {
"name": "ci_status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"review_status": {
"name": "review_status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"reviewer": {
"name": "reviewer",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"fetched_at": {
"name": "fetched_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"repo_meta": {
"name": "repo_meta",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"remote_url": {
"name": "remote_url",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,254 @@
{
"version": "6",
"dialect": "sqlite",
"id": "ac89870f-1630-4a16-9606-7b1225f6da8a",
"prevId": "e6d294b6-27ce-424b-a3b3-c100b42e628b",
"tables": {
"branches": {
"name": "branches",
"columns": {
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"commit_sha": {
"name": "commit_sha",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"parent_branch": {
"name": "parent_branch",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"diff_stat": {
"name": "diff_stat",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"has_unpushed": {
"name": "has_unpushed",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"conflicts_with_main": {
"name": "conflicts_with_main",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"first_seen_at": {
"name": "first_seen_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"last_seen_at": {
"name": "last_seen_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"handoff_index": {
"name": "handoff_index",
"columns": {
"handoff_id": {
"name": "handoff_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"pr_cache": {
"name": "pr_cache",
"columns": {
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"pr_number": {
"name": "pr_number",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"state": {
"name": "state",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"pr_url": {
"name": "pr_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"pr_author": {
"name": "pr_author",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"is_draft": {
"name": "is_draft",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"ci_status": {
"name": "ci_status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"review_status": {
"name": "review_status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"reviewer": {
"name": "reviewer",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"fetched_at": {
"name": "fetched_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"repo_meta": {
"name": "repo_meta",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"remote_url": {
"name": "remote_url",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,34 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1770924376062,
"tag": "0000_stormy_the_hunter",
"breakpoints": true
},
{
"idx": 1,
"version": "6",
"when": 1770947252449,
"tag": "0001_wild_carlie_cooper",
"breakpoints": true
},
{
"idx": 2,
"version": "6",
"when": 1771276338465,
"tag": "0002_far_war_machine",
"breakpoints": true
},
{
"idx": 3,
"version": "6",
"when": 1771369000000,
"tag": "0003_busy_legacy",
"breakpoints": true
}
]
}

View file

@ -0,0 +1,81 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
"entries": [
{
"idx": 0,
"when": 1770924376062,
"tag": "0000_stormy_the_hunter",
"breakpoints": true
},
{
"idx": 1,
"when": 1770947252449,
"tag": "0001_wild_carlie_cooper",
"breakpoints": true
},
{
"idx": 2,
"when": 1771276338465,
"tag": "0002_far_war_machine",
"breakpoints": true
},
{
"idx": 3,
"when": 1771369000000,
"tag": "0003_busy_legacy",
"breakpoints": true
}
]
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`branches\` (
\`branch_name\` text PRIMARY KEY NOT NULL,
\`commit_sha\` text NOT NULL,
\`worktree_path\` text,
\`parent_branch\` text,
\`diff_stat\` text,
\`has_unpushed\` integer,
\`conflicts_with_main\` integer,
\`first_seen_at\` integer,
\`last_seen_at\` integer,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`pr_cache\` (
\`branch_name\` text PRIMARY KEY NOT NULL,
\`pr_number\` integer NOT NULL,
\`state\` text NOT NULL,
\`title\` text NOT NULL,
\`pr_url\` text,
\`pr_author\` text,
\`is_draft\` integer,
\`ci_status\` text,
\`review_status\` text,
\`reviewer\` text,
\`fetched_at\` integer,
\`updated_at\` integer NOT NULL
);
`,
m0001: `CREATE TABLE \`repo_meta\` (
\`id\` integer PRIMARY KEY NOT NULL,
\`remote_url\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
ALTER TABLE \`branches\` DROP COLUMN \`worktree_path\`;`,
m0002: `CREATE TABLE \`handoff_index\` (
\`handoff_id\` text PRIMARY KEY NOT NULL,
\`branch_name\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0003: `ALTER TABLE \`branches\` ADD \`tracked_in_stack\` integer;`,
} as const
};

View file

@ -0,0 +1,44 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
// SQLite is per project actor instance (workspaceId+repoId), so no workspaceId/repoId columns needed.
export const branches = sqliteTable("branches", {
branchName: text("branch_name").notNull().primaryKey(),
commitSha: text("commit_sha").notNull(),
parentBranch: text("parent_branch"),
trackedInStack: integer("tracked_in_stack"),
diffStat: text("diff_stat"),
hasUnpushed: integer("has_unpushed"),
conflictsWithMain: integer("conflicts_with_main"),
firstSeenAt: integer("first_seen_at"),
lastSeenAt: integer("last_seen_at"),
updatedAt: integer("updated_at").notNull(),
});
export const repoMeta = sqliteTable("repo_meta", {
id: integer("id").primaryKey(),
remoteUrl: text("remote_url").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const prCache = sqliteTable("pr_cache", {
branchName: text("branch_name").notNull().primaryKey(),
prNumber: integer("pr_number").notNull(),
state: text("state").notNull(),
title: text("title").notNull(),
prUrl: text("pr_url"),
prAuthor: text("pr_author"),
isDraft: integer("is_draft"),
ciStatus: text("ci_status"),
reviewStatus: text("review_status"),
reviewer: text("reviewer"),
fetchedAt: integer("fetched_at"),
updatedAt: integer("updated_at").notNull(),
});
export const handoffIndex = sqliteTable("handoff_index", {
handoffId: text("handoff_id").notNull().primaryKey(),
branchName: text("branch_name"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull()
});

View file

@ -0,0 +1,28 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { projectDb } from "./db/db.js";
import { PROJECT_QUEUE_NAMES, projectActions, runProjectWorkflow } from "./actions.js";
export interface ProjectInput {
workspaceId: string;
repoId: string;
remoteUrl: string;
}
export const project = actor({
db: projectDb,
queues: Object.fromEntries(PROJECT_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
actionTimeout: 5 * 60_000,
},
createState: (_c, input: ProjectInput) => ({
workspaceId: input.workspaceId,
repoId: input.repoId,
remoteUrl: input.remoteUrl,
localPath: null as string | null,
syncActorsStarted: false,
handoffIndexHydrated: false
}),
actions: projectActions,
run: workflow(runProjectWorkflow),
});

View file

@ -0,0 +1,69 @@
export interface StackEntry {
branchName: string;
parentBranch: string | null;
}
export interface OrderedBranchRow {
branchName: string;
parentBranch: string | null;
updatedAt: number;
}
export function normalizeParentBranch(branchName: string, parentBranch: string | null | undefined): string | null {
const parent = parentBranch?.trim() || null;
if (!parent || parent === branchName) {
return null;
}
return parent;
}
export function parentLookupFromStack(entries: StackEntry[]): Map<string, string | null> {
const lookup = new Map<string, string | null>();
for (const entry of entries) {
const branchName = entry.branchName.trim();
if (!branchName) {
continue;
}
lookup.set(branchName, normalizeParentBranch(branchName, entry.parentBranch));
}
return lookup;
}
export function sortBranchesForOverview(rows: OrderedBranchRow[]): OrderedBranchRow[] {
const byName = new Map(rows.map((row) => [row.branchName, row]));
const depthMemo = new Map<string, number>();
const computing = new Set<string>();
const depthFor = (branchName: string): number => {
const cached = depthMemo.get(branchName);
if (cached != null) {
return cached;
}
if (computing.has(branchName)) {
return 999;
}
computing.add(branchName);
const row = byName.get(branchName);
const parent = row?.parentBranch;
let depth = 0;
if (parent && parent !== branchName && byName.has(parent)) {
depth = Math.min(998, depthFor(parent) + 1);
}
computing.delete(branchName);
depthMemo.set(branchName, depth);
return depth;
};
return [...rows].sort((a, b) => {
const da = depthFor(a.branchName);
const db = depthFor(b.branchName);
if (da !== db) {
return da - db;
}
if (a.updatedAt !== b.updatedAt) {
return b.updatedAt - a.updatedAt;
}
return a.branchName.localeCompare(b.branchName);
});
}

View file

@ -0,0 +1,10 @@
import { actorSqliteDb } from "../../../db/actor-sqlite.js";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const sandboxInstanceDb = actorSqliteDb({
actorName: "sandbox-instance",
schema,
migrations,
migrationsFolderUrl: new URL("./drizzle/", import.meta.url),
});

View file

@ -0,0 +1,7 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/sandbox-instance/db/drizzle",
schema: "./src/actors/sandbox-instance/db/schema.ts",
});

View file

@ -0,0 +1,6 @@
CREATE TABLE `sandbox_instance` (
`id` integer PRIMARY KEY NOT NULL,
`metadata_json` text NOT NULL,
`status` text NOT NULL,
`updated_at` integer NOT NULL
);

View file

@ -0,0 +1,27 @@
CREATE TABLE `sandbox_sessions` (
`id` text PRIMARY KEY NOT NULL,
`agent` text NOT NULL,
`agent_session_id` text NOT NULL,
`last_connection_id` text NOT NULL,
`created_at` integer NOT NULL,
`destroyed_at` integer,
`session_init_json` text
);
--> statement-breakpoint
CREATE TABLE `sandbox_session_events` (
`id` text PRIMARY KEY NOT NULL,
`session_id` text NOT NULL,
`event_index` integer NOT NULL,
`created_at` integer NOT NULL,
`connection_id` text NOT NULL,
`sender` text NOT NULL,
`payload_json` text NOT NULL
);
--> statement-breakpoint
CREATE INDEX `sandbox_sessions_created_at_idx` ON `sandbox_sessions` (`created_at`);
--> statement-breakpoint
CREATE INDEX `sandbox_session_events_session_id_event_index_idx` ON `sandbox_session_events` (`session_id`,`event_index`);
--> statement-breakpoint
CREATE INDEX `sandbox_session_events_session_id_created_at_idx` ON `sandbox_session_events` (`session_id`,`created_at`);

View file

@ -0,0 +1,56 @@
{
"version": "6",
"dialect": "sqlite",
"id": "ef8a919c-64f0-46d9-b8ed-a15f039e6ba7",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"sandbox_instance": {
"name": "sandbox_instance",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"metadata_json": {
"name": "metadata_json",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,20 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1770924375604,
"tag": "0000_broad_tyrannus",
"breakpoints": true
},
{
"idx": 1,
"version": "6",
"when": 1776482400000,
"tag": "0001_sandbox_sessions",
"breakpoints": true
}
]
}

View file

@ -0,0 +1,61 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
"entries": [
{
"idx": 0,
"when": 1770924375604,
"tag": "0000_broad_tyrannus",
"breakpoints": true
},
{
"idx": 1,
"when": 1776482400000,
"tag": "0001_sandbox_sessions",
"breakpoints": true
}
]
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`sandbox_instance\` (
\`id\` integer PRIMARY KEY NOT NULL,
\`metadata_json\` text NOT NULL,
\`status\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0001: `CREATE TABLE \`sandbox_sessions\` (
\`id\` text PRIMARY KEY NOT NULL,
\`agent\` text NOT NULL,
\`agent_session_id\` text NOT NULL,
\`last_connection_id\` text NOT NULL,
\`created_at\` integer NOT NULL,
\`destroyed_at\` integer,
\`session_init_json\` text
);
--> statement-breakpoint
CREATE TABLE \`sandbox_session_events\` (
\`id\` text PRIMARY KEY NOT NULL,
\`session_id\` text NOT NULL,
\`event_index\` integer NOT NULL,
\`created_at\` integer NOT NULL,
\`connection_id\` text NOT NULL,
\`sender\` text NOT NULL,
\`payload_json\` text NOT NULL
);
--> statement-breakpoint
CREATE INDEX \`sandbox_sessions_created_at_idx\` ON \`sandbox_sessions\` (\`created_at\`);
--> statement-breakpoint
CREATE INDEX \`sandbox_session_events_session_id_event_index_idx\` ON \`sandbox_session_events\` (\`session_id\`,\`event_index\`);
--> statement-breakpoint
CREATE INDEX \`sandbox_session_events_session_id_created_at_idx\` ON \`sandbox_session_events\` (\`session_id\`,\`created_at\`);
`,
} as const
};

View file

@ -0,0 +1,31 @@
import { integer, sqliteTable, text } from "drizzle-orm/sqlite-core";
// SQLite is per sandbox-instance actor instance.
export const sandboxInstance = sqliteTable("sandbox_instance", {
id: integer("id").primaryKey(),
metadataJson: text("metadata_json").notNull(),
status: text("status").notNull(),
updatedAt: integer("updated_at").notNull(),
});
// Persist sandbox-agent sessions/events in SQLite instead of actor state so they survive
// serverless actor evictions and backend restarts.
export const sandboxSessions = sqliteTable("sandbox_sessions", {
id: text("id").notNull().primaryKey(),
agent: text("agent").notNull(),
agentSessionId: text("agent_session_id").notNull(),
lastConnectionId: text("last_connection_id").notNull(),
createdAt: integer("created_at").notNull(),
destroyedAt: integer("destroyed_at"),
sessionInitJson: text("session_init_json"),
});
export const sandboxSessionEvents = sqliteTable("sandbox_session_events", {
id: text("id").notNull().primaryKey(),
sessionId: text("session_id").notNull(),
eventIndex: integer("event_index").notNull(),
createdAt: integer("created_at").notNull(),
connectionId: text("connection_id").notNull(),
sender: text("sender").notNull(),
payloadJson: text("payload_json").notNull(),
});

View file

@ -0,0 +1,615 @@
import { setTimeout as delay } from "node:timers/promises";
import { eq } from "drizzle-orm";
import { actor, queue } from "rivetkit";
import { Loop, workflow } from "rivetkit/workflow";
import type { ProviderId } from "@openhandoff/shared";
import type { SessionEvent, SessionRecord } from "sandbox-agent";
import { sandboxInstanceDb } from "./db/db.js";
import { sandboxInstance as sandboxInstanceTable } from "./db/schema.js";
import { SandboxInstancePersistDriver } from "./persist.js";
import { getActorRuntimeContext } from "../context.js";
import { selfSandboxInstance } from "../handles.js";
import { logActorWarning, resolveErrorMessage } from "../logging.js";
import { expectQueueResponse } from "../../services/queue.js";
export interface SandboxInstanceInput {
workspaceId: string;
providerId: ProviderId;
sandboxId: string;
}
const SANDBOX_ROW_ID = 1;
const CREATE_SESSION_MAX_ATTEMPTS = 3;
const CREATE_SESSION_RETRY_BASE_MS = 1_000;
const CREATE_SESSION_STEP_TIMEOUT_MS = 10 * 60_000;
function normalizeStatusFromEventPayload(
payload: unknown,
): "running" | "idle" | "error" | null {
if (payload && typeof payload === "object") {
const envelope = payload as {
error?: unknown;
method?: unknown;
result?: unknown;
};
if (envelope.error) {
return "error";
}
if (envelope.result && typeof envelope.result === "object") {
const stopReason = (envelope.result as { stopReason?: unknown }).stopReason;
if (typeof stopReason === "string" && stopReason.length > 0) {
return "idle";
}
}
if (typeof envelope.method === "string") {
const lowered = envelope.method.toLowerCase();
if (lowered.includes("error") || lowered.includes("failed")) {
return "error";
}
if (
lowered.includes("ended") ||
lowered.includes("complete") ||
lowered.includes("stopped")
) {
return "idle";
}
}
}
return null;
}
function stringifyJson(value: unknown): string {
return JSON.stringify(value, (_key, item) => {
if (typeof item === "bigint") return item.toString();
return item;
});
}
function parseMetadata(metadataJson: string): Record<string, unknown> {
try {
const parsed = JSON.parse(metadataJson) as unknown;
if (parsed && typeof parsed === "object") return parsed as Record<string, unknown>;
return {};
} catch {
return {};
}
}
async function loadPersistedAgentConfig(c: any): Promise<{ endpoint: string; token?: string } | null> {
try {
const row = await c.db
.select({ metadataJson: sandboxInstanceTable.metadataJson })
.from(sandboxInstanceTable)
.where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID))
.get();
if (row?.metadataJson) {
const metadata = parseMetadata(row.metadataJson);
const endpoint = typeof metadata.agentEndpoint === "string" ? metadata.agentEndpoint.trim() : "";
const token = typeof metadata.agentToken === "string" ? metadata.agentToken.trim() : "";
if (endpoint) {
return token ? { endpoint, token } : { endpoint };
}
}
} catch {
return null;
}
return null;
}
async function loadFreshDaytonaAgentConfig(c: any): Promise<{ endpoint: string; token?: string }> {
const { config, driver } = getActorRuntimeContext();
const daytona = driver.daytona.createClient({
apiUrl: config.providers.daytona.endpoint,
apiKey: config.providers.daytona.apiKey,
});
const sandbox = await daytona.getSandbox(c.state.sandboxId);
const state = String(sandbox.state ?? "unknown").toLowerCase();
if (state !== "started" && state !== "running") {
await daytona.startSandbox(c.state.sandboxId, 60);
}
const preview = await daytona.getPreviewEndpoint(c.state.sandboxId, 2468);
return preview.token ? { endpoint: preview.url, token: preview.token } : { endpoint: preview.url };
}
async function loadFreshProviderAgentConfig(c: any): Promise<{ endpoint: string; token?: string }> {
const { providers } = getActorRuntimeContext();
const provider = providers.get(c.state.providerId);
return await provider.ensureSandboxAgent({
workspaceId: c.state.workspaceId,
sandboxId: c.state.sandboxId,
});
}
async function loadAgentConfig(c: any): Promise<{ endpoint: string; token?: string }> {
const persisted = await loadPersistedAgentConfig(c);
if (c.state.providerId === "daytona") {
// Keep one stable signed preview endpoint per sandbox-instance actor.
// Rotating preview URLs on every call fragments SDK client state (sessions/events)
// because client caching keys by endpoint.
if (persisted) {
return persisted;
}
return await loadFreshDaytonaAgentConfig(c);
}
// Local sandboxes are tied to the current backend process, so the sandbox-agent
// token can rotate on restart. Always refresh from the provider instead of
// trusting persisted metadata.
if (c.state.providerId === "local") {
return await loadFreshProviderAgentConfig(c);
}
if (persisted) {
return persisted;
}
return await loadFreshProviderAgentConfig(c);
}
async function derivePersistedSessionStatus(
persist: SandboxInstancePersistDriver,
sessionId: string,
): Promise<{ id: string; status: "running" | "idle" | "error" }> {
const session = await persist.getSession(sessionId);
if (!session) {
return { id: sessionId, status: "error" };
}
if (session.destroyedAt) {
return { id: sessionId, status: "idle" };
}
const events = await persist.listEvents({
sessionId,
limit: 25,
});
for (let index = events.items.length - 1; index >= 0; index -= 1) {
const event = events.items[index];
if (!event) continue;
const status = normalizeStatusFromEventPayload(event.payload);
if (status) {
return { id: sessionId, status };
}
}
return { id: sessionId, status: "idle" };
}
function isTransientSessionCreateError(detail: string): boolean {
const lowered = detail.toLowerCase();
if (
lowered.includes("timed out") ||
lowered.includes("timeout") ||
lowered.includes("504") ||
lowered.includes("gateway timeout")
) {
// ACP timeout errors are expensive and usually deterministic for the same
// request; immediate retries spawn additional sessions/processes and make
// recovery harder.
return false;
}
return (
lowered.includes("502") ||
lowered.includes("503") ||
lowered.includes("bad gateway") ||
lowered.includes("econnreset") ||
lowered.includes("econnrefused")
);
}
interface EnsureSandboxCommand {
metadata: Record<string, unknown>;
status: string;
agentEndpoint?: string;
agentToken?: string;
}
interface HealthSandboxCommand {
status: string;
message: string;
}
interface CreateSessionCommand {
prompt: string;
cwd?: string;
agent?: "claude" | "codex" | "opencode";
}
interface CreateSessionResult {
id: string | null;
status: "running" | "idle" | "error";
error?: string;
}
interface ListSessionsCommand {
cursor?: string;
limit?: number;
}
interface ListSessionEventsCommand {
sessionId: string;
cursor?: string;
limit?: number;
}
interface SendPromptCommand {
sessionId: string;
prompt: string;
notification?: boolean;
}
interface SessionStatusCommand {
sessionId: string;
}
interface SessionControlCommand {
sessionId: string;
}
const SANDBOX_INSTANCE_QUEUE_NAMES = [
"sandboxInstance.command.ensure",
"sandboxInstance.command.updateHealth",
"sandboxInstance.command.destroy",
"sandboxInstance.command.createSession",
"sandboxInstance.command.sendPrompt",
"sandboxInstance.command.cancelSession",
"sandboxInstance.command.destroySession",
] as const;
type SandboxInstanceQueueName = (typeof SANDBOX_INSTANCE_QUEUE_NAMES)[number];
function sandboxInstanceWorkflowQueueName(
name: SandboxInstanceQueueName,
): SandboxInstanceQueueName {
return name;
}
async function getSandboxAgentClient(c: any) {
const { driver } = getActorRuntimeContext();
const persist = new SandboxInstancePersistDriver(c.db);
const { endpoint, token } = await loadAgentConfig(c);
return driver.sandboxAgent.createClient({
endpoint,
token,
persist,
});
}
async function ensureSandboxMutation(c: any, command: EnsureSandboxCommand): Promise<void> {
const now = Date.now();
const metadata = {
...command.metadata,
agentEndpoint: command.agentEndpoint ?? null,
agentToken: command.agentToken ?? null,
};
const metadataJson = stringifyJson(metadata);
await c.db
.insert(sandboxInstanceTable)
.values({
id: SANDBOX_ROW_ID,
metadataJson,
status: command.status,
updatedAt: now
})
.onConflictDoUpdate({
target: sandboxInstanceTable.id,
set: {
metadataJson,
status: command.status,
updatedAt: now
}
})
.run();
}
async function updateHealthMutation(c: any, command: HealthSandboxCommand): Promise<void> {
await c.db
.update(sandboxInstanceTable)
.set({
status: `${command.status}:${command.message}`,
updatedAt: Date.now()
})
.where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID))
.run();
}
async function destroySandboxMutation(c: any): Promise<void> {
await c.db
.delete(sandboxInstanceTable)
.where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID))
.run();
}
async function createSessionMutation(c: any, command: CreateSessionCommand): Promise<CreateSessionResult> {
let lastDetail = "sandbox-agent createSession failed";
let attemptsMade = 0;
for (let attempt = 1; attempt <= CREATE_SESSION_MAX_ATTEMPTS; attempt += 1) {
attemptsMade = attempt;
try {
const client = await getSandboxAgentClient(c);
const session = await client.createSession({
prompt: command.prompt,
cwd: command.cwd,
agent: command.agent,
});
return { id: session.id, status: session.status };
} catch (error) {
const detail = error instanceof Error ? error.message : String(error);
lastDetail = detail;
const retryable = isTransientSessionCreateError(detail);
const canRetry = retryable && attempt < CREATE_SESSION_MAX_ATTEMPTS;
if (!canRetry) {
break;
}
const waitMs = CREATE_SESSION_RETRY_BASE_MS * attempt;
logActorWarning("sandbox-instance", "createSession transient failure; retrying", {
workspaceId: c.state.workspaceId,
providerId: c.state.providerId,
sandboxId: c.state.sandboxId,
attempt,
maxAttempts: CREATE_SESSION_MAX_ATTEMPTS,
waitMs,
error: detail
});
await delay(waitMs);
}
}
const attemptLabel = attemptsMade === 1 ? "attempt" : "attempts";
return {
id: null,
status: "error",
error: `sandbox-agent createSession failed after ${attemptsMade} ${attemptLabel}: ${lastDetail}`
};
}
async function sendPromptMutation(c: any, command: SendPromptCommand): Promise<void> {
const client = await getSandboxAgentClient(c);
await client.sendPrompt({
sessionId: command.sessionId,
prompt: command.prompt,
notification: command.notification,
});
}
async function cancelSessionMutation(c: any, command: SessionControlCommand): Promise<void> {
const client = await getSandboxAgentClient(c);
await client.cancelSession(command.sessionId);
}
async function destroySessionMutation(c: any, command: SessionControlCommand): Promise<void> {
const client = await getSandboxAgentClient(c);
await client.destroySession(command.sessionId);
}
async function runSandboxInstanceWorkflow(ctx: any): Promise<void> {
await ctx.loop("sandbox-instance-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-sandbox-instance-command", {
names: [...SANDBOX_INSTANCE_QUEUE_NAMES],
completable: true,
});
if (!msg) {
return Loop.continue(undefined);
}
if (msg.name === "sandboxInstance.command.ensure") {
await loopCtx.step("sandbox-instance-ensure", async () =>
ensureSandboxMutation(loopCtx, msg.body as EnsureSandboxCommand),
);
await msg.complete({ ok: true });
return Loop.continue(undefined);
}
if (msg.name === "sandboxInstance.command.updateHealth") {
await loopCtx.step("sandbox-instance-update-health", async () =>
updateHealthMutation(loopCtx, msg.body as HealthSandboxCommand),
);
await msg.complete({ ok: true });
return Loop.continue(undefined);
}
if (msg.name === "sandboxInstance.command.destroy") {
await loopCtx.step("sandbox-instance-destroy", async () =>
destroySandboxMutation(loopCtx),
);
await msg.complete({ ok: true });
return Loop.continue(undefined);
}
if (msg.name === "sandboxInstance.command.createSession") {
const result = await loopCtx.step({
name: "sandbox-instance-create-session",
timeout: CREATE_SESSION_STEP_TIMEOUT_MS,
run: async () => createSessionMutation(loopCtx, msg.body as CreateSessionCommand),
});
await msg.complete(result);
return Loop.continue(undefined);
}
if (msg.name === "sandboxInstance.command.sendPrompt") {
await loopCtx.step("sandbox-instance-send-prompt", async () =>
sendPromptMutation(loopCtx, msg.body as SendPromptCommand),
);
await msg.complete({ ok: true });
return Loop.continue(undefined);
}
if (msg.name === "sandboxInstance.command.cancelSession") {
await loopCtx.step("sandbox-instance-cancel-session", async () =>
cancelSessionMutation(loopCtx, msg.body as SessionControlCommand),
);
await msg.complete({ ok: true });
return Loop.continue(undefined);
}
if (msg.name === "sandboxInstance.command.destroySession") {
await loopCtx.step("sandbox-instance-destroy-session", async () =>
destroySessionMutation(loopCtx, msg.body as SessionControlCommand),
);
await msg.complete({ ok: true });
}
return Loop.continue(undefined);
});
}
export const sandboxInstance = actor({
db: sandboxInstanceDb,
queues: Object.fromEntries(SANDBOX_INSTANCE_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
actionTimeout: 5 * 60_000,
},
createState: (_c, input: SandboxInstanceInput) => ({
workspaceId: input.workspaceId,
providerId: input.providerId,
sandboxId: input.sandboxId,
}),
actions: {
async providerState(c: any): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> {
const at = Date.now();
const { config, driver } = getActorRuntimeContext();
if (c.state.providerId === "daytona") {
const daytona = driver.daytona.createClient({
apiUrl: config.providers.daytona.endpoint,
apiKey: config.providers.daytona.apiKey,
});
const sandbox = await daytona.getSandbox(c.state.sandboxId);
const state = String(sandbox.state ?? "unknown").toLowerCase();
return { providerId: c.state.providerId, sandboxId: c.state.sandboxId, state, at };
}
return {
providerId: c.state.providerId,
sandboxId: c.state.sandboxId,
state: "unknown",
at,
};
},
async ensure(c, command: EnsureSandboxCommand): Promise<void> {
const self = selfSandboxInstance(c);
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.ensure"), command, {
wait: true,
timeout: 60_000,
});
},
async updateHealth(c, command: HealthSandboxCommand): Promise<void> {
const self = selfSandboxInstance(c);
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.updateHealth"), command, {
wait: true,
timeout: 60_000,
});
},
async destroy(c): Promise<void> {
const self = selfSandboxInstance(c);
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.destroy"), {}, {
wait: true,
timeout: 60_000,
});
},
async createSession(c: any, command: CreateSessionCommand): Promise<CreateSessionResult> {
const self = selfSandboxInstance(c);
return expectQueueResponse<CreateSessionResult>(
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.createSession"), command, {
wait: true,
timeout: 5 * 60_000,
}),
);
},
async listSessions(
c: any,
command?: ListSessionsCommand
): Promise<{ items: SessionRecord[]; nextCursor?: string }> {
const persist = new SandboxInstancePersistDriver(c.db);
try {
const client = await getSandboxAgentClient(c);
const page = await client.listSessions({
cursor: command?.cursor,
limit: command?.limit,
});
return {
items: page.items,
nextCursor: page.nextCursor,
};
} catch (error) {
logActorWarning("sandbox-instance", "listSessions remote read failed; using persisted fallback", {
workspaceId: c.state.workspaceId,
providerId: c.state.providerId,
sandboxId: c.state.sandboxId,
error: resolveErrorMessage(error)
});
return await persist.listSessions({
cursor: command?.cursor,
limit: command?.limit,
});
}
},
async listSessionEvents(
c: any,
command: ListSessionEventsCommand
): Promise<{ items: SessionEvent[]; nextCursor?: string }> {
const persist = new SandboxInstancePersistDriver(c.db);
return await persist.listEvents({
sessionId: command.sessionId,
cursor: command.cursor,
limit: command.limit,
});
},
async sendPrompt(c, command: SendPromptCommand): Promise<void> {
const self = selfSandboxInstance(c);
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.sendPrompt"), command, {
wait: true,
timeout: 5 * 60_000,
});
},
async cancelSession(c, command: SessionControlCommand): Promise<void> {
const self = selfSandboxInstance(c);
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.cancelSession"), command, {
wait: true,
timeout: 60_000,
});
},
async destroySession(c, command: SessionControlCommand): Promise<void> {
const self = selfSandboxInstance(c);
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.destroySession"), command, {
wait: true,
timeout: 60_000,
});
},
async sessionStatus(
c,
command: SessionStatusCommand
): Promise<{ id: string; status: "running" | "idle" | "error" }> {
return await derivePersistedSessionStatus(
new SandboxInstancePersistDriver(c.db),
command.sessionId,
);
}
},
run: workflow(runSandboxInstanceWorkflow),
});

View file

@ -0,0 +1,294 @@
import { and, asc, count, eq } from "drizzle-orm";
import type {
ListEventsRequest,
ListPage,
ListPageRequest,
SessionEvent,
SessionPersistDriver,
SessionRecord
} from "sandbox-agent";
import { sandboxSessionEvents, sandboxSessions } from "./db/schema.js";
const DEFAULT_MAX_SESSIONS = 1024;
const DEFAULT_MAX_EVENTS_PER_SESSION = 500;
const DEFAULT_LIST_LIMIT = 100;
function normalizeCap(value: number | undefined, fallback: number): number {
if (!Number.isFinite(value) || (value ?? 0) < 1) {
return fallback;
}
return Math.floor(value as number);
}
function parseCursor(cursor: string | undefined): number {
if (!cursor) return 0;
const parsed = Number.parseInt(cursor, 10);
if (!Number.isFinite(parsed) || parsed < 0) return 0;
return parsed;
}
export function resolveEventListOffset(params: {
cursor?: string;
total: number;
limit: number;
}): number {
if (params.cursor != null) {
return parseCursor(params.cursor);
}
return Math.max(0, params.total - params.limit);
}
function safeStringify(value: unknown): string {
return JSON.stringify(value, (_key, item) => {
if (typeof item === "bigint") return item.toString();
return item;
});
}
function safeParseJson<T>(value: string | null | undefined, fallback: T): T {
if (!value) return fallback;
try {
return JSON.parse(value) as T;
} catch {
return fallback;
}
}
export interface SandboxInstancePersistDriverOptions {
maxSessions?: number;
maxEventsPerSession?: number;
}
export class SandboxInstancePersistDriver implements SessionPersistDriver {
private readonly maxSessions: number;
private readonly maxEventsPerSession: number;
constructor(
private readonly db: any,
options: SandboxInstancePersistDriverOptions = {}
) {
this.maxSessions = normalizeCap(options.maxSessions, DEFAULT_MAX_SESSIONS);
this.maxEventsPerSession = normalizeCap(
options.maxEventsPerSession,
DEFAULT_MAX_EVENTS_PER_SESSION
);
}
async getSession(id: string): Promise<SessionRecord | null> {
const row = await this.db
.select({
id: sandboxSessions.id,
agent: sandboxSessions.agent,
agentSessionId: sandboxSessions.agentSessionId,
lastConnectionId: sandboxSessions.lastConnectionId,
createdAt: sandboxSessions.createdAt,
destroyedAt: sandboxSessions.destroyedAt,
sessionInitJson: sandboxSessions.sessionInitJson,
})
.from(sandboxSessions)
.where(eq(sandboxSessions.id, id))
.get();
if (!row) return null;
return {
id: row.id,
agent: row.agent,
agentSessionId: row.agentSessionId,
lastConnectionId: row.lastConnectionId,
createdAt: row.createdAt,
destroyedAt: row.destroyedAt ?? undefined,
sessionInit: safeParseJson(row.sessionInitJson, undefined),
};
}
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
const offset = parseCursor(request.cursor);
const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT);
const rows = await this.db
.select({
id: sandboxSessions.id,
agent: sandboxSessions.agent,
agentSessionId: sandboxSessions.agentSessionId,
lastConnectionId: sandboxSessions.lastConnectionId,
createdAt: sandboxSessions.createdAt,
destroyedAt: sandboxSessions.destroyedAt,
sessionInitJson: sandboxSessions.sessionInitJson,
})
.from(sandboxSessions)
.orderBy(asc(sandboxSessions.createdAt), asc(sandboxSessions.id))
.limit(limit)
.offset(offset)
.all();
const items = rows.map((row) => ({
id: row.id,
agent: row.agent,
agentSessionId: row.agentSessionId,
lastConnectionId: row.lastConnectionId,
createdAt: row.createdAt,
destroyedAt: row.destroyedAt ?? undefined,
sessionInit: safeParseJson(row.sessionInitJson, undefined),
}));
const totalRow = await this.db
.select({ c: count() })
.from(sandboxSessions)
.get();
const total = Number(totalRow?.c ?? 0);
const nextOffset = offset + items.length;
return {
items,
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
};
}
async updateSession(session: SessionRecord): Promise<void> {
const now = Date.now();
await this.db
.insert(sandboxSessions)
.values({
id: session.id,
agent: session.agent,
agentSessionId: session.agentSessionId,
lastConnectionId: session.lastConnectionId,
createdAt: session.createdAt ?? now,
destroyedAt: session.destroyedAt ?? null,
sessionInitJson: session.sessionInit ? safeStringify(session.sessionInit) : null,
})
.onConflictDoUpdate({
target: sandboxSessions.id,
set: {
agent: session.agent,
agentSessionId: session.agentSessionId,
lastConnectionId: session.lastConnectionId,
createdAt: session.createdAt ?? now,
destroyedAt: session.destroyedAt ?? null,
sessionInitJson: session.sessionInit ? safeStringify(session.sessionInit) : null,
},
})
.run();
// Evict oldest sessions beyond cap.
const totalRow = await this.db
.select({ c: count() })
.from(sandboxSessions)
.get();
const total = Number(totalRow?.c ?? 0);
const overflow = total - this.maxSessions;
if (overflow <= 0) return;
const toRemove = await this.db
.select({ id: sandboxSessions.id })
.from(sandboxSessions)
.orderBy(asc(sandboxSessions.createdAt), asc(sandboxSessions.id))
.limit(overflow)
.all();
for (const row of toRemove) {
await this.db.delete(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, row.id)).run();
await this.db.delete(sandboxSessions).where(eq(sandboxSessions.id, row.id)).run();
}
}
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT);
const totalRow = await this.db
.select({ c: count() })
.from(sandboxSessionEvents)
.where(eq(sandboxSessionEvents.sessionId, request.sessionId))
.get();
const total = Number(totalRow?.c ?? 0);
const offset = resolveEventListOffset({
cursor: request.cursor,
total,
limit,
});
const rows = await this.db
.select({
id: sandboxSessionEvents.id,
sessionId: sandboxSessionEvents.sessionId,
eventIndex: sandboxSessionEvents.eventIndex,
createdAt: sandboxSessionEvents.createdAt,
connectionId: sandboxSessionEvents.connectionId,
sender: sandboxSessionEvents.sender,
payloadJson: sandboxSessionEvents.payloadJson,
})
.from(sandboxSessionEvents)
.where(eq(sandboxSessionEvents.sessionId, request.sessionId))
.orderBy(asc(sandboxSessionEvents.eventIndex), asc(sandboxSessionEvents.id))
.limit(limit)
.offset(offset)
.all();
const items: SessionEvent[] = rows.map((row) => ({
id: row.id,
eventIndex: row.eventIndex,
sessionId: row.sessionId,
createdAt: row.createdAt,
connectionId: row.connectionId,
sender: row.sender as any,
payload: safeParseJson(row.payloadJson, null),
}));
const nextOffset = offset + items.length;
return {
items,
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
};
}
async insertEvent(event: SessionEvent): Promise<void> {
await this.db
.insert(sandboxSessionEvents)
.values({
id: event.id,
sessionId: event.sessionId,
eventIndex: event.eventIndex,
createdAt: event.createdAt,
connectionId: event.connectionId,
sender: event.sender,
payloadJson: safeStringify(event.payload),
})
.onConflictDoUpdate({
target: sandboxSessionEvents.id,
set: {
sessionId: event.sessionId,
eventIndex: event.eventIndex,
createdAt: event.createdAt,
connectionId: event.connectionId,
sender: event.sender,
payloadJson: safeStringify(event.payload),
},
})
.run();
// Trim oldest events beyond cap.
const totalRow = await this.db
.select({ c: count() })
.from(sandboxSessionEvents)
.where(eq(sandboxSessionEvents.sessionId, event.sessionId))
.get();
const total = Number(totalRow?.c ?? 0);
const overflow = total - this.maxEventsPerSession;
if (overflow <= 0) return;
const toRemove = await this.db
.select({ id: sandboxSessionEvents.id })
.from(sandboxSessionEvents)
.where(eq(sandboxSessionEvents.sessionId, event.sessionId))
.orderBy(asc(sandboxSessionEvents.eventIndex), asc(sandboxSessionEvents.id))
.limit(overflow)
.all();
for (const row of toRemove) {
await this.db
.delete(sandboxSessionEvents)
.where(and(eq(sandboxSessionEvents.sessionId, event.sessionId), eq(sandboxSessionEvents.id, row.id)))
.run();
}
}
}

View file

@ -0,0 +1,689 @@
// @ts-nocheck
import { desc, eq } from "drizzle-orm";
import { Loop } from "rivetkit/workflow";
import type {
AddRepoInput,
CreateHandoffInput,
HandoffRecord,
HandoffSummary,
HandoffWorkbenchChangeModelInput,
HandoffWorkbenchCreateHandoffInput,
HandoffWorkbenchDiffInput,
HandoffWorkbenchRenameInput,
HandoffWorkbenchRenameSessionInput,
HandoffWorkbenchSelectInput,
HandoffWorkbenchSetSessionUnreadInput,
HandoffWorkbenchSendMessageInput,
HandoffWorkbenchSnapshot,
HandoffWorkbenchTabInput,
HandoffWorkbenchUpdateDraftInput,
HistoryEvent,
HistoryQueryInput,
ListHandoffsInput,
ProviderId,
RepoOverview,
RepoStackActionInput,
RepoStackActionResult,
RepoRecord,
SwitchResult,
WorkspaceUseInput
} from "@openhandoff/shared";
import { getActorRuntimeContext } from "../context.js";
import { getHandoff, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js";
import { logActorWarning, resolveErrorMessage } from "../logging.js";
import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js";
import { handoffLookup, repos, providerProfiles } from "./db/schema.js";
import { agentTypeForModel } from "../handoff/workbench.js";
import { expectQueueResponse } from "../../services/queue.js";
interface WorkspaceState {
workspaceId: string;
}
interface RefreshProviderProfilesCommand {
providerId?: ProviderId;
}
interface GetHandoffInput {
workspaceId: string;
handoffId: string;
}
interface HandoffProxyActionInput extends GetHandoffInput {
reason?: string;
}
interface RepoOverviewInput {
workspaceId: string;
repoId: string;
}
const WORKSPACE_QUEUE_NAMES = [
"workspace.command.addRepo",
"workspace.command.createHandoff",
"workspace.command.refreshProviderProfiles",
] as const;
type WorkspaceQueueName = (typeof WORKSPACE_QUEUE_NAMES)[number];
export { WORKSPACE_QUEUE_NAMES };
export function workspaceWorkflowQueueName(name: WorkspaceQueueName): WorkspaceQueueName {
return name;
}
function assertWorkspace(c: { state: WorkspaceState }, workspaceId: string): void {
if (workspaceId !== c.state.workspaceId) {
throw new Error(`Workspace actor mismatch: actor=${c.state.workspaceId} command=${workspaceId}`);
}
}
async function resolveRepoId(c: any, handoffId: string): Promise<string> {
const row = await c.db
.select({ repoId: handoffLookup.repoId })
.from(handoffLookup)
.where(eq(handoffLookup.handoffId, handoffId))
.get();
if (!row) {
throw new Error(`Unknown handoff: ${handoffId} (not in lookup)`);
}
return row.repoId;
}
async function upsertHandoffLookupRow(c: any, handoffId: string, repoId: string): Promise<void> {
await c.db
.insert(handoffLookup)
.values({
handoffId,
repoId,
})
.onConflictDoUpdate({
target: handoffLookup.handoffId,
set: { repoId },
})
.run();
}
async function collectAllHandoffSummaries(c: any): Promise<HandoffSummary[]> {
const repoRows = await c.db
.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl })
.from(repos)
.orderBy(desc(repos.updatedAt))
.all();
const all: HandoffSummary[] = [];
for (const row of repoRows) {
try {
const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl);
const snapshot = await project.listHandoffSummaries({ includeArchived: true });
all.push(...snapshot);
} catch (error) {
logActorWarning("workspace", "failed collecting handoffs for repo", {
workspaceId: c.state.workspaceId,
repoId: row.repoId,
error: resolveErrorMessage(error)
});
}
}
all.sort((a, b) => b.updatedAt - a.updatedAt);
return all;
}
function repoLabelFromRemote(remoteUrl: string): string {
try {
const url = new URL(remoteUrl.startsWith("http") ? remoteUrl : `https://${remoteUrl}`);
const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean);
if (parts.length >= 2) {
return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/, "")}`;
}
} catch {
// ignore
}
return remoteUrl;
}
async function buildWorkbenchSnapshot(c: any): Promise<HandoffWorkbenchSnapshot> {
const repoRows = await c.db
.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt })
.from(repos)
.orderBy(desc(repos.updatedAt))
.all();
const handoffs: Array<any> = [];
const projects: Array<any> = [];
for (const row of repoRows) {
const projectHandoffs: Array<any> = [];
try {
const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl);
const summaries = await project.listHandoffSummaries({ includeArchived: true });
for (const summary of summaries) {
try {
await upsertHandoffLookupRow(c, summary.handoffId, row.repoId);
const handoff = getHandoff(c, c.state.workspaceId, row.repoId, summary.handoffId);
const snapshot = await handoff.getWorkbench({});
handoffs.push(snapshot);
projectHandoffs.push(snapshot);
} catch (error) {
logActorWarning("workspace", "failed collecting workbench handoff", {
workspaceId: c.state.workspaceId,
repoId: row.repoId,
handoffId: summary.handoffId,
error: resolveErrorMessage(error)
});
}
}
if (projectHandoffs.length > 0) {
projects.push({
id: row.repoId,
label: repoLabelFromRemote(row.remoteUrl),
updatedAtMs: projectHandoffs[0]?.updatedAtMs ?? row.updatedAt,
handoffs: projectHandoffs.sort((left, right) => right.updatedAtMs - left.updatedAtMs),
});
}
} catch (error) {
logActorWarning("workspace", "failed collecting workbench repo snapshot", {
workspaceId: c.state.workspaceId,
repoId: row.repoId,
error: resolveErrorMessage(error)
});
}
}
handoffs.sort((left, right) => right.updatedAtMs - left.updatedAtMs);
projects.sort((left, right) => right.updatedAtMs - left.updatedAtMs);
return {
workspaceId: c.state.workspaceId,
repos: repoRows.map((row) => ({
id: row.repoId,
label: repoLabelFromRemote(row.remoteUrl)
})),
projects,
handoffs,
};
}
async function requireWorkbenchHandoff(c: any, handoffId: string) {
const repoId = await resolveRepoId(c, handoffId);
return getHandoff(c, c.state.workspaceId, repoId, handoffId);
}
async function addRepoMutation(c: any, input: AddRepoInput): Promise<RepoRecord> {
assertWorkspace(c, input.workspaceId);
const remoteUrl = normalizeRemoteUrl(input.remoteUrl);
if (!remoteUrl) {
throw new Error("remoteUrl is required");
}
const { driver } = getActorRuntimeContext();
await driver.git.validateRemote(remoteUrl);
const repoId = repoIdFromRemote(remoteUrl);
const now = Date.now();
await c.db
.insert(repos)
.values({
repoId,
remoteUrl,
createdAt: now,
updatedAt: now
})
.onConflictDoUpdate({
target: repos.repoId,
set: {
remoteUrl,
updatedAt: now
}
})
.run();
await workspaceActions.notifyWorkbenchUpdated(c);
return {
workspaceId: c.state.workspaceId,
repoId,
remoteUrl,
createdAt: now,
updatedAt: now
};
}
async function createHandoffMutation(c: any, input: CreateHandoffInput): Promise<HandoffRecord> {
assertWorkspace(c, input.workspaceId);
const { providers } = getActorRuntimeContext();
const providerId = input.providerId ?? providers.defaultProviderId();
const repoId = input.repoId;
const repoRow = await c.db
.select({ remoteUrl: repos.remoteUrl })
.from(repos)
.where(eq(repos.repoId, repoId))
.get();
if (!repoRow) {
throw new Error(`Unknown repo: ${repoId}`);
}
const remoteUrl = repoRow.remoteUrl;
await c.db
.insert(providerProfiles)
.values({
providerId,
profileJson: JSON.stringify({ providerId }),
updatedAt: Date.now()
})
.onConflictDoUpdate({
target: providerProfiles.providerId,
set: {
profileJson: JSON.stringify({ providerId }),
updatedAt: Date.now()
}
})
.run();
const project = await getOrCreateProject(c, c.state.workspaceId, repoId, remoteUrl);
await project.ensure({ remoteUrl });
const created = await project.createHandoff({
task: input.task,
providerId,
agentType: input.agentType ?? null,
explicitTitle: input.explicitTitle ?? null,
explicitBranchName: input.explicitBranchName ?? null,
onBranch: input.onBranch ?? null
});
await c.db
.insert(handoffLookup)
.values({
handoffId: created.handoffId,
repoId
})
.onConflictDoUpdate({
target: handoffLookup.handoffId,
set: { repoId }
})
.run();
const handoff = getHandoff(c, c.state.workspaceId, repoId, created.handoffId);
await handoff.provision({ providerId });
await workspaceActions.notifyWorkbenchUpdated(c);
return created;
}
async function refreshProviderProfilesMutation(c: any, command?: RefreshProviderProfilesCommand): Promise<void> {
const body = command ?? {};
const { providers } = getActorRuntimeContext();
const providerIds: ProviderId[] = body.providerId ? [body.providerId] : providers.availableProviderIds();
for (const providerId of providerIds) {
await c.db
.insert(providerProfiles)
.values({
providerId,
profileJson: JSON.stringify({ providerId }),
updatedAt: Date.now()
})
.onConflictDoUpdate({
target: providerProfiles.providerId,
set: {
profileJson: JSON.stringify({ providerId }),
updatedAt: Date.now()
}
})
.run();
}
}
export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
await ctx.loop("workspace-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-workspace-command", {
names: [...WORKSPACE_QUEUE_NAMES],
completable: true,
});
if (!msg) {
return Loop.continue(undefined);
}
if (msg.name === "workspace.command.addRepo") {
const result = await loopCtx.step({
name: "workspace-add-repo",
timeout: 60_000,
run: async () => addRepoMutation(loopCtx, msg.body as AddRepoInput),
});
await msg.complete(result);
return Loop.continue(undefined);
}
if (msg.name === "workspace.command.createHandoff") {
const result = await loopCtx.step({
name: "workspace-create-handoff",
timeout: 12 * 60_000,
run: async () => createHandoffMutation(loopCtx, msg.body as CreateHandoffInput),
});
await msg.complete(result);
return Loop.continue(undefined);
}
if (msg.name === "workspace.command.refreshProviderProfiles") {
await loopCtx.step("workspace-refresh-provider-profiles", async () =>
refreshProviderProfilesMutation(loopCtx, msg.body as RefreshProviderProfilesCommand),
);
await msg.complete({ ok: true });
}
return Loop.continue(undefined);
});
}
export const workspaceActions = {
async useWorkspace(c: any, input: WorkspaceUseInput): Promise<{ workspaceId: string }> {
assertWorkspace(c, input.workspaceId);
return { workspaceId: c.state.workspaceId };
},
async addRepo(c: any, input: AddRepoInput): Promise<RepoRecord> {
const self = selfWorkspace(c);
return expectQueueResponse<RepoRecord>(
await self.send(workspaceWorkflowQueueName("workspace.command.addRepo"), input, {
wait: true,
timeout: 60_000,
}),
);
},
async listRepos(c: any, input: WorkspaceUseInput): Promise<RepoRecord[]> {
assertWorkspace(c, input.workspaceId);
const rows = await c.db
.select({
repoId: repos.repoId,
remoteUrl: repos.remoteUrl,
createdAt: repos.createdAt,
updatedAt: repos.updatedAt
})
.from(repos)
.orderBy(desc(repos.updatedAt))
.all();
return rows.map((row) => ({
workspaceId: c.state.workspaceId,
repoId: row.repoId,
remoteUrl: row.remoteUrl,
createdAt: row.createdAt,
updatedAt: row.updatedAt
}));
},
async createHandoff(c: any, input: CreateHandoffInput): Promise<HandoffRecord> {
const self = selfWorkspace(c);
return expectQueueResponse<HandoffRecord>(
await self.send(workspaceWorkflowQueueName("workspace.command.createHandoff"), input, {
wait: true,
timeout: 12 * 60_000,
}),
);
},
async getWorkbench(c: any, input: WorkspaceUseInput): Promise<HandoffWorkbenchSnapshot> {
assertWorkspace(c, input.workspaceId);
return await buildWorkbenchSnapshot(c);
},
async notifyWorkbenchUpdated(c: any): Promise<void> {
c.broadcast("workbenchUpdated", { at: Date.now() });
},
async createWorkbenchHandoff(c: any, input: HandoffWorkbenchCreateHandoffInput): Promise<{ handoffId: string }> {
const created = await workspaceActions.createHandoff(c, {
workspaceId: c.state.workspaceId,
repoId: input.repoId,
task: input.task,
...(input.title ? { explicitTitle: input.title } : {}),
...(input.branch ? { explicitBranchName: input.branch } : {}),
...(input.model ? { agentType: agentTypeForModel(input.model) } : {})
});
return { handoffId: created.handoffId };
},
async markWorkbenchUnread(c: any, input: HandoffWorkbenchSelectInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.markWorkbenchUnread({});
},
async renameWorkbenchHandoff(c: any, input: HandoffWorkbenchRenameInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.renameWorkbenchHandoff(input);
},
async renameWorkbenchBranch(c: any, input: HandoffWorkbenchRenameInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.renameWorkbenchBranch(input);
},
async createWorkbenchSession(c: any, input: HandoffWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
return await handoff.createWorkbenchSession({ ...(input.model ? { model: input.model } : {}) });
},
async renameWorkbenchSession(c: any, input: HandoffWorkbenchRenameSessionInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.renameWorkbenchSession(input);
},
async setWorkbenchSessionUnread(c: any, input: HandoffWorkbenchSetSessionUnreadInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.setWorkbenchSessionUnread(input);
},
async updateWorkbenchDraft(c: any, input: HandoffWorkbenchUpdateDraftInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.updateWorkbenchDraft(input);
},
async changeWorkbenchModel(c: any, input: HandoffWorkbenchChangeModelInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.changeWorkbenchModel(input);
},
async sendWorkbenchMessage(c: any, input: HandoffWorkbenchSendMessageInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.sendWorkbenchMessage(input);
},
async stopWorkbenchSession(c: any, input: HandoffWorkbenchTabInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.stopWorkbenchSession(input);
},
async closeWorkbenchSession(c: any, input: HandoffWorkbenchTabInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.closeWorkbenchSession(input);
},
async publishWorkbenchPr(c: any, input: HandoffWorkbenchSelectInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.publishWorkbenchPr({});
},
async revertWorkbenchFile(c: any, input: HandoffWorkbenchDiffInput): Promise<void> {
const handoff = await requireWorkbenchHandoff(c, input.handoffId);
await handoff.revertWorkbenchFile(input);
},
async listHandoffs(c: any, input: ListHandoffsInput): Promise<HandoffSummary[]> {
assertWorkspace(c, input.workspaceId);
if (input.repoId) {
const repoRow = await c.db
.select({ remoteUrl: repos.remoteUrl })
.from(repos)
.where(eq(repos.repoId, input.repoId))
.get();
if (!repoRow) {
throw new Error(`Unknown repo: ${input.repoId}`);
}
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
return await project.listHandoffSummaries({ includeArchived: true });
}
return await collectAllHandoffSummaries(c);
},
async getRepoOverview(c: any, input: RepoOverviewInput): Promise<RepoOverview> {
assertWorkspace(c, input.workspaceId);
const repoRow = await c.db
.select({ remoteUrl: repos.remoteUrl })
.from(repos)
.where(eq(repos.repoId, input.repoId))
.get();
if (!repoRow) {
throw new Error(`Unknown repo: ${input.repoId}`);
}
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
await project.ensure({ remoteUrl: repoRow.remoteUrl });
return await project.getRepoOverview({});
},
async runRepoStackAction(c: any, input: RepoStackActionInput): Promise<RepoStackActionResult> {
assertWorkspace(c, input.workspaceId);
const repoRow = await c.db
.select({ remoteUrl: repos.remoteUrl })
.from(repos)
.where(eq(repos.repoId, input.repoId))
.get();
if (!repoRow) {
throw new Error(`Unknown repo: ${input.repoId}`);
}
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
await project.ensure({ remoteUrl: repoRow.remoteUrl });
return await project.runRepoStackAction({
action: input.action,
branchName: input.branchName,
parentBranch: input.parentBranch
});
},
async switchHandoff(c: any, handoffId: string): Promise<SwitchResult> {
const repoId = await resolveRepoId(c, handoffId);
const h = getHandoff(c, c.state.workspaceId, repoId, handoffId);
const record = await h.get();
const switched = await h.switch();
return {
workspaceId: c.state.workspaceId,
handoffId,
providerId: record.providerId,
switchTarget: switched.switchTarget
};
},
async refreshProviderProfiles(c: any, command?: RefreshProviderProfilesCommand): Promise<void> {
const self = selfWorkspace(c);
await self.send(workspaceWorkflowQueueName("workspace.command.refreshProviderProfiles"), command ?? {}, {
wait: true,
timeout: 60_000,
});
},
async history(c: any, input: HistoryQueryInput): Promise<HistoryEvent[]> {
assertWorkspace(c, input.workspaceId);
const limit = input.limit ?? 20;
const repoRows = await c.db.select({ repoId: repos.repoId }).from(repos).all();
const allEvents: HistoryEvent[] = [];
for (const row of repoRows) {
try {
const hist = await getOrCreateHistory(c, c.state.workspaceId, row.repoId);
const items = await hist.list({
branch: input.branch,
handoffId: input.handoffId,
limit
});
allEvents.push(...items);
} catch (error) {
logActorWarning("workspace", "history lookup failed for repo", {
workspaceId: c.state.workspaceId,
repoId: row.repoId,
error: resolveErrorMessage(error)
});
}
}
allEvents.sort((a, b) => b.createdAt - a.createdAt);
return allEvents.slice(0, limit);
},
async getHandoff(c: any, input: GetHandoffInput): Promise<HandoffRecord> {
assertWorkspace(c, input.workspaceId);
const repoId = await resolveRepoId(c, input.handoffId);
const repoRow = await c.db
.select({ remoteUrl: repos.remoteUrl })
.from(repos)
.where(eq(repos.repoId, repoId))
.get();
if (!repoRow) {
throw new Error(`Unknown repo: ${repoId}`);
}
const project = await getOrCreateProject(c, c.state.workspaceId, repoId, repoRow.remoteUrl);
return await project.getHandoffEnriched({ handoffId: input.handoffId });
},
async attachHandoff(c: any, input: HandoffProxyActionInput): Promise<{ target: string; sessionId: string | null }> {
assertWorkspace(c, input.workspaceId);
const repoId = await resolveRepoId(c, input.handoffId);
const h = getHandoff(c, c.state.workspaceId, repoId, input.handoffId);
return await h.attach({ reason: input.reason });
},
async pushHandoff(c: any, input: HandoffProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
const repoId = await resolveRepoId(c, input.handoffId);
const h = getHandoff(c, c.state.workspaceId, repoId, input.handoffId);
await h.push({ reason: input.reason });
},
async syncHandoff(c: any, input: HandoffProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
const repoId = await resolveRepoId(c, input.handoffId);
const h = getHandoff(c, c.state.workspaceId, repoId, input.handoffId);
await h.sync({ reason: input.reason });
},
async mergeHandoff(c: any, input: HandoffProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
const repoId = await resolveRepoId(c, input.handoffId);
const h = getHandoff(c, c.state.workspaceId, repoId, input.handoffId);
await h.merge({ reason: input.reason });
},
async archiveHandoff(c: any, input: HandoffProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
const repoId = await resolveRepoId(c, input.handoffId);
const h = getHandoff(c, c.state.workspaceId, repoId, input.handoffId);
await h.archive({ reason: input.reason });
},
async killHandoff(c: any, input: HandoffProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
const repoId = await resolveRepoId(c, input.handoffId);
const h = getHandoff(c, c.state.workspaceId, repoId, input.handoffId);
await h.kill({ reason: input.reason });
}
};

View file

@ -0,0 +1,10 @@
import { actorSqliteDb } from "../../../db/actor-sqlite.js";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const workspaceDb = actorSqliteDb({
actorName: "workspace",
schema,
migrations,
migrationsFolderUrl: new URL("./drizzle/", import.meta.url),
});

View file

@ -0,0 +1,7 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/workspace/db/drizzle",
schema: "./src/actors/workspace/db/schema.ts",
});

View file

@ -0,0 +1,5 @@
CREATE TABLE `provider_profiles` (
`provider_id` text PRIMARY KEY NOT NULL,
`profile_json` text NOT NULL,
`updated_at` integer NOT NULL
);

View file

@ -0,0 +1,6 @@
CREATE TABLE `repos` (
`repo_id` text PRIMARY KEY NOT NULL,
`remote_url` text NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);

View file

@ -0,0 +1,4 @@
CREATE TABLE `handoff_lookup` (
`handoff_id` text PRIMARY KEY NOT NULL,
`repo_id` text NOT NULL
);

View file

@ -0,0 +1,49 @@
{
"version": "6",
"dialect": "sqlite",
"id": "a85809c0-65c2-4f99-92ed-34357c9f83d7",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"provider_profiles": {
"name": "provider_profiles",
"columns": {
"provider_id": {
"name": "provider_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"profile_json": {
"name": "profile_json",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,87 @@
{
"version": "6",
"dialect": "sqlite",
"id": "450e2fdf-6349-482f-8a68-5bc0f0a9718a",
"prevId": "a85809c0-65c2-4f99-92ed-34357c9f83d7",
"tables": {
"provider_profiles": {
"name": "provider_profiles",
"columns": {
"provider_id": {
"name": "provider_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"profile_json": {
"name": "profile_json",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"repos": {
"name": "repos",
"columns": {
"repo_id": {
"name": "repo_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"remote_url": {
"name": "remote_url",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,27 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1770924376525,
"tag": "0000_rare_iron_man",
"breakpoints": true
},
{
"idx": 1,
"version": "6",
"when": 1770947252912,
"tag": "0001_sleepy_lady_deathstrike",
"breakpoints": true
},
{
"idx": 2,
"version": "6",
"when": 1772668800000,
"tag": "0002_tiny_silver_surfer",
"breakpoints": true
}
]
}

View file

@ -0,0 +1,50 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
"entries": [
{
"idx": 0,
"when": 1770924376525,
"tag": "0000_rare_iron_man",
"breakpoints": true
},
{
"idx": 1,
"when": 1770947252912,
"tag": "0001_sleepy_lady_deathstrike",
"breakpoints": true
},
{
"idx": 2,
"when": 1772668800000,
"tag": "0002_tiny_silver_surfer",
"breakpoints": true
}
]
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`provider_profiles\` (
\`provider_id\` text PRIMARY KEY NOT NULL,
\`profile_json\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0001: `CREATE TABLE \`repos\` (
\`repo_id\` text PRIMARY KEY NOT NULL,
\`remote_url\` text NOT NULL,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0002: `CREATE TABLE \`handoff_lookup\` (
\`handoff_id\` text PRIMARY KEY NOT NULL,
\`repo_id\` text NOT NULL
);
`,
} as const
};

View file

@ -0,0 +1,20 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
// SQLite is per workspace actor instance, so no workspaceId column needed.
export const providerProfiles = sqliteTable("provider_profiles", {
providerId: text("provider_id").notNull().primaryKey(),
profileJson: text("profile_json").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const repos = sqliteTable("repos", {
repoId: text("repo_id").notNull().primaryKey(),
remoteUrl: text("remote_url").notNull(),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const handoffLookup = sqliteTable("handoff_lookup", {
handoffId: text("handoff_id").notNull().primaryKey(),
repoId: text("repo_id").notNull(),
});

View file

@ -0,0 +1,17 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { workspaceDb } from "./db/db.js";
import { runWorkspaceWorkflow, WORKSPACE_QUEUE_NAMES, workspaceActions } from "./actions.js";
export const workspace = actor({
db: workspaceDb,
queues: Object.fromEntries(WORKSPACE_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
actionTimeout: 5 * 60_000,
},
createState: (_c, workspaceId: string) => ({
workspaceId
}),
actions: workspaceActions,
run: workflow(runWorkspaceWorkflow),
});

View file

@ -0,0 +1,22 @@
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { dirname } from "node:path";
import { homedir } from "node:os";
import * as toml from "@iarna/toml";
import { ConfigSchema, type AppConfig } from "@openhandoff/shared";
export const CONFIG_PATH = `${homedir()}/.config/openhandoff/config.toml`;
export function loadConfig(path = CONFIG_PATH): AppConfig {
if (!existsSync(path)) {
return ConfigSchema.parse({});
}
const raw = readFileSync(path, "utf8");
const parsed = toml.parse(raw) as unknown;
return ConfigSchema.parse(parsed);
}
export function saveConfig(config: AppConfig, path = CONFIG_PATH): void {
mkdirSync(dirname(path), { recursive: true });
writeFileSync(path, toml.stringify(config), "utf8");
}

View file

@ -0,0 +1,13 @@
import type { AppConfig } from "@openhandoff/shared";
export function defaultWorkspace(config: AppConfig): string {
const ws = config.workspace.default.trim();
return ws.length > 0 ? ws : "default";
}
export function resolveWorkspace(flagWorkspace: string | undefined, config: AppConfig): string {
if (flagWorkspace && flagWorkspace.trim().length > 0) {
return flagWorkspace.trim();
}
return defaultWorkspace(config);
}

View file

@ -0,0 +1,105 @@
import { mkdirSync } from "node:fs";
import { join } from "node:path";
import { fileURLToPath } from "node:url";
import { db as kvDrizzleDb } from "rivetkit/db/drizzle";
// Keep this file decoupled from RivetKit's internal type export paths.
// RivetKit consumes database providers structurally.
export interface RawAccess {
execute: (query: string, ...args: unknown[]) => Promise<unknown[]>;
close: () => Promise<void>;
}
export interface DatabaseProviderContext {
actorId: string;
}
export type DatabaseProvider<DB> = {
createClient: (ctx: DatabaseProviderContext) => Promise<DB>;
onMigrate: (client: DB) => void | Promise<void>;
onDestroy?: (client: DB) => void | Promise<void>;
};
export interface ActorSqliteDbOptions<TSchema extends Record<string, unknown>> {
actorName: string;
schema?: TSchema;
migrations?: unknown;
migrationsFolderUrl: URL;
/**
* Override base directory for per-actor SQLite files.
*
* Default: `<cwd>/.openhandoff/backend/sqlite`
*/
baseDir?: string;
}
export function actorSqliteDb<TSchema extends Record<string, unknown>>(
options: ActorSqliteDbOptions<TSchema>
): DatabaseProvider<any & RawAccess> {
const isBunRuntime =
typeof (globalThis as any).Bun !== "undefined" && typeof (process as any)?.versions?.bun === "string";
// Backend tests run in a Node-ish Vitest environment where `bun:sqlite` and
// Bun's sqlite-backed Drizzle driver are not supported.
//
// Additionally, RivetKit's KV-backed SQLite implementation currently has stability
// issues under Bun in this repo's setup (wa-sqlite runtime errors). Prefer Bun's
// native SQLite driver in production backend execution.
if (!isBunRuntime || process.env.VITEST || process.env.NODE_ENV === "test") {
return kvDrizzleDb({
schema: options.schema,
migrations: options.migrations,
}) as unknown as DatabaseProvider<any & RawAccess>;
}
const baseDir = options.baseDir ?? join(process.cwd(), ".openhandoff", "backend", "sqlite");
const migrationsFolder = fileURLToPath(options.migrationsFolderUrl);
return {
createClient: async (ctx) => {
// Keep Bun-only module out of Vitest/Vite's static import graph.
const { Database } = await import(/* @vite-ignore */ "bun:sqlite");
const { drizzle } = await import("drizzle-orm/bun-sqlite");
const dir = join(baseDir, options.actorName);
mkdirSync(dir, { recursive: true });
const dbPath = join(dir, `${ctx.actorId}.sqlite`);
const sqlite = new Database(dbPath);
sqlite.exec("PRAGMA journal_mode = WAL;");
sqlite.exec("PRAGMA foreign_keys = ON;");
const client = drizzle({
client: sqlite,
schema: options.schema,
});
return Object.assign(client, {
execute: async (query: string, ...args: unknown[]) => {
const stmt = sqlite.query(query);
try {
return stmt.all(args as never) as unknown[];
} catch {
stmt.run(args as never);
return [];
}
},
close: async () => {
sqlite.close();
},
} satisfies RawAccess);
},
onMigrate: async (client) => {
const { migrate } = await import("drizzle-orm/bun-sqlite/migrator");
await migrate(client, {
migrationsFolder,
});
},
onDestroy: async (client) => {
await client.close();
},
};
}

View file

@ -0,0 +1,180 @@
import type { BranchSnapshot } from "./integrations/git/index.js";
import type { PullRequestSnapshot } from "./integrations/github/index.js";
import type {
SandboxSession,
SandboxAgentClientOptions,
SandboxSessionCreateRequest
} from "./integrations/sandbox-agent/client.js";
import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionRecord } from "sandbox-agent";
import type {
DaytonaClientOptions,
DaytonaCreateSandboxOptions,
DaytonaPreviewEndpoint,
DaytonaSandbox,
} from "./integrations/daytona/client.js";
import {
validateRemote,
ensureCloned,
fetch,
listRemoteBranches,
remoteDefaultBaseRef,
revParse,
ensureRemoteBranch,
diffStatForBranch,
conflictsWithMain,
} from "./integrations/git/index.js";
import {
gitSpiceAvailable,
gitSpiceListStack,
gitSpiceRebaseBranch,
gitSpiceReparentBranch,
gitSpiceRestackRepo,
gitSpiceRestackSubtree,
gitSpiceSyncRepo,
gitSpiceTrackBranch,
} from "./integrations/git-spice/index.js";
import { listPullRequests, createPr } from "./integrations/github/index.js";
import { SandboxAgentClient } from "./integrations/sandbox-agent/client.js";
import { DaytonaClient } from "./integrations/daytona/client.js";
export interface GitDriver {
validateRemote(remoteUrl: string): Promise<void>;
ensureCloned(remoteUrl: string, targetPath: string): Promise<void>;
fetch(repoPath: string): Promise<void>;
listRemoteBranches(repoPath: string): Promise<BranchSnapshot[]>;
remoteDefaultBaseRef(repoPath: string): Promise<string>;
revParse(repoPath: string, ref: string): Promise<string>;
ensureRemoteBranch(repoPath: string, branchName: string): Promise<void>;
diffStatForBranch(repoPath: string, branchName: string): Promise<string>;
conflictsWithMain(repoPath: string, branchName: string): Promise<boolean>;
}
export interface StackBranchSnapshot {
branchName: string;
parentBranch: string | null;
}
export interface StackDriver {
available(repoPath: string): Promise<boolean>;
listStack(repoPath: string): Promise<StackBranchSnapshot[]>;
syncRepo(repoPath: string): Promise<void>;
restackRepo(repoPath: string): Promise<void>;
restackSubtree(repoPath: string, branchName: string): Promise<void>;
rebaseBranch(repoPath: string, branchName: string): Promise<void>;
reparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void>;
trackBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void>;
}
export interface GithubDriver {
listPullRequests(repoPath: string): Promise<PullRequestSnapshot[]>;
createPr(
repoPath: string,
headBranch: string,
title: string,
body?: string
): Promise<{ number: number; url: string }>;
}
export interface SandboxAgentClientLike {
createSession(request: string | SandboxSessionCreateRequest): Promise<SandboxSession>;
sessionStatus(sessionId: string): Promise<SandboxSession>;
listSessions(request?: ListPageRequest): Promise<ListPage<SessionRecord>>;
listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>>;
sendPrompt(request: { sessionId: string; prompt: string; notification?: boolean }): Promise<void>;
cancelSession(sessionId: string): Promise<void>;
destroySession(sessionId: string): Promise<void>;
}
export interface SandboxAgentDriver {
createClient(options: SandboxAgentClientOptions): SandboxAgentClientLike;
}
export interface DaytonaClientLike {
createSandbox(options: DaytonaCreateSandboxOptions): Promise<DaytonaSandbox>;
getSandbox(sandboxId: string): Promise<DaytonaSandbox>;
startSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void>;
stopSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void>;
deleteSandbox(sandboxId: string): Promise<void>;
executeCommand(sandboxId: string, command: string): Promise<{ exitCode: number; result: string }>;
getPreviewEndpoint(sandboxId: string, port: number): Promise<DaytonaPreviewEndpoint>;
}
export interface DaytonaDriver {
createClient(options: DaytonaClientOptions): DaytonaClientLike;
}
export interface TmuxDriver {
setWindowStatus(branchName: string, status: string): number;
}
export interface BackendDriver {
git: GitDriver;
stack: StackDriver;
github: GithubDriver;
sandboxAgent: SandboxAgentDriver;
daytona: DaytonaDriver;
tmux: TmuxDriver;
}
export function createDefaultDriver(): BackendDriver {
const sandboxAgentClients = new Map<string, SandboxAgentClient>();
const daytonaClients = new Map<string, DaytonaClient>();
return {
git: {
validateRemote,
ensureCloned,
fetch,
listRemoteBranches,
remoteDefaultBaseRef,
revParse,
ensureRemoteBranch,
diffStatForBranch,
conflictsWithMain,
},
stack: {
available: gitSpiceAvailable,
listStack: gitSpiceListStack,
syncRepo: gitSpiceSyncRepo,
restackRepo: gitSpiceRestackRepo,
restackSubtree: gitSpiceRestackSubtree,
rebaseBranch: gitSpiceRebaseBranch,
reparentBranch: gitSpiceReparentBranch,
trackBranch: gitSpiceTrackBranch,
},
github: {
listPullRequests,
createPr,
},
sandboxAgent: {
createClient: (opts) => {
if (opts.persist) {
return new SandboxAgentClient(opts);
}
const key = `${opts.endpoint}|${opts.token ?? ""}|${opts.agent ?? ""}`;
const cached = sandboxAgentClients.get(key);
if (cached) {
return cached;
}
const created = new SandboxAgentClient(opts);
sandboxAgentClients.set(key, created);
return created;
},
},
daytona: {
createClient: (opts) => {
const key = `${opts.apiUrl ?? ""}|${opts.apiKey ?? ""}|${opts.target ?? ""}`;
const cached = daytonaClients.get(key);
if (cached) {
return cached;
}
const created = new DaytonaClient(opts);
daytonaClients.set(key, created);
return created;
},
},
tmux: {
setWindowStatus: () => 0,
},
};
}

View file

@ -0,0 +1,143 @@
import { Hono } from "hono";
import { cors } from "hono/cors";
import { initActorRuntimeContext } from "./actors/context.js";
import { registry } from "./actors/index.js";
import { loadConfig } from "./config/backend.js";
import { createBackends, createNotificationService } from "./notifications/index.js";
import { createDefaultDriver } from "./driver.js";
import { createProviderRegistry } from "./providers/index.js";
export interface BackendStartOptions {
host?: string;
port?: number;
}
export async function startBackend(options: BackendStartOptions = {}): Promise<void> {
// sandbox-agent agent plugins vary on which env var they read for OpenAI/Codex auth.
// Normalize to keep local dev + docker-compose simple.
if (!process.env.CODEX_API_KEY && process.env.OPENAI_API_KEY) {
process.env.CODEX_API_KEY = process.env.OPENAI_API_KEY;
}
const config = loadConfig();
config.backend.host = options.host ?? config.backend.host;
config.backend.port = options.port ?? config.backend.port;
// Allow docker-compose/dev environments to supply provider config via env vars
// instead of writing into the container's config.toml.
const envFirst = (...keys: string[]): string | undefined => {
for (const key of keys) {
const raw = process.env[key];
if (raw && raw.trim().length > 0) return raw.trim();
}
return undefined;
};
config.providers.daytona.endpoint =
envFirst("HF_DAYTONA_ENDPOINT", "DAYTONA_ENDPOINT") ?? config.providers.daytona.endpoint;
config.providers.daytona.apiKey =
envFirst("HF_DAYTONA_API_KEY", "DAYTONA_API_KEY") ?? config.providers.daytona.apiKey;
const driver = createDefaultDriver();
const providers = createProviderRegistry(config, driver);
const backends = await createBackends(config.notify);
const notifications = createNotificationService(backends);
initActorRuntimeContext(config, providers, notifications, driver);
const inner = registry.serve();
// Wrap in a Hono app mounted at /api/rivet to serve on the backend port.
// Uses Bun.serve — cannot use @hono/node-server because it conflicts with
// RivetKit's internal Bun.serve manager server (Bun bug: mixing Node HTTP
// server and Bun.serve in the same process breaks Bun.serve's fetch handler).
const app = new Hono();
app.use(
"/api/rivet/*",
cors({
origin: "*",
allowHeaders: ["Content-Type", "Authorization", "x-rivet-token"],
allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
exposeHeaders: ["Content-Type"],
})
);
app.use(
"/api/rivet",
cors({
origin: "*",
allowHeaders: ["Content-Type", "Authorization", "x-rivet-token"],
allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
exposeHeaders: ["Content-Type"],
})
);
const forward = async (c: any) => {
try {
// RivetKit serverless handler is configured with basePath `/api/rivet` by default.
return await inner.fetch(c.req.raw);
} catch (err) {
if (err instanceof URIError) {
return c.text("Bad Request: Malformed URI", 400);
}
throw err;
}
};
app.all("/api/rivet", forward);
app.all("/api/rivet/*", forward);
const server = Bun.serve({
fetch: app.fetch,
hostname: config.backend.host,
port: config.backend.port
});
process.on("SIGINT", async () => {
server.stop();
process.exit(0);
});
process.on("SIGTERM", async () => {
server.stop();
process.exit(0);
});
// Keep process alive.
await new Promise<void>(() => undefined);
}
function parseArg(flag: string): string | undefined {
const idx = process.argv.indexOf(flag);
if (idx < 0) return undefined;
return process.argv[idx + 1];
}
function parseEnvPort(value: string | undefined): number | undefined {
if (!value) {
return undefined;
}
const port = Number(value);
if (!Number.isInteger(port) || port <= 0 || port > 65535) {
return undefined;
}
return port;
}
async function main(): Promise<void> {
const cmd = process.argv[2] ?? "start";
if (cmd !== "start") {
throw new Error(`Unsupported backend command: ${cmd}`);
}
const host = parseArg("--host") ?? process.env.HOST ?? process.env.HF_BACKEND_HOST;
const port = parseArg("--port") ?? process.env.PORT ?? process.env.HF_BACKEND_PORT;
await startBackend({
host,
port: parseEnvPort(port)
});
}
if (import.meta.url === `file://${process.argv[1]}`) {
main().catch((err: unknown) => {
const message = err instanceof Error ? err.stack ?? err.message : String(err);
console.error(message);
process.exit(1);
});
}

View file

@ -0,0 +1,115 @@
import { Daytona, type Image } from "@daytonaio/sdk";
export interface DaytonaSandbox {
id: string;
state?: string;
snapshot?: string;
labels?: Record<string, string>;
}
export interface DaytonaCreateSandboxOptions {
image: string | Image;
envVars?: Record<string, string>;
labels?: Record<string, string>;
autoStopInterval?: number;
}
export interface DaytonaPreviewEndpoint {
url: string;
token?: string;
}
export interface DaytonaClientOptions {
apiUrl?: string;
apiKey?: string;
target?: string;
}
function normalizeApiUrl(input?: string): string | undefined {
if (!input) return undefined;
const trimmed = input.replace(/\/+$/, "");
if (trimmed.endsWith("/api")) {
return trimmed;
}
return `${trimmed}/api`;
}
export class DaytonaClient {
private readonly daytona: Daytona;
constructor(options: DaytonaClientOptions) {
const apiUrl = normalizeApiUrl(options.apiUrl);
this.daytona = new Daytona({
_experimental: {},
...(apiUrl ? { apiUrl } : {}),
...(options.apiKey ? { apiKey: options.apiKey } : {}),
...(options.target ? { target: options.target } : {}),
});
}
async createSandbox(options: DaytonaCreateSandboxOptions): Promise<DaytonaSandbox> {
const sandbox = await this.daytona.create({
image: options.image,
envVars: options.envVars,
labels: options.labels,
...(options.autoStopInterval !== undefined
? { autoStopInterval: options.autoStopInterval }
: {}),
});
return {
id: sandbox.id,
state: sandbox.state,
snapshot: sandbox.snapshot,
labels: (sandbox as any).labels,
};
}
async getSandbox(sandboxId: string): Promise<DaytonaSandbox> {
const sandbox = await this.daytona.get(sandboxId);
return {
id: sandbox.id,
state: sandbox.state,
snapshot: sandbox.snapshot,
labels: (sandbox as any).labels,
};
}
async startSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void> {
const sandbox = await this.daytona.get(sandboxId);
await sandbox.start(timeoutSeconds);
}
async stopSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void> {
const sandbox = await this.daytona.get(sandboxId);
await sandbox.stop(timeoutSeconds);
}
async deleteSandbox(sandboxId: string): Promise<void> {
const sandbox = await this.daytona.get(sandboxId);
await this.daytona.delete(sandbox);
}
async executeCommand(sandboxId: string, command: string): Promise<{ exitCode: number; result: string }> {
const sandbox = await this.daytona.get(sandboxId);
const response = await sandbox.process.executeCommand(command);
return {
exitCode: response.exitCode,
result: response.result,
};
}
async getPreviewEndpoint(sandboxId: string, port: number): Promise<DaytonaPreviewEndpoint> {
const sandbox = await this.daytona.get(sandboxId);
// Use signed preview URLs for server-to-sandbox communication.
// The standard preview link may redirect to an interactive Auth0 flow from non-browser clients.
// Signed preview URLs work for direct HTTP access.
//
// Request a longer-lived URL so sessions can run for several minutes without refresh.
const preview = await sandbox.getSignedPreviewUrl(port, 6 * 60 * 60);
return {
url: preview.url,
token: preview.token,
};
}
}

View file

@ -0,0 +1,246 @@
import { execFile } from "node:child_process";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
const DEFAULT_TIMEOUT_MS = 2 * 60_000;
interface SpiceCommand {
command: string;
prefix: string[];
}
export interface SpiceStackEntry {
branchName: string;
parentBranch: string | null;
}
function spiceCommands(): SpiceCommand[] {
const explicit = process.env.HF_GIT_SPICE_BIN?.trim();
const list: SpiceCommand[] = [];
if (explicit) {
list.push({ command: explicit, prefix: [] });
}
list.push({ command: "git-spice", prefix: [] });
list.push({ command: "git", prefix: ["spice"] });
return list;
}
function commandLabel(cmd: SpiceCommand): string {
return [cmd.command, ...cmd.prefix].join(" ");
}
function looksMissing(error: unknown): boolean {
const detail = error instanceof Error ? error.message : String(error);
return (
detail.includes("ENOENT") ||
detail.includes("not a git command") ||
detail.includes("command not found")
);
}
async function tryRun(
repoPath: string,
cmd: SpiceCommand,
args: string[]
): Promise<{ stdout: string; stderr: string }> {
return await execFileAsync(cmd.command, [...cmd.prefix, ...args], {
cwd: repoPath,
timeout: DEFAULT_TIMEOUT_MS,
maxBuffer: 1024 * 1024 * 8,
env: {
...process.env,
NO_COLOR: "1",
FORCE_COLOR: "0"
}
});
}
async function pickCommand(repoPath: string): Promise<SpiceCommand | null> {
for (const candidate of spiceCommands()) {
try {
await tryRun(repoPath, candidate, ["--help"]);
return candidate;
} catch (error) {
if (looksMissing(error)) {
continue;
}
}
}
return null;
}
async function runSpice(repoPath: string, args: string[]): Promise<{ stdout: string; stderr: string }> {
const cmd = await pickCommand(repoPath);
if (!cmd) {
throw new Error("git-spice is not available (set HF_GIT_SPICE_BIN or install git-spice)");
}
return await tryRun(repoPath, cmd, args);
}
function parseLogJson(stdout: string): SpiceStackEntry[] {
const trimmed = stdout.trim();
if (!trimmed) {
return [];
}
const entries: SpiceStackEntry[] = [];
// `git-spice log ... --json` prints one JSON object per line.
for (const line of trimmed.split("\n")) {
const raw = line.trim();
if (!raw.startsWith("{")) {
continue;
}
try {
const value = JSON.parse(raw) as {
name?: string;
branch?: string;
parent?: string | null;
parentBranch?: string | null;
};
const branchName = (value.name ?? value.branch ?? "").trim();
if (!branchName) {
continue;
}
const parentRaw = value.parent ?? value.parentBranch ?? null;
const parentBranch = parentRaw ? parentRaw.trim() || null : null;
entries.push({ branchName, parentBranch });
} catch {
continue;
}
}
const seen = new Set<string>();
return entries.filter((entry) => {
if (seen.has(entry.branchName)) {
return false;
}
seen.add(entry.branchName);
return true;
});
}
async function runFallbacks(repoPath: string, commands: string[][], errorContext: string): Promise<void> {
const failures: string[] = [];
for (const args of commands) {
try {
await runSpice(repoPath, args);
return;
} catch (error) {
failures.push(`${args.join(" ")} :: ${error instanceof Error ? error.message : String(error)}`);
}
}
throw new Error(`${errorContext}. attempts=${failures.join(" | ")}`);
}
export async function gitSpiceAvailable(repoPath: string): Promise<boolean> {
return (await pickCommand(repoPath)) !== null;
}
export async function gitSpiceListStack(repoPath: string): Promise<SpiceStackEntry[]> {
try {
const { stdout } = await runSpice(repoPath, [
"log",
"short",
"--all",
"--json",
"--no-cr-status",
"--no-prompt"
]);
return parseLogJson(stdout);
} catch {
return [];
}
}
export async function gitSpiceSyncRepo(repoPath: string): Promise<void> {
await runFallbacks(
repoPath,
[
["repo", "sync", "--restack", "--no-prompt"],
["repo", "sync", "--restack"],
["repo", "sync"]
],
"git-spice repo sync failed"
);
}
export async function gitSpiceRestackRepo(repoPath: string): Promise<void> {
await runFallbacks(
repoPath,
[
["repo", "restack", "--no-prompt"],
["repo", "restack"]
],
"git-spice repo restack failed"
);
}
export async function gitSpiceRestackSubtree(repoPath: string, branchName: string): Promise<void> {
await runFallbacks(
repoPath,
[
["upstack", "restack", "--branch", branchName, "--no-prompt"],
["upstack", "restack", "--branch", branchName],
["branch", "restack", "--branch", branchName, "--no-prompt"],
["branch", "restack", "--branch", branchName]
],
`git-spice restack subtree failed for ${branchName}`
);
}
export async function gitSpiceRebaseBranch(repoPath: string, branchName: string): Promise<void> {
await runFallbacks(
repoPath,
[
["branch", "restack", "--branch", branchName, "--no-prompt"],
["branch", "restack", "--branch", branchName]
],
`git-spice branch restack failed for ${branchName}`
);
}
export async function gitSpiceReparentBranch(
repoPath: string,
branchName: string,
parentBranch: string
): Promise<void> {
await runFallbacks(
repoPath,
[
["upstack", "onto", "--branch", branchName, parentBranch, "--no-prompt"],
["upstack", "onto", "--branch", branchName, parentBranch],
["branch", "onto", "--branch", branchName, parentBranch, "--no-prompt"],
["branch", "onto", "--branch", branchName, parentBranch]
],
`git-spice reparent failed for ${branchName} -> ${parentBranch}`
);
}
export async function gitSpiceTrackBranch(
repoPath: string,
branchName: string,
parentBranch: string
): Promise<void> {
await runFallbacks(
repoPath,
[
["branch", "track", branchName, "--base", parentBranch, "--no-prompt"],
["branch", "track", branchName, "--base", parentBranch]
],
`git-spice track failed for ${branchName}`
);
}
export function normalizeBaseBranchName(ref: string): string {
const trimmed = ref.trim();
if (!trimmed) {
return "main";
}
return trimmed.startsWith("origin/") ? trimmed.slice("origin/".length) : trimmed;
}
export function describeSpiceCommandForLogs(repoPath: string): Promise<string | null> {
return pickCommand(repoPath).then((cmd) => (cmd ? commandLabel(cmd) : null));
}

View file

@ -0,0 +1,302 @@
import { execFile } from "node:child_process";
import { chmodSync, existsSync, mkdirSync, mkdtempSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { dirname, resolve } from "node:path";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
const DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS = 15_000;
const DEFAULT_GIT_FETCH_TIMEOUT_MS = 2 * 60_000;
const DEFAULT_GIT_CLONE_TIMEOUT_MS = 5 * 60_000;
function resolveGithubToken(): string | null {
const token =
process.env.GH_TOKEN ??
process.env.GITHUB_TOKEN ??
process.env.HF_GITHUB_TOKEN ??
process.env.HF_GH_TOKEN ??
null;
if (!token) return null;
const trimmed = token.trim();
return trimmed.length > 0 ? trimmed : null;
}
let cachedAskpassPath: string | null = null;
function ensureAskpassScript(): string {
if (cachedAskpassPath) {
return cachedAskpassPath;
}
const dir = mkdtempSync(resolve(tmpdir(), "openhandoff-git-askpass-"));
const path = resolve(dir, "askpass.sh");
// Git invokes $GIT_ASKPASS with the prompt string as argv[1]. Provide both username and password.
// We avoid embedding the token in this file; it is read from env at runtime.
const content =
[
"#!/bin/sh",
'prompt="$1"',
// Prefer GH_TOKEN/GITHUB_TOKEN but support HF_* aliases too.
'token="${GH_TOKEN:-${GITHUB_TOKEN:-${HF_GITHUB_TOKEN:-${HF_GH_TOKEN:-}}}}"',
'case "$prompt" in',
' *Username*) echo "x-access-token" ;;',
' *Password*) echo "$token" ;;',
' *) echo "" ;;',
"esac",
"",
].join("\n");
writeFileSync(path, content, "utf8");
chmodSync(path, 0o700);
cachedAskpassPath = path;
return path;
}
function gitEnv(): Record<string, string> {
const env: Record<string, string> = { ...(process.env as Record<string, string>) };
env.GIT_TERMINAL_PROMPT = "0";
const token = resolveGithubToken();
if (token) {
env.GIT_ASKPASS = ensureAskpassScript();
// Some tooling expects these vars; keep them aligned.
env.GITHUB_TOKEN = env.GITHUB_TOKEN || token;
env.GH_TOKEN = env.GH_TOKEN || token;
}
return env;
}
export interface BranchSnapshot {
branchName: string;
commitSha: string;
}
export async function fetch(repoPath: string): Promise<void> {
await execFileAsync("git", ["-C", repoPath, "fetch", "--prune"], {
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
env: gitEnv(),
});
}
export async function revParse(repoPath: string, ref: string): Promise<string> {
const { stdout } = await execFileAsync("git", ["-C", repoPath, "rev-parse", ref], { env: gitEnv() });
return stdout.trim();
}
export async function validateRemote(remoteUrl: string): Promise<void> {
const remote = remoteUrl.trim();
if (!remote) {
throw new Error("remoteUrl is required");
}
try {
await execFileAsync("git", ["ls-remote", "--exit-code", remote, "HEAD"], {
// This command does not need repo context. Running from a neutral directory
// avoids inheriting broken worktree .git indirection inside dev containers.
cwd: tmpdir(),
maxBuffer: 1024 * 1024,
timeout: DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS,
env: gitEnv(),
});
} catch (error) {
const detail = error instanceof Error ? error.message : String(error);
throw new Error(`git remote validation failed: ${detail}`);
}
}
function isGitRepo(path: string): boolean {
return existsSync(resolve(path, ".git"));
}
export async function ensureCloned(remoteUrl: string, targetPath: string): Promise<void> {
const remote = remoteUrl.trim();
if (!remote) {
throw new Error("remoteUrl is required");
}
if (existsSync(targetPath)) {
if (!isGitRepo(targetPath)) {
throw new Error(`targetPath exists but is not a git repo: ${targetPath}`);
}
// Keep origin aligned with the configured remote URL.
await execFileAsync("git", ["-C", targetPath, "remote", "set-url", "origin", remote], {
maxBuffer: 1024 * 1024,
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
env: gitEnv(),
});
await fetch(targetPath);
return;
}
mkdirSync(dirname(targetPath), { recursive: true });
await execFileAsync("git", ["clone", remote, targetPath], {
maxBuffer: 1024 * 1024 * 8,
timeout: DEFAULT_GIT_CLONE_TIMEOUT_MS,
env: gitEnv(),
});
await fetch(targetPath);
}
export async function remoteDefaultBaseRef(repoPath: string): Promise<string> {
try {
const { stdout } = await execFileAsync("git", [
"-C",
repoPath,
"symbolic-ref",
"refs/remotes/origin/HEAD",
], { env: gitEnv() });
const ref = stdout.trim(); // refs/remotes/origin/main
const match = ref.match(/^refs\/remotes\/(.+)$/);
if (match?.[1]) {
return match[1];
}
} catch {
// fall through
}
const candidates = ["origin/main", "origin/master", "main", "master"];
for (const ref of candidates) {
try {
await execFileAsync("git", ["-C", repoPath, "rev-parse", "--verify", ref], { env: gitEnv() });
return ref;
} catch {
continue;
}
}
return "origin/main";
}
export async function listRemoteBranches(repoPath: string): Promise<BranchSnapshot[]> {
const { stdout } = await execFileAsync(
"git",
[
"-C",
repoPath,
"for-each-ref",
"--format=%(refname:short) %(objectname)",
"refs/remotes/origin",
],
{ maxBuffer: 1024 * 1024, env: gitEnv() }
);
return stdout
.trim()
.split("\n")
.filter((line) => line.trim().length > 0)
.map((line) => {
const [refName, commitSha] = line.trim().split(/\s+/, 2);
const short = (refName ?? "").trim();
const branchName = short.replace(/^origin\//, "");
return { branchName, commitSha: commitSha ?? "" };
})
.filter(
(row) =>
row.branchName.length > 0 &&
row.branchName !== "HEAD" &&
row.branchName !== "origin" &&
row.commitSha.length > 0,
);
}
async function remoteBranchExists(repoPath: string, branchName: string): Promise<boolean> {
try {
await execFileAsync("git", [
"-C",
repoPath,
"show-ref",
"--verify",
`refs/remotes/origin/${branchName}`,
], { env: gitEnv() });
return true;
} catch {
return false;
}
}
export async function ensureRemoteBranch(repoPath: string, branchName: string): Promise<void> {
await fetch(repoPath);
if (await remoteBranchExists(repoPath, branchName)) {
return;
}
const baseRef = await remoteDefaultBaseRef(repoPath);
await execFileAsync("git", ["-C", repoPath, "push", "origin", `${baseRef}:refs/heads/${branchName}`], {
maxBuffer: 1024 * 1024 * 2,
env: gitEnv(),
});
await fetch(repoPath);
}
export async function diffStatForBranch(repoPath: string, branchName: string): Promise<string> {
try {
const baseRef = await remoteDefaultBaseRef(repoPath);
const headRef = `origin/${branchName}`;
const { stdout } = await execFileAsync(
"git",
["-C", repoPath, "diff", "--shortstat", `${baseRef}...${headRef}`],
{ maxBuffer: 1024 * 1024, env: gitEnv() }
);
const trimmed = stdout.trim();
if (!trimmed) {
return "+0/-0";
}
const insertMatch = trimmed.match(/(\d+)\s+insertion/);
const deleteMatch = trimmed.match(/(\d+)\s+deletion/);
const insertions = insertMatch ? insertMatch[1] : "0";
const deletions = deleteMatch ? deleteMatch[1] : "0";
return `+${insertions}/-${deletions}`;
} catch {
return "+0/-0";
}
}
export async function conflictsWithMain(
repoPath: string,
branchName: string
): Promise<boolean> {
try {
const baseRef = await remoteDefaultBaseRef(repoPath);
const headRef = `origin/${branchName}`;
// Use merge-tree (git 2.38+) for a clean conflict check.
try {
await execFileAsync(
"git",
["-C", repoPath, "merge-tree", "--write-tree", "--no-messages", baseRef, headRef],
{ env: gitEnv() }
);
// If merge-tree exits 0, no conflicts. Non-zero exit means conflicts.
return false;
} catch {
// merge-tree exits non-zero when there are conflicts
return true;
}
} catch {
return false;
}
}
export async function getOriginOwner(repoPath: string): Promise<string> {
try {
const { stdout } = await execFileAsync(
"git",
["-C", repoPath, "remote", "get-url", "origin"],
{ env: gitEnv() }
);
const url = stdout.trim();
// Handle SSH: git@github.com:owner/repo.git
const sshMatch = url.match(/[:\/]([^\/]+)\/[^\/]+(?:\.git)?$/);
if (sshMatch) {
return sshMatch[1] ?? "";
}
// Handle HTTPS: https://github.com/owner/repo.git
const httpsMatch = url.match(/\/\/[^\/]+\/([^\/]+)\//);
if (httpsMatch) {
return httpsMatch[1] ?? "";
}
return "";
} catch {
return "";
}
}

View file

@ -0,0 +1,290 @@
import { execFile } from "node:child_process";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
export interface PullRequestSnapshot {
number: number;
headRefName: string;
state: string;
title: string;
url: string;
author: string;
isDraft: boolean;
ciStatus: string | null;
reviewStatus: string | null;
reviewer: string | null;
}
interface GhPrListItem {
number: number;
headRefName: string;
state: string;
title: string;
url?: string;
author?: { login?: string };
isDraft?: boolean;
statusCheckRollup?: Array<{
state?: string;
status?: string;
conclusion?: string;
__typename?: string;
}>;
reviews?: Array<{
state?: string;
author?: { login?: string };
}>;
}
function parseCiStatus(
checks: GhPrListItem["statusCheckRollup"]
): string | null {
if (!checks || checks.length === 0) return null;
let total = 0;
let successes = 0;
let hasRunning = false;
for (const check of checks) {
total++;
const conclusion = check.conclusion?.toUpperCase();
const state = check.state?.toUpperCase();
const status = check.status?.toUpperCase();
if (conclusion === "SUCCESS" || state === "SUCCESS") {
successes++;
} else if (
status === "IN_PROGRESS" ||
status === "QUEUED" ||
status === "PENDING" ||
state === "PENDING"
) {
hasRunning = true;
}
}
if (hasRunning && successes < total) {
return "running";
}
return `${successes}/${total}`;
}
function parseReviewStatus(
reviews: GhPrListItem["reviews"]
): { status: string | null; reviewer: string | null } {
if (!reviews || reviews.length === 0) {
return { status: null, reviewer: null };
}
// Build a map of latest review per author
const latestByAuthor = new Map<string, { state: string; login: string }>();
for (const review of reviews) {
const login = review.author?.login ?? "unknown";
const state = review.state?.toUpperCase() ?? "";
if (state === "COMMENTED") continue; // Skip comments, only track actionable reviews
latestByAuthor.set(login, { state, login });
}
// Check for CHANGES_REQUESTED first (takes priority), then APPROVED
for (const [, entry] of latestByAuthor) {
if (entry.state === "CHANGES_REQUESTED") {
return { status: "CHANGES_REQUESTED", reviewer: entry.login };
}
}
for (const [, entry] of latestByAuthor) {
if (entry.state === "APPROVED") {
return { status: "APPROVED", reviewer: entry.login };
}
}
// If there are reviews but none are APPROVED or CHANGES_REQUESTED
if (latestByAuthor.size > 0) {
const first = latestByAuthor.values().next().value;
return { status: "PENDING", reviewer: first?.login ?? null };
}
return { status: null, reviewer: null };
}
function snapshotFromGhItem(item: GhPrListItem): PullRequestSnapshot {
const { status: reviewStatus, reviewer } = parseReviewStatus(item.reviews);
return {
number: item.number,
headRefName: item.headRefName,
state: item.state,
title: item.title,
url: item.url ?? "",
author: item.author?.login ?? "",
isDraft: item.isDraft ?? false,
ciStatus: parseCiStatus(item.statusCheckRollup),
reviewStatus,
reviewer
};
}
const PR_JSON_FIELDS =
"number,headRefName,state,title,url,author,isDraft,statusCheckRollup,reviews";
export async function listPullRequests(repoPath: string): Promise<PullRequestSnapshot[]> {
try {
const { stdout } = await execFileAsync(
"gh",
[
"pr",
"list",
"--json",
PR_JSON_FIELDS,
"--limit",
"200"
],
{ maxBuffer: 1024 * 1024 * 4, cwd: repoPath }
);
const parsed = JSON.parse(stdout) as GhPrListItem[];
return parsed.map((item) => {
// Handle fork PRs where headRefName may contain "owner:branch"
const headRefName = item.headRefName.includes(":")
? item.headRefName.split(":").pop() ?? item.headRefName
: item.headRefName;
return snapshotFromGhItem({ ...item, headRefName });
});
} catch {
return [];
}
}
export async function getPrInfo(
repoPath: string,
branchName: string
): Promise<PullRequestSnapshot | null> {
try {
const { stdout } = await execFileAsync(
"gh",
[
"pr",
"view",
branchName,
"--json",
PR_JSON_FIELDS
],
{ maxBuffer: 1024 * 1024 * 4, cwd: repoPath }
);
const item = JSON.parse(stdout) as GhPrListItem;
return snapshotFromGhItem(item);
} catch {
return null;
}
}
export async function createPr(
repoPath: string,
headBranch: string,
title: string,
body?: string
): Promise<{ number: number; url: string }> {
const args = ["pr", "create", "--title", title, "--head", headBranch];
if (body) {
args.push("--body", body);
} else {
args.push("--body", "");
}
const { stdout } = await execFileAsync("gh", args, {
maxBuffer: 1024 * 1024,
cwd: repoPath
});
// gh pr create outputs the PR URL on success
const url = stdout.trim();
// Extract PR number from URL: https://github.com/owner/repo/pull/123
const numberMatch = url.match(/\/pull\/(\d+)/);
const number = numberMatch ? parseInt(numberMatch[1]!, 10) : 0;
return { number, url };
}
export async function getAllowedMergeMethod(
repoPath: string
): Promise<"squash" | "rebase" | "merge"> {
try {
// Get the repo owner/name from gh
const { stdout: repoJson } = await execFileAsync(
"gh",
["repo", "view", "--json", "owner,name"],
{ cwd: repoPath }
);
const repo = JSON.parse(repoJson) as { owner: { login: string }; name: string };
const repoFullName = `${repo.owner.login}/${repo.name}`;
const { stdout } = await execFileAsync(
"gh",
[
"api",
`repos/${repoFullName}`,
"--jq",
".allow_squash_merge, .allow_rebase_merge, .allow_merge_commit"
],
{ maxBuffer: 1024 * 1024, cwd: repoPath }
);
const lines = stdout.trim().split("\n");
const allowSquash = lines[0]?.trim() === "true";
const allowRebase = lines[1]?.trim() === "true";
const allowMerge = lines[2]?.trim() === "true";
if (allowSquash) return "squash";
if (allowRebase) return "rebase";
if (allowMerge) return "merge";
return "squash";
} catch {
return "squash";
}
}
export async function mergePr(repoPath: string, prNumber: number): Promise<void> {
const method = await getAllowedMergeMethod(repoPath);
await execFileAsync(
"gh",
["pr", "merge", String(prNumber), `--${method}`, "--delete-branch"],
{ cwd: repoPath }
);
}
export async function isPrMerged(
repoPath: string,
branchName: string
): Promise<boolean> {
try {
const { stdout } = await execFileAsync(
"gh",
["pr", "view", branchName, "--json", "state"],
{ cwd: repoPath }
);
const parsed = JSON.parse(stdout) as { state: string };
return parsed.state.toUpperCase() === "MERGED";
} catch {
return false;
}
}
export async function getPrTitle(
repoPath: string,
branchName: string
): Promise<string | null> {
try {
const { stdout } = await execFileAsync(
"gh",
["pr", "view", branchName, "--json", "title"],
{ cwd: repoPath }
);
const parsed = JSON.parse(stdout) as { title: string };
return parsed.title;
} catch {
return null;
}
}

View file

@ -0,0 +1,159 @@
import { execFile } from "node:child_process";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
export async function graphiteAvailable(repoPath: string): Promise<boolean> {
try {
await execFileAsync("gt", ["trunk"], { cwd: repoPath });
return true;
} catch {
return false;
}
}
export async function graphiteGet(repoPath: string, branchName: string): Promise<boolean> {
try {
await execFileAsync("gt", ["get", branchName], { cwd: repoPath });
return true;
} catch {
return false;
}
}
export async function graphiteCreateBranch(
repoPath: string,
branchName: string
): Promise<void> {
await execFileAsync("gt", ["create", branchName], { cwd: repoPath });
}
export async function graphiteCheckout(
repoPath: string,
branchName: string
): Promise<void> {
await execFileAsync("gt", ["checkout", branchName], { cwd: repoPath });
}
export async function graphiteSubmit(repoPath: string): Promise<void> {
await execFileAsync("gt", ["submit", "--no-edit"], { cwd: repoPath });
}
export async function graphiteMergeBranch(
repoPath: string,
branchName: string
): Promise<void> {
await execFileAsync("gt", ["merge", branchName], { cwd: repoPath });
}
export async function graphiteAbandon(
repoPath: string,
branchName: string
): Promise<void> {
await execFileAsync("gt", ["abandon", branchName], { cwd: repoPath });
}
export interface GraphiteStackEntry {
branchName: string;
parentBranch: string | null;
}
export async function graphiteGetStack(
repoPath: string
): Promise<GraphiteStackEntry[]> {
try {
// Try JSON output first
const { stdout } = await execFileAsync("gt", ["log", "--json"], {
cwd: repoPath,
maxBuffer: 1024 * 1024
});
const parsed = JSON.parse(stdout) as Array<{
branch?: string;
name?: string;
parent?: string;
parentBranch?: string;
}>;
return parsed.map((entry) => ({
branchName: entry.branch ?? entry.name ?? "",
parentBranch: entry.parent ?? entry.parentBranch ?? null
}));
} catch {
// Fall back to text parsing of `gt log`
try {
const { stdout } = await execFileAsync("gt", ["log"], {
cwd: repoPath,
maxBuffer: 1024 * 1024
});
const entries: GraphiteStackEntry[] = [];
const lines = stdout.split("\n").filter((l) => l.trim().length > 0);
// Parse indented tree output: each line has tree chars (|, /, \, -, etc.)
// followed by branch names. Build parent-child from indentation level.
const branchStack: string[] = [];
for (const line of lines) {
// Strip ANSI color codes
const clean = line.replace(/\x1b\[[0-9;]*m/g, "");
// Extract branch name: skip tree characters and whitespace
const branchMatch = clean.match(/[│├└─|/\\*\s]*(?:◉|○|●)?\s*(.+)/);
if (!branchMatch) continue;
const branchName = branchMatch[1]!.trim();
if (!branchName || branchName.startsWith("(") || branchName === "") continue;
// Determine indentation level by counting leading whitespace/tree chars
const indent = clean.search(/[a-zA-Z0-9]/);
const level = Math.max(0, Math.floor(indent / 2));
// Trim stack to current level
while (branchStack.length > level) {
branchStack.pop();
}
const parentBranch = branchStack.length > 0
? branchStack[branchStack.length - 1] ?? null
: null;
entries.push({ branchName, parentBranch });
branchStack.push(branchName);
}
return entries;
} catch {
return [];
}
}
}
export async function graphiteGetParent(
repoPath: string,
branchName: string
): Promise<string | null> {
try {
// Try `gt get <branchName>` to see parent info
const { stdout } = await execFileAsync("gt", ["get", branchName], {
cwd: repoPath,
maxBuffer: 1024 * 1024
});
// Parse output for parent branch reference
const parentMatch = stdout.match(/parent:\s*(\S+)/i);
if (parentMatch) {
return parentMatch[1] ?? null;
}
} catch {
// Fall through to stack-based lookup
}
// Fall back to stack info
try {
const stack = await graphiteGetStack(repoPath);
const entry = stack.find((e) => e.branchName === branchName);
return entry?.parentBranch ?? null;
} catch {
return null;
}
}

View file

@ -0,0 +1,394 @@
import type { AgentType } from "@openhandoff/shared";
import type {
ListEventsRequest,
ListPage,
ListPageRequest,
SessionEvent,
SessionPersistDriver,
SessionRecord
} from "sandbox-agent";
import { SandboxAgent } from "sandbox-agent";
export type AgentId = AgentType | "opencode";
export interface SandboxSession {
id: string;
status: "running" | "idle" | "error";
}
export interface SandboxSessionCreateRequest {
prompt?: string;
cwd?: string;
agent?: AgentId;
}
export interface SandboxSessionPromptRequest {
sessionId: string;
prompt: string;
notification?: boolean;
}
export interface SandboxAgentClientOptions {
endpoint: string;
token?: string;
agent?: AgentId;
persist?: SessionPersistDriver;
}
const DEFAULT_AGENT: AgentId = "codex";
function modeIdForAgent(agent: AgentId): string | null {
switch (agent) {
case "codex":
return "full-access";
case "claude":
return "acceptEdits";
default:
return null;
}
}
function normalizeStatusFromMessage(payload: unknown): SandboxSession["status"] | null {
if (payload && typeof payload === "object") {
const envelope = payload as {
error?: unknown;
method?: unknown;
result?: unknown;
};
const maybeError = envelope.error;
if (maybeError) {
return "error";
}
if (envelope.result && typeof envelope.result === "object") {
const stopReason = (envelope.result as { stopReason?: unknown }).stopReason;
if (typeof stopReason === "string" && stopReason.length > 0) {
return "idle";
}
}
const method = envelope.method;
if (typeof method === "string") {
const lowered = method.toLowerCase();
if (lowered.includes("error") || lowered.includes("failed")) {
return "error";
}
if (lowered.includes("ended") || lowered.includes("complete") || lowered.includes("stopped")) {
return "idle";
}
}
}
return null;
}
export class SandboxAgentClient {
readonly endpoint: string;
readonly token?: string;
readonly agent: AgentId;
readonly persist?: SessionPersistDriver;
private sdkPromise?: Promise<SandboxAgent>;
private readonly statusBySessionId = new Map<string, SandboxSession["status"]>();
constructor(options: SandboxAgentClientOptions) {
this.endpoint = options.endpoint.replace(/\/$/, "");
this.token = options.token;
this.agent = options.agent ?? DEFAULT_AGENT;
this.persist = options.persist;
}
private async sdk(): Promise<SandboxAgent> {
if (!this.sdkPromise) {
this.sdkPromise = SandboxAgent.connect({
baseUrl: this.endpoint,
token: this.token,
persist: this.persist,
});
}
return this.sdkPromise;
}
private setStatus(sessionId: string, status: SandboxSession["status"]): void {
this.statusBySessionId.set(sessionId, status);
}
private isLikelyPromptTimeout(err: unknown): boolean {
const message = err instanceof Error ? err.message : String(err);
const lowered = message.toLowerCase();
// sandbox-agent server times out long-running ACP prompts and returns a 504-like error.
return (
lowered.includes("timeout waiting for agent response") ||
lowered.includes("timed out waiting for agent response") ||
lowered.includes("504")
);
}
async createSession(request: string | SandboxSessionCreateRequest): Promise<SandboxSession> {
const normalized: SandboxSessionCreateRequest =
typeof request === "string"
? { prompt: request }
: request;
const sdk = await this.sdk();
// Do not wrap createSession in a local Promise.race timeout. The underlying SDK
// call is not abortable, so local timeout races create overlapping ACP requests and
// can produce duplicate/orphaned sessions while the original request is still running.
const session = await sdk.createSession({
agent: normalized.agent ?? this.agent,
sessionInit: {
cwd: normalized.cwd ?? "/",
mcpServers: [],
},
});
const modeId = modeIdForAgent(normalized.agent ?? this.agent);
// Codex defaults to a restrictive "read-only" preset in some environments.
// For OpenHandoff automation we need to allow edits + command execution + network
// access (git push / PR creation). Use full-access where supported.
//
// If the agent doesn't support session modes, ignore.
//
// Do this in the background: ACP mode updates can occasionally time out (504),
// and waiting here can stall session creation long enough to trip handoff init
// step timeouts even though the session itself was created.
if (modeId) {
void session.send("session/set_mode", { modeId }).catch(() => {
// ignore
});
}
const prompt = normalized.prompt?.trim();
if (!prompt) {
this.setStatus(session.id, "idle");
return {
id: session.id,
status: "idle",
};
}
// Fire the first turn in the background. We intentionally do not await this:
// session creation must remain fast, and we observe completion via events/stopReason.
//
// Note: sandbox-agent's ACP adapter for Codex may take >2 minutes to respond.
// sandbox-agent can return a timeout error (504) even though the agent continues
// running. Treat that timeout as non-fatal and keep polling events.
void session
.prompt([{ type: "text", text: prompt }])
.then(() => {
this.setStatus(session.id, "idle");
})
.catch((err) => {
if (this.isLikelyPromptTimeout(err)) {
this.setStatus(session.id, "running");
return;
}
this.setStatus(session.id, "error");
});
this.setStatus(session.id, "running");
return {
id: session.id,
status: "running",
};
}
async createSessionNoTask(dir: string): Promise<SandboxSession> {
return this.createSession({
cwd: dir,
});
}
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
const sdk = await this.sdk();
const page = await sdk.listSessions(request);
return {
items: page.items.map((session) => session.toRecord()),
nextCursor: page.nextCursor,
};
}
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
const sdk = await this.sdk();
return sdk.getEvents(request);
}
async sendPrompt(request: SandboxSessionPromptRequest): Promise<void> {
const sdk = await this.sdk();
const existing = await sdk.getSession(request.sessionId);
if (!existing) {
throw new Error(`session '${request.sessionId}' not found`);
}
const session = await sdk.resumeSession(request.sessionId);
const modeId = modeIdForAgent(this.agent);
// Keep mode update best-effort and non-blocking for the same reason as createSession.
if (modeId) {
void session.send("session/set_mode", { modeId }).catch(() => {
// ignore
});
}
const text = request.prompt.trim();
if (!text) return;
// sandbox-agent's Session.send(notification=true) forwards an extNotification with
// method "session/prompt", which some agents (e.g. codex-acp) do not implement.
// Use Session.prompt and treat notification=true as "fire-and-forget".
const fireAndForget = request.notification ?? true;
if (fireAndForget) {
void session
.prompt([{ type: "text", text }])
.then(() => {
this.setStatus(request.sessionId, "idle");
})
.catch((err) => {
if (this.isLikelyPromptTimeout(err)) {
this.setStatus(request.sessionId, "running");
return;
}
this.setStatus(request.sessionId, "error");
});
} else {
try {
await session.prompt([{ type: "text", text }]);
this.setStatus(request.sessionId, "idle");
} catch (err) {
if (this.isLikelyPromptTimeout(err)) {
this.setStatus(request.sessionId, "running");
return;
}
throw err;
}
}
this.setStatus(request.sessionId, "running");
}
async cancelSession(sessionId: string): Promise<void> {
const sdk = await this.sdk();
const existing = await sdk.getSession(sessionId);
if (!existing) {
throw new Error(`session '${sessionId}' not found`);
}
const session = await sdk.resumeSession(sessionId);
await session.send("session/cancel", {});
this.setStatus(sessionId, "idle");
}
async destroySession(sessionId: string): Promise<void> {
const sdk = await this.sdk();
await sdk.destroySession(sessionId);
this.setStatus(sessionId, "idle");
}
async sessionStatus(sessionId: string): Promise<SandboxSession> {
const cached = this.statusBySessionId.get(sessionId);
if (cached && cached !== "running") {
return { id: sessionId, status: cached };
}
const sdk = await this.sdk();
const session = await sdk.getSession(sessionId);
if (!session) {
this.setStatus(sessionId, "error");
return { id: sessionId, status: "error" };
}
const record = session.toRecord();
if (record.destroyedAt) {
this.setStatus(sessionId, "idle");
return { id: sessionId, status: "idle" };
}
const events = await sdk.getEvents({
sessionId,
limit: 25,
});
for (let i = events.items.length - 1; i >= 0; i--) {
const item = events.items[i];
if (!item) continue;
const status = normalizeStatusFromMessage(item.payload);
if (status) {
this.setStatus(sessionId, status);
return { id: sessionId, status };
}
}
this.setStatus(sessionId, "running");
return { id: sessionId, status: "running" };
}
async killSessionsInDirectory(dir: string): Promise<void> {
const sdk = await this.sdk();
let cursor: string | undefined;
do {
const page = await sdk.listSessions({
cursor,
limit: 100,
});
for (const session of page.items) {
const initCwd = session.toRecord().sessionInit?.cwd;
if (initCwd !== dir) {
continue;
}
await sdk.destroySession(session.id);
this.statusBySessionId.delete(session.id);
}
cursor = page.nextCursor;
} while (cursor);
}
async generateCommitMessage(
dir: string,
spec: string,
task: string
): Promise<string> {
const prompt = [
"Generate a conventional commit message for the following changes.",
"Return ONLY the commit message, no explanation or markdown formatting.",
"",
`Task: ${task}`,
"",
`Spec/diff:\n${spec}`
].join("\n");
const sdk = await this.sdk();
const session = await sdk.createSession({
agent: this.agent,
sessionInit: {
cwd: dir,
mcpServers: [],
},
});
await session.prompt([{ type: "text", text: prompt }]);
this.setStatus(session.id, "idle");
const events = await sdk.getEvents({
sessionId: session.id,
limit: 100,
});
for (let i = events.items.length - 1; i >= 0; i--) {
const event = events.items[i];
if (!event) continue;
if (event.sender !== "agent") continue;
const payload = event.payload as Record<string, unknown>;
const params = payload.params;
if (!params || typeof params !== "object") continue;
const text = (params as { text?: unknown }).text;
if (typeof text === "string" && text.trim().length > 0) {
return text.trim();
}
}
throw new Error("sandbox-agent commit message response was empty");
}
}

View file

@ -0,0 +1,124 @@
import { execFile } from "node:child_process";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
export type NotifyUrgency = "low" | "normal" | "high";
export interface NotifyBackend {
name: string;
available(): Promise<boolean>;
send(title: string, body: string, urgency: NotifyUrgency): Promise<boolean>;
}
async function isOnPath(binary: string): Promise<boolean> {
try {
await execFileAsync("which", [binary]);
return true;
} catch {
return false;
}
}
export class OpenclawBackend implements NotifyBackend {
readonly name = "openclaw";
async available(): Promise<boolean> {
return isOnPath("openclaw");
}
async send(title: string, body: string, _urgency: NotifyUrgency): Promise<boolean> {
try {
await execFileAsync("openclaw", ["wake", "--title", title, "--body", body]);
return true;
} catch {
return false;
}
}
}
export class MacOsNotifyBackend implements NotifyBackend {
readonly name = "macos-osascript";
async available(): Promise<boolean> {
return process.platform === "darwin";
}
async send(title: string, body: string, _urgency: NotifyUrgency): Promise<boolean> {
try {
const escaped_body = body.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
const escaped_title = title.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
const script = `display notification "${escaped_body}" with title "${escaped_title}"`;
await execFileAsync("osascript", ["-e", script]);
return true;
} catch {
return false;
}
}
}
export class LinuxNotifySendBackend implements NotifyBackend {
readonly name = "linux-notify-send";
async available(): Promise<boolean> {
return isOnPath("notify-send");
}
async send(title: string, body: string, urgency: NotifyUrgency): Promise<boolean> {
const urgencyMap: Record<NotifyUrgency, string> = {
low: "low",
normal: "normal",
high: "critical",
};
try {
await execFileAsync("notify-send", ["-u", urgencyMap[urgency], title, body]);
return true;
} catch {
return false;
}
}
}
export class TerminalBellBackend implements NotifyBackend {
readonly name = "terminal";
async available(): Promise<boolean> {
return true;
}
async send(title: string, body: string, _urgency: NotifyUrgency): Promise<boolean> {
try {
process.stderr.write("\x07");
process.stderr.write(`[${title}] ${body}\n`);
return true;
} catch {
return false;
}
}
}
const backendFactories: Record<string, () => NotifyBackend> = {
"openclaw": () => new OpenclawBackend(),
"macos-osascript": () => new MacOsNotifyBackend(),
"linux-notify-send": () => new LinuxNotifySendBackend(),
"terminal": () => new TerminalBellBackend(),
};
export async function createBackends(configOrder: string[]): Promise<NotifyBackend[]> {
const backends: NotifyBackend[] = [];
for (const name of configOrder) {
const factory = backendFactories[name];
if (!factory) {
continue;
}
const backend = factory();
if (await backend.available()) {
backends.push(backend);
}
}
return backends;
}

View file

@ -0,0 +1,67 @@
import type { NotifyBackend, NotifyUrgency } from "./backends.js";
export type { NotifyUrgency } from "./backends.js";
export { createBackends } from "./backends.js";
export interface NotificationService {
notify(title: string, body: string, urgency: NotifyUrgency): Promise<void>;
agentIdle(branchName: string): Promise<void>;
agentError(branchName: string, error: string): Promise<void>;
ciPassed(branchName: string, prNumber: number): Promise<void>;
ciFailed(branchName: string, prNumber: number): Promise<void>;
prApproved(branchName: string, prNumber: number, reviewer: string): Promise<void>;
changesRequested(branchName: string, prNumber: number, reviewer: string): Promise<void>;
prMerged(branchName: string, prNumber: number): Promise<void>;
handoffCreated(branchName: string): Promise<void>;
}
export function createNotificationService(backends: NotifyBackend[]): NotificationService {
async function notify(title: string, body: string, urgency: NotifyUrgency): Promise<void> {
for (const backend of backends) {
const sent = await backend.send(title, body, urgency);
if (sent) {
return;
}
}
}
return {
notify,
async agentIdle(branchName: string): Promise<void> {
await notify("Agent Idle", `Agent finished on ${branchName}`, "normal");
},
async agentError(branchName: string, error: string): Promise<void> {
await notify("Agent Error", `Agent error on ${branchName}: ${error}`, "high");
},
async ciPassed(branchName: string, prNumber: number): Promise<void> {
await notify("CI Passed", `CI passed on ${branchName} (PR #${prNumber})`, "low");
},
async ciFailed(branchName: string, prNumber: number): Promise<void> {
await notify("CI Failed", `CI failed on ${branchName} (PR #${prNumber})`, "high");
},
async prApproved(branchName: string, prNumber: number, reviewer: string): Promise<void> {
await notify("PR Approved", `PR #${prNumber} on ${branchName} approved by ${reviewer}`, "normal");
},
async changesRequested(branchName: string, prNumber: number, reviewer: string): Promise<void> {
await notify(
"Changes Requested",
`Changes requested on PR #${prNumber} (${branchName}) by ${reviewer}`,
"high",
);
},
async prMerged(branchName: string, prNumber: number): Promise<void> {
await notify("PR Merged", `PR #${prNumber} on ${branchName} merged`, "normal");
},
async handoffCreated(branchName: string): Promise<void> {
await notify("Handoff Created", `New handoff on ${branchName}`, "low");
},
};
}

View file

@ -0,0 +1,50 @@
export type CiState = "running" | "pass" | "fail" | "unknown";
export type ReviewState = "approved" | "changes_requested" | "pending" | "none" | "unknown";
export interface PrStateTransition {
type: "ci_passed" | "ci_failed" | "pr_approved" | "changes_requested";
branchName: string;
prNumber: number;
reviewer?: string;
}
export class PrStateTracker {
private states: Map<string, { ci: CiState; review: ReviewState }>;
constructor() {
this.states = new Map();
}
update(
repoId: string,
branchName: string,
prNumber: number,
ci: CiState,
review: ReviewState,
reviewer?: string,
): PrStateTransition[] {
const key = `${repoId}:${branchName}`;
const prev = this.states.get(key);
const transitions: PrStateTransition[] = [];
if (prev) {
// CI transitions: only fire when moving from "running" to a terminal state
if (prev.ci === "running" && ci === "pass") {
transitions.push({ type: "ci_passed", branchName, prNumber });
} else if (prev.ci === "running" && ci === "fail") {
transitions.push({ type: "ci_failed", branchName, prNumber });
}
// Review transitions: only fire when moving from "pending" to a terminal state
if (prev.review === "pending" && review === "approved") {
transitions.push({ type: "pr_approved", branchName, prNumber, reviewer });
} else if (prev.review === "pending" && review === "changes_requested") {
transitions.push({ type: "changes_requested", branchName, prNumber, reviewer });
}
}
this.states.set(key, { ci, review });
return transitions;
}
}

View file

@ -0,0 +1,475 @@
import type {
AgentEndpoint,
AttachTarget,
AttachTargetRequest,
CreateSandboxRequest,
DestroySandboxRequest,
EnsureAgentRequest,
ExecuteSandboxCommandRequest,
ExecuteSandboxCommandResult,
ProviderCapabilities,
ReleaseSandboxRequest,
ResumeSandboxRequest,
SandboxHandle,
SandboxHealth,
SandboxHealthRequest,
SandboxProvider
} from "../provider-api/index.js";
import type { DaytonaDriver } from "../../driver.js";
import { Image } from "@daytonaio/sdk";
export interface DaytonaProviderConfig {
endpoint?: string;
apiKey?: string;
image: string;
target?: string;
/**
* Auto-stop interval in minutes. If omitted, Daytona's default applies.
* Set to `0` to disable auto-stop.
*/
autoStopInterval?: number;
}
export class DaytonaProvider implements SandboxProvider {
constructor(
private readonly config: DaytonaProviderConfig,
private readonly daytona?: DaytonaDriver
) {}
private static readonly SANDBOX_AGENT_PORT = 2468;
private static readonly SANDBOX_AGENT_VERSION = "0.3.0";
private static readonly DEFAULT_ACP_REQUEST_TIMEOUT_MS = 120_000;
private static readonly AGENT_IDS = ["codex", "claude"] as const;
private static readonly PASSTHROUGH_ENV_KEYS = [
"ANTHROPIC_API_KEY",
"CLAUDE_API_KEY",
"OPENAI_API_KEY",
"CODEX_API_KEY",
"OPENCODE_API_KEY",
"CEREBRAS_API_KEY",
"GH_TOKEN",
"GITHUB_TOKEN",
] as const;
private getRequestTimeoutMs(): number {
const parsed = Number(process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS ?? "120000");
if (!Number.isFinite(parsed) || parsed <= 0) {
return 120_000;
}
return Math.floor(parsed);
}
private getAcpRequestTimeoutMs(): number {
const parsed = Number(
process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS
?? DaytonaProvider.DEFAULT_ACP_REQUEST_TIMEOUT_MS.toString()
);
if (!Number.isFinite(parsed) || parsed <= 0) {
return DaytonaProvider.DEFAULT_ACP_REQUEST_TIMEOUT_MS;
}
return Math.floor(parsed);
}
private async withTimeout<T>(label: string, fn: () => Promise<T>): Promise<T> {
const timeoutMs = this.getRequestTimeoutMs();
let timer: ReturnType<typeof setTimeout> | null = null;
try {
return await Promise.race([
fn(),
new Promise<T>((_, reject) => {
timer = setTimeout(() => {
reject(new Error(`daytona ${label} timed out after ${timeoutMs}ms`));
}, timeoutMs);
}),
]);
} finally {
if (timer) {
clearTimeout(timer);
}
}
}
private getClient() {
const apiKey = this.config.apiKey?.trim();
if (!apiKey) {
return undefined;
}
const endpoint = this.config.endpoint?.trim();
return this.daytona?.createClient({
...(endpoint ? { apiUrl: endpoint } : {}),
apiKey,
target: this.config.target,
});
}
private requireClient() {
const client = this.getClient();
if (client) {
return client;
}
if (!this.daytona) {
throw new Error("daytona provider requires backend daytona driver");
}
throw new Error(
"daytona provider is not configured: missing apiKey. " +
"Set HF_DAYTONA_API_KEY (or DAYTONA_API_KEY). " +
"Optionally set HF_DAYTONA_ENDPOINT (or DAYTONA_ENDPOINT)."
);
}
private async ensureStarted(sandboxId: string): Promise<void> {
const client = this.requireClient();
const sandbox = await this.withTimeout("get sandbox", () => client.getSandbox(sandboxId));
const state = String(sandbox.state ?? "unknown").toLowerCase();
if (state === "started" || state === "running") {
return;
}
// If the sandbox is stopped (or any non-started state), try starting it.
// Daytona preserves the filesystem across stop/start, which is what we rely on for faster git setup.
await this.withTimeout("start sandbox", () => client.startSandbox(sandboxId, 60));
}
private buildEnvVars(): Record<string, string> {
const envVars: Record<string, string> = {};
for (const key of DaytonaProvider.PASSTHROUGH_ENV_KEYS) {
const value = process.env[key];
if (value) {
envVars[key] = value;
}
}
return envVars;
}
private buildSnapshotImage() {
// Use Daytona image build + snapshot caching so base tooling (git + sandbox-agent)
// is prepared once and reused for subsequent sandboxes.
return Image.base(this.config.image).runCommands(
"apt-get update && apt-get install -y curl ca-certificates git openssh-client nodejs npm",
`curl -fsSL https://releases.rivet.dev/sandbox-agent/${DaytonaProvider.SANDBOX_AGENT_VERSION}/install.sh | sh`,
`bash -lc 'export PATH="$HOME/.local/bin:$PATH"; sandbox-agent install-agent codex || true; sandbox-agent install-agent claude || true'`
);
}
private async runCheckedCommand(
sandboxId: string,
command: string,
label: string
): Promise<void> {
const client = this.requireClient();
const result = await this.withTimeout(`execute command (${label})`, () =>
client.executeCommand(sandboxId, command)
);
if (result.exitCode !== 0) {
throw new Error(`daytona ${label} failed (${result.exitCode}): ${result.result}`);
}
}
id() {
return "daytona" as const;
}
capabilities(): ProviderCapabilities {
return {
remote: true,
supportsSessionReuse: true
};
}
async validateConfig(input: unknown): Promise<Record<string, unknown>> {
return (input as Record<string, unknown> | undefined) ?? {};
}
async createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle> {
const client = this.requireClient();
const emitDebug = req.debug ?? (() => {});
emitDebug("daytona.createSandbox.start", {
workspaceId: req.workspaceId,
repoId: req.repoId,
handoffId: req.handoffId,
branchName: req.branchName
});
const createStartedAt = Date.now();
const sandbox = await this.withTimeout("create sandbox", () =>
client.createSandbox({
image: this.buildSnapshotImage(),
envVars: this.buildEnvVars(),
labels: {
"openhandoff.workspace": req.workspaceId,
"openhandoff.handoff": req.handoffId,
"openhandoff.repo_id": req.repoId,
"openhandoff.repo_remote": req.repoRemote,
"openhandoff.branch": req.branchName,
},
autoStopInterval: this.config.autoStopInterval,
})
);
emitDebug("daytona.createSandbox.created", {
sandboxId: sandbox.id,
durationMs: Date.now() - createStartedAt,
state: sandbox.state ?? null
});
const repoDir = `/home/daytona/openhandoff/${req.workspaceId}/${req.repoId}/${req.handoffId}/repo`;
// Prepare a working directory for the agent. This must succeed for the handoff to work.
const installStartedAt = Date.now();
await this.runCheckedCommand(
sandbox.id,
[
"bash",
"-lc",
`'set -euo pipefail; export DEBIAN_FRONTEND=noninteractive; if command -v git >/dev/null 2>&1 && command -v npx >/dev/null 2>&1; then exit 0; fi; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y git openssh-client ca-certificates nodejs npm >/tmp/apt-install.log 2>&1'`
].join(" "),
"install git + node toolchain"
);
emitDebug("daytona.createSandbox.install_toolchain.done", {
sandboxId: sandbox.id,
durationMs: Date.now() - installStartedAt
});
const cloneStartedAt = Date.now();
await this.runCheckedCommand(
sandbox.id,
[
"bash",
"-lc",
`${JSON.stringify(
[
"set -euo pipefail",
"export GIT_TERMINAL_PROMPT=0",
"export GIT_ASKPASS=/bin/echo",
`rm -rf "${repoDir}"`,
`mkdir -p "${repoDir}"`,
`rmdir "${repoDir}"`,
// Clone without embedding credentials. Auth for pushing is configured by the agent at runtime.
`git clone "${req.repoRemote}" "${repoDir}"`,
`cd "${repoDir}"`,
`git fetch origin --prune`,
// The handoff branch may not exist remotely yet (agent push creates it). Base off current branch (default branch).
`if git show-ref --verify --quiet "refs/remotes/origin/${req.branchName}"; then git checkout -B "${req.branchName}" "origin/${req.branchName}"; else git checkout -B "${req.branchName}" "$(git branch --show-current 2>/dev/null || echo main)"; fi`,
`git config user.email "openhandoff@local" >/dev/null 2>&1 || true`,
`git config user.name "OpenHandoff" >/dev/null 2>&1 || true`,
].join("; ")
)}`
].join(" "),
"clone repo"
);
emitDebug("daytona.createSandbox.clone_repo.done", {
sandboxId: sandbox.id,
durationMs: Date.now() - cloneStartedAt
});
return {
sandboxId: sandbox.id,
switchTarget: `daytona://${sandbox.id}`,
metadata: {
endpoint: this.config.endpoint ?? null,
image: this.config.image,
snapshot: sandbox.snapshot ?? null,
remote: true,
state: sandbox.state ?? null,
cwd: repoDir,
}
};
}
async resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle> {
const client = this.requireClient();
await this.ensureStarted(req.sandboxId);
// Reconstruct cwd from sandbox labels written at create time.
const info = await this.withTimeout("resume get sandbox", () =>
client.getSandbox(req.sandboxId)
);
const labels = info.labels ?? {};
const workspaceId = labels["openhandoff.workspace"] ?? req.workspaceId;
const repoId = labels["openhandoff.repo_id"] ?? "";
const handoffId = labels["openhandoff.handoff"] ?? "";
const cwd =
repoId && handoffId
? `/home/daytona/openhandoff/${workspaceId}/${repoId}/${handoffId}/repo`
: null;
return {
sandboxId: req.sandboxId,
switchTarget: `daytona://${req.sandboxId}`,
metadata: {
resumed: true,
endpoint: this.config.endpoint ?? null,
...(cwd ? { cwd } : {}),
}
};
}
async destroySandbox(_req: DestroySandboxRequest): Promise<void> {
const client = this.getClient();
if (!client) {
return;
}
try {
await this.withTimeout("delete sandbox", () => client.deleteSandbox(_req.sandboxId));
} catch (error) {
// Ignore not-found style cleanup failures.
const text = error instanceof Error ? error.message : String(error);
if (text.toLowerCase().includes("not found")) {
return;
}
throw error;
}
}
async releaseSandbox(req: ReleaseSandboxRequest): Promise<void> {
const client = this.getClient();
if (!client) {
return;
}
try {
await this.withTimeout("stop sandbox", () => client.stopSandbox(req.sandboxId, 60));
} catch (error) {
const text = error instanceof Error ? error.message : String(error);
if (text.toLowerCase().includes("not found")) {
return;
}
throw error;
}
}
async ensureSandboxAgent(req: EnsureAgentRequest): Promise<AgentEndpoint> {
const client = this.requireClient();
const acpRequestTimeoutMs = this.getAcpRequestTimeoutMs();
await this.ensureStarted(req.sandboxId);
await this.runCheckedCommand(
req.sandboxId,
[
"bash",
"-lc",
`'set -euo pipefail; if command -v curl >/dev/null 2>&1; then exit 0; fi; export DEBIAN_FRONTEND=noninteractive; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y curl ca-certificates >/tmp/apt-install.log 2>&1'`
].join(" "),
"install curl"
);
await this.runCheckedCommand(
req.sandboxId,
[
"bash",
"-lc",
`'set -euo pipefail; if command -v npx >/dev/null 2>&1; then exit 0; fi; export DEBIAN_FRONTEND=noninteractive; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y nodejs npm >/tmp/apt-install.log 2>&1'`
].join(" "),
"install node toolchain"
);
await this.runCheckedCommand(
req.sandboxId,
[
"bash",
"-lc",
`'set -euo pipefail; export PATH="$HOME/.local/bin:$PATH"; if sandbox-agent --version 2>/dev/null | grep -q "${DaytonaProvider.SANDBOX_AGENT_VERSION}"; then exit 0; fi; curl -fsSL https://releases.rivet.dev/sandbox-agent/${DaytonaProvider.SANDBOX_AGENT_VERSION}/install.sh | sh'`
].join(" "),
"install sandbox-agent"
);
for (const agentId of DaytonaProvider.AGENT_IDS) {
try {
await this.runCheckedCommand(
req.sandboxId,
["bash", "-lc", `'export PATH="$HOME/.local/bin:$PATH"; sandbox-agent install-agent ${agentId}'`].join(" "),
`install agent ${agentId}`
);
} catch {
// Some sandbox-agent builds may not ship every agent plugin; treat this as best-effort.
}
}
await this.runCheckedCommand(
req.sandboxId,
[
"bash",
"-lc",
`'set -euo pipefail; export PATH="$HOME/.local/bin:$PATH"; command -v sandbox-agent >/dev/null 2>&1; if pgrep -x sandbox-agent >/dev/null; then exit 0; fi; nohup env SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS=${acpRequestTimeoutMs} sandbox-agent server --no-token --host 0.0.0.0 --port ${DaytonaProvider.SANDBOX_AGENT_PORT} >/tmp/sandbox-agent.log 2>&1 &'`
].join(" "),
"start sandbox-agent"
);
await this.runCheckedCommand(
req.sandboxId,
[
"bash",
"-lc",
`'for i in $(seq 1 45); do curl -fsS "http://127.0.0.1:${DaytonaProvider.SANDBOX_AGENT_PORT}/v1/health" >/dev/null && exit 0; sleep 1; done; echo "sandbox-agent failed to become healthy" >&2; tail -n 80 /tmp/sandbox-agent.log >&2; exit 1'`
].join(" "),
"wait for sandbox-agent health"
);
const preview = await this.withTimeout("get preview endpoint", () =>
client.getPreviewEndpoint(req.sandboxId, DaytonaProvider.SANDBOX_AGENT_PORT)
);
return {
endpoint: preview.url,
token: preview.token
};
}
async health(req: SandboxHealthRequest): Promise<SandboxHealth> {
const client = this.getClient();
if (!client) {
return {
status: "degraded",
message: "daytona driver not configured",
};
}
try {
const sandbox = await this.withTimeout("health get sandbox", () =>
client.getSandbox(req.sandboxId)
);
const state = String(sandbox.state ?? "unknown");
if (state.toLowerCase().includes("error")) {
return {
status: "down",
message: `daytona sandbox in error state: ${state}`,
};
}
return {
status: "healthy",
message: `daytona sandbox state: ${state}`,
};
} catch (error) {
const text = error instanceof Error ? error.message : String(error);
return {
status: "down",
message: `daytona sandbox health check failed: ${text}`,
};
}
}
async attachTarget(req: AttachTargetRequest): Promise<AttachTarget> {
return {
target: `daytona://${req.sandboxId}`
};
}
async executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult> {
const client = this.requireClient();
await this.ensureStarted(req.sandboxId);
return await this.withTimeout(`execute command (${req.label ?? "command"})`, () =>
client.executeCommand(req.sandboxId, req.command)
);
}
}

View file

@ -0,0 +1,71 @@
import type { ProviderId } from "@openhandoff/shared";
import type { AppConfig } from "@openhandoff/shared";
import type { BackendDriver } from "../driver.js";
import { DaytonaProvider } from "./daytona/index.js";
import { LocalProvider } from "./local/index.js";
import type { SandboxProvider } from "./provider-api/index.js";
export interface ProviderRegistry {
get(providerId: ProviderId): SandboxProvider;
availableProviderIds(): ProviderId[];
defaultProviderId(): ProviderId;
}
export function createProviderRegistry(config: AppConfig, driver?: BackendDriver): ProviderRegistry {
const gitDriver = driver?.git ?? {
validateRemote: async () => {
throw new Error("local provider requires backend git driver");
},
ensureCloned: async () => {
throw new Error("local provider requires backend git driver");
},
fetch: async () => {
throw new Error("local provider requires backend git driver");
},
listRemoteBranches: async () => {
throw new Error("local provider requires backend git driver");
},
remoteDefaultBaseRef: async () => {
throw new Error("local provider requires backend git driver");
},
revParse: async () => {
throw new Error("local provider requires backend git driver");
},
ensureRemoteBranch: async () => {
throw new Error("local provider requires backend git driver");
},
diffStatForBranch: async () => {
throw new Error("local provider requires backend git driver");
},
conflictsWithMain: async () => {
throw new Error("local provider requires backend git driver");
},
};
const local = new LocalProvider({
rootDir: config.providers.local.rootDir,
sandboxAgentPort: config.providers.local.sandboxAgentPort,
}, gitDriver);
const daytona = new DaytonaProvider({
endpoint: config.providers.daytona.endpoint,
apiKey: config.providers.daytona.apiKey,
image: config.providers.daytona.image
}, driver?.daytona);
const map: Record<ProviderId, SandboxProvider> = {
local,
daytona
};
return {
get(providerId: ProviderId): SandboxProvider {
return map[providerId];
},
availableProviderIds(): ProviderId[] {
return Object.keys(map) as ProviderId[];
},
defaultProviderId(): ProviderId {
return config.providers.daytona.apiKey ? "daytona" : "local";
}
};
}

View file

@ -0,0 +1,251 @@
import { randomUUID } from "node:crypto";
import { execFile } from "node:child_process";
import { existsSync, mkdirSync, rmSync } from "node:fs";
import { homedir } from "node:os";
import { dirname, resolve } from "node:path";
import { promisify } from "node:util";
import { InMemorySessionPersistDriver, SandboxAgent } from "sandbox-agent";
import type {
AgentEndpoint,
AttachTarget,
AttachTargetRequest,
CreateSandboxRequest,
DestroySandboxRequest,
EnsureAgentRequest,
ExecuteSandboxCommandRequest,
ExecuteSandboxCommandResult,
ProviderCapabilities,
ReleaseSandboxRequest,
ResumeSandboxRequest,
SandboxHandle,
SandboxHealth,
SandboxHealthRequest,
SandboxProvider,
} from "../provider-api/index.js";
import type { GitDriver } from "../../driver.js";
const execFileAsync = promisify(execFile);
const DEFAULT_SANDBOX_AGENT_PORT = 2468;
export interface LocalProviderConfig {
rootDir?: string;
sandboxAgentPort?: number;
}
function expandHome(value: string): string {
if (value === "~") {
return homedir();
}
if (value.startsWith("~/")) {
return resolve(homedir(), value.slice(2));
}
return value;
}
async function branchExists(repoPath: string, branchName: string): Promise<boolean> {
try {
await execFileAsync("git", [
"-C",
repoPath,
"show-ref",
"--verify",
`refs/remotes/origin/${branchName}`,
]);
return true;
} catch {
return false;
}
}
async function checkoutBranch(repoPath: string, branchName: string, git: GitDriver): Promise<void> {
await git.fetch(repoPath);
const targetRef = (await branchExists(repoPath, branchName))
? `origin/${branchName}`
: await git.remoteDefaultBaseRef(repoPath);
await execFileAsync("git", ["-C", repoPath, "checkout", "-B", branchName, targetRef], {
env: process.env as Record<string, string>,
});
}
export class LocalProvider implements SandboxProvider {
private sdkPromise: Promise<SandboxAgent> | null = null;
constructor(
private readonly config: LocalProviderConfig,
private readonly git: GitDriver,
) {}
private rootDir(): string {
return expandHome(
this.config.rootDir?.trim() || "~/.local/share/openhandoff/local-sandboxes",
);
}
private sandboxRoot(workspaceId: string, sandboxId: string): string {
return resolve(this.rootDir(), workspaceId, sandboxId);
}
private repoDir(workspaceId: string, sandboxId: string): string {
return resolve(this.sandboxRoot(workspaceId, sandboxId), "repo");
}
private sandboxHandle(
workspaceId: string,
sandboxId: string,
repoDir: string,
): SandboxHandle {
return {
sandboxId,
switchTarget: `local://${repoDir}`,
metadata: {
cwd: repoDir,
repoDir,
},
};
}
private async sandboxAgent(): Promise<SandboxAgent> {
if (!this.sdkPromise) {
const sandboxAgentHome = resolve(this.rootDir(), ".sandbox-agent-home");
mkdirSync(sandboxAgentHome, { recursive: true });
const spawnHome = process.env.HOME?.trim() || sandboxAgentHome;
this.sdkPromise = SandboxAgent.start({
persist: new InMemorySessionPersistDriver(),
spawn: {
enabled: true,
host: "127.0.0.1",
port: this.config.sandboxAgentPort ?? DEFAULT_SANDBOX_AGENT_PORT,
log: "silent",
env: {
HOME: spawnHome,
...(process.env.ANTHROPIC_API_KEY ? { ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY } : {}),
...(process.env.CLAUDE_API_KEY ? { CLAUDE_API_KEY: process.env.CLAUDE_API_KEY } : {}),
...(process.env.OPENAI_API_KEY ? { OPENAI_API_KEY: process.env.OPENAI_API_KEY } : {}),
...(process.env.CODEX_API_KEY ? { CODEX_API_KEY: process.env.CODEX_API_KEY } : {}),
...(process.env.GH_TOKEN ? { GH_TOKEN: process.env.GH_TOKEN } : {}),
...(process.env.GITHUB_TOKEN ? { GITHUB_TOKEN: process.env.GITHUB_TOKEN } : {}),
},
},
}).then(async (sdk) => {
for (const agentName of ["claude", "codex"] as const) {
try {
const agent = await sdk.getAgent(agentName, { config: true });
if (!agent.installed) {
await sdk.installAgent(agentName);
}
} catch {
// The local provider can still function if the agent is already available
// through the user's PATH or the install check is unsupported.
}
}
return sdk;
});
}
return this.sdkPromise;
}
id() {
return "local" as const;
}
capabilities(): ProviderCapabilities {
return {
remote: false,
supportsSessionReuse: true,
};
}
async validateConfig(input: unknown): Promise<Record<string, unknown>> {
return (input as Record<string, unknown> | undefined) ?? {};
}
async createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle> {
const sandboxId = req.handoffId || `local-${randomUUID()}`;
const repoDir = this.repoDir(req.workspaceId, sandboxId);
mkdirSync(dirname(repoDir), { recursive: true });
await this.git.ensureCloned(req.repoRemote, repoDir);
await checkoutBranch(repoDir, req.branchName, this.git);
return this.sandboxHandle(req.workspaceId, sandboxId, repoDir);
}
async resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle> {
const repoDir = this.repoDir(req.workspaceId, req.sandboxId);
if (!existsSync(repoDir)) {
throw new Error(`local sandbox repo is missing: ${repoDir}`);
}
return this.sandboxHandle(req.workspaceId, req.sandboxId, repoDir);
}
async destroySandbox(req: DestroySandboxRequest): Promise<void> {
rmSync(this.sandboxRoot(req.workspaceId, req.sandboxId), {
force: true,
recursive: true,
});
}
async releaseSandbox(_req: ReleaseSandboxRequest): Promise<void> {
// Local sandboxes stay warm on disk to preserve session state and repo context.
}
async ensureSandboxAgent(_req: EnsureAgentRequest): Promise<AgentEndpoint> {
const sdk = await this.sandboxAgent();
const { baseUrl, token } = sdk as unknown as {
baseUrl?: string;
token?: string;
};
if (!baseUrl) {
throw new Error("sandbox-agent baseUrl is unavailable");
}
return token ? { endpoint: baseUrl, token } : { endpoint: baseUrl };
}
async health(req: SandboxHealthRequest): Promise<SandboxHealth> {
try {
const repoDir = this.repoDir(req.workspaceId, req.sandboxId);
if (!existsSync(repoDir)) {
return {
status: "down",
message: "local sandbox repo is missing",
};
}
const sdk = await this.sandboxAgent();
const health = await sdk.getHealth();
return {
status: health.status === "ok" ? "healthy" : "degraded",
message: health.status,
};
} catch (error) {
return {
status: "down",
message: error instanceof Error ? error.message : String(error),
};
}
}
async attachTarget(req: AttachTargetRequest): Promise<AttachTarget> {
return { target: this.repoDir(req.workspaceId, req.sandboxId) };
}
async executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult> {
const cwd = this.repoDir(req.workspaceId, req.sandboxId);
try {
const { stdout, stderr } = await execFileAsync("bash", ["-lc", req.command], {
cwd,
env: process.env as Record<string, string>,
maxBuffer: 1024 * 1024 * 16,
});
return {
exitCode: 0,
result: [stdout, stderr].filter(Boolean).join(""),
};
} catch (error) {
const detail = error as { stdout?: string; stderr?: string; code?: number };
return {
exitCode: typeof detail.code === "number" ? detail.code : 1,
result: [detail.stdout, detail.stderr, error instanceof Error ? error.message : String(error)]
.filter(Boolean)
.join(""),
};
}
}
}

View file

@ -0,0 +1,99 @@
import type { ProviderId } from "@openhandoff/shared";
export interface ProviderCapabilities {
remote: boolean;
supportsSessionReuse: boolean;
}
export interface CreateSandboxRequest {
workspaceId: string;
repoId: string;
repoRemote: string;
branchName: string;
handoffId: string;
debug?: (message: string, context?: Record<string, unknown>) => void;
options?: Record<string, unknown>;
}
export interface ResumeSandboxRequest {
workspaceId: string;
sandboxId: string;
options?: Record<string, unknown>;
}
export interface DestroySandboxRequest {
workspaceId: string;
sandboxId: string;
}
export interface ReleaseSandboxRequest {
workspaceId: string;
sandboxId: string;
}
export interface EnsureAgentRequest {
workspaceId: string;
sandboxId: string;
}
export interface SandboxHealthRequest {
workspaceId: string;
sandboxId: string;
}
export interface AttachTargetRequest {
workspaceId: string;
sandboxId: string;
}
export interface ExecuteSandboxCommandRequest {
workspaceId: string;
sandboxId: string;
command: string;
label?: string;
}
export interface SandboxHandle {
sandboxId: string;
switchTarget: string;
metadata: Record<string, unknown>;
}
export interface AgentEndpoint {
endpoint: string;
token?: string;
}
export interface SandboxHealth {
status: "healthy" | "degraded" | "down";
message: string;
}
export interface AttachTarget {
target: string;
}
export interface ExecuteSandboxCommandResult {
exitCode: number;
result: string;
}
export interface SandboxProvider {
id(): ProviderId;
capabilities(): ProviderCapabilities;
validateConfig(input: unknown): Promise<Record<string, unknown>>;
createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle>;
resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle>;
destroySandbox(req: DestroySandboxRequest): Promise<void>;
/**
* Release resources for a sandbox without deleting its filesystem/state.
* For remote providers, this typically maps to "stop"/"suspend".
*/
releaseSandbox(req: ReleaseSandboxRequest): Promise<void>;
ensureSandboxAgent(req: EnsureAgentRequest): Promise<AgentEndpoint>;
health(req: SandboxHealthRequest): Promise<SandboxHealth>;
attachTarget(req: AttachTargetRequest): Promise<AttachTarget>;
executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult>;
}

View file

@ -0,0 +1,128 @@
export interface ResolveCreateFlowDecisionInput {
task: string;
explicitTitle?: string;
explicitBranchName?: string;
localBranches: string[];
handoffBranches: string[];
}
export interface ResolveCreateFlowDecisionResult {
title: string;
branchName: string;
}
function firstNonEmptyLine(input: string): string {
const lines = input
.split(/\r?\n/)
.map((line) => line.trim())
.filter((line) => line.length > 0);
return lines[0] ?? "";
}
export function deriveFallbackTitle(task: string, explicitTitle?: string): string {
const source = (explicitTitle && explicitTitle.trim()) || firstNonEmptyLine(task) || "update handoff";
const explicitPrefixMatch = source.match(/^\s*(feat|fix|docs|refactor):\s+(.+)$/i);
if (explicitPrefixMatch) {
const explicitTypePrefix = explicitPrefixMatch[1]!.toLowerCase();
const explicitSummary = explicitPrefixMatch[2]!
.split("")
.map((char) => (/^[a-zA-Z0-9 -]$/.test(char) ? char : " "))
.join("")
.split(/\s+/)
.filter((token) => token.length > 0)
.join(" ")
.slice(0, 62)
.trim();
return `${explicitTypePrefix}: ${explicitSummary || "update handoff"}`;
}
const lowered = source.toLowerCase();
const typePrefix = lowered.includes("fix") || lowered.includes("bug")
? "fix"
: lowered.includes("doc") || lowered.includes("readme")
? "docs"
: lowered.includes("refactor")
? "refactor"
: "feat";
const cleaned = source
.split("")
.map((char) => (/^[a-zA-Z0-9 -]$/.test(char) ? char : " "))
.join("")
.split(/\s+/)
.filter((token) => token.length > 0)
.join(" ");
const summary = (cleaned || "update handoff").slice(0, 62).trim();
return `${typePrefix}: ${summary}`.trim();
}
export function sanitizeBranchName(input: string): string {
const normalized = input
.toLowerCase()
.split("")
.map((char) => (/^[a-z0-9]$/.test(char) ? char : "-"))
.join("");
let result = "";
let previousDash = false;
for (const char of normalized) {
if (char === "-") {
if (!previousDash && result.length > 0) {
result += char;
}
previousDash = true;
continue;
}
result += char;
previousDash = false;
}
const trimmed = result.replace(/-+$/g, "");
if (trimmed.length <= 50) {
return trimmed;
}
return trimmed.slice(0, 50).replace(/-+$/g, "");
}
export function resolveCreateFlowDecision(
input: ResolveCreateFlowDecisionInput
): ResolveCreateFlowDecisionResult {
const explicitBranch = input.explicitBranchName?.trim();
const title = deriveFallbackTitle(input.task, input.explicitTitle);
const generatedBase = sanitizeBranchName(title) || "handoff";
const branchBase = explicitBranch && explicitBranch.length > 0 ? explicitBranch : generatedBase;
const existingBranches = new Set(input.localBranches.map((value) => value.trim()).filter((value) => value.length > 0));
const existingHandoffBranches = new Set(
input.handoffBranches.map((value) => value.trim()).filter((value) => value.length > 0)
);
const conflicts = (name: string): boolean =>
existingBranches.has(name) || existingHandoffBranches.has(name);
if (explicitBranch && conflicts(branchBase)) {
throw new Error(
`Branch '${branchBase}' already exists. Choose a different --name/--branch value.`
);
}
if (explicitBranch) {
return { title, branchName: branchBase };
}
let candidate = branchBase;
let index = 2;
while (conflicts(candidate)) {
candidate = `${branchBase}-${index}`;
index += 1;
}
return {
title,
branchName: candidate
};
}

View file

@ -0,0 +1,25 @@
import type { AppConfig } from "@openhandoff/shared";
import { homedir } from "node:os";
import { dirname, join, resolve } from "node:path";
function expandPath(input: string): string {
if (input.startsWith("~/")) {
return `${homedir()}/${input.slice(2)}`;
}
return input;
}
export function openhandoffDataDir(config: AppConfig): string {
// Keep data collocated with the backend DB by default.
const dbPath = expandPath(config.backend.dbPath);
return resolve(dirname(dbPath));
}
export function openhandoffRepoClonePath(
config: AppConfig,
workspaceId: string,
repoId: string
): string {
return resolve(join(openhandoffDataDir(config), "repos", workspaceId, repoId));
}

Some files were not shown because too many files have changed in this diff Show more