mirror of
https://github.com/harivansh-afk/sandbox-agent.git
synced 2026-04-15 07:04:48 +00:00
Rename Foundry handoffs to tasks (#239)
* Restore foundry onboarding stack * Consolidate foundry rename * Create foundry tasks without prompts * Rename Foundry handoffs to tasks
This commit is contained in:
parent
d30cc0bcc8
commit
d75e8c31d1
281 changed files with 9242 additions and 4356 deletions
36
foundry/packages/backend/CLAUDE.md
Normal file
36
foundry/packages/backend/CLAUDE.md
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# Backend Notes
|
||||
|
||||
## Actor Hierarchy
|
||||
|
||||
Keep the backend actor tree aligned with this shape unless we explicitly decide to change it:
|
||||
|
||||
```text
|
||||
WorkspaceActor
|
||||
├─ HistoryActor(workspace-scoped global feed)
|
||||
├─ ProjectActor(repo)
|
||||
│ ├─ ProjectBranchSyncActor
|
||||
│ ├─ ProjectPrSyncActor
|
||||
│ └─ TaskActor(task)
|
||||
│ ├─ TaskSessionActor(session) × N
|
||||
│ │ └─ SessionStatusSyncActor(session) × 0..1
|
||||
│ └─ Task-local workbench state
|
||||
└─ SandboxInstanceActor(providerId, sandboxId) × N
|
||||
```
|
||||
|
||||
## Ownership Rules
|
||||
|
||||
- `WorkspaceActor` is the workspace coordinator and lookup/index owner.
|
||||
- `HistoryActor` is workspace-scoped. There is one workspace-level history feed.
|
||||
- `ProjectActor` is the repo coordinator and owns repo-local caches/indexes.
|
||||
- `TaskActor` is one branch. Treat `1 task = 1 branch` once branch assignment is finalized.
|
||||
- `TaskActor` can have many sessions.
|
||||
- `TaskActor` can reference many sandbox instances historically, but should have only one active sandbox/session at a time.
|
||||
- Session unread state and draft prompts are backend-owned workbench state, not frontend-local state.
|
||||
- Branch rename is a real git operation, not just metadata.
|
||||
- `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity.
|
||||
- Sync actors are polling workers only. They feed parent actors and should not become the source of truth.
|
||||
|
||||
## Maintenance
|
||||
|
||||
- Keep this file up to date whenever actor ownership, hierarchy, or lifecycle responsibilities change.
|
||||
- If the real actor tree diverges from this document, update this document in the same change.
|
||||
35
foundry/packages/backend/package.json
Normal file
35
foundry/packages/backend/package.json
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"name": "@sandbox-agent/foundry-backend",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "tsup src/index.ts --format esm",
|
||||
"db:generate": "find src/actors -name drizzle.config.ts -exec pnpm exec drizzle-kit generate --config {} \\; && \"$HOME/.bun/bin/bun\" src/actors/_scripts/generate-actor-migrations.ts",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"test": "$HOME/.bun/bin/bun x vitest run",
|
||||
"start": "bun dist/index.js start"
|
||||
},
|
||||
"dependencies": {
|
||||
"@daytonaio/sdk": "0.141.0",
|
||||
"@hono/node-server": "^1.19.7",
|
||||
"@hono/node-ws": "^1.3.0",
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@sandbox-agent/foundry-shared": "workspace:*",
|
||||
"@sandbox-agent/persist-rivet": "workspace:*",
|
||||
"drizzle-orm": "^0.44.5",
|
||||
"hono": "^4.11.9",
|
||||
"pino": "^10.3.1",
|
||||
"rivetkit": "2.1.6",
|
||||
"sandbox-agent": "workspace:*",
|
||||
"uuid": "^13.0.0",
|
||||
"zod": "^4.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.3.9",
|
||||
"drizzle-kit": "^0.31.8",
|
||||
"tsup": "^8.5.0"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,135 @@
|
|||
import { mkdir, readdir, readFile, rm, writeFile } from "node:fs/promises";
|
||||
import { dirname, join, resolve } from "node:path";
|
||||
|
||||
type Journal = {
|
||||
entries?: Array<{
|
||||
idx: number;
|
||||
when: number;
|
||||
tag: string;
|
||||
breakpoints?: boolean;
|
||||
version?: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
function padMigrationKey(idx: number): string {
|
||||
return `m${String(idx).padStart(4, "0")}`;
|
||||
}
|
||||
|
||||
function escapeTemplateLiteral(value: string): string {
|
||||
return value.replace(/`/g, "\\`").replace(/\$\{/g, "\\${");
|
||||
}
|
||||
|
||||
async function fileExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await readFile(path);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function walkDirectories(root: string, onDir: (dir: string) => Promise<void>): Promise<void> {
|
||||
const entries = await readdir(root, { withFileTypes: true });
|
||||
await onDir(root);
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
if (entry.name === "node_modules" || entry.name === "dist" || entry.name.startsWith(".")) {
|
||||
continue;
|
||||
}
|
||||
await walkDirectories(join(root, entry.name), onDir);
|
||||
}
|
||||
}
|
||||
|
||||
async function generateOne(drizzleDir: string): Promise<void> {
|
||||
const metaDir = resolve(drizzleDir, "meta");
|
||||
const journalPath = resolve(metaDir, "_journal.json");
|
||||
if (!(await fileExists(journalPath))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const drizzleEntries = (await readdir(drizzleDir, { withFileTypes: true }))
|
||||
.filter((entry) => entry.isFile() && entry.name.endsWith(".sql"))
|
||||
.map((entry) => entry.name)
|
||||
.sort();
|
||||
|
||||
if (drizzleEntries.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const journalRaw = await readFile(journalPath, "utf8");
|
||||
const journal = JSON.parse(journalRaw) as Journal;
|
||||
const entries = journal.entries ?? [];
|
||||
|
||||
const sqlByKey = new Map<string, string>();
|
||||
for (const entry of entries) {
|
||||
const file = drizzleEntries[entry.idx];
|
||||
if (!file) {
|
||||
throw new Error(`Missing migration SQL file for idx=${entry.idx} in ${drizzleDir}`);
|
||||
}
|
||||
const sqlPath = resolve(drizzleDir, file);
|
||||
const sqlRaw = await readFile(sqlPath, "utf8");
|
||||
sqlByKey.set(padMigrationKey(entry.idx), sqlRaw);
|
||||
}
|
||||
|
||||
const migrationsObjectLines: string[] = [];
|
||||
for (const entry of entries) {
|
||||
const key = padMigrationKey(entry.idx);
|
||||
const sql = sqlByKey.get(key);
|
||||
if (!sql) continue;
|
||||
migrationsObjectLines.push(` ${key}: \`${escapeTemplateLiteral(sql)}\`,`);
|
||||
}
|
||||
|
||||
const banner = `// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
|
||||
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
|
||||
// Do not hand-edit this file.
|
||||
`;
|
||||
|
||||
const journalLiteral = JSON.stringify(
|
||||
{
|
||||
entries: entries.map((entry) => ({
|
||||
idx: entry.idx,
|
||||
when: entry.when,
|
||||
tag: entry.tag,
|
||||
breakpoints: Boolean(entry.breakpoints),
|
||||
})),
|
||||
},
|
||||
null,
|
||||
2,
|
||||
);
|
||||
|
||||
const outPath = resolve(drizzleDir, "..", "migrations.ts");
|
||||
const content = `${banner}
|
||||
const journal = ${journalLiteral} as const;
|
||||
|
||||
export default {
|
||||
journal,
|
||||
migrations: {
|
||||
${migrationsObjectLines.join("\n")}
|
||||
} as const
|
||||
};
|
||||
`;
|
||||
|
||||
await mkdir(dirname(outPath), { recursive: true });
|
||||
await writeFile(outPath, content, "utf8");
|
||||
|
||||
// drizzle-kit generates a JS helper file by default; delete to keep TS-only sources.
|
||||
await rm(resolve(drizzleDir, "migrations.js"), { force: true });
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const packageRoot = resolve(import.meta.dirname, "..", "..", ".."); // packages/backend
|
||||
const actorsRoot = resolve(packageRoot, "src", "actors");
|
||||
|
||||
await walkDirectories(actorsRoot, async (dir) => {
|
||||
if (dir.endsWith(`${join("db", "drizzle")}`)) {
|
||||
await generateOne(dir);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
main().catch((error: unknown) => {
|
||||
const message = error instanceof Error ? (error.stack ?? error.message) : String(error);
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(message);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
53
foundry/packages/backend/src/actors/context.ts
Normal file
53
foundry/packages/backend/src/actors/context.ts
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
import type { AppConfig } from "@sandbox-agent/foundry-shared";
|
||||
import type { BackendDriver } from "../driver.js";
|
||||
import type { NotificationService } from "../notifications/index.js";
|
||||
import type { ProviderRegistry } from "../providers/index.js";
|
||||
import type { AppShellServices } from "../services/app-shell-runtime.js";
|
||||
|
||||
let runtimeConfig: AppConfig | null = null;
|
||||
let providerRegistry: ProviderRegistry | null = null;
|
||||
let notificationService: NotificationService | null = null;
|
||||
let runtimeDriver: BackendDriver | null = null;
|
||||
let appShellServices: AppShellServices | null = null;
|
||||
|
||||
export function initActorRuntimeContext(
|
||||
config: AppConfig,
|
||||
providers: ProviderRegistry,
|
||||
notifications?: NotificationService,
|
||||
driver?: BackendDriver,
|
||||
appShell?: AppShellServices,
|
||||
): void {
|
||||
runtimeConfig = config;
|
||||
providerRegistry = providers;
|
||||
notificationService = notifications ?? null;
|
||||
runtimeDriver = driver ?? null;
|
||||
appShellServices = appShell ?? null;
|
||||
}
|
||||
|
||||
export function getActorRuntimeContext(): {
|
||||
config: AppConfig;
|
||||
providers: ProviderRegistry;
|
||||
notifications: NotificationService | null;
|
||||
driver: BackendDriver;
|
||||
appShell: AppShellServices;
|
||||
} {
|
||||
if (!runtimeConfig || !providerRegistry) {
|
||||
throw new Error("Actor runtime context not initialized");
|
||||
}
|
||||
|
||||
if (!runtimeDriver) {
|
||||
throw new Error("Actor runtime context missing driver");
|
||||
}
|
||||
|
||||
if (!appShellServices) {
|
||||
throw new Error("Actor runtime context missing app shell services");
|
||||
}
|
||||
|
||||
return {
|
||||
config: runtimeConfig,
|
||||
providers: providerRegistry,
|
||||
notifications: notificationService,
|
||||
driver: runtimeDriver,
|
||||
appShell: appShellServices,
|
||||
};
|
||||
}
|
||||
112
foundry/packages/backend/src/actors/events.ts
Normal file
112
foundry/packages/backend/src/actors/events.ts
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
import type { TaskStatus, ProviderId } from "@sandbox-agent/foundry-shared";
|
||||
|
||||
export interface TaskCreatedEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
providerId: ProviderId;
|
||||
branchName: string;
|
||||
title: string;
|
||||
}
|
||||
|
||||
export interface TaskStatusEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
status: TaskStatus;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface ProjectSnapshotEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
updatedAt: number;
|
||||
}
|
||||
|
||||
export interface AgentStartedEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
export interface AgentIdleEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
export interface AgentErrorEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface PrCreatedEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
prNumber: number;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface PrClosedEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
prNumber: number;
|
||||
merged: boolean;
|
||||
}
|
||||
|
||||
export interface PrReviewEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
prNumber: number;
|
||||
reviewer: string;
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface CiStatusChangedEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
prNumber: number;
|
||||
status: string;
|
||||
}
|
||||
|
||||
export type TaskStepName = "auto_commit" | "push" | "pr_submit";
|
||||
export type TaskStepStatus = "started" | "completed" | "skipped" | "failed";
|
||||
|
||||
export interface TaskStepEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
step: TaskStepName;
|
||||
status: TaskStepStatus;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface BranchSwitchedEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
branchName: string;
|
||||
}
|
||||
|
||||
export interface SessionAttachedEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
export interface BranchSyncedEvent {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
branchName: string;
|
||||
strategy: string;
|
||||
}
|
||||
127
foundry/packages/backend/src/actors/handles.ts
Normal file
127
foundry/packages/backend/src/actors/handles.ts
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
import { taskKey, taskStatusSyncKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, sandboxInstanceKey, workspaceKey } from "./keys.js";
|
||||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
||||
|
||||
export function actorClient(c: any) {
|
||||
return c.client();
|
||||
}
|
||||
|
||||
export async function getOrCreateWorkspace(c: any, workspaceId: string) {
|
||||
return await actorClient(c).workspace.getOrCreate(workspaceKey(workspaceId), {
|
||||
createWithInput: workspaceId,
|
||||
});
|
||||
}
|
||||
|
||||
export async function getOrCreateProject(c: any, workspaceId: string, repoId: string, remoteUrl: string) {
|
||||
return await actorClient(c).project.getOrCreate(projectKey(workspaceId, repoId), {
|
||||
createWithInput: {
|
||||
workspaceId,
|
||||
repoId,
|
||||
remoteUrl,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function getProject(c: any, workspaceId: string, repoId: string) {
|
||||
return actorClient(c).project.get(projectKey(workspaceId, repoId));
|
||||
}
|
||||
|
||||
export function getTask(c: any, workspaceId: string, repoId: string, taskId: string) {
|
||||
return actorClient(c).task.get(taskKey(workspaceId, repoId, taskId));
|
||||
}
|
||||
|
||||
export async function getOrCreateTask(c: any, workspaceId: string, repoId: string, taskId: string, createWithInput: Record<string, unknown>) {
|
||||
return await actorClient(c).task.getOrCreate(taskKey(workspaceId, repoId, taskId), {
|
||||
createWithInput,
|
||||
});
|
||||
}
|
||||
|
||||
export async function getOrCreateHistory(c: any, workspaceId: string, repoId: string) {
|
||||
return await actorClient(c).history.getOrCreate(historyKey(workspaceId, repoId), {
|
||||
createWithInput: {
|
||||
workspaceId,
|
||||
repoId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getOrCreateProjectPrSync(c: any, workspaceId: string, repoId: string, repoPath: string, intervalMs: number) {
|
||||
return await actorClient(c).projectPrSync.getOrCreate(projectPrSyncKey(workspaceId, repoId), {
|
||||
createWithInput: {
|
||||
workspaceId,
|
||||
repoId,
|
||||
repoPath,
|
||||
intervalMs,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getOrCreateProjectBranchSync(c: any, workspaceId: string, repoId: string, repoPath: string, intervalMs: number) {
|
||||
return await actorClient(c).projectBranchSync.getOrCreate(projectBranchSyncKey(workspaceId, repoId), {
|
||||
createWithInput: {
|
||||
workspaceId,
|
||||
repoId,
|
||||
repoPath,
|
||||
intervalMs,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export function getSandboxInstance(c: any, workspaceId: string, providerId: ProviderId, sandboxId: string) {
|
||||
return actorClient(c).sandboxInstance.get(sandboxInstanceKey(workspaceId, providerId, sandboxId));
|
||||
}
|
||||
|
||||
export async function getOrCreateSandboxInstance(
|
||||
c: any,
|
||||
workspaceId: string,
|
||||
providerId: ProviderId,
|
||||
sandboxId: string,
|
||||
createWithInput: Record<string, unknown>,
|
||||
) {
|
||||
return await actorClient(c).sandboxInstance.getOrCreate(sandboxInstanceKey(workspaceId, providerId, sandboxId), { createWithInput });
|
||||
}
|
||||
|
||||
export async function getOrCreateTaskStatusSync(
|
||||
c: any,
|
||||
workspaceId: string,
|
||||
repoId: string,
|
||||
taskId: string,
|
||||
sandboxId: string,
|
||||
sessionId: string,
|
||||
createWithInput: Record<string, unknown>,
|
||||
) {
|
||||
return await actorClient(c).taskStatusSync.getOrCreate(taskStatusSyncKey(workspaceId, repoId, taskId, sandboxId, sessionId), {
|
||||
createWithInput,
|
||||
});
|
||||
}
|
||||
|
||||
export function selfProjectPrSync(c: any) {
|
||||
return actorClient(c).projectPrSync.getForId(c.actorId);
|
||||
}
|
||||
|
||||
export function selfProjectBranchSync(c: any) {
|
||||
return actorClient(c).projectBranchSync.getForId(c.actorId);
|
||||
}
|
||||
|
||||
export function selfTaskStatusSync(c: any) {
|
||||
return actorClient(c).taskStatusSync.getForId(c.actorId);
|
||||
}
|
||||
|
||||
export function selfHistory(c: any) {
|
||||
return actorClient(c).history.getForId(c.actorId);
|
||||
}
|
||||
|
||||
export function selfTask(c: any) {
|
||||
return actorClient(c).task.getForId(c.actorId);
|
||||
}
|
||||
|
||||
export function selfWorkspace(c: any) {
|
||||
return actorClient(c).workspace.getForId(c.actorId);
|
||||
}
|
||||
|
||||
export function selfProject(c: any) {
|
||||
return actorClient(c).project.getForId(c.actorId);
|
||||
}
|
||||
|
||||
export function selfSandboxInstance(c: any) {
|
||||
return actorClient(c).sandboxInstance.getForId(c.actorId);
|
||||
}
|
||||
5
foundry/packages/backend/src/actors/history/db/db.ts
Normal file
5
foundry/packages/backend/src/actors/history/db/db.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import { db } from "rivetkit/db/drizzle";
|
||||
import * as schema from "./schema.js";
|
||||
import migrations from "./migrations.js";
|
||||
|
||||
export const historyDb = db({ schema, migrations });
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import { defineConfig } from "rivetkit/db/drizzle";
|
||||
|
||||
export default defineConfig({
|
||||
out: "./src/actors/history/db/drizzle",
|
||||
schema: "./src/actors/history/db/schema.ts",
|
||||
});
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
CREATE TABLE `events` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`task_id` text,
|
||||
`branch_name` text,
|
||||
`kind` text NOT NULL,
|
||||
`payload_json` text NOT NULL,
|
||||
`created_at` integer NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "9d9ebe3c-8341-449c-bd14-2b6fd62853a1",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"tables": {
|
||||
"events": {
|
||||
"name": "events",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": true
|
||||
},
|
||||
"task_id": {
|
||||
"name": "task_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"kind": {
|
||||
"name": "kind",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"payload_json": {
|
||||
"name": "payload_json",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1770924375133,
|
||||
"tag": "0000_watery_bushwacker",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
29
foundry/packages/backend/src/actors/history/db/migrations.ts
Normal file
29
foundry/packages/backend/src/actors/history/db/migrations.ts
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
|
||||
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
|
||||
// Do not hand-edit this file.
|
||||
|
||||
const journal = {
|
||||
entries: [
|
||||
{
|
||||
idx: 0,
|
||||
when: 1770924375133,
|
||||
tag: "0000_watery_bushwacker",
|
||||
breakpoints: true,
|
||||
},
|
||||
],
|
||||
} as const;
|
||||
|
||||
export default {
|
||||
journal,
|
||||
migrations: {
|
||||
m0000: `CREATE TABLE \`events\` (
|
||||
\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
\`task_id\` text,
|
||||
\`branch_name\` text,
|
||||
\`kind\` text NOT NULL,
|
||||
\`payload_json\` text NOT NULL,
|
||||
\`created_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
} as const,
|
||||
};
|
||||
10
foundry/packages/backend/src/actors/history/db/schema.ts
Normal file
10
foundry/packages/backend/src/actors/history/db/schema.ts
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
|
||||
|
||||
export const events = sqliteTable("events", {
|
||||
id: integer("id").primaryKey({ autoIncrement: true }),
|
||||
taskId: text("task_id"),
|
||||
branchName: text("branch_name"),
|
||||
kind: text("kind").notNull(),
|
||||
payloadJson: text("payload_json").notNull(),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
});
|
||||
111
foundry/packages/backend/src/actors/history/index.ts
Normal file
111
foundry/packages/backend/src/actors/history/index.ts
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
// @ts-nocheck
|
||||
import { and, desc, eq } from "drizzle-orm";
|
||||
import { actor, queue } from "rivetkit";
|
||||
import { Loop, workflow } from "rivetkit/workflow";
|
||||
import type { HistoryEvent } from "@sandbox-agent/foundry-shared";
|
||||
import { selfHistory } from "../handles.js";
|
||||
import { historyDb } from "./db/db.js";
|
||||
import { events } from "./db/schema.js";
|
||||
|
||||
export interface HistoryInput {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
}
|
||||
|
||||
export interface AppendHistoryCommand {
|
||||
kind: string;
|
||||
taskId?: string;
|
||||
branchName?: string;
|
||||
payload: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface ListHistoryParams {
|
||||
branch?: string;
|
||||
taskId?: string;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
const HISTORY_QUEUE_NAMES = ["history.command.append"] as const;
|
||||
|
||||
async function appendHistoryRow(loopCtx: any, body: AppendHistoryCommand): Promise<void> {
|
||||
const now = Date.now();
|
||||
await loopCtx.db
|
||||
.insert(events)
|
||||
.values({
|
||||
taskId: body.taskId ?? null,
|
||||
branchName: body.branchName ?? null,
|
||||
kind: body.kind,
|
||||
payloadJson: JSON.stringify(body.payload),
|
||||
createdAt: now,
|
||||
})
|
||||
.run();
|
||||
}
|
||||
|
||||
async function runHistoryWorkflow(ctx: any): Promise<void> {
|
||||
await ctx.loop("history-command-loop", async (loopCtx: any) => {
|
||||
const msg = await loopCtx.queue.next("next-history-command", {
|
||||
names: [...HISTORY_QUEUE_NAMES],
|
||||
completable: true,
|
||||
});
|
||||
if (!msg) {
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "history.command.append") {
|
||||
await loopCtx.step("append-history-row", async () => appendHistoryRow(loopCtx, msg.body as AppendHistoryCommand));
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
|
||||
return Loop.continue(undefined);
|
||||
});
|
||||
}
|
||||
|
||||
export const history = actor({
|
||||
db: historyDb,
|
||||
queues: {
|
||||
"history.command.append": queue(),
|
||||
},
|
||||
createState: (_c, input: HistoryInput) => ({
|
||||
workspaceId: input.workspaceId,
|
||||
repoId: input.repoId,
|
||||
}),
|
||||
actions: {
|
||||
async append(c, command: AppendHistoryCommand): Promise<void> {
|
||||
const self = selfHistory(c);
|
||||
await self.send("history.command.append", command, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async list(c, params?: ListHistoryParams): Promise<HistoryEvent[]> {
|
||||
const whereParts = [];
|
||||
if (params?.taskId) {
|
||||
whereParts.push(eq(events.taskId, params.taskId));
|
||||
}
|
||||
if (params?.branch) {
|
||||
whereParts.push(eq(events.branchName, params.branch));
|
||||
}
|
||||
|
||||
const base = c.db
|
||||
.select({
|
||||
id: events.id,
|
||||
taskId: events.taskId,
|
||||
branchName: events.branchName,
|
||||
kind: events.kind,
|
||||
payloadJson: events.payloadJson,
|
||||
createdAt: events.createdAt,
|
||||
})
|
||||
.from(events);
|
||||
|
||||
const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base)
|
||||
.orderBy(desc(events.createdAt))
|
||||
.limit(params?.limit ?? 100)
|
||||
.all();
|
||||
|
||||
return rows.map((row) => ({
|
||||
...row,
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId: c.state.repoId,
|
||||
}));
|
||||
},
|
||||
},
|
||||
run: workflow(runHistoryWorkflow),
|
||||
});
|
||||
54
foundry/packages/backend/src/actors/index.ts
Normal file
54
foundry/packages/backend/src/actors/index.ts
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
import { setup } from "rivetkit";
|
||||
import { taskStatusSync } from "./task-status-sync/index.js";
|
||||
import { task } from "./task/index.js";
|
||||
import { history } from "./history/index.js";
|
||||
import { projectBranchSync } from "./project-branch-sync/index.js";
|
||||
import { projectPrSync } from "./project-pr-sync/index.js";
|
||||
import { project } from "./project/index.js";
|
||||
import { sandboxInstance } from "./sandbox-instance/index.js";
|
||||
import { workspace } from "./workspace/index.js";
|
||||
|
||||
export function resolveManagerPort(): number {
|
||||
const raw = process.env.HF_RIVET_MANAGER_PORT ?? process.env.RIVETKIT_MANAGER_PORT;
|
||||
if (!raw) {
|
||||
return 7750;
|
||||
}
|
||||
|
||||
const parsed = Number(raw);
|
||||
if (!Number.isInteger(parsed) || parsed <= 0 || parsed > 65535) {
|
||||
throw new Error(`Invalid HF_RIVET_MANAGER_PORT/RIVETKIT_MANAGER_PORT: ${raw}`);
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function resolveManagerHost(): string {
|
||||
const raw = process.env.HF_RIVET_MANAGER_HOST ?? process.env.RIVETKIT_MANAGER_HOST;
|
||||
return raw && raw.trim().length > 0 ? raw.trim() : "0.0.0.0";
|
||||
}
|
||||
|
||||
export const registry = setup({
|
||||
use: {
|
||||
workspace,
|
||||
project,
|
||||
task,
|
||||
sandboxInstance,
|
||||
history,
|
||||
projectPrSync,
|
||||
projectBranchSync,
|
||||
taskStatusSync,
|
||||
},
|
||||
managerPort: resolveManagerPort(),
|
||||
managerHost: resolveManagerHost(),
|
||||
});
|
||||
|
||||
export * from "./context.js";
|
||||
export * from "./events.js";
|
||||
export * from "./task-status-sync/index.js";
|
||||
export * from "./task/index.js";
|
||||
export * from "./history/index.js";
|
||||
export * from "./keys.js";
|
||||
export * from "./project-branch-sync/index.js";
|
||||
export * from "./project-pr-sync/index.js";
|
||||
export * from "./project/index.js";
|
||||
export * from "./sandbox-instance/index.js";
|
||||
export * from "./workspace/index.js";
|
||||
34
foundry/packages/backend/src/actors/keys.ts
Normal file
34
foundry/packages/backend/src/actors/keys.ts
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
export type ActorKey = string[];
|
||||
|
||||
export function workspaceKey(workspaceId: string): ActorKey {
|
||||
return ["ws", workspaceId];
|
||||
}
|
||||
|
||||
export function projectKey(workspaceId: string, repoId: string): ActorKey {
|
||||
return ["ws", workspaceId, "project", repoId];
|
||||
}
|
||||
|
||||
export function taskKey(workspaceId: string, repoId: string, taskId: string): ActorKey {
|
||||
return ["ws", workspaceId, "project", repoId, "task", taskId];
|
||||
}
|
||||
|
||||
export function sandboxInstanceKey(workspaceId: string, providerId: string, sandboxId: string): ActorKey {
|
||||
return ["ws", workspaceId, "provider", providerId, "sandbox", sandboxId];
|
||||
}
|
||||
|
||||
export function historyKey(workspaceId: string, repoId: string): ActorKey {
|
||||
return ["ws", workspaceId, "project", repoId, "history"];
|
||||
}
|
||||
|
||||
export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey {
|
||||
return ["ws", workspaceId, "project", repoId, "pr-sync"];
|
||||
}
|
||||
|
||||
export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey {
|
||||
return ["ws", workspaceId, "project", repoId, "branch-sync"];
|
||||
}
|
||||
|
||||
export function taskStatusSyncKey(workspaceId: string, repoId: string, taskId: string, sandboxId: string, sessionId: string): ActorKey {
|
||||
// Include sandbox + session so multiple sandboxes/sessions can be tracked per task.
|
||||
return ["ws", workspaceId, "project", repoId, "task", taskId, "status-sync", sandboxId, sessionId];
|
||||
}
|
||||
27
foundry/packages/backend/src/actors/logging.ts
Normal file
27
foundry/packages/backend/src/actors/logging.ts
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
export function resolveErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
return String(error);
|
||||
}
|
||||
|
||||
export function isActorNotFoundError(error: unknown): boolean {
|
||||
return resolveErrorMessage(error).includes("Actor not found:");
|
||||
}
|
||||
|
||||
export function resolveErrorStack(error: unknown): string | undefined {
|
||||
if (error instanceof Error && typeof error.stack === "string") {
|
||||
return error.stack;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function logActorWarning(scope: string, message: string, context?: Record<string, unknown>): void {
|
||||
const payload = {
|
||||
scope,
|
||||
message,
|
||||
...(context ?? {}),
|
||||
};
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn("[foundry][actor:warn]", payload);
|
||||
}
|
||||
189
foundry/packages/backend/src/actors/polling.ts
Normal file
189
foundry/packages/backend/src/actors/polling.ts
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
import { Loop } from "rivetkit/workflow";
|
||||
import { normalizeMessages } from "../services/queue.js";
|
||||
|
||||
export interface PollingControlState {
|
||||
intervalMs: number;
|
||||
running: boolean;
|
||||
}
|
||||
|
||||
export interface PollingControlQueueNames {
|
||||
start: string;
|
||||
stop: string;
|
||||
setInterval: string;
|
||||
force: string;
|
||||
}
|
||||
|
||||
export interface PollingQueueMessage {
|
||||
name: string;
|
||||
body: unknown;
|
||||
complete(response: unknown): Promise<void>;
|
||||
}
|
||||
|
||||
interface PollingActorContext<TState extends PollingControlState> {
|
||||
state: TState;
|
||||
abortSignal: AbortSignal;
|
||||
queue: {
|
||||
nextBatch(options: { names: readonly string[]; timeout: number; count: number; completable: true }): Promise<PollingQueueMessage[]>;
|
||||
};
|
||||
}
|
||||
|
||||
interface RunPollingOptions<TState extends PollingControlState> {
|
||||
control: PollingControlQueueNames;
|
||||
onPoll(c: PollingActorContext<TState>): Promise<void>;
|
||||
}
|
||||
|
||||
export async function runPollingControlLoop<TState extends PollingControlState>(
|
||||
c: PollingActorContext<TState>,
|
||||
options: RunPollingOptions<TState>,
|
||||
): Promise<void> {
|
||||
while (!c.abortSignal.aborted) {
|
||||
const messages = normalizeMessages(
|
||||
await c.queue.nextBatch({
|
||||
names: [options.control.start, options.control.stop, options.control.setInterval, options.control.force],
|
||||
timeout: Math.max(500, c.state.intervalMs),
|
||||
count: 16,
|
||||
completable: true,
|
||||
}),
|
||||
) as PollingQueueMessage[];
|
||||
|
||||
if (messages.length === 0) {
|
||||
if (!c.state.running) {
|
||||
continue;
|
||||
}
|
||||
await options.onPoll(c);
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const msg of messages) {
|
||||
if (msg.name === options.control.start) {
|
||||
c.state.running = true;
|
||||
await msg.complete({ ok: true });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (msg.name === options.control.stop) {
|
||||
c.state.running = false;
|
||||
await msg.complete({ ok: true });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (msg.name === options.control.setInterval) {
|
||||
const intervalMs = Number((msg.body as { intervalMs?: unknown })?.intervalMs);
|
||||
c.state.intervalMs = Number.isFinite(intervalMs) ? Math.max(500, intervalMs) : c.state.intervalMs;
|
||||
await msg.complete({ ok: true });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (msg.name === options.control.force) {
|
||||
await options.onPoll(c);
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface WorkflowPollingActorContext<TState extends PollingControlState> {
|
||||
state: TState;
|
||||
loop(config: { name: string; historyEvery: number; historyKeep: number; run(ctx: WorkflowPollingActorContext<TState>): Promise<unknown> }): Promise<void>;
|
||||
}
|
||||
|
||||
interface WorkflowPollingQueueMessage extends PollingQueueMessage {}
|
||||
|
||||
interface WorkflowPollingLoopContext<TState extends PollingControlState> {
|
||||
state: TState;
|
||||
queue: {
|
||||
nextBatch(
|
||||
name: string,
|
||||
options: {
|
||||
names: readonly string[];
|
||||
timeout: number;
|
||||
count: number;
|
||||
completable: true;
|
||||
},
|
||||
): Promise<WorkflowPollingQueueMessage[]>;
|
||||
};
|
||||
step<T>(
|
||||
nameOrConfig:
|
||||
| string
|
||||
| {
|
||||
name: string;
|
||||
timeout?: number;
|
||||
run: () => Promise<T>;
|
||||
},
|
||||
run?: () => Promise<T>,
|
||||
): Promise<T>;
|
||||
}
|
||||
|
||||
export async function runWorkflowPollingLoop<TState extends PollingControlState>(
|
||||
ctx: any,
|
||||
options: RunPollingOptions<TState> & { loopName: string },
|
||||
): Promise<void> {
|
||||
await ctx.loop(options.loopName, async (loopCtx: WorkflowPollingLoopContext<TState>) => {
|
||||
const control = await loopCtx.step("read-control-state", async () => ({
|
||||
intervalMs: Math.max(500, Number(loopCtx.state.intervalMs) || 500),
|
||||
running: Boolean(loopCtx.state.running),
|
||||
}));
|
||||
|
||||
const messages = normalizeMessages(
|
||||
await loopCtx.queue.nextBatch("next-polling-control-batch", {
|
||||
names: [options.control.start, options.control.stop, options.control.setInterval, options.control.force],
|
||||
timeout: control.running ? control.intervalMs : 60_000,
|
||||
count: 16,
|
||||
completable: true,
|
||||
}),
|
||||
) as WorkflowPollingQueueMessage[];
|
||||
|
||||
if (messages.length === 0) {
|
||||
if (control.running) {
|
||||
await loopCtx.step({
|
||||
name: "poll-tick",
|
||||
timeout: 5 * 60_000,
|
||||
run: async () => {
|
||||
await options.onPoll(loopCtx as unknown as PollingActorContext<TState>);
|
||||
},
|
||||
});
|
||||
}
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
for (const msg of messages) {
|
||||
if (msg.name === options.control.start) {
|
||||
await loopCtx.step("control-start", async () => {
|
||||
loopCtx.state.running = true;
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (msg.name === options.control.stop) {
|
||||
await loopCtx.step("control-stop", async () => {
|
||||
loopCtx.state.running = false;
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (msg.name === options.control.setInterval) {
|
||||
await loopCtx.step("control-set-interval", async () => {
|
||||
const intervalMs = Number((msg.body as { intervalMs?: unknown })?.intervalMs);
|
||||
loopCtx.state.intervalMs = Number.isFinite(intervalMs) ? Math.max(500, intervalMs) : loopCtx.state.intervalMs;
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (msg.name === options.control.force) {
|
||||
await loopCtx.step({
|
||||
name: "control-force",
|
||||
timeout: 5 * 60_000,
|
||||
run: async () => {
|
||||
await options.onPoll(loopCtx as unknown as PollingActorContext<TState>);
|
||||
},
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
}
|
||||
|
||||
return Loop.continue(undefined);
|
||||
});
|
||||
}
|
||||
176
foundry/packages/backend/src/actors/project-branch-sync/index.ts
Normal file
176
foundry/packages/backend/src/actors/project-branch-sync/index.ts
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
import { actor, queue } from "rivetkit";
|
||||
import { workflow } from "rivetkit/workflow";
|
||||
import type { GitDriver } from "../../driver.js";
|
||||
import { getActorRuntimeContext } from "../context.js";
|
||||
import { getProject, selfProjectBranchSync } from "../handles.js";
|
||||
import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js";
|
||||
import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js";
|
||||
import { parentLookupFromStack } from "../project/stack-model.js";
|
||||
import { withRepoGitLock } from "../../services/repo-git-lock.js";
|
||||
|
||||
export interface ProjectBranchSyncInput {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
repoPath: string;
|
||||
intervalMs: number;
|
||||
}
|
||||
|
||||
interface SetIntervalCommand {
|
||||
intervalMs: number;
|
||||
}
|
||||
|
||||
interface EnrichedBranchSnapshot {
|
||||
branchName: string;
|
||||
commitSha: string;
|
||||
parentBranch: string | null;
|
||||
trackedInStack: boolean;
|
||||
diffStat: string | null;
|
||||
hasUnpushed: boolean;
|
||||
conflictsWithMain: boolean;
|
||||
}
|
||||
|
||||
interface ProjectBranchSyncState extends PollingControlState {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
repoPath: string;
|
||||
}
|
||||
|
||||
const CONTROL = {
|
||||
start: "project.branch_sync.control.start",
|
||||
stop: "project.branch_sync.control.stop",
|
||||
setInterval: "project.branch_sync.control.set_interval",
|
||||
force: "project.branch_sync.control.force",
|
||||
} as const;
|
||||
|
||||
async function enrichBranches(workspaceId: string, repoId: string, repoPath: string, git: GitDriver): Promise<EnrichedBranchSnapshot[]> {
|
||||
return await withRepoGitLock(repoPath, async () => {
|
||||
await git.fetch(repoPath);
|
||||
const branches = await git.listRemoteBranches(repoPath);
|
||||
const { driver } = getActorRuntimeContext();
|
||||
const stackEntries = await driver.stack.listStack(repoPath).catch(() => []);
|
||||
const parentByBranch = parentLookupFromStack(stackEntries);
|
||||
const enriched: EnrichedBranchSnapshot[] = [];
|
||||
|
||||
const baseRef = await git.remoteDefaultBaseRef(repoPath);
|
||||
const baseSha = await git.revParse(repoPath, baseRef).catch(() => "");
|
||||
|
||||
for (const branch of branches) {
|
||||
let branchDiffStat: string | null = null;
|
||||
let branchHasUnpushed = false;
|
||||
let branchConflicts = false;
|
||||
|
||||
try {
|
||||
branchDiffStat = await git.diffStatForBranch(repoPath, branch.branchName);
|
||||
} catch (error) {
|
||||
logActorWarning("project-branch-sync", "diffStatForBranch failed", {
|
||||
workspaceId,
|
||||
repoId,
|
||||
branchName: branch.branchName,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
branchDiffStat = null;
|
||||
}
|
||||
|
||||
try {
|
||||
const headSha = await git.revParse(repoPath, `origin/${branch.branchName}`);
|
||||
branchHasUnpushed = Boolean(baseSha && headSha && headSha !== baseSha);
|
||||
} catch (error) {
|
||||
logActorWarning("project-branch-sync", "revParse failed", {
|
||||
workspaceId,
|
||||
repoId,
|
||||
branchName: branch.branchName,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
branchHasUnpushed = false;
|
||||
}
|
||||
|
||||
try {
|
||||
branchConflicts = await git.conflictsWithMain(repoPath, branch.branchName);
|
||||
} catch (error) {
|
||||
logActorWarning("project-branch-sync", "conflictsWithMain failed", {
|
||||
workspaceId,
|
||||
repoId,
|
||||
branchName: branch.branchName,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
branchConflicts = false;
|
||||
}
|
||||
|
||||
enriched.push({
|
||||
branchName: branch.branchName,
|
||||
commitSha: branch.commitSha,
|
||||
parentBranch: parentByBranch.get(branch.branchName) ?? null,
|
||||
trackedInStack: parentByBranch.has(branch.branchName),
|
||||
diffStat: branchDiffStat,
|
||||
hasUnpushed: branchHasUnpushed,
|
||||
conflictsWithMain: branchConflicts,
|
||||
});
|
||||
}
|
||||
|
||||
return enriched;
|
||||
});
|
||||
}
|
||||
|
||||
async function pollBranches(c: { state: ProjectBranchSyncState }): Promise<void> {
|
||||
const { driver } = getActorRuntimeContext();
|
||||
const enrichedItems = await enrichBranches(c.state.workspaceId, c.state.repoId, c.state.repoPath, driver.git);
|
||||
const parent = getProject(c, c.state.workspaceId, c.state.repoId);
|
||||
await parent.applyBranchSyncResult({ items: enrichedItems, at: Date.now() });
|
||||
}
|
||||
|
||||
export const projectBranchSync = actor({
|
||||
queues: {
|
||||
[CONTROL.start]: queue(),
|
||||
[CONTROL.stop]: queue(),
|
||||
[CONTROL.setInterval]: queue(),
|
||||
[CONTROL.force]: queue(),
|
||||
},
|
||||
options: {
|
||||
// Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling.
|
||||
noSleep: true,
|
||||
},
|
||||
createState: (_c, input: ProjectBranchSyncInput): ProjectBranchSyncState => ({
|
||||
workspaceId: input.workspaceId,
|
||||
repoId: input.repoId,
|
||||
repoPath: input.repoPath,
|
||||
intervalMs: input.intervalMs,
|
||||
running: true,
|
||||
}),
|
||||
actions: {
|
||||
async start(c): Promise<void> {
|
||||
const self = selfProjectBranchSync(c);
|
||||
await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async stop(c): Promise<void> {
|
||||
const self = selfProjectBranchSync(c);
|
||||
await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async setIntervalMs(c, payload: SetIntervalCommand): Promise<void> {
|
||||
const self = selfProjectBranchSync(c);
|
||||
await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async force(c): Promise<void> {
|
||||
const self = selfProjectBranchSync(c);
|
||||
await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 });
|
||||
},
|
||||
},
|
||||
run: workflow(async (ctx) => {
|
||||
await runWorkflowPollingLoop<ProjectBranchSyncState>(ctx, {
|
||||
loopName: "project-branch-sync-loop",
|
||||
control: CONTROL,
|
||||
onPoll: async (loopCtx) => {
|
||||
try {
|
||||
await pollBranches(loopCtx);
|
||||
} catch (error) {
|
||||
logActorWarning("project-branch-sync", "poll failed", {
|
||||
error: resolveErrorMessage(error),
|
||||
stack: resolveErrorStack(error),
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
}),
|
||||
});
|
||||
94
foundry/packages/backend/src/actors/project-pr-sync/index.ts
Normal file
94
foundry/packages/backend/src/actors/project-pr-sync/index.ts
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
import { actor, queue } from "rivetkit";
|
||||
import { workflow } from "rivetkit/workflow";
|
||||
import { getActorRuntimeContext } from "../context.js";
|
||||
import { getProject, selfProjectPrSync } from "../handles.js";
|
||||
import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js";
|
||||
import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js";
|
||||
|
||||
export interface ProjectPrSyncInput {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
repoPath: string;
|
||||
intervalMs: number;
|
||||
}
|
||||
|
||||
interface SetIntervalCommand {
|
||||
intervalMs: number;
|
||||
}
|
||||
|
||||
interface ProjectPrSyncState extends PollingControlState {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
repoPath: string;
|
||||
}
|
||||
|
||||
const CONTROL = {
|
||||
start: "project.pr_sync.control.start",
|
||||
stop: "project.pr_sync.control.stop",
|
||||
setInterval: "project.pr_sync.control.set_interval",
|
||||
force: "project.pr_sync.control.force",
|
||||
} as const;
|
||||
|
||||
async function pollPrs(c: { state: ProjectPrSyncState }): Promise<void> {
|
||||
const { driver } = getActorRuntimeContext();
|
||||
const items = await driver.github.listPullRequests(c.state.repoPath);
|
||||
const parent = getProject(c, c.state.workspaceId, c.state.repoId);
|
||||
await parent.applyPrSyncResult({ items, at: Date.now() });
|
||||
}
|
||||
|
||||
export const projectPrSync = actor({
|
||||
queues: {
|
||||
[CONTROL.start]: queue(),
|
||||
[CONTROL.stop]: queue(),
|
||||
[CONTROL.setInterval]: queue(),
|
||||
[CONTROL.force]: queue(),
|
||||
},
|
||||
options: {
|
||||
// Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling.
|
||||
noSleep: true,
|
||||
},
|
||||
createState: (_c, input: ProjectPrSyncInput): ProjectPrSyncState => ({
|
||||
workspaceId: input.workspaceId,
|
||||
repoId: input.repoId,
|
||||
repoPath: input.repoPath,
|
||||
intervalMs: input.intervalMs,
|
||||
running: true,
|
||||
}),
|
||||
actions: {
|
||||
async start(c): Promise<void> {
|
||||
const self = selfProjectPrSync(c);
|
||||
await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async stop(c): Promise<void> {
|
||||
const self = selfProjectPrSync(c);
|
||||
await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async setIntervalMs(c, payload: SetIntervalCommand): Promise<void> {
|
||||
const self = selfProjectPrSync(c);
|
||||
await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async force(c): Promise<void> {
|
||||
const self = selfProjectPrSync(c);
|
||||
await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 });
|
||||
},
|
||||
},
|
||||
run: workflow(async (ctx) => {
|
||||
await runWorkflowPollingLoop<ProjectPrSyncState>(ctx, {
|
||||
loopName: "project-pr-sync-loop",
|
||||
control: CONTROL,
|
||||
onPoll: async (loopCtx) => {
|
||||
try {
|
||||
await pollPrs(loopCtx);
|
||||
} catch (error) {
|
||||
logActorWarning("project-pr-sync", "poll failed", {
|
||||
error: resolveErrorMessage(error),
|
||||
stack: resolveErrorStack(error),
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
}),
|
||||
});
|
||||
1149
foundry/packages/backend/src/actors/project/actions.ts
Normal file
1149
foundry/packages/backend/src/actors/project/actions.ts
Normal file
File diff suppressed because it is too large
Load diff
5
foundry/packages/backend/src/actors/project/db/db.ts
Normal file
5
foundry/packages/backend/src/actors/project/db/db.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import { db } from "rivetkit/db/drizzle";
|
||||
import * as schema from "./schema.js";
|
||||
import migrations from "./migrations.js";
|
||||
|
||||
export const projectDb = db({ schema, migrations });
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import { defineConfig } from "rivetkit/db/drizzle";
|
||||
|
||||
export default defineConfig({
|
||||
out: "./src/actors/project/db/drizzle",
|
||||
schema: "./src/actors/project/db/schema.ts",
|
||||
});
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
CREATE TABLE `branches` (
|
||||
`branch_name` text PRIMARY KEY NOT NULL,
|
||||
`commit_sha` text NOT NULL,
|
||||
`worktree_path` text,
|
||||
`parent_branch` text,
|
||||
`diff_stat` text,
|
||||
`has_unpushed` integer,
|
||||
`conflicts_with_main` integer,
|
||||
`first_seen_at` integer,
|
||||
`last_seen_at` integer,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `pr_cache` (
|
||||
`branch_name` text PRIMARY KEY NOT NULL,
|
||||
`pr_number` integer NOT NULL,
|
||||
`state` text NOT NULL,
|
||||
`title` text NOT NULL,
|
||||
`pr_url` text,
|
||||
`pr_author` text,
|
||||
`is_draft` integer,
|
||||
`ci_status` text,
|
||||
`review_status` text,
|
||||
`reviewer` text,
|
||||
`fetched_at` integer,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
CREATE TABLE `repo_meta` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`remote_url` text NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE `branches` DROP COLUMN `worktree_path`;
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
CREATE TABLE `task_index` (
|
||||
`task_id` text PRIMARY KEY NOT NULL,
|
||||
`branch_name` text,
|
||||
`created_at` integer NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE `branches` ADD `tracked_in_stack` integer;
|
||||
|
|
@ -0,0 +1,192 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "03d97613-0108-4197-8660-5f2af5409fe6",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"tables": {
|
||||
"branches": {
|
||||
"name": "branches",
|
||||
"columns": {
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"commit_sha": {
|
||||
"name": "commit_sha",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"worktree_path": {
|
||||
"name": "worktree_path",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"parent_branch": {
|
||||
"name": "parent_branch",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"diff_stat": {
|
||||
"name": "diff_stat",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"has_unpushed": {
|
||||
"name": "has_unpushed",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"conflicts_with_main": {
|
||||
"name": "conflicts_with_main",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"first_seen_at": {
|
||||
"name": "first_seen_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"last_seen_at": {
|
||||
"name": "last_seen_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"pr_cache": {
|
||||
"name": "pr_cache",
|
||||
"columns": {
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_number": {
|
||||
"name": "pr_number",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"state": {
|
||||
"name": "state",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_url": {
|
||||
"name": "pr_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_author": {
|
||||
"name": "pr_author",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"is_draft": {
|
||||
"name": "is_draft",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"ci_status": {
|
||||
"name": "ci_status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"review_status": {
|
||||
"name": "review_status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"reviewer": {
|
||||
"name": "reviewer",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"fetched_at": {
|
||||
"name": "fetched_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,216 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "e6d294b6-27ce-424b-a3b3-c100b42e628b",
|
||||
"prevId": "03d97613-0108-4197-8660-5f2af5409fe6",
|
||||
"tables": {
|
||||
"branches": {
|
||||
"name": "branches",
|
||||
"columns": {
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"commit_sha": {
|
||||
"name": "commit_sha",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"parent_branch": {
|
||||
"name": "parent_branch",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"diff_stat": {
|
||||
"name": "diff_stat",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"has_unpushed": {
|
||||
"name": "has_unpushed",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"conflicts_with_main": {
|
||||
"name": "conflicts_with_main",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"first_seen_at": {
|
||||
"name": "first_seen_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"last_seen_at": {
|
||||
"name": "last_seen_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"pr_cache": {
|
||||
"name": "pr_cache",
|
||||
"columns": {
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_number": {
|
||||
"name": "pr_number",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"state": {
|
||||
"name": "state",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_url": {
|
||||
"name": "pr_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_author": {
|
||||
"name": "pr_author",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"is_draft": {
|
||||
"name": "is_draft",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"ci_status": {
|
||||
"name": "ci_status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"review_status": {
|
||||
"name": "review_status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"reviewer": {
|
||||
"name": "reviewer",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"fetched_at": {
|
||||
"name": "fetched_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"repo_meta": {
|
||||
"name": "repo_meta",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"remote_url": {
|
||||
"name": "remote_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,254 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "ac89870f-1630-4a16-9606-7b1225f6da8a",
|
||||
"prevId": "e6d294b6-27ce-424b-a3b3-c100b42e628b",
|
||||
"tables": {
|
||||
"branches": {
|
||||
"name": "branches",
|
||||
"columns": {
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"commit_sha": {
|
||||
"name": "commit_sha",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"parent_branch": {
|
||||
"name": "parent_branch",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"diff_stat": {
|
||||
"name": "diff_stat",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"has_unpushed": {
|
||||
"name": "has_unpushed",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"conflicts_with_main": {
|
||||
"name": "conflicts_with_main",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"first_seen_at": {
|
||||
"name": "first_seen_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"last_seen_at": {
|
||||
"name": "last_seen_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"task_index": {
|
||||
"name": "task_index",
|
||||
"columns": {
|
||||
"task_id": {
|
||||
"name": "task_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"pr_cache": {
|
||||
"name": "pr_cache",
|
||||
"columns": {
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_number": {
|
||||
"name": "pr_number",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"state": {
|
||||
"name": "state",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_url": {
|
||||
"name": "pr_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"pr_author": {
|
||||
"name": "pr_author",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"is_draft": {
|
||||
"name": "is_draft",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"ci_status": {
|
||||
"name": "ci_status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"review_status": {
|
||||
"name": "review_status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"reviewer": {
|
||||
"name": "reviewer",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"fetched_at": {
|
||||
"name": "fetched_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"repo_meta": {
|
||||
"name": "repo_meta",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"remote_url": {
|
||||
"name": "remote_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1770924376062,
|
||||
"tag": "0000_stormy_the_hunter",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"version": "6",
|
||||
"when": 1770947252449,
|
||||
"tag": "0001_wild_carlie_cooper",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"version": "6",
|
||||
"when": 1771276338465,
|
||||
"tag": "0002_far_war_machine",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"version": "6",
|
||||
"when": 1771369000000,
|
||||
"tag": "0003_busy_legacy",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
81
foundry/packages/backend/src/actors/project/db/migrations.ts
Normal file
81
foundry/packages/backend/src/actors/project/db/migrations.ts
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
|
||||
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
|
||||
// Do not hand-edit this file.
|
||||
|
||||
const journal = {
|
||||
entries: [
|
||||
{
|
||||
idx: 0,
|
||||
when: 1770924376062,
|
||||
tag: "0000_stormy_the_hunter",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 1,
|
||||
when: 1770947252449,
|
||||
tag: "0001_wild_carlie_cooper",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 2,
|
||||
when: 1771276338465,
|
||||
tag: "0002_far_war_machine",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 3,
|
||||
when: 1771369000000,
|
||||
tag: "0003_busy_legacy",
|
||||
breakpoints: true,
|
||||
},
|
||||
],
|
||||
} as const;
|
||||
|
||||
export default {
|
||||
journal,
|
||||
migrations: {
|
||||
m0000: `CREATE TABLE \`branches\` (
|
||||
\`branch_name\` text PRIMARY KEY NOT NULL,
|
||||
\`commit_sha\` text NOT NULL,
|
||||
\`worktree_path\` text,
|
||||
\`parent_branch\` text,
|
||||
\`diff_stat\` text,
|
||||
\`has_unpushed\` integer,
|
||||
\`conflicts_with_main\` integer,
|
||||
\`first_seen_at\` integer,
|
||||
\`last_seen_at\` integer,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE \`pr_cache\` (
|
||||
\`branch_name\` text PRIMARY KEY NOT NULL,
|
||||
\`pr_number\` integer NOT NULL,
|
||||
\`state\` text NOT NULL,
|
||||
\`title\` text NOT NULL,
|
||||
\`pr_url\` text,
|
||||
\`pr_author\` text,
|
||||
\`is_draft\` integer,
|
||||
\`ci_status\` text,
|
||||
\`review_status\` text,
|
||||
\`reviewer\` text,
|
||||
\`fetched_at\` integer,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0001: `CREATE TABLE \`repo_meta\` (
|
||||
\`id\` integer PRIMARY KEY NOT NULL,
|
||||
\`remote_url\` text NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE \`branches\` DROP COLUMN \`worktree_path\`;`,
|
||||
m0002: `CREATE TABLE \`task_index\` (
|
||||
\`task_id\` text PRIMARY KEY NOT NULL,
|
||||
\`branch_name\` text,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0003: `ALTER TABLE \`branches\` ADD \`tracked_in_stack\` integer;`,
|
||||
} as const,
|
||||
};
|
||||
44
foundry/packages/backend/src/actors/project/db/schema.ts
Normal file
44
foundry/packages/backend/src/actors/project/db/schema.ts
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
|
||||
|
||||
// SQLite is per project actor instance (workspaceId+repoId), so no workspaceId/repoId columns needed.
|
||||
|
||||
export const branches = sqliteTable("branches", {
|
||||
branchName: text("branch_name").notNull().primaryKey(),
|
||||
commitSha: text("commit_sha").notNull(),
|
||||
parentBranch: text("parent_branch"),
|
||||
trackedInStack: integer("tracked_in_stack"),
|
||||
diffStat: text("diff_stat"),
|
||||
hasUnpushed: integer("has_unpushed"),
|
||||
conflictsWithMain: integer("conflicts_with_main"),
|
||||
firstSeenAt: integer("first_seen_at"),
|
||||
lastSeenAt: integer("last_seen_at"),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const repoMeta = sqliteTable("repo_meta", {
|
||||
id: integer("id").primaryKey(),
|
||||
remoteUrl: text("remote_url").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const prCache = sqliteTable("pr_cache", {
|
||||
branchName: text("branch_name").notNull().primaryKey(),
|
||||
prNumber: integer("pr_number").notNull(),
|
||||
state: text("state").notNull(),
|
||||
title: text("title").notNull(),
|
||||
prUrl: text("pr_url"),
|
||||
prAuthor: text("pr_author"),
|
||||
isDraft: integer("is_draft"),
|
||||
ciStatus: text("ci_status"),
|
||||
reviewStatus: text("review_status"),
|
||||
reviewer: text("reviewer"),
|
||||
fetchedAt: integer("fetched_at"),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const taskIndex = sqliteTable("task_index", {
|
||||
taskId: text("task_id").notNull().primaryKey(),
|
||||
branchName: text("branch_name"),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
28
foundry/packages/backend/src/actors/project/index.ts
Normal file
28
foundry/packages/backend/src/actors/project/index.ts
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import { actor, queue } from "rivetkit";
|
||||
import { workflow } from "rivetkit/workflow";
|
||||
import { projectDb } from "./db/db.js";
|
||||
import { PROJECT_QUEUE_NAMES, projectActions, runProjectWorkflow } from "./actions.js";
|
||||
|
||||
export interface ProjectInput {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
remoteUrl: string;
|
||||
}
|
||||
|
||||
export const project = actor({
|
||||
db: projectDb,
|
||||
queues: Object.fromEntries(PROJECT_QUEUE_NAMES.map((name) => [name, queue()])),
|
||||
options: {
|
||||
actionTimeout: 5 * 60_000,
|
||||
},
|
||||
createState: (_c, input: ProjectInput) => ({
|
||||
workspaceId: input.workspaceId,
|
||||
repoId: input.repoId,
|
||||
remoteUrl: input.remoteUrl,
|
||||
localPath: null as string | null,
|
||||
syncActorsStarted: false,
|
||||
taskIndexHydrated: false,
|
||||
}),
|
||||
actions: projectActions,
|
||||
run: workflow(runProjectWorkflow),
|
||||
});
|
||||
69
foundry/packages/backend/src/actors/project/stack-model.ts
Normal file
69
foundry/packages/backend/src/actors/project/stack-model.ts
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
export interface StackEntry {
|
||||
branchName: string;
|
||||
parentBranch: string | null;
|
||||
}
|
||||
|
||||
export interface OrderedBranchRow {
|
||||
branchName: string;
|
||||
parentBranch: string | null;
|
||||
updatedAt: number;
|
||||
}
|
||||
|
||||
export function normalizeParentBranch(branchName: string, parentBranch: string | null | undefined): string | null {
|
||||
const parent = parentBranch?.trim() || null;
|
||||
if (!parent || parent === branchName) {
|
||||
return null;
|
||||
}
|
||||
return parent;
|
||||
}
|
||||
|
||||
export function parentLookupFromStack(entries: StackEntry[]): Map<string, string | null> {
|
||||
const lookup = new Map<string, string | null>();
|
||||
for (const entry of entries) {
|
||||
const branchName = entry.branchName.trim();
|
||||
if (!branchName) {
|
||||
continue;
|
||||
}
|
||||
lookup.set(branchName, normalizeParentBranch(branchName, entry.parentBranch));
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
|
||||
export function sortBranchesForOverview(rows: OrderedBranchRow[]): OrderedBranchRow[] {
|
||||
const byName = new Map(rows.map((row) => [row.branchName, row]));
|
||||
const depthMemo = new Map<string, number>();
|
||||
const computing = new Set<string>();
|
||||
|
||||
const depthFor = (branchName: string): number => {
|
||||
const cached = depthMemo.get(branchName);
|
||||
if (cached != null) {
|
||||
return cached;
|
||||
}
|
||||
if (computing.has(branchName)) {
|
||||
return 999;
|
||||
}
|
||||
|
||||
computing.add(branchName);
|
||||
const row = byName.get(branchName);
|
||||
const parent = row?.parentBranch;
|
||||
let depth = 0;
|
||||
if (parent && parent !== branchName && byName.has(parent)) {
|
||||
depth = Math.min(998, depthFor(parent) + 1);
|
||||
}
|
||||
computing.delete(branchName);
|
||||
depthMemo.set(branchName, depth);
|
||||
return depth;
|
||||
};
|
||||
|
||||
return [...rows].sort((a, b) => {
|
||||
const da = depthFor(a.branchName);
|
||||
const db = depthFor(b.branchName);
|
||||
if (da !== db) {
|
||||
return da - db;
|
||||
}
|
||||
if (a.updatedAt !== b.updatedAt) {
|
||||
return b.updatedAt - a.updatedAt;
|
||||
}
|
||||
return a.branchName.localeCompare(b.branchName);
|
||||
});
|
||||
}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
import { db } from "rivetkit/db/drizzle";
|
||||
import * as schema from "./schema.js";
|
||||
import migrations from "./migrations.js";
|
||||
|
||||
export const sandboxInstanceDb = db({ schema, migrations });
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import { defineConfig } from "rivetkit/db/drizzle";
|
||||
|
||||
export default defineConfig({
|
||||
out: "./src/actors/sandbox-instance/db/drizzle",
|
||||
schema: "./src/actors/sandbox-instance/db/schema.ts",
|
||||
});
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
CREATE TABLE `sandbox_instance` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`metadata_json` text NOT NULL,
|
||||
`status` text NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
CREATE TABLE `sandbox_sessions` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`agent` text NOT NULL,
|
||||
`agent_session_id` text NOT NULL,
|
||||
`last_connection_id` text NOT NULL,
|
||||
`created_at` integer NOT NULL,
|
||||
`destroyed_at` integer,
|
||||
`session_init_json` text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
|
||||
CREATE TABLE `sandbox_session_events` (
|
||||
`id` text PRIMARY KEY NOT NULL,
|
||||
`session_id` text NOT NULL,
|
||||
`event_index` integer NOT NULL,
|
||||
`created_at` integer NOT NULL,
|
||||
`connection_id` text NOT NULL,
|
||||
`sender` text NOT NULL,
|
||||
`payload_json` text NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
|
||||
CREATE INDEX `sandbox_sessions_created_at_idx` ON `sandbox_sessions` (`created_at`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `sandbox_session_events_session_id_event_index_idx` ON `sandbox_session_events` (`session_id`,`event_index`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX `sandbox_session_events_session_id_created_at_idx` ON `sandbox_session_events` (`session_id`,`created_at`);
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "ef8a919c-64f0-46d9-b8ed-a15f039e6ba7",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"tables": {
|
||||
"sandbox_instance": {
|
||||
"name": "sandbox_instance",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"metadata_json": {
|
||||
"name": "metadata_json",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status": {
|
||||
"name": "status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1770924375604,
|
||||
"tag": "0000_broad_tyrannus",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"version": "6",
|
||||
"when": 1776482400000,
|
||||
"tag": "0001_sandbox_sessions",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
|
||||
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
|
||||
// Do not hand-edit this file.
|
||||
|
||||
const journal = {
|
||||
entries: [
|
||||
{
|
||||
idx: 0,
|
||||
when: 1770924375604,
|
||||
tag: "0000_broad_tyrannus",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 1,
|
||||
when: 1776482400000,
|
||||
tag: "0001_sandbox_sessions",
|
||||
breakpoints: true,
|
||||
},
|
||||
],
|
||||
} as const;
|
||||
|
||||
export default {
|
||||
journal,
|
||||
migrations: {
|
||||
m0000: `CREATE TABLE \`sandbox_instance\` (
|
||||
\`id\` integer PRIMARY KEY NOT NULL,
|
||||
\`metadata_json\` text NOT NULL,
|
||||
\`status\` text NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0001: `CREATE TABLE \`sandbox_sessions\` (
|
||||
\`id\` text PRIMARY KEY NOT NULL,
|
||||
\`agent\` text NOT NULL,
|
||||
\`agent_session_id\` text NOT NULL,
|
||||
\`last_connection_id\` text NOT NULL,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`destroyed_at\` integer,
|
||||
\`session_init_json\` text
|
||||
);
|
||||
--> statement-breakpoint
|
||||
|
||||
CREATE TABLE \`sandbox_session_events\` (
|
||||
\`id\` text PRIMARY KEY NOT NULL,
|
||||
\`session_id\` text NOT NULL,
|
||||
\`event_index\` integer NOT NULL,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`connection_id\` text NOT NULL,
|
||||
\`sender\` text NOT NULL,
|
||||
\`payload_json\` text NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
|
||||
CREATE INDEX \`sandbox_sessions_created_at_idx\` ON \`sandbox_sessions\` (\`created_at\`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX \`sandbox_session_events_session_id_event_index_idx\` ON \`sandbox_session_events\` (\`session_id\`,\`event_index\`);
|
||||
--> statement-breakpoint
|
||||
CREATE INDEX \`sandbox_session_events_session_id_created_at_idx\` ON \`sandbox_session_events\` (\`session_id\`,\`created_at\`);
|
||||
`,
|
||||
} as const,
|
||||
};
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
|
||||
|
||||
// SQLite is per sandbox-instance actor instance.
|
||||
export const sandboxInstance = sqliteTable("sandbox_instance", {
|
||||
id: integer("id").primaryKey(),
|
||||
metadataJson: text("metadata_json").notNull(),
|
||||
status: text("status").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
// Persist sandbox-agent sessions/events in SQLite instead of actor state so they survive
|
||||
// serverless actor evictions and backend restarts.
|
||||
export const sandboxSessions = sqliteTable("sandbox_sessions", {
|
||||
id: text("id").notNull().primaryKey(),
|
||||
agent: text("agent").notNull(),
|
||||
agentSessionId: text("agent_session_id").notNull(),
|
||||
lastConnectionId: text("last_connection_id").notNull(),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
destroyedAt: integer("destroyed_at"),
|
||||
sessionInitJson: text("session_init_json"),
|
||||
});
|
||||
|
||||
export const sandboxSessionEvents = sqliteTable("sandbox_session_events", {
|
||||
id: text("id").notNull().primaryKey(),
|
||||
sessionId: text("session_id").notNull(),
|
||||
eventIndex: integer("event_index").notNull(),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
connectionId: text("connection_id").notNull(),
|
||||
sender: text("sender").notNull(),
|
||||
payloadJson: text("payload_json").notNull(),
|
||||
});
|
||||
636
foundry/packages/backend/src/actors/sandbox-instance/index.ts
Normal file
636
foundry/packages/backend/src/actors/sandbox-instance/index.ts
Normal file
|
|
@ -0,0 +1,636 @@
|
|||
import { setTimeout as delay } from "node:timers/promises";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { actor, queue } from "rivetkit";
|
||||
import { Loop, workflow } from "rivetkit/workflow";
|
||||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
||||
import type {
|
||||
ProcessCreateRequest,
|
||||
ProcessInfo,
|
||||
ProcessLogFollowQuery,
|
||||
ProcessLogsResponse,
|
||||
ProcessSignalQuery,
|
||||
SessionEvent,
|
||||
SessionRecord,
|
||||
} from "sandbox-agent";
|
||||
import { sandboxInstanceDb } from "./db/db.js";
|
||||
import { sandboxInstance as sandboxInstanceTable } from "./db/schema.js";
|
||||
import { SandboxInstancePersistDriver } from "./persist.js";
|
||||
import { getActorRuntimeContext } from "../context.js";
|
||||
import { selfSandboxInstance } from "../handles.js";
|
||||
import { logActorWarning, resolveErrorMessage } from "../logging.js";
|
||||
import { expectQueueResponse } from "../../services/queue.js";
|
||||
|
||||
export interface SandboxInstanceInput {
|
||||
workspaceId: string;
|
||||
providerId: ProviderId;
|
||||
sandboxId: string;
|
||||
}
|
||||
|
||||
interface SandboxAgentConnection {
|
||||
endpoint: string;
|
||||
token?: string;
|
||||
}
|
||||
|
||||
const SANDBOX_ROW_ID = 1;
|
||||
const CREATE_SESSION_MAX_ATTEMPTS = 3;
|
||||
const CREATE_SESSION_RETRY_BASE_MS = 1_000;
|
||||
const CREATE_SESSION_STEP_TIMEOUT_MS = 10 * 60_000;
|
||||
|
||||
function normalizeStatusFromEventPayload(payload: unknown): "running" | "idle" | "error" | null {
|
||||
if (payload && typeof payload === "object") {
|
||||
const envelope = payload as {
|
||||
error?: unknown;
|
||||
method?: unknown;
|
||||
result?: unknown;
|
||||
};
|
||||
|
||||
if (envelope.error) {
|
||||
return "error";
|
||||
}
|
||||
|
||||
if (envelope.result && typeof envelope.result === "object") {
|
||||
const stopReason = (envelope.result as { stopReason?: unknown }).stopReason;
|
||||
if (typeof stopReason === "string" && stopReason.length > 0) {
|
||||
return "idle";
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof envelope.method === "string") {
|
||||
const lowered = envelope.method.toLowerCase();
|
||||
if (lowered.includes("error") || lowered.includes("failed")) {
|
||||
return "error";
|
||||
}
|
||||
if (lowered.includes("ended") || lowered.includes("complete") || lowered.includes("stopped")) {
|
||||
return "idle";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function stringifyJson(value: unknown): string {
|
||||
return JSON.stringify(value, (_key, item) => {
|
||||
if (typeof item === "bigint") return item.toString();
|
||||
return item;
|
||||
});
|
||||
}
|
||||
|
||||
function parseMetadata(metadataJson: string): Record<string, unknown> {
|
||||
try {
|
||||
const parsed = JSON.parse(metadataJson) as unknown;
|
||||
if (parsed && typeof parsed === "object") return parsed as Record<string, unknown>;
|
||||
return {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
async function loadPersistedAgentConfig(c: any): Promise<SandboxAgentConnection | null> {
|
||||
try {
|
||||
const row = await c.db
|
||||
.select({ metadataJson: sandboxInstanceTable.metadataJson })
|
||||
.from(sandboxInstanceTable)
|
||||
.where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID))
|
||||
.get();
|
||||
|
||||
if (row?.metadataJson) {
|
||||
const metadata = parseMetadata(row.metadataJson);
|
||||
const endpoint = typeof metadata.agentEndpoint === "string" ? metadata.agentEndpoint.trim() : "";
|
||||
const token = typeof metadata.agentToken === "string" ? metadata.agentToken.trim() : "";
|
||||
if (endpoint) {
|
||||
return token ? { endpoint, token } : { endpoint };
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async function loadFreshDaytonaAgentConfig(c: any): Promise<SandboxAgentConnection> {
|
||||
const { config, driver } = getActorRuntimeContext();
|
||||
const daytona = driver.daytona.createClient({
|
||||
apiUrl: config.providers.daytona.endpoint,
|
||||
apiKey: config.providers.daytona.apiKey,
|
||||
});
|
||||
const sandbox = await daytona.getSandbox(c.state.sandboxId);
|
||||
const state = String(sandbox.state ?? "unknown").toLowerCase();
|
||||
if (state !== "started" && state !== "running") {
|
||||
await daytona.startSandbox(c.state.sandboxId, 60);
|
||||
}
|
||||
const preview = await daytona.getPreviewEndpoint(c.state.sandboxId, 2468);
|
||||
return preview.token ? { endpoint: preview.url, token: preview.token } : { endpoint: preview.url };
|
||||
}
|
||||
|
||||
async function loadFreshProviderAgentConfig(c: any): Promise<SandboxAgentConnection> {
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const provider = providers.get(c.state.providerId);
|
||||
return await provider.ensureSandboxAgent({
|
||||
workspaceId: c.state.workspaceId,
|
||||
sandboxId: c.state.sandboxId,
|
||||
});
|
||||
}
|
||||
|
||||
async function loadAgentConfig(c: any): Promise<SandboxAgentConnection> {
|
||||
const persisted = await loadPersistedAgentConfig(c);
|
||||
if (c.state.providerId === "daytona") {
|
||||
// Keep one stable signed preview endpoint per sandbox-instance actor.
|
||||
// Rotating preview URLs on every call fragments SDK client state (sessions/events)
|
||||
// because client caching keys by endpoint.
|
||||
if (persisted) {
|
||||
return persisted;
|
||||
}
|
||||
return await loadFreshDaytonaAgentConfig(c);
|
||||
}
|
||||
|
||||
// Local sandboxes are tied to the current backend process, so the sandbox-agent
|
||||
// token can rotate on restart. Always refresh from the provider instead of
|
||||
// trusting persisted metadata.
|
||||
if (c.state.providerId === "local") {
|
||||
return await loadFreshProviderAgentConfig(c);
|
||||
}
|
||||
|
||||
if (persisted) {
|
||||
return persisted;
|
||||
}
|
||||
|
||||
return await loadFreshProviderAgentConfig(c);
|
||||
}
|
||||
|
||||
async function derivePersistedSessionStatus(
|
||||
persist: SandboxInstancePersistDriver,
|
||||
sessionId: string,
|
||||
): Promise<{ id: string; status: "running" | "idle" | "error" }> {
|
||||
const session = await persist.getSession(sessionId);
|
||||
if (!session) {
|
||||
return { id: sessionId, status: "error" };
|
||||
}
|
||||
|
||||
if (session.destroyedAt) {
|
||||
return { id: sessionId, status: "idle" };
|
||||
}
|
||||
|
||||
const events = await persist.listEvents({
|
||||
sessionId,
|
||||
limit: 25,
|
||||
});
|
||||
|
||||
for (let index = events.items.length - 1; index >= 0; index -= 1) {
|
||||
const event = events.items[index];
|
||||
if (!event) continue;
|
||||
const status = normalizeStatusFromEventPayload(event.payload);
|
||||
if (status) {
|
||||
return { id: sessionId, status };
|
||||
}
|
||||
}
|
||||
|
||||
return { id: sessionId, status: "idle" };
|
||||
}
|
||||
|
||||
function isTransientSessionCreateError(detail: string): boolean {
|
||||
const lowered = detail.toLowerCase();
|
||||
if (lowered.includes("timed out") || lowered.includes("timeout") || lowered.includes("504") || lowered.includes("gateway timeout")) {
|
||||
// ACP timeout errors are expensive and usually deterministic for the same
|
||||
// request; immediate retries spawn additional sessions/processes and make
|
||||
// recovery harder.
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
lowered.includes("502") || lowered.includes("503") || lowered.includes("bad gateway") || lowered.includes("econnreset") || lowered.includes("econnrefused")
|
||||
);
|
||||
}
|
||||
|
||||
interface EnsureSandboxCommand {
|
||||
metadata: Record<string, unknown>;
|
||||
status: string;
|
||||
agentEndpoint?: string;
|
||||
agentToken?: string;
|
||||
}
|
||||
|
||||
interface HealthSandboxCommand {
|
||||
status: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
interface CreateSessionCommand {
|
||||
prompt: string;
|
||||
cwd?: string;
|
||||
agent?: "claude" | "codex" | "opencode";
|
||||
}
|
||||
|
||||
interface CreateSessionResult {
|
||||
id: string | null;
|
||||
status: "running" | "idle" | "error";
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface ListSessionsCommand {
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
interface ListSessionEventsCommand {
|
||||
sessionId: string;
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
interface SendPromptCommand {
|
||||
sessionId: string;
|
||||
prompt: string;
|
||||
notification?: boolean;
|
||||
}
|
||||
|
||||
interface SessionStatusCommand {
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
interface SessionControlCommand {
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
const SANDBOX_INSTANCE_QUEUE_NAMES = [
|
||||
"sandboxInstance.command.ensure",
|
||||
"sandboxInstance.command.updateHealth",
|
||||
"sandboxInstance.command.destroy",
|
||||
"sandboxInstance.command.createSession",
|
||||
"sandboxInstance.command.sendPrompt",
|
||||
"sandboxInstance.command.cancelSession",
|
||||
"sandboxInstance.command.destroySession",
|
||||
] as const;
|
||||
|
||||
type SandboxInstanceQueueName = (typeof SANDBOX_INSTANCE_QUEUE_NAMES)[number];
|
||||
|
||||
function sandboxInstanceWorkflowQueueName(name: SandboxInstanceQueueName): SandboxInstanceQueueName {
|
||||
return name;
|
||||
}
|
||||
|
||||
async function getSandboxAgentClient(c: any) {
|
||||
const { driver } = getActorRuntimeContext();
|
||||
const persist = new SandboxInstancePersistDriver(c.db);
|
||||
const { endpoint, token } = await loadAgentConfig(c);
|
||||
return driver.sandboxAgent.createClient({
|
||||
endpoint,
|
||||
token,
|
||||
persist,
|
||||
});
|
||||
}
|
||||
|
||||
function broadcastProcessesUpdated(c: any): void {
|
||||
c.broadcast("processesUpdated", {
|
||||
sandboxId: c.state.sandboxId,
|
||||
at: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
async function ensureSandboxMutation(c: any, command: EnsureSandboxCommand): Promise<void> {
|
||||
const now = Date.now();
|
||||
const metadata = {
|
||||
...command.metadata,
|
||||
agentEndpoint: command.agentEndpoint ?? null,
|
||||
agentToken: command.agentToken ?? null,
|
||||
};
|
||||
|
||||
const metadataJson = stringifyJson(metadata);
|
||||
await c.db
|
||||
.insert(sandboxInstanceTable)
|
||||
.values({
|
||||
id: SANDBOX_ROW_ID,
|
||||
metadataJson,
|
||||
status: command.status,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: sandboxInstanceTable.id,
|
||||
set: {
|
||||
metadataJson,
|
||||
status: command.status,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
}
|
||||
|
||||
async function updateHealthMutation(c: any, command: HealthSandboxCommand): Promise<void> {
|
||||
await c.db
|
||||
.update(sandboxInstanceTable)
|
||||
.set({
|
||||
status: `${command.status}:${command.message}`,
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID))
|
||||
.run();
|
||||
}
|
||||
|
||||
async function destroySandboxMutation(c: any): Promise<void> {
|
||||
await c.db.delete(sandboxInstanceTable).where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID)).run();
|
||||
}
|
||||
|
||||
async function createSessionMutation(c: any, command: CreateSessionCommand): Promise<CreateSessionResult> {
|
||||
let lastDetail = "sandbox-agent createSession failed";
|
||||
let attemptsMade = 0;
|
||||
|
||||
for (let attempt = 1; attempt <= CREATE_SESSION_MAX_ATTEMPTS; attempt += 1) {
|
||||
attemptsMade = attempt;
|
||||
try {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
|
||||
const session = await client.createSession({
|
||||
prompt: command.prompt,
|
||||
cwd: command.cwd,
|
||||
agent: command.agent,
|
||||
});
|
||||
|
||||
return { id: session.id, status: session.status };
|
||||
} catch (error) {
|
||||
const detail = error instanceof Error ? error.message : String(error);
|
||||
lastDetail = detail;
|
||||
const retryable = isTransientSessionCreateError(detail);
|
||||
const canRetry = retryable && attempt < CREATE_SESSION_MAX_ATTEMPTS;
|
||||
|
||||
if (!canRetry) {
|
||||
break;
|
||||
}
|
||||
|
||||
const waitMs = CREATE_SESSION_RETRY_BASE_MS * attempt;
|
||||
logActorWarning("sandbox-instance", "createSession transient failure; retrying", {
|
||||
workspaceId: c.state.workspaceId,
|
||||
providerId: c.state.providerId,
|
||||
sandboxId: c.state.sandboxId,
|
||||
attempt,
|
||||
maxAttempts: CREATE_SESSION_MAX_ATTEMPTS,
|
||||
waitMs,
|
||||
error: detail,
|
||||
});
|
||||
await delay(waitMs);
|
||||
}
|
||||
}
|
||||
|
||||
const attemptLabel = attemptsMade === 1 ? "attempt" : "attempts";
|
||||
return {
|
||||
id: null,
|
||||
status: "error",
|
||||
error: `sandbox-agent createSession failed after ${attemptsMade} ${attemptLabel}: ${lastDetail}`,
|
||||
};
|
||||
}
|
||||
|
||||
async function sendPromptMutation(c: any, command: SendPromptCommand): Promise<void> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
await client.sendPrompt({
|
||||
sessionId: command.sessionId,
|
||||
prompt: command.prompt,
|
||||
notification: command.notification,
|
||||
});
|
||||
}
|
||||
|
||||
async function cancelSessionMutation(c: any, command: SessionControlCommand): Promise<void> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
await client.cancelSession(command.sessionId);
|
||||
}
|
||||
|
||||
async function destroySessionMutation(c: any, command: SessionControlCommand): Promise<void> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
await client.destroySession(command.sessionId);
|
||||
}
|
||||
|
||||
async function runSandboxInstanceWorkflow(ctx: any): Promise<void> {
|
||||
await ctx.loop("sandbox-instance-command-loop", async (loopCtx: any) => {
|
||||
const msg = await loopCtx.queue.next("next-sandbox-instance-command", {
|
||||
names: [...SANDBOX_INSTANCE_QUEUE_NAMES],
|
||||
completable: true,
|
||||
});
|
||||
if (!msg) {
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "sandboxInstance.command.ensure") {
|
||||
await loopCtx.step("sandbox-instance-ensure", async () => ensureSandboxMutation(loopCtx, msg.body as EnsureSandboxCommand));
|
||||
await msg.complete({ ok: true });
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "sandboxInstance.command.updateHealth") {
|
||||
await loopCtx.step("sandbox-instance-update-health", async () => updateHealthMutation(loopCtx, msg.body as HealthSandboxCommand));
|
||||
await msg.complete({ ok: true });
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "sandboxInstance.command.destroy") {
|
||||
await loopCtx.step("sandbox-instance-destroy", async () => destroySandboxMutation(loopCtx));
|
||||
await msg.complete({ ok: true });
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "sandboxInstance.command.createSession") {
|
||||
const result = await loopCtx.step({
|
||||
name: "sandbox-instance-create-session",
|
||||
timeout: CREATE_SESSION_STEP_TIMEOUT_MS,
|
||||
run: async () => createSessionMutation(loopCtx, msg.body as CreateSessionCommand),
|
||||
});
|
||||
await msg.complete(result);
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "sandboxInstance.command.sendPrompt") {
|
||||
await loopCtx.step("sandbox-instance-send-prompt", async () => sendPromptMutation(loopCtx, msg.body as SendPromptCommand));
|
||||
await msg.complete({ ok: true });
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "sandboxInstance.command.cancelSession") {
|
||||
await loopCtx.step("sandbox-instance-cancel-session", async () => cancelSessionMutation(loopCtx, msg.body as SessionControlCommand));
|
||||
await msg.complete({ ok: true });
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "sandboxInstance.command.destroySession") {
|
||||
await loopCtx.step("sandbox-instance-destroy-session", async () => destroySessionMutation(loopCtx, msg.body as SessionControlCommand));
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
|
||||
return Loop.continue(undefined);
|
||||
});
|
||||
}
|
||||
|
||||
export const sandboxInstance = actor({
|
||||
db: sandboxInstanceDb,
|
||||
queues: Object.fromEntries(SANDBOX_INSTANCE_QUEUE_NAMES.map((name) => [name, queue()])),
|
||||
options: {
|
||||
actionTimeout: 5 * 60_000,
|
||||
},
|
||||
createState: (_c, input: SandboxInstanceInput) => ({
|
||||
workspaceId: input.workspaceId,
|
||||
providerId: input.providerId,
|
||||
sandboxId: input.sandboxId,
|
||||
}),
|
||||
actions: {
|
||||
async sandboxAgentConnection(c: any): Promise<SandboxAgentConnection> {
|
||||
return await loadAgentConfig(c);
|
||||
},
|
||||
|
||||
async createProcess(c: any, request: ProcessCreateRequest): Promise<ProcessInfo> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
const created = await client.createProcess(request);
|
||||
broadcastProcessesUpdated(c);
|
||||
return created;
|
||||
},
|
||||
|
||||
async listProcesses(c: any): Promise<{ processes: ProcessInfo[] }> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
return await client.listProcesses();
|
||||
},
|
||||
|
||||
async getProcessLogs(c: any, request: { processId: string; query?: ProcessLogFollowQuery }): Promise<ProcessLogsResponse> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
return await client.getProcessLogs(request.processId, request.query);
|
||||
},
|
||||
|
||||
async stopProcess(c: any, request: { processId: string; query?: ProcessSignalQuery }): Promise<ProcessInfo> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
const stopped = await client.stopProcess(request.processId, request.query);
|
||||
broadcastProcessesUpdated(c);
|
||||
return stopped;
|
||||
},
|
||||
|
||||
async killProcess(c: any, request: { processId: string; query?: ProcessSignalQuery }): Promise<ProcessInfo> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
const killed = await client.killProcess(request.processId, request.query);
|
||||
broadcastProcessesUpdated(c);
|
||||
return killed;
|
||||
},
|
||||
|
||||
async deleteProcess(c: any, request: { processId: string }): Promise<void> {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
await client.deleteProcess(request.processId);
|
||||
broadcastProcessesUpdated(c);
|
||||
},
|
||||
|
||||
async providerState(c: any): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> {
|
||||
const at = Date.now();
|
||||
const { config, driver } = getActorRuntimeContext();
|
||||
|
||||
if (c.state.providerId === "daytona") {
|
||||
const daytona = driver.daytona.createClient({
|
||||
apiUrl: config.providers.daytona.endpoint,
|
||||
apiKey: config.providers.daytona.apiKey,
|
||||
});
|
||||
const sandbox = await daytona.getSandbox(c.state.sandboxId);
|
||||
const state = String(sandbox.state ?? "unknown").toLowerCase();
|
||||
return { providerId: c.state.providerId, sandboxId: c.state.sandboxId, state, at };
|
||||
}
|
||||
|
||||
return {
|
||||
providerId: c.state.providerId,
|
||||
sandboxId: c.state.sandboxId,
|
||||
state: "unknown",
|
||||
at,
|
||||
};
|
||||
},
|
||||
|
||||
async ensure(c, command: EnsureSandboxCommand): Promise<void> {
|
||||
const self = selfSandboxInstance(c);
|
||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.ensure"), command, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async updateHealth(c, command: HealthSandboxCommand): Promise<void> {
|
||||
const self = selfSandboxInstance(c);
|
||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.updateHealth"), command, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async destroy(c): Promise<void> {
|
||||
const self = selfSandboxInstance(c);
|
||||
await self.send(
|
||||
sandboxInstanceWorkflowQueueName("sandboxInstance.command.destroy"),
|
||||
{},
|
||||
{
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
},
|
||||
);
|
||||
},
|
||||
|
||||
async createSession(c: any, command: CreateSessionCommand): Promise<CreateSessionResult> {
|
||||
const self = selfSandboxInstance(c);
|
||||
return expectQueueResponse<CreateSessionResult>(
|
||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.createSession"), command, {
|
||||
wait: true,
|
||||
timeout: 5 * 60_000,
|
||||
}),
|
||||
);
|
||||
},
|
||||
|
||||
async listSessions(c: any, command?: ListSessionsCommand): Promise<{ items: SessionRecord[]; nextCursor?: string }> {
|
||||
const persist = new SandboxInstancePersistDriver(c.db);
|
||||
try {
|
||||
const client = await getSandboxAgentClient(c);
|
||||
|
||||
const page = await client.listSessions({
|
||||
cursor: command?.cursor,
|
||||
limit: command?.limit,
|
||||
});
|
||||
|
||||
return {
|
||||
items: page.items,
|
||||
nextCursor: page.nextCursor,
|
||||
};
|
||||
} catch (error) {
|
||||
logActorWarning("sandbox-instance", "listSessions remote read failed; using persisted fallback", {
|
||||
workspaceId: c.state.workspaceId,
|
||||
providerId: c.state.providerId,
|
||||
sandboxId: c.state.sandboxId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
return await persist.listSessions({
|
||||
cursor: command?.cursor,
|
||||
limit: command?.limit,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
async listSessionEvents(c: any, command: ListSessionEventsCommand): Promise<{ items: SessionEvent[]; nextCursor?: string }> {
|
||||
const persist = new SandboxInstancePersistDriver(c.db);
|
||||
return await persist.listEvents({
|
||||
sessionId: command.sessionId,
|
||||
cursor: command.cursor,
|
||||
limit: command.limit,
|
||||
});
|
||||
},
|
||||
|
||||
async sendPrompt(c, command: SendPromptCommand): Promise<void> {
|
||||
const self = selfSandboxInstance(c);
|
||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.sendPrompt"), command, {
|
||||
wait: true,
|
||||
timeout: 5 * 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async cancelSession(c, command: SessionControlCommand): Promise<void> {
|
||||
const self = selfSandboxInstance(c);
|
||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.cancelSession"), command, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async destroySession(c, command: SessionControlCommand): Promise<void> {
|
||||
const self = selfSandboxInstance(c);
|
||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.destroySession"), command, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async sessionStatus(c, command: SessionStatusCommand): Promise<{ id: string; status: "running" | "idle" | "error" }> {
|
||||
return await derivePersistedSessionStatus(new SandboxInstancePersistDriver(c.db), command.sessionId);
|
||||
},
|
||||
},
|
||||
run: workflow(runSandboxInstanceWorkflow),
|
||||
});
|
||||
266
foundry/packages/backend/src/actors/sandbox-instance/persist.ts
Normal file
266
foundry/packages/backend/src/actors/sandbox-instance/persist.ts
Normal file
|
|
@ -0,0 +1,266 @@
|
|||
import { and, asc, count, eq } from "drizzle-orm";
|
||||
import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent";
|
||||
import { sandboxSessionEvents, sandboxSessions } from "./db/schema.js";
|
||||
|
||||
const DEFAULT_MAX_SESSIONS = 1024;
|
||||
const DEFAULT_MAX_EVENTS_PER_SESSION = 500;
|
||||
const DEFAULT_LIST_LIMIT = 100;
|
||||
|
||||
function normalizeCap(value: number | undefined, fallback: number): number {
|
||||
if (!Number.isFinite(value) || (value ?? 0) < 1) {
|
||||
return fallback;
|
||||
}
|
||||
return Math.floor(value as number);
|
||||
}
|
||||
|
||||
function parseCursor(cursor: string | undefined): number {
|
||||
if (!cursor) return 0;
|
||||
const parsed = Number.parseInt(cursor, 10);
|
||||
if (!Number.isFinite(parsed) || parsed < 0) return 0;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
export function resolveEventListOffset(params: { cursor?: string; total: number; limit: number }): number {
|
||||
if (params.cursor != null) {
|
||||
return parseCursor(params.cursor);
|
||||
}
|
||||
return Math.max(0, params.total - params.limit);
|
||||
}
|
||||
|
||||
function safeStringify(value: unknown): string {
|
||||
return JSON.stringify(value, (_key, item) => {
|
||||
if (typeof item === "bigint") return item.toString();
|
||||
return item;
|
||||
});
|
||||
}
|
||||
|
||||
function safeParseJson<T>(value: string | null | undefined, fallback: T): T {
|
||||
if (!value) return fallback;
|
||||
try {
|
||||
return JSON.parse(value) as T;
|
||||
} catch {
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
|
||||
export interface SandboxInstancePersistDriverOptions {
|
||||
maxSessions?: number;
|
||||
maxEventsPerSession?: number;
|
||||
}
|
||||
|
||||
export class SandboxInstancePersistDriver implements SessionPersistDriver {
|
||||
private readonly maxSessions: number;
|
||||
private readonly maxEventsPerSession: number;
|
||||
|
||||
constructor(
|
||||
private readonly db: any,
|
||||
options: SandboxInstancePersistDriverOptions = {},
|
||||
) {
|
||||
this.maxSessions = normalizeCap(options.maxSessions, DEFAULT_MAX_SESSIONS);
|
||||
this.maxEventsPerSession = normalizeCap(options.maxEventsPerSession, DEFAULT_MAX_EVENTS_PER_SESSION);
|
||||
}
|
||||
|
||||
async getSession(id: string): Promise<SessionRecord | null> {
|
||||
const row = await this.db
|
||||
.select({
|
||||
id: sandboxSessions.id,
|
||||
agent: sandboxSessions.agent,
|
||||
agentSessionId: sandboxSessions.agentSessionId,
|
||||
lastConnectionId: sandboxSessions.lastConnectionId,
|
||||
createdAt: sandboxSessions.createdAt,
|
||||
destroyedAt: sandboxSessions.destroyedAt,
|
||||
sessionInitJson: sandboxSessions.sessionInitJson,
|
||||
})
|
||||
.from(sandboxSessions)
|
||||
.where(eq(sandboxSessions.id, id))
|
||||
.get();
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
return {
|
||||
id: row.id,
|
||||
agent: row.agent,
|
||||
agentSessionId: row.agentSessionId,
|
||||
lastConnectionId: row.lastConnectionId,
|
||||
createdAt: row.createdAt,
|
||||
destroyedAt: row.destroyedAt ?? undefined,
|
||||
sessionInit: safeParseJson(row.sessionInitJson, undefined),
|
||||
};
|
||||
}
|
||||
|
||||
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
|
||||
const offset = parseCursor(request.cursor);
|
||||
const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT);
|
||||
|
||||
const rows = await this.db
|
||||
.select({
|
||||
id: sandboxSessions.id,
|
||||
agent: sandboxSessions.agent,
|
||||
agentSessionId: sandboxSessions.agentSessionId,
|
||||
lastConnectionId: sandboxSessions.lastConnectionId,
|
||||
createdAt: sandboxSessions.createdAt,
|
||||
destroyedAt: sandboxSessions.destroyedAt,
|
||||
sessionInitJson: sandboxSessions.sessionInitJson,
|
||||
})
|
||||
.from(sandboxSessions)
|
||||
.orderBy(asc(sandboxSessions.createdAt), asc(sandboxSessions.id))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.all();
|
||||
|
||||
const items = rows.map((row) => ({
|
||||
id: row.id,
|
||||
agent: row.agent,
|
||||
agentSessionId: row.agentSessionId,
|
||||
lastConnectionId: row.lastConnectionId,
|
||||
createdAt: row.createdAt,
|
||||
destroyedAt: row.destroyedAt ?? undefined,
|
||||
sessionInit: safeParseJson(row.sessionInitJson, undefined),
|
||||
}));
|
||||
|
||||
const totalRow = await this.db.select({ c: count() }).from(sandboxSessions).get();
|
||||
const total = Number(totalRow?.c ?? 0);
|
||||
|
||||
const nextOffset = offset + items.length;
|
||||
return {
|
||||
items,
|
||||
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async updateSession(session: SessionRecord): Promise<void> {
|
||||
const now = Date.now();
|
||||
await this.db
|
||||
.insert(sandboxSessions)
|
||||
.values({
|
||||
id: session.id,
|
||||
agent: session.agent,
|
||||
agentSessionId: session.agentSessionId,
|
||||
lastConnectionId: session.lastConnectionId,
|
||||
createdAt: session.createdAt ?? now,
|
||||
destroyedAt: session.destroyedAt ?? null,
|
||||
sessionInitJson: session.sessionInit ? safeStringify(session.sessionInit) : null,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: sandboxSessions.id,
|
||||
set: {
|
||||
agent: session.agent,
|
||||
agentSessionId: session.agentSessionId,
|
||||
lastConnectionId: session.lastConnectionId,
|
||||
createdAt: session.createdAt ?? now,
|
||||
destroyedAt: session.destroyedAt ?? null,
|
||||
sessionInitJson: session.sessionInit ? safeStringify(session.sessionInit) : null,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
// Evict oldest sessions beyond cap.
|
||||
const totalRow = await this.db.select({ c: count() }).from(sandboxSessions).get();
|
||||
const total = Number(totalRow?.c ?? 0);
|
||||
const overflow = total - this.maxSessions;
|
||||
if (overflow <= 0) return;
|
||||
|
||||
const toRemove = await this.db
|
||||
.select({ id: sandboxSessions.id })
|
||||
.from(sandboxSessions)
|
||||
.orderBy(asc(sandboxSessions.createdAt), asc(sandboxSessions.id))
|
||||
.limit(overflow)
|
||||
.all();
|
||||
|
||||
for (const row of toRemove) {
|
||||
await this.db.delete(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, row.id)).run();
|
||||
await this.db.delete(sandboxSessions).where(eq(sandboxSessions.id, row.id)).run();
|
||||
}
|
||||
}
|
||||
|
||||
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
|
||||
const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT);
|
||||
const totalRow = await this.db.select({ c: count() }).from(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, request.sessionId)).get();
|
||||
const total = Number(totalRow?.c ?? 0);
|
||||
const offset = resolveEventListOffset({
|
||||
cursor: request.cursor,
|
||||
total,
|
||||
limit,
|
||||
});
|
||||
|
||||
const rows = await this.db
|
||||
.select({
|
||||
id: sandboxSessionEvents.id,
|
||||
sessionId: sandboxSessionEvents.sessionId,
|
||||
eventIndex: sandboxSessionEvents.eventIndex,
|
||||
createdAt: sandboxSessionEvents.createdAt,
|
||||
connectionId: sandboxSessionEvents.connectionId,
|
||||
sender: sandboxSessionEvents.sender,
|
||||
payloadJson: sandboxSessionEvents.payloadJson,
|
||||
})
|
||||
.from(sandboxSessionEvents)
|
||||
.where(eq(sandboxSessionEvents.sessionId, request.sessionId))
|
||||
.orderBy(asc(sandboxSessionEvents.eventIndex), asc(sandboxSessionEvents.id))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.all();
|
||||
|
||||
const items: SessionEvent[] = rows.map((row) => ({
|
||||
id: row.id,
|
||||
eventIndex: row.eventIndex,
|
||||
sessionId: row.sessionId,
|
||||
createdAt: row.createdAt,
|
||||
connectionId: row.connectionId,
|
||||
sender: row.sender as any,
|
||||
payload: safeParseJson(row.payloadJson, null),
|
||||
}));
|
||||
|
||||
const nextOffset = offset + items.length;
|
||||
return {
|
||||
items,
|
||||
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async insertEvent(event: SessionEvent): Promise<void> {
|
||||
await this.db
|
||||
.insert(sandboxSessionEvents)
|
||||
.values({
|
||||
id: event.id,
|
||||
sessionId: event.sessionId,
|
||||
eventIndex: event.eventIndex,
|
||||
createdAt: event.createdAt,
|
||||
connectionId: event.connectionId,
|
||||
sender: event.sender,
|
||||
payloadJson: safeStringify(event.payload),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: sandboxSessionEvents.id,
|
||||
set: {
|
||||
sessionId: event.sessionId,
|
||||
eventIndex: event.eventIndex,
|
||||
createdAt: event.createdAt,
|
||||
connectionId: event.connectionId,
|
||||
sender: event.sender,
|
||||
payloadJson: safeStringify(event.payload),
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
// Trim oldest events beyond cap.
|
||||
const totalRow = await this.db.select({ c: count() }).from(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, event.sessionId)).get();
|
||||
const total = Number(totalRow?.c ?? 0);
|
||||
const overflow = total - this.maxEventsPerSession;
|
||||
if (overflow <= 0) return;
|
||||
|
||||
const toRemove = await this.db
|
||||
.select({ id: sandboxSessionEvents.id })
|
||||
.from(sandboxSessionEvents)
|
||||
.where(eq(sandboxSessionEvents.sessionId, event.sessionId))
|
||||
.orderBy(asc(sandboxSessionEvents.eventIndex), asc(sandboxSessionEvents.id))
|
||||
.limit(overflow)
|
||||
.all();
|
||||
|
||||
for (const row of toRemove) {
|
||||
await this.db
|
||||
.delete(sandboxSessionEvents)
|
||||
.where(and(eq(sandboxSessionEvents.sessionId, event.sessionId), eq(sandboxSessionEvents.id, row.id)))
|
||||
.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
108
foundry/packages/backend/src/actors/task-status-sync/index.ts
Normal file
108
foundry/packages/backend/src/actors/task-status-sync/index.ts
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import { actor, queue } from "rivetkit";
|
||||
import { workflow } from "rivetkit/workflow";
|
||||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
||||
import { getTask, getSandboxInstance, selfTaskStatusSync } from "../handles.js";
|
||||
import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js";
|
||||
import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js";
|
||||
|
||||
export interface TaskStatusSyncInput {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
providerId: ProviderId;
|
||||
sandboxId: string;
|
||||
sessionId: string;
|
||||
intervalMs: number;
|
||||
}
|
||||
|
||||
interface SetIntervalCommand {
|
||||
intervalMs: number;
|
||||
}
|
||||
|
||||
interface TaskStatusSyncState extends PollingControlState {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
providerId: ProviderId;
|
||||
sandboxId: string;
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
const CONTROL = {
|
||||
start: "task.status_sync.control.start",
|
||||
stop: "task.status_sync.control.stop",
|
||||
setInterval: "task.status_sync.control.set_interval",
|
||||
force: "task.status_sync.control.force",
|
||||
} as const;
|
||||
|
||||
async function pollSessionStatus(c: { state: TaskStatusSyncState }): Promise<void> {
|
||||
const sandboxInstance = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, c.state.sandboxId);
|
||||
const status = await sandboxInstance.sessionStatus({ sessionId: c.state.sessionId });
|
||||
|
||||
const parent = getTask(c, c.state.workspaceId, c.state.repoId, c.state.taskId);
|
||||
await parent.syncWorkbenchSessionStatus({
|
||||
sessionId: c.state.sessionId,
|
||||
status: status.status,
|
||||
at: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
export const taskStatusSync = actor({
|
||||
queues: {
|
||||
[CONTROL.start]: queue(),
|
||||
[CONTROL.stop]: queue(),
|
||||
[CONTROL.setInterval]: queue(),
|
||||
[CONTROL.force]: queue(),
|
||||
},
|
||||
options: {
|
||||
// Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling.
|
||||
noSleep: true,
|
||||
},
|
||||
createState: (_c, input: TaskStatusSyncInput): TaskStatusSyncState => ({
|
||||
workspaceId: input.workspaceId,
|
||||
repoId: input.repoId,
|
||||
taskId: input.taskId,
|
||||
providerId: input.providerId,
|
||||
sandboxId: input.sandboxId,
|
||||
sessionId: input.sessionId,
|
||||
intervalMs: input.intervalMs,
|
||||
running: true,
|
||||
}),
|
||||
actions: {
|
||||
async start(c): Promise<void> {
|
||||
const self = selfTaskStatusSync(c);
|
||||
await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async stop(c): Promise<void> {
|
||||
const self = selfTaskStatusSync(c);
|
||||
await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async setIntervalMs(c, payload: SetIntervalCommand): Promise<void> {
|
||||
const self = selfTaskStatusSync(c);
|
||||
await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 });
|
||||
},
|
||||
|
||||
async force(c): Promise<void> {
|
||||
const self = selfTaskStatusSync(c);
|
||||
await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 });
|
||||
},
|
||||
},
|
||||
run: workflow(async (ctx) => {
|
||||
await runWorkflowPollingLoop<TaskStatusSyncState>(ctx, {
|
||||
loopName: "task-status-sync-loop",
|
||||
control: CONTROL,
|
||||
onPoll: async (loopCtx) => {
|
||||
try {
|
||||
await pollSessionStatus(loopCtx);
|
||||
} catch (error) {
|
||||
logActorWarning("task-status-sync", "poll failed", {
|
||||
error: resolveErrorMessage(error),
|
||||
stack: resolveErrorStack(error),
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
}),
|
||||
});
|
||||
5
foundry/packages/backend/src/actors/task/db/db.ts
Normal file
5
foundry/packages/backend/src/actors/task/db/db.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import { db } from "rivetkit/db/drizzle";
|
||||
import * as schema from "./schema.js";
|
||||
import migrations from "./migrations.js";
|
||||
|
||||
export const taskDb = db({ schema, migrations });
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import { defineConfig } from "rivetkit/db/drizzle";
|
||||
|
||||
export default defineConfig({
|
||||
out: "./src/actors/task/db/drizzle",
|
||||
schema: "./src/actors/task/db/schema.ts",
|
||||
});
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
CREATE TABLE `task` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`branch_name` text NOT NULL,
|
||||
`title` text NOT NULL,
|
||||
`task` text NOT NULL,
|
||||
`provider_id` text NOT NULL,
|
||||
`status` text NOT NULL,
|
||||
`agent_type` text DEFAULT 'claude',
|
||||
`auto_committed` integer DEFAULT 0,
|
||||
`pushed` integer DEFAULT 0,
|
||||
`pr_submitted` integer DEFAULT 0,
|
||||
`needs_push` integer DEFAULT 0,
|
||||
`created_at` integer NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `task_runtime` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`sandbox_id` text,
|
||||
`session_id` text,
|
||||
`switch_target` text,
|
||||
`status_message` text,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
ALTER TABLE `task` DROP COLUMN `auto_committed`;--> statement-breakpoint
|
||||
ALTER TABLE `task` DROP COLUMN `pushed`;--> statement-breakpoint
|
||||
ALTER TABLE `task` DROP COLUMN `needs_push`;
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
ALTER TABLE `task_runtime` RENAME COLUMN "sandbox_id" TO "active_sandbox_id";--> statement-breakpoint
|
||||
ALTER TABLE `task_runtime` RENAME COLUMN "session_id" TO "active_session_id";--> statement-breakpoint
|
||||
ALTER TABLE `task_runtime` RENAME COLUMN "switch_target" TO "active_switch_target";--> statement-breakpoint
|
||||
CREATE TABLE `task_sandboxes` (
|
||||
`sandbox_id` text PRIMARY KEY NOT NULL,
|
||||
`provider_id` text NOT NULL,
|
||||
`switch_target` text NOT NULL,
|
||||
`cwd` text,
|
||||
`status_message` text,
|
||||
`created_at` integer NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE `task_runtime` ADD `active_cwd` text;
|
||||
--> statement-breakpoint
|
||||
INSERT INTO `task_sandboxes` (
|
||||
`sandbox_id`,
|
||||
`provider_id`,
|
||||
`switch_target`,
|
||||
`cwd`,
|
||||
`status_message`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
r.`active_sandbox_id`,
|
||||
(SELECT h.`provider_id` FROM `task` h WHERE h.`id` = 1),
|
||||
r.`active_switch_target`,
|
||||
r.`active_cwd`,
|
||||
r.`status_message`,
|
||||
COALESCE((SELECT h.`created_at` FROM `task` h WHERE h.`id` = 1), r.`updated_at`),
|
||||
r.`updated_at`
|
||||
FROM `task_runtime` r
|
||||
WHERE
|
||||
r.`id` = 1
|
||||
AND r.`active_sandbox_id` IS NOT NULL
|
||||
AND r.`active_switch_target` IS NOT NULL
|
||||
ON CONFLICT(`sandbox_id`) DO NOTHING;
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
-- Allow tasks to exist before their branch/title are determined.
|
||||
-- Drizzle doesn't support altering column nullability in SQLite directly, so rebuild the table.
|
||||
|
||||
PRAGMA foreign_keys=off;
|
||||
|
||||
CREATE TABLE `task__new` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`branch_name` text,
|
||||
`title` text,
|
||||
`task` text NOT NULL,
|
||||
`provider_id` text NOT NULL,
|
||||
`status` text NOT NULL,
|
||||
`agent_type` text DEFAULT 'claude',
|
||||
`pr_submitted` integer DEFAULT 0,
|
||||
`created_at` integer NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO `task__new` (
|
||||
`id`,
|
||||
`branch_name`,
|
||||
`title`,
|
||||
`task`,
|
||||
`provider_id`,
|
||||
`status`,
|
||||
`agent_type`,
|
||||
`pr_submitted`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`branch_name`,
|
||||
`title`,
|
||||
`task`,
|
||||
`provider_id`,
|
||||
`status`,
|
||||
`agent_type`,
|
||||
`pr_submitted`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
FROM `task`;
|
||||
|
||||
DROP TABLE `task`;
|
||||
ALTER TABLE `task__new` RENAME TO `task`;
|
||||
|
||||
PRAGMA foreign_keys=on;
|
||||
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
-- Fix: make branch_name/title nullable during initial "naming" stage.
|
||||
-- 0003 was missing statement breakpoints, so drizzle's migrator marked it applied without executing all statements.
|
||||
-- Rebuild the table again with proper statement breakpoints.
|
||||
|
||||
PRAGMA foreign_keys=off;
|
||||
--> statement-breakpoint
|
||||
|
||||
DROP TABLE IF EXISTS `task__new`;
|
||||
--> statement-breakpoint
|
||||
|
||||
CREATE TABLE `task__new` (
|
||||
`id` integer PRIMARY KEY NOT NULL,
|
||||
`branch_name` text,
|
||||
`title` text,
|
||||
`task` text NOT NULL,
|
||||
`provider_id` text NOT NULL,
|
||||
`status` text NOT NULL,
|
||||
`agent_type` text DEFAULT 'claude',
|
||||
`pr_submitted` integer DEFAULT 0,
|
||||
`created_at` integer NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
|
||||
INSERT INTO `task__new` (
|
||||
`id`,
|
||||
`branch_name`,
|
||||
`title`,
|
||||
`task`,
|
||||
`provider_id`,
|
||||
`status`,
|
||||
`agent_type`,
|
||||
`pr_submitted`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
)
|
||||
SELECT
|
||||
`id`,
|
||||
`branch_name`,
|
||||
`title`,
|
||||
`task`,
|
||||
`provider_id`,
|
||||
`status`,
|
||||
`agent_type`,
|
||||
`pr_submitted`,
|
||||
`created_at`,
|
||||
`updated_at`
|
||||
FROM `task`;
|
||||
--> statement-breakpoint
|
||||
|
||||
DROP TABLE `task`;
|
||||
--> statement-breakpoint
|
||||
|
||||
ALTER TABLE `task__new` RENAME TO `task`;
|
||||
--> statement-breakpoint
|
||||
|
||||
PRAGMA foreign_keys=on;
|
||||
|
|
@ -0,0 +1 @@
|
|||
ALTER TABLE `task_sandboxes` ADD `sandbox_actor_id` text;
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
CREATE TABLE `task_workbench_sessions` (
|
||||
`session_id` text PRIMARY KEY NOT NULL,
|
||||
`session_name` text NOT NULL,
|
||||
`model` text NOT NULL,
|
||||
`unread` integer DEFAULT 0 NOT NULL,
|
||||
`draft_text` text DEFAULT '' NOT NULL,
|
||||
`draft_attachments_json` text DEFAULT '[]' NOT NULL,
|
||||
`draft_updated_at` integer,
|
||||
`created` integer DEFAULT 1 NOT NULL,
|
||||
`closed` integer DEFAULT 0 NOT NULL,
|
||||
`thinking_since_ms` integer,
|
||||
`created_at` integer NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1,176 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "9b004d3b-0722-4bb5-a410-d47635db7df3",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"tables": {
|
||||
"task": {
|
||||
"name": "task",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"task": {
|
||||
"name": "task",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status": {
|
||||
"name": "status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"agent_type": {
|
||||
"name": "agent_type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "'claude'"
|
||||
},
|
||||
"auto_committed": {
|
||||
"name": "auto_committed",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"pushed": {
|
||||
"name": "pushed",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"pr_submitted": {
|
||||
"name": "pr_submitted",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"needs_push": {
|
||||
"name": "needs_push",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"task_runtime": {
|
||||
"name": "task_runtime",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"sandbox_id": {
|
||||
"name": "sandbox_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"session_id": {
|
||||
"name": "session_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"switch_target": {
|
||||
"name": "switch_target",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status_message": {
|
||||
"name": "status_message",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,152 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "0fca0f14-69df-4fca-bc52-29e902247909",
|
||||
"prevId": "9b004d3b-0722-4bb5-a410-d47635db7df3",
|
||||
"tables": {
|
||||
"task": {
|
||||
"name": "task",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"task": {
|
||||
"name": "task",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status": {
|
||||
"name": "status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"agent_type": {
|
||||
"name": "agent_type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "'claude'"
|
||||
},
|
||||
"pr_submitted": {
|
||||
"name": "pr_submitted",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"task_runtime": {
|
||||
"name": "task_runtime",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"sandbox_id": {
|
||||
"name": "sandbox_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"session_id": {
|
||||
"name": "session_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"switch_target": {
|
||||
"name": "switch_target",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status_message": {
|
||||
"name": "status_message",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,222 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "72cef919-e545-48be-a7c0-7ac74cfcf9e6",
|
||||
"prevId": "0fca0f14-69df-4fca-bc52-29e902247909",
|
||||
"tables": {
|
||||
"task": {
|
||||
"name": "task",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"branch_name": {
|
||||
"name": "branch_name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"title": {
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"task": {
|
||||
"name": "task",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status": {
|
||||
"name": "status",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"agent_type": {
|
||||
"name": "agent_type",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": "'claude'"
|
||||
},
|
||||
"pr_submitted": {
|
||||
"name": "pr_submitted",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false,
|
||||
"default": 0
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"task_runtime": {
|
||||
"name": "task_runtime",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"active_sandbox_id": {
|
||||
"name": "active_sandbox_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"active_session_id": {
|
||||
"name": "active_session_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"active_switch_target": {
|
||||
"name": "active_switch_target",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"active_cwd": {
|
||||
"name": "active_cwd",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status_message": {
|
||||
"name": "status_message",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"task_sandboxes": {
|
||||
"name": "task_sandboxes",
|
||||
"columns": {
|
||||
"sandbox_id": {
|
||||
"name": "sandbox_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"switch_target": {
|
||||
"name": "switch_target",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"cwd": {
|
||||
"name": "cwd",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"status_message": {
|
||||
"name": "status_message",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": false,
|
||||
"autoincrement": false
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {
|
||||
"\"task_runtime\".\"sandbox_id\"": "\"task_runtime\".\"active_sandbox_id\"",
|
||||
"\"task_runtime\".\"session_id\"": "\"task_runtime\".\"active_session_id\"",
|
||||
"\"task_runtime\".\"switch_target\"": "\"task_runtime\".\"active_switch_target\""
|
||||
}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1770924374665,
|
||||
"tag": "0000_condemned_maria_hill",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"version": "6",
|
||||
"when": 1770947251055,
|
||||
"tag": "0001_rapid_eddie_brock",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"version": "6",
|
||||
"when": 1770948428907,
|
||||
"tag": "0002_lazy_moira_mactaggert",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"version": "6",
|
||||
"when": 1771027535276,
|
||||
"tag": "0003_plucky_bran",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 4,
|
||||
"version": "6",
|
||||
"when": 1771097651912,
|
||||
"tag": "0004_focused_shuri",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 5,
|
||||
"version": "6",
|
||||
"when": 1771370000000,
|
||||
"tag": "0005_sandbox_actor_id",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
245
foundry/packages/backend/src/actors/task/db/migrations.ts
Normal file
245
foundry/packages/backend/src/actors/task/db/migrations.ts
Normal file
|
|
@ -0,0 +1,245 @@
|
|||
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
|
||||
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
|
||||
// Do not hand-edit this file.
|
||||
|
||||
const journal = {
|
||||
entries: [
|
||||
{
|
||||
idx: 0,
|
||||
when: 1770924374665,
|
||||
tag: "0000_condemned_maria_hill",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 1,
|
||||
when: 1770947251055,
|
||||
tag: "0001_rapid_eddie_brock",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 2,
|
||||
when: 1770948428907,
|
||||
tag: "0002_lazy_moira_mactaggert",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 3,
|
||||
when: 1771027535276,
|
||||
tag: "0003_plucky_bran",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 4,
|
||||
when: 1771097651912,
|
||||
tag: "0004_focused_shuri",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 5,
|
||||
when: 1771370000000,
|
||||
tag: "0005_sandbox_actor_id",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 6,
|
||||
when: 1773020000000,
|
||||
tag: "0006_workbench_sessions",
|
||||
breakpoints: true,
|
||||
},
|
||||
],
|
||||
} as const;
|
||||
|
||||
export default {
|
||||
journal,
|
||||
migrations: {
|
||||
m0000: `CREATE TABLE \`task\` (
|
||||
\`id\` integer PRIMARY KEY NOT NULL,
|
||||
\`branch_name\` text NOT NULL,
|
||||
\`title\` text NOT NULL,
|
||||
\`task\` text NOT NULL,
|
||||
\`provider_id\` text NOT NULL,
|
||||
\`status\` text NOT NULL,
|
||||
\`agent_type\` text DEFAULT 'claude',
|
||||
\`auto_committed\` integer DEFAULT 0,
|
||||
\`pushed\` integer DEFAULT 0,
|
||||
\`pr_submitted\` integer DEFAULT 0,
|
||||
\`needs_push\` integer DEFAULT 0,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE \`task_runtime\` (
|
||||
\`id\` integer PRIMARY KEY NOT NULL,
|
||||
\`sandbox_id\` text,
|
||||
\`session_id\` text,
|
||||
\`switch_target\` text,
|
||||
\`status_message\` text,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0001: `ALTER TABLE \`task\` DROP COLUMN \`auto_committed\`;--> statement-breakpoint
|
||||
ALTER TABLE \`task\` DROP COLUMN \`pushed\`;--> statement-breakpoint
|
||||
ALTER TABLE \`task\` DROP COLUMN \`needs_push\`;`,
|
||||
m0002: `ALTER TABLE \`task_runtime\` RENAME COLUMN "sandbox_id" TO "active_sandbox_id";--> statement-breakpoint
|
||||
ALTER TABLE \`task_runtime\` RENAME COLUMN "session_id" TO "active_session_id";--> statement-breakpoint
|
||||
ALTER TABLE \`task_runtime\` RENAME COLUMN "switch_target" TO "active_switch_target";--> statement-breakpoint
|
||||
CREATE TABLE \`task_sandboxes\` (
|
||||
\`sandbox_id\` text PRIMARY KEY NOT NULL,
|
||||
\`provider_id\` text NOT NULL,
|
||||
\`switch_target\` text NOT NULL,
|
||||
\`cwd\` text,
|
||||
\`status_message\` text,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE \`task_runtime\` ADD \`active_cwd\` text;
|
||||
--> statement-breakpoint
|
||||
INSERT INTO \`task_sandboxes\` (
|
||||
\`sandbox_id\`,
|
||||
\`provider_id\`,
|
||||
\`switch_target\`,
|
||||
\`cwd\`,
|
||||
\`status_message\`,
|
||||
\`created_at\`,
|
||||
\`updated_at\`
|
||||
)
|
||||
SELECT
|
||||
r.\`active_sandbox_id\`,
|
||||
(SELECT h.\`provider_id\` FROM \`task\` h WHERE h.\`id\` = 1),
|
||||
r.\`active_switch_target\`,
|
||||
r.\`active_cwd\`,
|
||||
r.\`status_message\`,
|
||||
COALESCE((SELECT h.\`created_at\` FROM \`task\` h WHERE h.\`id\` = 1), r.\`updated_at\`),
|
||||
r.\`updated_at\`
|
||||
FROM \`task_runtime\` r
|
||||
WHERE
|
||||
r.\`id\` = 1
|
||||
AND r.\`active_sandbox_id\` IS NOT NULL
|
||||
AND r.\`active_switch_target\` IS NOT NULL
|
||||
ON CONFLICT(\`sandbox_id\`) DO NOTHING;
|
||||
`,
|
||||
m0003: `-- Allow tasks to exist before their branch/title are determined.
|
||||
-- Drizzle doesn't support altering column nullability in SQLite directly, so rebuild the table.
|
||||
|
||||
PRAGMA foreign_keys=off;
|
||||
|
||||
CREATE TABLE \`task__new\` (
|
||||
\`id\` integer PRIMARY KEY NOT NULL,
|
||||
\`branch_name\` text,
|
||||
\`title\` text,
|
||||
\`task\` text NOT NULL,
|
||||
\`provider_id\` text NOT NULL,
|
||||
\`status\` text NOT NULL,
|
||||
\`agent_type\` text DEFAULT 'claude',
|
||||
\`pr_submitted\` integer DEFAULT 0,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO \`task__new\` (
|
||||
\`id\`,
|
||||
\`branch_name\`,
|
||||
\`title\`,
|
||||
\`task\`,
|
||||
\`provider_id\`,
|
||||
\`status\`,
|
||||
\`agent_type\`,
|
||||
\`pr_submitted\`,
|
||||
\`created_at\`,
|
||||
\`updated_at\`
|
||||
)
|
||||
SELECT
|
||||
\`id\`,
|
||||
\`branch_name\`,
|
||||
\`title\`,
|
||||
\`task\`,
|
||||
\`provider_id\`,
|
||||
\`status\`,
|
||||
\`agent_type\`,
|
||||
\`pr_submitted\`,
|
||||
\`created_at\`,
|
||||
\`updated_at\`
|
||||
FROM \`task\`;
|
||||
|
||||
DROP TABLE \`task\`;
|
||||
ALTER TABLE \`task__new\` RENAME TO \`task\`;
|
||||
|
||||
PRAGMA foreign_keys=on;
|
||||
|
||||
`,
|
||||
m0004: `-- Fix: make branch_name/title nullable during initial "naming" stage.
|
||||
-- 0003 was missing statement breakpoints, so drizzle's migrator marked it applied without executing all statements.
|
||||
-- Rebuild the table again with proper statement breakpoints.
|
||||
|
||||
PRAGMA foreign_keys=off;
|
||||
--> statement-breakpoint
|
||||
|
||||
DROP TABLE IF EXISTS \`task__new\`;
|
||||
--> statement-breakpoint
|
||||
|
||||
CREATE TABLE \`task__new\` (
|
||||
\`id\` integer PRIMARY KEY NOT NULL,
|
||||
\`branch_name\` text,
|
||||
\`title\` text,
|
||||
\`task\` text NOT NULL,
|
||||
\`provider_id\` text NOT NULL,
|
||||
\`status\` text NOT NULL,
|
||||
\`agent_type\` text DEFAULT 'claude',
|
||||
\`pr_submitted\` integer DEFAULT 0,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
|
||||
INSERT INTO \`task__new\` (
|
||||
\`id\`,
|
||||
\`branch_name\`,
|
||||
\`title\`,
|
||||
\`task\`,
|
||||
\`provider_id\`,
|
||||
\`status\`,
|
||||
\`agent_type\`,
|
||||
\`pr_submitted\`,
|
||||
\`created_at\`,
|
||||
\`updated_at\`
|
||||
)
|
||||
SELECT
|
||||
\`id\`,
|
||||
\`branch_name\`,
|
||||
\`title\`,
|
||||
\`task\`,
|
||||
\`provider_id\`,
|
||||
\`status\`,
|
||||
\`agent_type\`,
|
||||
\`pr_submitted\`,
|
||||
\`created_at\`,
|
||||
\`updated_at\`
|
||||
FROM \`task\`;
|
||||
--> statement-breakpoint
|
||||
|
||||
DROP TABLE \`task\`;
|
||||
--> statement-breakpoint
|
||||
|
||||
ALTER TABLE \`task__new\` RENAME TO \`task\`;
|
||||
--> statement-breakpoint
|
||||
|
||||
PRAGMA foreign_keys=on;
|
||||
`,
|
||||
m0005: `ALTER TABLE \`task_sandboxes\` ADD \`sandbox_actor_id\` text;`,
|
||||
m0006: `CREATE TABLE \`task_workbench_sessions\` (
|
||||
\`session_id\` text PRIMARY KEY NOT NULL,
|
||||
\`session_name\` text NOT NULL,
|
||||
\`model\` text NOT NULL,
|
||||
\`unread\` integer DEFAULT 0 NOT NULL,
|
||||
\`draft_text\` text DEFAULT '' NOT NULL,
|
||||
\`draft_attachments_json\` text DEFAULT '[]' NOT NULL,
|
||||
\`draft_updated_at\` integer,
|
||||
\`created\` integer DEFAULT 1 NOT NULL,
|
||||
\`closed\` integer DEFAULT 0 NOT NULL,
|
||||
\`thinking_since_ms\` integer,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);`,
|
||||
} as const,
|
||||
};
|
||||
51
foundry/packages/backend/src/actors/task/db/schema.ts
Normal file
51
foundry/packages/backend/src/actors/task/db/schema.ts
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
|
||||
|
||||
// SQLite is per task actor instance, so these tables only ever store one row (id=1).
|
||||
export const task = sqliteTable("task", {
|
||||
id: integer("id").primaryKey(),
|
||||
branchName: text("branch_name"),
|
||||
title: text("title"),
|
||||
task: text("task").notNull(),
|
||||
providerId: text("provider_id").notNull(),
|
||||
status: text("status").notNull(),
|
||||
agentType: text("agent_type").default("claude"),
|
||||
prSubmitted: integer("pr_submitted").default(0),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const taskRuntime = sqliteTable("task_runtime", {
|
||||
id: integer("id").primaryKey(),
|
||||
activeSandboxId: text("active_sandbox_id"),
|
||||
activeSessionId: text("active_session_id"),
|
||||
activeSwitchTarget: text("active_switch_target"),
|
||||
activeCwd: text("active_cwd"),
|
||||
statusMessage: text("status_message"),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const taskSandboxes = sqliteTable("task_sandboxes", {
|
||||
sandboxId: text("sandbox_id").notNull().primaryKey(),
|
||||
providerId: text("provider_id").notNull(),
|
||||
sandboxActorId: text("sandbox_actor_id"),
|
||||
switchTarget: text("switch_target").notNull(),
|
||||
cwd: text("cwd"),
|
||||
statusMessage: text("status_message"),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", {
|
||||
sessionId: text("session_id").notNull().primaryKey(),
|
||||
sessionName: text("session_name").notNull(),
|
||||
model: text("model").notNull(),
|
||||
unread: integer("unread").notNull().default(0),
|
||||
draftText: text("draft_text").notNull().default(""),
|
||||
draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"),
|
||||
draftUpdatedAt: integer("draft_updated_at"),
|
||||
created: integer("created").notNull().default(1),
|
||||
closed: integer("closed").notNull().default(0),
|
||||
thinkingSinceMs: integer("thinking_since_ms"),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
385
foundry/packages/backend/src/actors/task/index.ts
Normal file
385
foundry/packages/backend/src/actors/task/index.ts
Normal file
|
|
@ -0,0 +1,385 @@
|
|||
import { actor, queue } from "rivetkit";
|
||||
import { workflow } from "rivetkit/workflow";
|
||||
import type {
|
||||
AgentType,
|
||||
TaskRecord,
|
||||
TaskWorkbenchChangeModelInput,
|
||||
TaskWorkbenchRenameInput,
|
||||
TaskWorkbenchRenameSessionInput,
|
||||
TaskWorkbenchSetSessionUnreadInput,
|
||||
TaskWorkbenchSendMessageInput,
|
||||
TaskWorkbenchUpdateDraftInput,
|
||||
ProviderId,
|
||||
} from "@sandbox-agent/foundry-shared";
|
||||
import { expectQueueResponse } from "../../services/queue.js";
|
||||
import { selfTask } from "../handles.js";
|
||||
import { taskDb } from "./db/db.js";
|
||||
import { getCurrentRecord } from "./workflow/common.js";
|
||||
import {
|
||||
changeWorkbenchModel,
|
||||
closeWorkbenchSession,
|
||||
createWorkbenchSession,
|
||||
getWorkbenchTask,
|
||||
markWorkbenchUnread,
|
||||
publishWorkbenchPr,
|
||||
renameWorkbenchBranch,
|
||||
renameWorkbenchTask,
|
||||
renameWorkbenchSession,
|
||||
revertWorkbenchFile,
|
||||
sendWorkbenchMessage,
|
||||
syncWorkbenchSessionStatus,
|
||||
setWorkbenchSessionUnread,
|
||||
stopWorkbenchSession,
|
||||
updateWorkbenchDraft,
|
||||
} from "./workbench.js";
|
||||
import { TASK_QUEUE_NAMES, taskWorkflowQueueName, runTaskWorkflow } from "./workflow/index.js";
|
||||
|
||||
export interface TaskInput {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
taskId: string;
|
||||
repoRemote: string;
|
||||
repoLocalPath: string;
|
||||
branchName: string | null;
|
||||
title: string | null;
|
||||
task: string;
|
||||
providerId: ProviderId;
|
||||
agentType: AgentType | null;
|
||||
explicitTitle: string | null;
|
||||
explicitBranchName: string | null;
|
||||
initialPrompt: string | null;
|
||||
}
|
||||
|
||||
interface InitializeCommand {
|
||||
providerId?: ProviderId;
|
||||
}
|
||||
|
||||
interface TaskActionCommand {
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
interface TaskTabCommand {
|
||||
tabId: string;
|
||||
}
|
||||
|
||||
interface TaskStatusSyncCommand {
|
||||
sessionId: string;
|
||||
status: "running" | "idle" | "error";
|
||||
at: number;
|
||||
}
|
||||
|
||||
interface TaskWorkbenchValueCommand {
|
||||
value: string;
|
||||
}
|
||||
|
||||
interface TaskWorkbenchSessionTitleCommand {
|
||||
sessionId: string;
|
||||
title: string;
|
||||
}
|
||||
|
||||
interface TaskWorkbenchSessionUnreadCommand {
|
||||
sessionId: string;
|
||||
unread: boolean;
|
||||
}
|
||||
|
||||
interface TaskWorkbenchUpdateDraftCommand {
|
||||
sessionId: string;
|
||||
text: string;
|
||||
attachments: Array<any>;
|
||||
}
|
||||
|
||||
interface TaskWorkbenchChangeModelCommand {
|
||||
sessionId: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
interface TaskWorkbenchSendMessageCommand {
|
||||
sessionId: string;
|
||||
text: string;
|
||||
attachments: Array<any>;
|
||||
}
|
||||
|
||||
interface TaskWorkbenchCreateSessionCommand {
|
||||
model?: string;
|
||||
}
|
||||
|
||||
interface TaskWorkbenchSessionCommand {
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
export const task = actor({
|
||||
db: taskDb,
|
||||
queues: Object.fromEntries(TASK_QUEUE_NAMES.map((name) => [name, queue()])),
|
||||
options: {
|
||||
actionTimeout: 5 * 60_000,
|
||||
},
|
||||
createState: (_c, input: TaskInput) => ({
|
||||
workspaceId: input.workspaceId,
|
||||
repoId: input.repoId,
|
||||
taskId: input.taskId,
|
||||
repoRemote: input.repoRemote,
|
||||
repoLocalPath: input.repoLocalPath,
|
||||
branchName: input.branchName,
|
||||
title: input.title,
|
||||
task: input.task,
|
||||
providerId: input.providerId,
|
||||
agentType: input.agentType,
|
||||
explicitTitle: input.explicitTitle,
|
||||
explicitBranchName: input.explicitBranchName,
|
||||
initialPrompt: input.initialPrompt,
|
||||
initialized: false,
|
||||
previousStatus: null as string | null,
|
||||
}),
|
||||
actions: {
|
||||
async initialize(c, cmd: InitializeCommand): Promise<TaskRecord> {
|
||||
const self = selfTask(c);
|
||||
const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
});
|
||||
return expectQueueResponse<TaskRecord>(result);
|
||||
},
|
||||
|
||||
async provision(c, cmd: InitializeCommand): Promise<{ ok: true }> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.provision"), cmd ?? {}, {
|
||||
wait: true,
|
||||
timeout: 30 * 60_000,
|
||||
});
|
||||
return { ok: true };
|
||||
},
|
||||
|
||||
async attach(c, cmd?: TaskActionCommand): Promise<{ target: string; sessionId: string | null }> {
|
||||
const self = selfTask(c);
|
||||
const result = await self.send(taskWorkflowQueueName("task.command.attach"), cmd ?? {}, {
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
});
|
||||
return expectQueueResponse<{ target: string; sessionId: string | null }>(result);
|
||||
},
|
||||
|
||||
async switch(c): Promise<{ switchTarget: string }> {
|
||||
const self = selfTask(c);
|
||||
const result = await self.send(
|
||||
taskWorkflowQueueName("task.command.switch"),
|
||||
{},
|
||||
{
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
},
|
||||
);
|
||||
return expectQueueResponse<{ switchTarget: string }>(result);
|
||||
},
|
||||
|
||||
async push(c, cmd?: TaskActionCommand): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.push"), cmd ?? {}, {
|
||||
wait: true,
|
||||
timeout: 180_000,
|
||||
});
|
||||
},
|
||||
|
||||
async sync(c, cmd?: TaskActionCommand): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.sync"), cmd ?? {}, {
|
||||
wait: true,
|
||||
timeout: 30_000,
|
||||
});
|
||||
},
|
||||
|
||||
async merge(c, cmd?: TaskActionCommand): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.merge"), cmd ?? {}, {
|
||||
wait: true,
|
||||
timeout: 30_000,
|
||||
});
|
||||
},
|
||||
|
||||
async archive(c, cmd?: TaskActionCommand): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
void self
|
||||
.send(taskWorkflowQueueName("task.command.archive"), cmd ?? {}, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
})
|
||||
.catch((error: unknown) => {
|
||||
c.log.warn({
|
||||
msg: "archive command failed",
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
async kill(c, cmd?: TaskActionCommand): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.kill"), cmd ?? {}, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async get(c): Promise<TaskRecord> {
|
||||
return await getCurrentRecord({ db: c.db, state: c.state });
|
||||
},
|
||||
|
||||
async getWorkbench(c) {
|
||||
return await getWorkbenchTask(c);
|
||||
},
|
||||
|
||||
async markWorkbenchUnread(c): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(
|
||||
taskWorkflowQueueName("task.command.workbench.mark_unread"),
|
||||
{},
|
||||
{
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
},
|
||||
);
|
||||
},
|
||||
|
||||
async renameWorkbenchTask(c, input: TaskWorkbenchRenameInput): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.workbench.rename_task"), { value: input.value } satisfies TaskWorkbenchValueCommand, {
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
});
|
||||
},
|
||||
|
||||
async renameWorkbenchBranch(c, input: TaskWorkbenchRenameInput): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.workbench.rename_branch"), { value: input.value } satisfies TaskWorkbenchValueCommand, {
|
||||
wait: true,
|
||||
timeout: 5 * 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async createWorkbenchSession(c, input?: { model?: string }): Promise<{ tabId: string }> {
|
||||
const self = selfTask(c);
|
||||
const result = await self.send(
|
||||
taskWorkflowQueueName("task.command.workbench.create_session"),
|
||||
{ ...(input?.model ? { model: input.model } : {}) } satisfies TaskWorkbenchCreateSessionCommand,
|
||||
{
|
||||
wait: true,
|
||||
timeout: 5 * 60_000,
|
||||
},
|
||||
);
|
||||
return expectQueueResponse<{ tabId: string }>(result);
|
||||
},
|
||||
|
||||
async renameWorkbenchSession(c, input: TaskWorkbenchRenameSessionInput): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(
|
||||
taskWorkflowQueueName("task.command.workbench.rename_session"),
|
||||
{ sessionId: input.tabId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand,
|
||||
{
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
},
|
||||
);
|
||||
},
|
||||
|
||||
async setWorkbenchSessionUnread(c, input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(
|
||||
taskWorkflowQueueName("task.command.workbench.set_session_unread"),
|
||||
{ sessionId: input.tabId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand,
|
||||
{
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
},
|
||||
);
|
||||
},
|
||||
|
||||
async updateWorkbenchDraft(c, input: TaskWorkbenchUpdateDraftInput): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(
|
||||
taskWorkflowQueueName("task.command.workbench.update_draft"),
|
||||
{
|
||||
sessionId: input.tabId,
|
||||
text: input.text,
|
||||
attachments: input.attachments,
|
||||
} satisfies TaskWorkbenchUpdateDraftCommand,
|
||||
{
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
},
|
||||
);
|
||||
},
|
||||
|
||||
async changeWorkbenchModel(c, input: TaskWorkbenchChangeModelInput): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(
|
||||
taskWorkflowQueueName("task.command.workbench.change_model"),
|
||||
{ sessionId: input.tabId, model: input.model } satisfies TaskWorkbenchChangeModelCommand,
|
||||
{
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
},
|
||||
);
|
||||
},
|
||||
|
||||
async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageInput): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(
|
||||
taskWorkflowQueueName("task.command.workbench.send_message"),
|
||||
{
|
||||
sessionId: input.tabId,
|
||||
text: input.text,
|
||||
attachments: input.attachments,
|
||||
} satisfies TaskWorkbenchSendMessageCommand,
|
||||
{
|
||||
wait: true,
|
||||
timeout: 10 * 60_000,
|
||||
},
|
||||
);
|
||||
},
|
||||
|
||||
async stopWorkbenchSession(c, input: TaskTabCommand): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, {
|
||||
wait: true,
|
||||
timeout: 5 * 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async syncWorkbenchSessionStatus(c, input: TaskStatusSyncCommand): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.workbench.sync_session_status"), input, {
|
||||
wait: true,
|
||||
timeout: 20_000,
|
||||
});
|
||||
},
|
||||
|
||||
async closeWorkbenchSession(c, input: TaskTabCommand): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, {
|
||||
wait: true,
|
||||
timeout: 5 * 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async publishWorkbenchPr(c): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(
|
||||
taskWorkflowQueueName("task.command.workbench.publish_pr"),
|
||||
{},
|
||||
{
|
||||
wait: true,
|
||||
timeout: 10 * 60_000,
|
||||
},
|
||||
);
|
||||
},
|
||||
|
||||
async revertWorkbenchFile(c, input: { path: string }): Promise<void> {
|
||||
const self = selfTask(c);
|
||||
await self.send(taskWorkflowQueueName("task.command.workbench.revert_file"), input, {
|
||||
wait: true,
|
||||
timeout: 5 * 60_000,
|
||||
});
|
||||
},
|
||||
},
|
||||
run: workflow(runTaskWorkflow),
|
||||
});
|
||||
|
||||
export { TASK_QUEUE_NAMES };
|
||||
818
foundry/packages/backend/src/actors/task/workbench.ts
Normal file
818
foundry/packages/backend/src/actors/task/workbench.ts
Normal file
|
|
@ -0,0 +1,818 @@
|
|||
// @ts-nocheck
|
||||
import { basename } from "node:path";
|
||||
import { asc, eq } from "drizzle-orm";
|
||||
import { getActorRuntimeContext } from "../context.js";
|
||||
import { getOrCreateTaskStatusSync, getOrCreateProject, getOrCreateWorkspace, getSandboxInstance } from "../handles.js";
|
||||
import { task as taskTable, taskRuntime, taskWorkbenchSessions } from "./db/schema.js";
|
||||
import { getCurrentRecord } from "./workflow/common.js";
|
||||
|
||||
const STATUS_SYNC_INTERVAL_MS = 1_000;
|
||||
|
||||
async function ensureWorkbenchSessionTable(c: any): Promise<void> {
|
||||
await c.db.execute(`
|
||||
CREATE TABLE IF NOT EXISTS task_workbench_sessions (
|
||||
session_id text PRIMARY KEY NOT NULL,
|
||||
session_name text NOT NULL,
|
||||
model text NOT NULL,
|
||||
unread integer DEFAULT 0 NOT NULL,
|
||||
draft_text text DEFAULT '' NOT NULL,
|
||||
draft_attachments_json text DEFAULT '[]' NOT NULL,
|
||||
draft_updated_at integer,
|
||||
created integer DEFAULT 1 NOT NULL,
|
||||
closed integer DEFAULT 0 NOT NULL,
|
||||
thinking_since_ms integer,
|
||||
created_at integer NOT NULL,
|
||||
updated_at integer NOT NULL
|
||||
)
|
||||
`);
|
||||
}
|
||||
|
||||
function defaultModelForAgent(agentType: string | null | undefined) {
|
||||
return agentType === "codex" ? "gpt-4o" : "claude-sonnet-4";
|
||||
}
|
||||
|
||||
function agentKindForModel(model: string) {
|
||||
if (model === "gpt-4o" || model === "o3") {
|
||||
return "Codex";
|
||||
}
|
||||
return "Claude";
|
||||
}
|
||||
|
||||
export function agentTypeForModel(model: string) {
|
||||
if (model === "gpt-4o" || model === "o3") {
|
||||
return "codex";
|
||||
}
|
||||
return "claude";
|
||||
}
|
||||
|
||||
function repoLabelFromRemote(remoteUrl: string): string {
|
||||
const trimmed = remoteUrl.trim();
|
||||
try {
|
||||
const url = new URL(trimmed.startsWith("http") ? trimmed : `https://${trimmed}`);
|
||||
const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean);
|
||||
if (parts.length >= 2) {
|
||||
return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/, "")}`;
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return basename(trimmed.replace(/\.git$/, ""));
|
||||
}
|
||||
|
||||
function parseDraftAttachments(value: string | null | undefined): Array<any> {
|
||||
if (!value) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(value) as unknown;
|
||||
return Array.isArray(parsed) ? parsed : [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export function shouldMarkSessionUnreadForStatus(meta: { thinkingSinceMs?: number | null }, status: "running" | "idle" | "error"): boolean {
|
||||
if (status === "running") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Only mark unread when we observe the transition out of an active thinking state.
|
||||
// Repeated idle polls for an already-finished session must not flip unread back on.
|
||||
return Boolean(meta.thinkingSinceMs);
|
||||
}
|
||||
|
||||
async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean }): Promise<Array<any>> {
|
||||
await ensureWorkbenchSessionTable(c);
|
||||
const rows = await c.db.select().from(taskWorkbenchSessions).orderBy(asc(taskWorkbenchSessions.createdAt)).all();
|
||||
const mapped = rows.map((row: any) => ({
|
||||
...row,
|
||||
id: row.sessionId,
|
||||
sessionId: row.sessionId,
|
||||
draftAttachments: parseDraftAttachments(row.draftAttachmentsJson),
|
||||
draftUpdatedAtMs: row.draftUpdatedAt ?? null,
|
||||
unread: row.unread === 1,
|
||||
created: row.created === 1,
|
||||
closed: row.closed === 1,
|
||||
}));
|
||||
|
||||
if (options?.includeClosed === true) {
|
||||
return mapped;
|
||||
}
|
||||
|
||||
return mapped.filter((row: any) => row.closed !== true);
|
||||
}
|
||||
|
||||
async function nextSessionName(c: any): Promise<string> {
|
||||
const rows = await listSessionMetaRows(c, { includeClosed: true });
|
||||
return `Session ${rows.length + 1}`;
|
||||
}
|
||||
|
||||
async function readSessionMeta(c: any, sessionId: string): Promise<any | null> {
|
||||
await ensureWorkbenchSessionTable(c);
|
||||
const row = await c.db.select().from(taskWorkbenchSessions).where(eq(taskWorkbenchSessions.sessionId, sessionId)).get();
|
||||
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
...row,
|
||||
id: row.sessionId,
|
||||
sessionId: row.sessionId,
|
||||
draftAttachments: parseDraftAttachments(row.draftAttachmentsJson),
|
||||
draftUpdatedAtMs: row.draftUpdatedAt ?? null,
|
||||
unread: row.unread === 1,
|
||||
created: row.created === 1,
|
||||
closed: row.closed === 1,
|
||||
};
|
||||
}
|
||||
|
||||
async function ensureSessionMeta(
|
||||
c: any,
|
||||
params: {
|
||||
sessionId: string;
|
||||
model?: string;
|
||||
sessionName?: string;
|
||||
unread?: boolean;
|
||||
},
|
||||
): Promise<any> {
|
||||
await ensureWorkbenchSessionTable(c);
|
||||
const existing = await readSessionMeta(c, params.sessionId);
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
const sessionName = params.sessionName ?? (await nextSessionName(c));
|
||||
const model = params.model ?? defaultModelForAgent(c.state.agentType);
|
||||
const unread = params.unread ?? false;
|
||||
|
||||
await c.db
|
||||
.insert(taskWorkbenchSessions)
|
||||
.values({
|
||||
sessionId: params.sessionId,
|
||||
sessionName,
|
||||
model,
|
||||
unread: unread ? 1 : 0,
|
||||
draftText: "",
|
||||
draftAttachmentsJson: "[]",
|
||||
draftUpdatedAt: null,
|
||||
created: 1,
|
||||
closed: 0,
|
||||
thinkingSinceMs: null,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.run();
|
||||
|
||||
return await readSessionMeta(c, params.sessionId);
|
||||
}
|
||||
|
||||
async function updateSessionMeta(c: any, sessionId: string, values: Record<string, unknown>): Promise<any> {
|
||||
await ensureSessionMeta(c, { sessionId });
|
||||
await c.db
|
||||
.update(taskWorkbenchSessions)
|
||||
.set({
|
||||
...values,
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.where(eq(taskWorkbenchSessions.sessionId, sessionId))
|
||||
.run();
|
||||
return await readSessionMeta(c, sessionId);
|
||||
}
|
||||
|
||||
async function notifyWorkbenchUpdated(c: any): Promise<void> {
|
||||
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
|
||||
await workspace.notifyWorkbenchUpdated({});
|
||||
}
|
||||
|
||||
function shellFragment(parts: string[]): string {
|
||||
return parts.join(" && ");
|
||||
}
|
||||
|
||||
async function executeInSandbox(
|
||||
c: any,
|
||||
params: {
|
||||
sandboxId: string;
|
||||
cwd: string;
|
||||
command: string;
|
||||
label: string;
|
||||
},
|
||||
): Promise<{ exitCode: number; result: string }> {
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const provider = providers.get(c.state.providerId);
|
||||
return await provider.executeCommand({
|
||||
workspaceId: c.state.workspaceId,
|
||||
sandboxId: params.sandboxId,
|
||||
command: `bash -lc ${JSON.stringify(shellFragment([`cd ${JSON.stringify(params.cwd)}`, params.command]))}`,
|
||||
label: params.label,
|
||||
});
|
||||
}
|
||||
|
||||
function parseGitStatus(output: string): Array<{ path: string; type: "M" | "A" | "D" }> {
|
||||
return output
|
||||
.split("\n")
|
||||
.map((line) => line.trimEnd())
|
||||
.filter(Boolean)
|
||||
.map((line) => {
|
||||
const status = line.slice(0, 2).trim();
|
||||
const rawPath = line.slice(3).trim();
|
||||
const path = rawPath.includes(" -> ") ? (rawPath.split(" -> ").pop() ?? rawPath) : rawPath;
|
||||
const type = status.includes("D") ? "D" : status.includes("A") || status === "??" ? "A" : "M";
|
||||
return { path, type };
|
||||
});
|
||||
}
|
||||
|
||||
function parseNumstat(output: string): Map<string, { added: number; removed: number }> {
|
||||
const map = new Map<string, { added: number; removed: number }>();
|
||||
for (const line of output.split("\n")) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) continue;
|
||||
const [addedRaw, removedRaw, ...pathParts] = trimmed.split("\t");
|
||||
const path = pathParts.join("\t").trim();
|
||||
if (!path) continue;
|
||||
map.set(path, {
|
||||
added: Number.parseInt(addedRaw ?? "0", 10) || 0,
|
||||
removed: Number.parseInt(removedRaw ?? "0", 10) || 0,
|
||||
});
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
function buildFileTree(paths: string[]): Array<any> {
|
||||
const root = {
|
||||
children: new Map<string, any>(),
|
||||
};
|
||||
|
||||
for (const path of paths) {
|
||||
const parts = path.split("/").filter(Boolean);
|
||||
let current = root;
|
||||
let currentPath = "";
|
||||
|
||||
for (let index = 0; index < parts.length; index += 1) {
|
||||
const part = parts[index]!;
|
||||
currentPath = currentPath ? `${currentPath}/${part}` : part;
|
||||
const isDir = index < parts.length - 1;
|
||||
let node = current.children.get(part);
|
||||
if (!node) {
|
||||
node = {
|
||||
name: part,
|
||||
path: currentPath,
|
||||
isDir,
|
||||
children: isDir ? new Map<string, any>() : undefined,
|
||||
};
|
||||
current.children.set(part, node);
|
||||
} else if (isDir && !(node.children instanceof Map)) {
|
||||
node.children = new Map<string, any>();
|
||||
}
|
||||
current = node;
|
||||
}
|
||||
}
|
||||
|
||||
function sortNodes(nodes: Iterable<any>): Array<any> {
|
||||
return [...nodes]
|
||||
.map((node) =>
|
||||
node.isDir
|
||||
? {
|
||||
name: node.name,
|
||||
path: node.path,
|
||||
isDir: true,
|
||||
children: sortNodes(node.children?.values?.() ?? []),
|
||||
}
|
||||
: {
|
||||
name: node.name,
|
||||
path: node.path,
|
||||
isDir: false,
|
||||
},
|
||||
)
|
||||
.sort((left, right) => {
|
||||
if (left.isDir !== right.isDir) {
|
||||
return left.isDir ? -1 : 1;
|
||||
}
|
||||
return left.path.localeCompare(right.path);
|
||||
});
|
||||
}
|
||||
|
||||
return sortNodes(root.children.values());
|
||||
}
|
||||
|
||||
async function collectWorkbenchGitState(c: any, record: any) {
|
||||
const activeSandboxId = record.activeSandboxId;
|
||||
const activeSandbox = activeSandboxId != null ? ((record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === activeSandboxId) ?? null) : null;
|
||||
const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null;
|
||||
if (!activeSandboxId || !cwd) {
|
||||
return {
|
||||
fileChanges: [],
|
||||
diffs: {},
|
||||
fileTree: [],
|
||||
};
|
||||
}
|
||||
|
||||
const statusResult = await executeInSandbox(c, {
|
||||
sandboxId: activeSandboxId,
|
||||
cwd,
|
||||
command: "git status --porcelain=v1 -uall",
|
||||
label: "git status",
|
||||
});
|
||||
if (statusResult.exitCode !== 0) {
|
||||
return {
|
||||
fileChanges: [],
|
||||
diffs: {},
|
||||
fileTree: [],
|
||||
};
|
||||
}
|
||||
|
||||
const statusRows = parseGitStatus(statusResult.result);
|
||||
const numstatResult = await executeInSandbox(c, {
|
||||
sandboxId: activeSandboxId,
|
||||
cwd,
|
||||
command: "git diff --numstat",
|
||||
label: "git diff numstat",
|
||||
});
|
||||
const numstat = parseNumstat(numstatResult.result);
|
||||
const diffs: Record<string, string> = {};
|
||||
|
||||
for (const row of statusRows) {
|
||||
const diffResult = await executeInSandbox(c, {
|
||||
sandboxId: activeSandboxId,
|
||||
cwd,
|
||||
command: `if git ls-files --error-unmatch -- ${JSON.stringify(row.path)} >/dev/null 2>&1; then git diff -- ${JSON.stringify(row.path)}; else git diff --no-index -- /dev/null ${JSON.stringify(row.path)} || true; fi`,
|
||||
label: `git diff ${row.path}`,
|
||||
});
|
||||
diffs[row.path] = diffResult.result;
|
||||
}
|
||||
|
||||
const filesResult = await executeInSandbox(c, {
|
||||
sandboxId: activeSandboxId,
|
||||
cwd,
|
||||
command: "git ls-files --cached --others --exclude-standard",
|
||||
label: "git ls-files",
|
||||
});
|
||||
const allPaths = filesResult.result
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
return {
|
||||
fileChanges: statusRows.map((row) => {
|
||||
const counts = numstat.get(row.path) ?? { added: 0, removed: 0 };
|
||||
return {
|
||||
path: row.path,
|
||||
added: counts.added,
|
||||
removed: counts.removed,
|
||||
type: row.type,
|
||||
};
|
||||
}),
|
||||
diffs,
|
||||
fileTree: buildFileTree(allPaths),
|
||||
};
|
||||
}
|
||||
|
||||
async function readSessionTranscript(c: any, record: any, sessionId: string) {
|
||||
const sandboxId = record.activeSandboxId ?? record.sandboxes?.[0]?.sandboxId ?? null;
|
||||
if (!sandboxId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, sandboxId);
|
||||
const page = await sandbox.listSessionEvents({
|
||||
sessionId,
|
||||
limit: 500,
|
||||
});
|
||||
return page.items.map((event: any) => ({
|
||||
id: event.id,
|
||||
eventIndex: event.eventIndex,
|
||||
sessionId: event.sessionId,
|
||||
createdAt: event.createdAt,
|
||||
connectionId: event.connectionId,
|
||||
sender: event.sender,
|
||||
payload: event.payload,
|
||||
}));
|
||||
}
|
||||
|
||||
async function activeSessionStatus(c: any, record: any, sessionId: string) {
|
||||
if (record.activeSessionId !== sessionId || !record.activeSandboxId) {
|
||||
return "idle";
|
||||
}
|
||||
|
||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
||||
const status = await sandbox.sessionStatus({ sessionId });
|
||||
return status.status;
|
||||
}
|
||||
|
||||
async function readPullRequestSummary(c: any, branchName: string | null) {
|
||||
if (!branchName) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote);
|
||||
return await project.getPullRequestForBranch({ branchName });
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureWorkbenchSeeded(c: any): Promise<any> {
|
||||
const record = await getCurrentRecord({ db: c.db, state: c.state });
|
||||
if (record.activeSessionId) {
|
||||
await ensureSessionMeta(c, {
|
||||
sessionId: record.activeSessionId,
|
||||
model: defaultModelForAgent(record.agentType),
|
||||
sessionName: "Session 1",
|
||||
});
|
||||
}
|
||||
return record;
|
||||
}
|
||||
|
||||
export async function getWorkbenchTask(c: any): Promise<any> {
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
const gitState = await collectWorkbenchGitState(c, record);
|
||||
const sessions = await listSessionMetaRows(c);
|
||||
const tabs = [];
|
||||
|
||||
for (const meta of sessions) {
|
||||
const status = await activeSessionStatus(c, record, meta.sessionId);
|
||||
let thinkingSinceMs = meta.thinkingSinceMs ?? null;
|
||||
let unread = Boolean(meta.unread);
|
||||
if (thinkingSinceMs && status !== "running") {
|
||||
thinkingSinceMs = null;
|
||||
unread = true;
|
||||
}
|
||||
|
||||
tabs.push({
|
||||
id: meta.id,
|
||||
sessionId: meta.sessionId,
|
||||
sessionName: meta.sessionName,
|
||||
agent: agentKindForModel(meta.model),
|
||||
model: meta.model,
|
||||
status,
|
||||
thinkingSinceMs: status === "running" ? thinkingSinceMs : null,
|
||||
unread,
|
||||
created: Boolean(meta.created),
|
||||
draft: {
|
||||
text: meta.draftText ?? "",
|
||||
attachments: Array.isArray(meta.draftAttachments) ? meta.draftAttachments : [],
|
||||
updatedAtMs: meta.draftUpdatedAtMs ?? null,
|
||||
},
|
||||
transcript: await readSessionTranscript(c, record, meta.sessionId),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
id: c.state.taskId,
|
||||
repoId: c.state.repoId,
|
||||
title: record.title ?? "New Task",
|
||||
status: record.status === "archived" ? "archived" : record.status === "running" ? "running" : record.status === "idle" ? "idle" : "new",
|
||||
repoName: repoLabelFromRemote(c.state.repoRemote),
|
||||
updatedAtMs: record.updatedAt,
|
||||
branch: record.branchName,
|
||||
pullRequest: await readPullRequestSummary(c, record.branchName),
|
||||
tabs,
|
||||
fileChanges: gitState.fileChanges,
|
||||
diffs: gitState.diffs,
|
||||
fileTree: gitState.fileTree,
|
||||
};
|
||||
}
|
||||
|
||||
export async function renameWorkbenchTask(c: any, value: string): Promise<void> {
|
||||
const nextTitle = value.trim();
|
||||
if (!nextTitle) {
|
||||
throw new Error("task title is required");
|
||||
}
|
||||
|
||||
await c.db
|
||||
.update(taskTable)
|
||||
.set({
|
||||
title: nextTitle,
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.where(eq(taskTable.id, 1))
|
||||
.run();
|
||||
c.state.title = nextTitle;
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function renameWorkbenchBranch(c: any, value: string): Promise<void> {
|
||||
const nextBranch = value.trim();
|
||||
if (!nextBranch) {
|
||||
throw new Error("branch name is required");
|
||||
}
|
||||
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
if (!record.branchName) {
|
||||
throw new Error("cannot rename branch before task branch exists");
|
||||
}
|
||||
if (!record.activeSandboxId) {
|
||||
throw new Error("cannot rename branch without an active sandbox");
|
||||
}
|
||||
const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null;
|
||||
if (!activeSandbox?.cwd) {
|
||||
throw new Error("cannot rename branch without a sandbox cwd");
|
||||
}
|
||||
|
||||
const renameResult = await executeInSandbox(c, {
|
||||
sandboxId: record.activeSandboxId,
|
||||
cwd: activeSandbox.cwd,
|
||||
command: [
|
||||
`git branch -m ${JSON.stringify(record.branchName)} ${JSON.stringify(nextBranch)}`,
|
||||
`if git ls-remote --exit-code --heads origin ${JSON.stringify(record.branchName)} >/dev/null 2>&1; then git push origin :${JSON.stringify(record.branchName)}; fi`,
|
||||
`git push origin ${JSON.stringify(nextBranch)}`,
|
||||
`git branch --set-upstream-to=${JSON.stringify(`origin/${nextBranch}`)} ${JSON.stringify(nextBranch)} || git push --set-upstream origin ${JSON.stringify(nextBranch)}`,
|
||||
].join(" && "),
|
||||
label: `git branch -m ${record.branchName} ${nextBranch}`,
|
||||
});
|
||||
if (renameResult.exitCode !== 0) {
|
||||
throw new Error(`branch rename failed (${renameResult.exitCode}): ${renameResult.result}`);
|
||||
}
|
||||
|
||||
await c.db
|
||||
.update(taskTable)
|
||||
.set({
|
||||
branchName: nextBranch,
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.where(eq(taskTable.id, 1))
|
||||
.run();
|
||||
c.state.branchName = nextBranch;
|
||||
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote);
|
||||
await project.registerTaskBranch({
|
||||
taskId: c.state.taskId,
|
||||
branchName: nextBranch,
|
||||
});
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function createWorkbenchSession(c: any, model?: string): Promise<{ tabId: string }> {
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
if (!record.activeSandboxId) {
|
||||
throw new Error("cannot create session without an active sandbox");
|
||||
}
|
||||
const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null;
|
||||
const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null;
|
||||
if (!cwd) {
|
||||
throw new Error("cannot create session without a sandbox cwd");
|
||||
}
|
||||
|
||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
||||
const created = await sandbox.createSession({
|
||||
prompt: "",
|
||||
cwd,
|
||||
agent: agentTypeForModel(model ?? defaultModelForAgent(record.agentType)),
|
||||
});
|
||||
if (!created.id) {
|
||||
throw new Error(created.error ?? "sandbox-agent session creation failed");
|
||||
}
|
||||
|
||||
await ensureSessionMeta(c, {
|
||||
sessionId: created.id,
|
||||
model: model ?? defaultModelForAgent(record.agentType),
|
||||
});
|
||||
await notifyWorkbenchUpdated(c);
|
||||
return { tabId: created.id };
|
||||
}
|
||||
|
||||
export async function renameWorkbenchSession(c: any, sessionId: string, title: string): Promise<void> {
|
||||
const trimmed = title.trim();
|
||||
if (!trimmed) {
|
||||
throw new Error("session title is required");
|
||||
}
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
sessionName: trimmed,
|
||||
});
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function setWorkbenchSessionUnread(c: any, sessionId: string, unread: boolean): Promise<void> {
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
unread: unread ? 1 : 0,
|
||||
});
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function updateWorkbenchDraft(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
draftText: text,
|
||||
draftAttachmentsJson: JSON.stringify(attachments),
|
||||
draftUpdatedAt: Date.now(),
|
||||
});
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function changeWorkbenchModel(c: any, sessionId: string, model: string): Promise<void> {
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
model,
|
||||
});
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
if (!record.activeSandboxId) {
|
||||
throw new Error("cannot send message without an active sandbox");
|
||||
}
|
||||
|
||||
await ensureSessionMeta(c, { sessionId });
|
||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
||||
const prompt = [text.trim(), ...attachments.map((attachment: any) => `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`)]
|
||||
.filter(Boolean)
|
||||
.join("\n\n");
|
||||
if (!prompt) {
|
||||
throw new Error("message text is required");
|
||||
}
|
||||
|
||||
await sandbox.sendPrompt({
|
||||
sessionId,
|
||||
prompt,
|
||||
notification: true,
|
||||
});
|
||||
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
unread: 0,
|
||||
created: 1,
|
||||
draftText: "",
|
||||
draftAttachmentsJson: "[]",
|
||||
draftUpdatedAt: Date.now(),
|
||||
thinkingSinceMs: Date.now(),
|
||||
});
|
||||
|
||||
await c.db
|
||||
.update(taskRuntime)
|
||||
.set({
|
||||
activeSessionId: sessionId,
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.where(eq(taskRuntime.id, 1))
|
||||
.run();
|
||||
|
||||
const sync = await getOrCreateTaskStatusSync(c, c.state.workspaceId, c.state.repoId, c.state.taskId, record.activeSandboxId, sessionId, {
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId: c.state.repoId,
|
||||
taskId: c.state.taskId,
|
||||
providerId: c.state.providerId,
|
||||
sandboxId: record.activeSandboxId,
|
||||
sessionId,
|
||||
intervalMs: STATUS_SYNC_INTERVAL_MS,
|
||||
});
|
||||
await sync.setIntervalMs({ intervalMs: STATUS_SYNC_INTERVAL_MS });
|
||||
await sync.start();
|
||||
await sync.force();
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function stopWorkbenchSession(c: any, sessionId: string): Promise<void> {
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
if (!record.activeSandboxId) {
|
||||
return;
|
||||
}
|
||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
||||
await sandbox.cancelSession({ sessionId });
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
thinkingSinceMs: null,
|
||||
});
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function syncWorkbenchSessionStatus(c: any, sessionId: string, status: "running" | "idle" | "error", at: number): Promise<void> {
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
const meta = await ensureSessionMeta(c, { sessionId });
|
||||
let changed = false;
|
||||
|
||||
if (record.activeSessionId === sessionId) {
|
||||
const mappedStatus = status === "running" ? "running" : status === "error" ? "error" : "idle";
|
||||
if (record.status !== mappedStatus) {
|
||||
await c.db
|
||||
.update(taskTable)
|
||||
.set({
|
||||
status: mappedStatus,
|
||||
updatedAt: at,
|
||||
})
|
||||
.where(eq(taskTable.id, 1))
|
||||
.run();
|
||||
changed = true;
|
||||
}
|
||||
|
||||
const statusMessage = `session:${status}`;
|
||||
if (record.statusMessage !== statusMessage) {
|
||||
await c.db
|
||||
.update(taskRuntime)
|
||||
.set({
|
||||
statusMessage,
|
||||
updatedAt: at,
|
||||
})
|
||||
.where(eq(taskRuntime.id, 1))
|
||||
.run();
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (status === "running") {
|
||||
if (!meta.thinkingSinceMs) {
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
thinkingSinceMs: at,
|
||||
});
|
||||
changed = true;
|
||||
}
|
||||
} else {
|
||||
if (meta.thinkingSinceMs) {
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
thinkingSinceMs: null,
|
||||
});
|
||||
changed = true;
|
||||
}
|
||||
if (!meta.unread && shouldMarkSessionUnreadForStatus(meta, status)) {
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
unread: 1,
|
||||
});
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (changed) {
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
}
|
||||
|
||||
export async function closeWorkbenchSession(c: any, sessionId: string): Promise<void> {
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
if (!record.activeSandboxId) {
|
||||
return;
|
||||
}
|
||||
const sessions = await listSessionMetaRows(c);
|
||||
if (sessions.filter((candidate) => candidate.closed !== true).length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
||||
await sandbox.destroySession({ sessionId });
|
||||
await updateSessionMeta(c, sessionId, {
|
||||
closed: 1,
|
||||
thinkingSinceMs: null,
|
||||
});
|
||||
if (record.activeSessionId === sessionId) {
|
||||
await c.db
|
||||
.update(taskRuntime)
|
||||
.set({
|
||||
activeSessionId: null,
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.where(eq(taskRuntime.id, 1))
|
||||
.run();
|
||||
}
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function markWorkbenchUnread(c: any): Promise<void> {
|
||||
const sessions = await listSessionMetaRows(c);
|
||||
const latest = sessions[sessions.length - 1];
|
||||
if (!latest) {
|
||||
return;
|
||||
}
|
||||
await updateSessionMeta(c, latest.sessionId, {
|
||||
unread: 1,
|
||||
});
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function publishWorkbenchPr(c: any): Promise<void> {
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
if (!record.branchName) {
|
||||
throw new Error("cannot publish PR without a branch");
|
||||
}
|
||||
const { driver } = getActorRuntimeContext();
|
||||
const created = await driver.github.createPr(c.state.repoLocalPath, record.branchName, record.title ?? c.state.task);
|
||||
await c.db
|
||||
.update(taskTable)
|
||||
.set({
|
||||
prSubmitted: 1,
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.where(eq(taskTable.id, 1))
|
||||
.run();
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
|
||||
export async function revertWorkbenchFile(c: any, path: string): Promise<void> {
|
||||
const record = await ensureWorkbenchSeeded(c);
|
||||
if (!record.activeSandboxId) {
|
||||
throw new Error("cannot revert file without an active sandbox");
|
||||
}
|
||||
const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null;
|
||||
if (!activeSandbox?.cwd) {
|
||||
throw new Error("cannot revert file without a sandbox cwd");
|
||||
}
|
||||
|
||||
const result = await executeInSandbox(c, {
|
||||
sandboxId: record.activeSandboxId,
|
||||
cwd: activeSandbox.cwd,
|
||||
command: `if git ls-files --error-unmatch -- ${JSON.stringify(path)} >/dev/null 2>&1; then git restore --staged --worktree -- ${JSON.stringify(path)} || git checkout -- ${JSON.stringify(path)}; else rm -f ${JSON.stringify(path)}; fi`,
|
||||
label: `git restore ${path}`,
|
||||
});
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(`file revert failed (${result.exitCode}): ${result.result}`);
|
||||
}
|
||||
await notifyWorkbenchUpdated(c);
|
||||
}
|
||||
175
foundry/packages/backend/src/actors/task/workflow/commands.ts
Normal file
175
foundry/packages/backend/src/actors/task/workflow/commands.ts
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
// @ts-nocheck
|
||||
import { eq } from "drizzle-orm";
|
||||
import { getActorRuntimeContext } from "../../context.js";
|
||||
import { getOrCreateTaskStatusSync } from "../../handles.js";
|
||||
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
||||
import { task as taskTable, taskRuntime } from "../db/schema.js";
|
||||
import { TASK_ROW_ID, appendHistory, getCurrentRecord, setTaskState } from "./common.js";
|
||||
import { pushActiveBranchActivity } from "./push.js";
|
||||
|
||||
async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: string): Promise<T> {
|
||||
let timer: ReturnType<typeof setTimeout> | undefined;
|
||||
try {
|
||||
return await Promise.race([
|
||||
promise,
|
||||
new Promise<T>((_resolve, reject) => {
|
||||
timer = setTimeout(() => reject(new Error(`${label} timed out after ${timeoutMs}ms`)), timeoutMs);
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function handleAttachActivity(loopCtx: any, msg: any): Promise<void> {
|
||||
const record = await getCurrentRecord(loopCtx);
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const activeSandbox = record.activeSandboxId ? (record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null) : null;
|
||||
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
|
||||
const target = await provider.attachTarget({
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
sandboxId: record.activeSandboxId ?? "",
|
||||
});
|
||||
|
||||
await appendHistory(loopCtx, "task.attach", {
|
||||
target: target.target,
|
||||
sessionId: record.activeSessionId,
|
||||
});
|
||||
|
||||
await msg.complete({
|
||||
target: target.target,
|
||||
sessionId: record.activeSessionId,
|
||||
});
|
||||
}
|
||||
|
||||
export async function handleSwitchActivity(loopCtx: any, msg: any): Promise<void> {
|
||||
const db = loopCtx.db;
|
||||
const runtime = await db.select({ switchTarget: taskRuntime.activeSwitchTarget }).from(taskRuntime).where(eq(taskRuntime.id, TASK_ROW_ID)).get();
|
||||
|
||||
await msg.complete({ switchTarget: runtime?.switchTarget ?? "" });
|
||||
}
|
||||
|
||||
export async function handlePushActivity(loopCtx: any, msg: any): Promise<void> {
|
||||
await pushActiveBranchActivity(loopCtx, {
|
||||
reason: msg.body?.reason ?? null,
|
||||
historyKind: "task.push",
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
|
||||
export async function handleSimpleCommandActivity(loopCtx: any, msg: any, statusMessage: string, historyKind: string): Promise<void> {
|
||||
const db = loopCtx.db;
|
||||
await db.update(taskRuntime).set({ statusMessage, updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run();
|
||||
|
||||
await appendHistory(loopCtx, historyKind, { reason: msg.body?.reason ?? null });
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
|
||||
export async function handleArchiveActivity(loopCtx: any, msg: any): Promise<void> {
|
||||
await setTaskState(loopCtx, "archive_stop_status_sync", "stopping status sync");
|
||||
const record = await getCurrentRecord(loopCtx);
|
||||
|
||||
if (record.activeSandboxId && record.activeSessionId) {
|
||||
try {
|
||||
const sync = await getOrCreateTaskStatusSync(
|
||||
loopCtx,
|
||||
loopCtx.state.workspaceId,
|
||||
loopCtx.state.repoId,
|
||||
loopCtx.state.taskId,
|
||||
record.activeSandboxId,
|
||||
record.activeSessionId,
|
||||
{
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
taskId: loopCtx.state.taskId,
|
||||
providerId: record.providerId,
|
||||
sandboxId: record.activeSandboxId,
|
||||
sessionId: record.activeSessionId,
|
||||
intervalMs: 2_000,
|
||||
},
|
||||
);
|
||||
await withTimeout(sync.stop(), 15_000, "task status sync stop");
|
||||
} catch (error) {
|
||||
logActorWarning("task.commands", "failed to stop status sync during archive", {
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
taskId: loopCtx.state.taskId,
|
||||
sandboxId: record.activeSandboxId,
|
||||
sessionId: record.activeSessionId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (record.activeSandboxId) {
|
||||
await setTaskState(loopCtx, "archive_release_sandbox", "releasing sandbox");
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const activeSandbox = record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null;
|
||||
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
|
||||
const workspaceId = loopCtx.state.workspaceId;
|
||||
const repoId = loopCtx.state.repoId;
|
||||
const taskId = loopCtx.state.taskId;
|
||||
const sandboxId = record.activeSandboxId;
|
||||
|
||||
// Do not block archive finalization on provider stop. Some provider stop calls can
|
||||
// run longer than the synchronous archive UX budget.
|
||||
void withTimeout(
|
||||
provider.releaseSandbox({
|
||||
workspaceId,
|
||||
sandboxId,
|
||||
}),
|
||||
45_000,
|
||||
"provider releaseSandbox",
|
||||
).catch((error) => {
|
||||
logActorWarning("task.commands", "failed to release sandbox during archive", {
|
||||
workspaceId,
|
||||
repoId,
|
||||
taskId,
|
||||
sandboxId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const db = loopCtx.db;
|
||||
await setTaskState(loopCtx, "archive_finalize", "finalizing archive");
|
||||
await db.update(taskTable).set({ status: "archived", updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run();
|
||||
|
||||
await db.update(taskRuntime).set({ activeSessionId: null, statusMessage: "archived", updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run();
|
||||
|
||||
await appendHistory(loopCtx, "task.archive", { reason: msg.body?.reason ?? null });
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
|
||||
export async function killDestroySandboxActivity(loopCtx: any): Promise<void> {
|
||||
await setTaskState(loopCtx, "kill_destroy_sandbox", "destroying sandbox");
|
||||
const record = await getCurrentRecord(loopCtx);
|
||||
if (!record.activeSandboxId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const activeSandbox = record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null;
|
||||
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
|
||||
await provider.destroySandbox({
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
sandboxId: record.activeSandboxId,
|
||||
});
|
||||
}
|
||||
|
||||
export async function killWriteDbActivity(loopCtx: any, msg: any): Promise<void> {
|
||||
await setTaskState(loopCtx, "kill_finalize", "finalizing kill");
|
||||
const db = loopCtx.db;
|
||||
await db.update(taskTable).set({ status: "killed", updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run();
|
||||
|
||||
await db.update(taskRuntime).set({ statusMessage: "killed", updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run();
|
||||
|
||||
await appendHistory(loopCtx, "task.kill", { reason: msg.body?.reason ?? null });
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
|
||||
export async function handleGetActivity(loopCtx: any, msg: any): Promise<void> {
|
||||
await msg.complete(await getCurrentRecord(loopCtx));
|
||||
}
|
||||
181
foundry/packages/backend/src/actors/task/workflow/common.ts
Normal file
181
foundry/packages/backend/src/actors/task/workflow/common.ts
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
// @ts-nocheck
|
||||
import { eq } from "drizzle-orm";
|
||||
import type { TaskRecord, TaskStatus } from "@sandbox-agent/foundry-shared";
|
||||
import { getOrCreateWorkspace } from "../../handles.js";
|
||||
import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js";
|
||||
import { historyKey } from "../../keys.js";
|
||||
|
||||
export const TASK_ROW_ID = 1;
|
||||
|
||||
export function collectErrorMessages(error: unknown): string[] {
|
||||
if (error == null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const out: string[] = [];
|
||||
const seen = new Set<unknown>();
|
||||
let current: unknown = error;
|
||||
|
||||
while (current != null && !seen.has(current)) {
|
||||
seen.add(current);
|
||||
|
||||
if (current instanceof Error) {
|
||||
const message = current.message?.trim();
|
||||
if (message) {
|
||||
out.push(message);
|
||||
}
|
||||
current = (current as { cause?: unknown }).cause;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (typeof current === "string") {
|
||||
const message = current.trim();
|
||||
if (message) {
|
||||
out.push(message);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
return out.filter((msg, index) => out.indexOf(msg) === index);
|
||||
}
|
||||
|
||||
export function resolveErrorDetail(error: unknown): string {
|
||||
const messages = collectErrorMessages(error);
|
||||
if (messages.length === 0) {
|
||||
return String(error);
|
||||
}
|
||||
|
||||
const nonWorkflowWrapper = messages.find((msg) => !/^Step\s+"[^"]+"\s+failed\b/i.test(msg));
|
||||
return nonWorkflowWrapper ?? messages[0]!;
|
||||
}
|
||||
|
||||
export function buildAgentPrompt(task: string): string {
|
||||
return task.trim();
|
||||
}
|
||||
|
||||
export async function setTaskState(ctx: any, status: TaskStatus, statusMessage?: string): Promise<void> {
|
||||
const now = Date.now();
|
||||
const db = ctx.db;
|
||||
await db.update(taskTable).set({ status, updatedAt: now }).where(eq(taskTable.id, TASK_ROW_ID)).run();
|
||||
|
||||
if (statusMessage != null) {
|
||||
await db
|
||||
.insert(taskRuntime)
|
||||
.values({
|
||||
id: TASK_ROW_ID,
|
||||
activeSandboxId: null,
|
||||
activeSessionId: null,
|
||||
activeSwitchTarget: null,
|
||||
activeCwd: null,
|
||||
statusMessage,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskRuntime.id,
|
||||
set: {
|
||||
statusMessage,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
}
|
||||
|
||||
const workspace = await getOrCreateWorkspace(ctx, ctx.state.workspaceId);
|
||||
await workspace.notifyWorkbenchUpdated({});
|
||||
}
|
||||
|
||||
export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
|
||||
const db = ctx.db;
|
||||
const row = await db
|
||||
.select({
|
||||
branchName: taskTable.branchName,
|
||||
title: taskTable.title,
|
||||
task: taskTable.task,
|
||||
providerId: taskTable.providerId,
|
||||
status: taskTable.status,
|
||||
statusMessage: taskRuntime.statusMessage,
|
||||
activeSandboxId: taskRuntime.activeSandboxId,
|
||||
activeSessionId: taskRuntime.activeSessionId,
|
||||
agentType: taskTable.agentType,
|
||||
prSubmitted: taskTable.prSubmitted,
|
||||
createdAt: taskTable.createdAt,
|
||||
updatedAt: taskTable.updatedAt,
|
||||
})
|
||||
.from(taskTable)
|
||||
.leftJoin(taskRuntime, eq(taskTable.id, taskRuntime.id))
|
||||
.where(eq(taskTable.id, TASK_ROW_ID))
|
||||
.get();
|
||||
|
||||
if (!row) {
|
||||
throw new Error(`Task not found: ${ctx.state.taskId}`);
|
||||
}
|
||||
|
||||
const sandboxes = await db
|
||||
.select({
|
||||
sandboxId: taskSandboxes.sandboxId,
|
||||
providerId: taskSandboxes.providerId,
|
||||
sandboxActorId: taskSandboxes.sandboxActorId,
|
||||
switchTarget: taskSandboxes.switchTarget,
|
||||
cwd: taskSandboxes.cwd,
|
||||
createdAt: taskSandboxes.createdAt,
|
||||
updatedAt: taskSandboxes.updatedAt,
|
||||
})
|
||||
.from(taskSandboxes)
|
||||
.all();
|
||||
|
||||
return {
|
||||
workspaceId: ctx.state.workspaceId,
|
||||
repoId: ctx.state.repoId,
|
||||
repoRemote: ctx.state.repoRemote,
|
||||
taskId: ctx.state.taskId,
|
||||
branchName: row.branchName,
|
||||
title: row.title,
|
||||
task: row.task,
|
||||
providerId: row.providerId,
|
||||
status: row.status,
|
||||
statusMessage: row.statusMessage ?? null,
|
||||
activeSandboxId: row.activeSandboxId ?? null,
|
||||
activeSessionId: row.activeSessionId ?? null,
|
||||
sandboxes: sandboxes.map((sb) => ({
|
||||
sandboxId: sb.sandboxId,
|
||||
providerId: sb.providerId,
|
||||
sandboxActorId: sb.sandboxActorId ?? null,
|
||||
switchTarget: sb.switchTarget,
|
||||
cwd: sb.cwd ?? null,
|
||||
createdAt: sb.createdAt,
|
||||
updatedAt: sb.updatedAt,
|
||||
})),
|
||||
agentType: row.agentType ?? null,
|
||||
prSubmitted: Boolean(row.prSubmitted),
|
||||
diffStat: null,
|
||||
hasUnpushed: null,
|
||||
conflictsWithMain: null,
|
||||
parentBranch: null,
|
||||
prUrl: null,
|
||||
prAuthor: null,
|
||||
ciStatus: null,
|
||||
reviewStatus: null,
|
||||
reviewer: null,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
} as TaskRecord;
|
||||
}
|
||||
|
||||
export async function appendHistory(ctx: any, kind: string, payload: Record<string, unknown>): Promise<void> {
|
||||
const client = ctx.client();
|
||||
const history = await client.history.getOrCreate(historyKey(ctx.state.workspaceId, ctx.state.repoId), {
|
||||
createWithInput: { workspaceId: ctx.state.workspaceId, repoId: ctx.state.repoId },
|
||||
});
|
||||
await history.append({
|
||||
kind,
|
||||
taskId: ctx.state.taskId,
|
||||
branchName: ctx.state.branchName,
|
||||
payload,
|
||||
});
|
||||
|
||||
const workspace = await getOrCreateWorkspace(ctx, ctx.state.workspaceId);
|
||||
await workspace.notifyWorkbenchUpdated({});
|
||||
}
|
||||
270
foundry/packages/backend/src/actors/task/workflow/index.ts
Normal file
270
foundry/packages/backend/src/actors/task/workflow/index.ts
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
import { Loop } from "rivetkit/workflow";
|
||||
import { getActorRuntimeContext } from "../../context.js";
|
||||
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
||||
import { getCurrentRecord } from "./common.js";
|
||||
import {
|
||||
initAssertNameActivity,
|
||||
initBootstrapDbActivity,
|
||||
initCompleteActivity,
|
||||
initCreateSandboxActivity,
|
||||
initCreateSessionActivity,
|
||||
initEnsureAgentActivity,
|
||||
initEnsureNameActivity,
|
||||
initExposeSandboxActivity,
|
||||
initFailedActivity,
|
||||
initStartSandboxInstanceActivity,
|
||||
initStartStatusSyncActivity,
|
||||
initWriteDbActivity,
|
||||
} from "./init.js";
|
||||
import {
|
||||
handleArchiveActivity,
|
||||
handleAttachActivity,
|
||||
handleGetActivity,
|
||||
handlePushActivity,
|
||||
handleSimpleCommandActivity,
|
||||
handleSwitchActivity,
|
||||
killDestroySandboxActivity,
|
||||
killWriteDbActivity,
|
||||
} from "./commands.js";
|
||||
import { idleNotifyActivity, idleSubmitPrActivity, statusUpdateActivity } from "./status-sync.js";
|
||||
import { TASK_QUEUE_NAMES } from "./queue.js";
|
||||
import {
|
||||
changeWorkbenchModel,
|
||||
closeWorkbenchSession,
|
||||
createWorkbenchSession,
|
||||
markWorkbenchUnread,
|
||||
publishWorkbenchPr,
|
||||
renameWorkbenchBranch,
|
||||
renameWorkbenchTask,
|
||||
renameWorkbenchSession,
|
||||
revertWorkbenchFile,
|
||||
sendWorkbenchMessage,
|
||||
setWorkbenchSessionUnread,
|
||||
stopWorkbenchSession,
|
||||
syncWorkbenchSessionStatus,
|
||||
updateWorkbenchDraft,
|
||||
} from "../workbench.js";
|
||||
|
||||
export { TASK_QUEUE_NAMES, taskWorkflowQueueName } from "./queue.js";
|
||||
|
||||
type TaskQueueName = (typeof TASK_QUEUE_NAMES)[number];
|
||||
|
||||
type WorkflowHandler = (loopCtx: any, msg: { name: TaskQueueName; body: any; complete: (response: unknown) => Promise<void> }) => Promise<void>;
|
||||
|
||||
const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
|
||||
"task.command.initialize": async (loopCtx, msg) => {
|
||||
const body = msg.body;
|
||||
|
||||
await loopCtx.step("init-bootstrap-db", async () => initBootstrapDbActivity(loopCtx, body));
|
||||
await loopCtx.removed("init-enqueue-provision", "step");
|
||||
await loopCtx.removed("init-dispatch-provision-v2", "step");
|
||||
const currentRecord = await loopCtx.step("init-read-current-record", async () => getCurrentRecord(loopCtx));
|
||||
|
||||
try {
|
||||
await msg.complete(currentRecord);
|
||||
} catch (error) {
|
||||
logActorWarning("task.workflow", "initialize completion failed", {
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
"task.command.provision": async (loopCtx, msg) => {
|
||||
const body = msg.body;
|
||||
await loopCtx.removed("init-failed", "step");
|
||||
try {
|
||||
await loopCtx.step("init-ensure-name", async () => initEnsureNameActivity(loopCtx));
|
||||
await loopCtx.step("init-assert-name", async () => initAssertNameActivity(loopCtx));
|
||||
|
||||
const sandbox = await loopCtx.step({
|
||||
name: "init-create-sandbox",
|
||||
timeout: 180_000,
|
||||
run: async () => initCreateSandboxActivity(loopCtx, body),
|
||||
});
|
||||
const agent = await loopCtx.step({
|
||||
name: "init-ensure-agent",
|
||||
timeout: 180_000,
|
||||
run: async () => initEnsureAgentActivity(loopCtx, body, sandbox),
|
||||
});
|
||||
const sandboxInstanceReady = await loopCtx.step({
|
||||
name: "init-start-sandbox-instance",
|
||||
timeout: 60_000,
|
||||
run: async () => initStartSandboxInstanceActivity(loopCtx, body, sandbox, agent),
|
||||
});
|
||||
await loopCtx.step("init-expose-sandbox", async () => initExposeSandboxActivity(loopCtx, body, sandbox, sandboxInstanceReady));
|
||||
const session = await loopCtx.step({
|
||||
name: "init-create-session",
|
||||
timeout: 180_000,
|
||||
run: async () => initCreateSessionActivity(loopCtx, body, sandbox, sandboxInstanceReady),
|
||||
});
|
||||
|
||||
await loopCtx.step("init-write-db", async () => initWriteDbActivity(loopCtx, body, sandbox, session, sandboxInstanceReady));
|
||||
await loopCtx.step("init-start-status-sync", async () => initStartStatusSyncActivity(loopCtx, body, sandbox, session));
|
||||
await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, body, sandbox, session));
|
||||
await msg.complete({ ok: true });
|
||||
} catch (error) {
|
||||
await loopCtx.step("init-failed-v2", async () => initFailedActivity(loopCtx, error));
|
||||
await msg.complete({ ok: false });
|
||||
}
|
||||
},
|
||||
|
||||
"task.command.attach": async (loopCtx, msg) => {
|
||||
await loopCtx.step("handle-attach", async () => handleAttachActivity(loopCtx, msg));
|
||||
},
|
||||
|
||||
"task.command.switch": async (loopCtx, msg) => {
|
||||
await loopCtx.step("handle-switch", async () => handleSwitchActivity(loopCtx, msg));
|
||||
},
|
||||
|
||||
"task.command.push": async (loopCtx, msg) => {
|
||||
await loopCtx.step("handle-push", async () => handlePushActivity(loopCtx, msg));
|
||||
},
|
||||
|
||||
"task.command.sync": async (loopCtx, msg) => {
|
||||
await loopCtx.step("handle-sync", async () => handleSimpleCommandActivity(loopCtx, msg, "sync requested", "task.sync"));
|
||||
},
|
||||
|
||||
"task.command.merge": async (loopCtx, msg) => {
|
||||
await loopCtx.step("handle-merge", async () => handleSimpleCommandActivity(loopCtx, msg, "merge requested", "task.merge"));
|
||||
},
|
||||
|
||||
"task.command.archive": async (loopCtx, msg) => {
|
||||
await loopCtx.step("handle-archive", async () => handleArchiveActivity(loopCtx, msg));
|
||||
},
|
||||
|
||||
"task.command.kill": async (loopCtx, msg) => {
|
||||
await loopCtx.step("kill-destroy-sandbox", async () => killDestroySandboxActivity(loopCtx));
|
||||
await loopCtx.step("kill-write-db", async () => killWriteDbActivity(loopCtx, msg));
|
||||
},
|
||||
|
||||
"task.command.get": async (loopCtx, msg) => {
|
||||
await loopCtx.step("handle-get", async () => handleGetActivity(loopCtx, msg));
|
||||
},
|
||||
|
||||
"task.command.workbench.mark_unread": async (loopCtx, msg) => {
|
||||
await loopCtx.step("workbench-mark-unread", async () => markWorkbenchUnread(loopCtx));
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.rename_task": async (loopCtx, msg) => {
|
||||
await loopCtx.step("workbench-rename-task", async () => renameWorkbenchTask(loopCtx, msg.body.value));
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.rename_branch": async (loopCtx, msg) => {
|
||||
await loopCtx.step({
|
||||
name: "workbench-rename-branch",
|
||||
timeout: 5 * 60_000,
|
||||
run: async () => renameWorkbenchBranch(loopCtx, msg.body.value),
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.create_session": async (loopCtx, msg) => {
|
||||
const created = await loopCtx.step({
|
||||
name: "workbench-create-session",
|
||||
timeout: 5 * 60_000,
|
||||
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
|
||||
});
|
||||
await msg.complete(created);
|
||||
},
|
||||
|
||||
"task.command.workbench.rename_session": async (loopCtx, msg) => {
|
||||
await loopCtx.step("workbench-rename-session", async () => renameWorkbenchSession(loopCtx, msg.body.sessionId, msg.body.title));
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.set_session_unread": async (loopCtx, msg) => {
|
||||
await loopCtx.step("workbench-set-session-unread", async () => setWorkbenchSessionUnread(loopCtx, msg.body.sessionId, msg.body.unread));
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.update_draft": async (loopCtx, msg) => {
|
||||
await loopCtx.step("workbench-update-draft", async () => updateWorkbenchDraft(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments));
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.change_model": async (loopCtx, msg) => {
|
||||
await loopCtx.step("workbench-change-model", async () => changeWorkbenchModel(loopCtx, msg.body.sessionId, msg.body.model));
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.send_message": async (loopCtx, msg) => {
|
||||
await loopCtx.step({
|
||||
name: "workbench-send-message",
|
||||
timeout: 10 * 60_000,
|
||||
run: async () => sendWorkbenchMessage(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments),
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.stop_session": async (loopCtx, msg) => {
|
||||
await loopCtx.step({
|
||||
name: "workbench-stop-session",
|
||||
timeout: 5 * 60_000,
|
||||
run: async () => stopWorkbenchSession(loopCtx, msg.body.sessionId),
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.sync_session_status": async (loopCtx, msg) => {
|
||||
await loopCtx.step("workbench-sync-session-status", async () => syncWorkbenchSessionStatus(loopCtx, msg.body.sessionId, msg.body.status, msg.body.at));
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.close_session": async (loopCtx, msg) => {
|
||||
await loopCtx.step({
|
||||
name: "workbench-close-session",
|
||||
timeout: 5 * 60_000,
|
||||
run: async () => closeWorkbenchSession(loopCtx, msg.body.sessionId),
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.publish_pr": async (loopCtx, msg) => {
|
||||
await loopCtx.step({
|
||||
name: "workbench-publish-pr",
|
||||
timeout: 10 * 60_000,
|
||||
run: async () => publishWorkbenchPr(loopCtx),
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.command.workbench.revert_file": async (loopCtx, msg) => {
|
||||
await loopCtx.step({
|
||||
name: "workbench-revert-file",
|
||||
timeout: 5 * 60_000,
|
||||
run: async () => revertWorkbenchFile(loopCtx, msg.body.path),
|
||||
});
|
||||
await msg.complete({ ok: true });
|
||||
},
|
||||
|
||||
"task.status_sync.result": async (loopCtx, msg) => {
|
||||
const transitionedToIdle = await loopCtx.step("status-update", async () => statusUpdateActivity(loopCtx, msg.body));
|
||||
|
||||
if (transitionedToIdle) {
|
||||
const { config } = getActorRuntimeContext();
|
||||
if (config.auto_submit) {
|
||||
await loopCtx.step("idle-submit-pr", async () => idleSubmitPrActivity(loopCtx));
|
||||
}
|
||||
await loopCtx.step("idle-notify", async () => idleNotifyActivity(loopCtx));
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
export async function runTaskWorkflow(ctx: any): Promise<void> {
|
||||
await ctx.loop("task-command-loop", async (loopCtx: any) => {
|
||||
const msg = await loopCtx.queue.next("next-command", {
|
||||
names: [...TASK_QUEUE_NAMES],
|
||||
completable: true,
|
||||
});
|
||||
if (!msg) {
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
const handler = commandHandlers[msg.name as TaskQueueName];
|
||||
if (handler) {
|
||||
await handler(loopCtx, msg);
|
||||
}
|
||||
return Loop.continue(undefined);
|
||||
});
|
||||
}
|
||||
607
foundry/packages/backend/src/actors/task/workflow/init.ts
Normal file
607
foundry/packages/backend/src/actors/task/workflow/init.ts
Normal file
|
|
@ -0,0 +1,607 @@
|
|||
// @ts-nocheck
|
||||
import { desc, eq } from "drizzle-orm";
|
||||
import { resolveCreateFlowDecision } from "../../../services/create-flow.js";
|
||||
import { getActorRuntimeContext } from "../../context.js";
|
||||
import { getOrCreateTaskStatusSync, getOrCreateHistory, getOrCreateProject, getOrCreateSandboxInstance, getSandboxInstance, selfTask } from "../../handles.js";
|
||||
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
||||
import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js";
|
||||
import { TASK_ROW_ID, appendHistory, buildAgentPrompt, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js";
|
||||
import { taskWorkflowQueueName } from "./queue.js";
|
||||
|
||||
const DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS = 180_000;
|
||||
|
||||
function getInitCreateSandboxActivityTimeoutMs(): number {
|
||||
const raw = process.env.HF_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
|
||||
if (!raw) {
|
||||
return DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
|
||||
}
|
||||
const parsed = Number(raw);
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
|
||||
}
|
||||
return Math.floor(parsed);
|
||||
}
|
||||
|
||||
function debugInit(loopCtx: any, message: string, context?: Record<string, unknown>): void {
|
||||
loopCtx.log.debug({
|
||||
msg: message,
|
||||
scope: "task.init",
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
taskId: loopCtx.state.taskId,
|
||||
...(context ?? {}),
|
||||
});
|
||||
}
|
||||
|
||||
async function withActivityTimeout<T>(timeoutMs: number, label: string, run: () => Promise<T>): Promise<T> {
|
||||
let timer: ReturnType<typeof setTimeout> | null = null;
|
||||
try {
|
||||
return await Promise.race([
|
||||
run(),
|
||||
new Promise<T>((_, reject) => {
|
||||
timer = setTimeout(() => {
|
||||
reject(new Error(`${label} timed out after ${timeoutMs}ms`));
|
||||
}, timeoutMs);
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<void> {
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const { config } = getActorRuntimeContext();
|
||||
const now = Date.now();
|
||||
const db = loopCtx.db;
|
||||
const initialStatusMessage = loopCtx.state.branchName && loopCtx.state.title ? "provisioning" : "naming";
|
||||
|
||||
try {
|
||||
await db
|
||||
.insert(taskTable)
|
||||
.values({
|
||||
id: TASK_ROW_ID,
|
||||
branchName: loopCtx.state.branchName,
|
||||
title: loopCtx.state.title,
|
||||
task: loopCtx.state.task,
|
||||
providerId,
|
||||
status: "init_bootstrap_db",
|
||||
agentType: loopCtx.state.agentType ?? config.default_agent,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskTable.id,
|
||||
set: {
|
||||
branchName: loopCtx.state.branchName,
|
||||
title: loopCtx.state.title,
|
||||
task: loopCtx.state.task,
|
||||
providerId,
|
||||
status: "init_bootstrap_db",
|
||||
agentType: loopCtx.state.agentType ?? config.default_agent,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
await db
|
||||
.insert(taskRuntime)
|
||||
.values({
|
||||
id: TASK_ROW_ID,
|
||||
activeSandboxId: null,
|
||||
activeSessionId: null,
|
||||
activeSwitchTarget: null,
|
||||
activeCwd: null,
|
||||
statusMessage: initialStatusMessage,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskRuntime.id,
|
||||
set: {
|
||||
activeSandboxId: null,
|
||||
activeSessionId: null,
|
||||
activeSwitchTarget: null,
|
||||
activeCwd: null,
|
||||
statusMessage: initialStatusMessage,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
} catch (error) {
|
||||
const detail = resolveErrorMessage(error);
|
||||
throw new Error(`task init bootstrap db failed: ${detail}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise<void> {
|
||||
await setTaskState(loopCtx, "init_enqueue_provision", "provision queued");
|
||||
const self = selfTask(loopCtx);
|
||||
void self
|
||||
.send(taskWorkflowQueueName("task.command.provision"), body, {
|
||||
wait: false,
|
||||
})
|
||||
.catch((error: unknown) => {
|
||||
logActorWarning("task.init", "background provision command failed", {
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
taskId: loopCtx.state.taskId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function initEnsureNameActivity(loopCtx: any): Promise<void> {
|
||||
await setTaskState(loopCtx, "init_ensure_name", "determining title and branch");
|
||||
const existing = await loopCtx.db
|
||||
.select({
|
||||
branchName: taskTable.branchName,
|
||||
title: taskTable.title,
|
||||
})
|
||||
.from(taskTable)
|
||||
.where(eq(taskTable.id, TASK_ROW_ID))
|
||||
.get();
|
||||
|
||||
if (existing?.branchName && existing?.title) {
|
||||
loopCtx.state.branchName = existing.branchName;
|
||||
loopCtx.state.title = existing.title;
|
||||
return;
|
||||
}
|
||||
|
||||
const { driver } = getActorRuntimeContext();
|
||||
try {
|
||||
await driver.git.fetch(loopCtx.state.repoLocalPath);
|
||||
} catch (error) {
|
||||
logActorWarning("task.init", "fetch before naming failed", {
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
taskId: loopCtx.state.taskId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
const remoteBranches = (await driver.git.listRemoteBranches(loopCtx.state.repoLocalPath)).map((branch: any) => branch.branchName);
|
||||
|
||||
const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote);
|
||||
const reservedBranches = await project.listReservedBranches({});
|
||||
|
||||
const resolved = resolveCreateFlowDecision({
|
||||
task: loopCtx.state.task,
|
||||
explicitTitle: loopCtx.state.explicitTitle ?? undefined,
|
||||
explicitBranchName: loopCtx.state.explicitBranchName ?? undefined,
|
||||
localBranches: remoteBranches,
|
||||
taskBranches: reservedBranches,
|
||||
});
|
||||
|
||||
const now = Date.now();
|
||||
await loopCtx.db
|
||||
.update(taskTable)
|
||||
.set({
|
||||
branchName: resolved.branchName,
|
||||
title: resolved.title,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(taskTable.id, TASK_ROW_ID))
|
||||
.run();
|
||||
|
||||
loopCtx.state.branchName = resolved.branchName;
|
||||
loopCtx.state.title = resolved.title;
|
||||
loopCtx.state.explicitTitle = null;
|
||||
loopCtx.state.explicitBranchName = null;
|
||||
|
||||
await loopCtx.db
|
||||
.update(taskRuntime)
|
||||
.set({
|
||||
statusMessage: "provisioning",
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||
.run();
|
||||
|
||||
await project.registerTaskBranch({
|
||||
taskId: loopCtx.state.taskId,
|
||||
branchName: resolved.branchName,
|
||||
});
|
||||
|
||||
await appendHistory(loopCtx, "task.named", {
|
||||
title: resolved.title,
|
||||
branchName: resolved.branchName,
|
||||
});
|
||||
}
|
||||
|
||||
export async function initAssertNameActivity(loopCtx: any): Promise<void> {
|
||||
await setTaskState(loopCtx, "init_assert_name", "validating naming");
|
||||
if (!loopCtx.state.branchName) {
|
||||
throw new Error("task branchName is not initialized");
|
||||
}
|
||||
}
|
||||
|
||||
export async function initCreateSandboxActivity(loopCtx: any, body: any): Promise<any> {
|
||||
await setTaskState(loopCtx, "init_create_sandbox", "creating sandbox");
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const provider = providers.get(providerId);
|
||||
const timeoutMs = getInitCreateSandboxActivityTimeoutMs();
|
||||
const startedAt = Date.now();
|
||||
|
||||
debugInit(loopCtx, "init_create_sandbox started", {
|
||||
providerId,
|
||||
timeoutMs,
|
||||
supportsSessionReuse: provider.capabilities().supportsSessionReuse,
|
||||
});
|
||||
|
||||
if (provider.capabilities().supportsSessionReuse) {
|
||||
const runtime = await loopCtx.db.select({ activeSandboxId: taskRuntime.activeSandboxId }).from(taskRuntime).where(eq(taskRuntime.id, TASK_ROW_ID)).get();
|
||||
|
||||
const existing = await loopCtx.db
|
||||
.select({ sandboxId: taskSandboxes.sandboxId })
|
||||
.from(taskSandboxes)
|
||||
.where(eq(taskSandboxes.providerId, providerId))
|
||||
.orderBy(desc(taskSandboxes.updatedAt))
|
||||
.limit(1)
|
||||
.get();
|
||||
|
||||
const sandboxId = runtime?.activeSandboxId ?? existing?.sandboxId ?? null;
|
||||
if (sandboxId) {
|
||||
debugInit(loopCtx, "init_create_sandbox attempting resume", { sandboxId });
|
||||
try {
|
||||
const resumed = await withActivityTimeout(timeoutMs, "resumeSandbox", async () =>
|
||||
provider.resumeSandbox({
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
sandboxId,
|
||||
}),
|
||||
);
|
||||
|
||||
debugInit(loopCtx, "init_create_sandbox resume succeeded", {
|
||||
sandboxId: resumed.sandboxId,
|
||||
durationMs: Date.now() - startedAt,
|
||||
});
|
||||
return resumed;
|
||||
} catch (error) {
|
||||
logActorWarning("task.init", "resume sandbox failed; creating a new sandbox", {
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
taskId: loopCtx.state.taskId,
|
||||
sandboxId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
debugInit(loopCtx, "init_create_sandbox creating fresh sandbox", {
|
||||
branchName: loopCtx.state.branchName,
|
||||
});
|
||||
|
||||
try {
|
||||
const sandbox = await withActivityTimeout(timeoutMs, "createSandbox", async () =>
|
||||
provider.createSandbox({
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
repoRemote: loopCtx.state.repoRemote,
|
||||
branchName: loopCtx.state.branchName,
|
||||
taskId: loopCtx.state.taskId,
|
||||
debug: (message, context) => debugInit(loopCtx, message, context),
|
||||
}),
|
||||
);
|
||||
|
||||
debugInit(loopCtx, "init_create_sandbox create succeeded", {
|
||||
sandboxId: sandbox.sandboxId,
|
||||
durationMs: Date.now() - startedAt,
|
||||
});
|
||||
return sandbox;
|
||||
} catch (error) {
|
||||
debugInit(loopCtx, "init_create_sandbox failed", {
|
||||
durationMs: Date.now() - startedAt,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function initEnsureAgentActivity(loopCtx: any, body: any, sandbox: any): Promise<any> {
|
||||
await setTaskState(loopCtx, "init_ensure_agent", "ensuring sandbox agent");
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const provider = providers.get(providerId);
|
||||
return await provider.ensureSandboxAgent({
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
sandboxId: sandbox.sandboxId,
|
||||
});
|
||||
}
|
||||
|
||||
export async function initStartSandboxInstanceActivity(loopCtx: any, body: any, sandbox: any, agent: any): Promise<any> {
|
||||
await setTaskState(loopCtx, "init_start_sandbox_instance", "starting sandbox runtime");
|
||||
try {
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const sandboxInstance = await getOrCreateSandboxInstance(loopCtx, loopCtx.state.workspaceId, providerId, sandbox.sandboxId, {
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
providerId,
|
||||
sandboxId: sandbox.sandboxId,
|
||||
});
|
||||
|
||||
await sandboxInstance.ensure({
|
||||
metadata: sandbox.metadata,
|
||||
status: "ready",
|
||||
agentEndpoint: agent.endpoint,
|
||||
agentToken: agent.token,
|
||||
});
|
||||
|
||||
const actorId = typeof (sandboxInstance as any).resolve === "function" ? await (sandboxInstance as any).resolve() : null;
|
||||
|
||||
return {
|
||||
ok: true as const,
|
||||
actorId: typeof actorId === "string" ? actorId : null,
|
||||
};
|
||||
} catch (error) {
|
||||
const detail = error instanceof Error ? error.message : String(error);
|
||||
return {
|
||||
ok: false as const,
|
||||
error: `sandbox-instance ensure failed: ${detail}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function initCreateSessionActivity(loopCtx: any, body: any, sandbox: any, sandboxInstanceReady: any): Promise<any> {
|
||||
await setTaskState(loopCtx, "init_create_session", "creating agent session");
|
||||
if (!sandboxInstanceReady.ok) {
|
||||
return {
|
||||
id: null,
|
||||
status: "error",
|
||||
error: sandboxInstanceReady.error ?? "sandbox instance is not ready",
|
||||
} as const;
|
||||
}
|
||||
|
||||
const { config } = getActorRuntimeContext();
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const sandboxInstance = getSandboxInstance(loopCtx, loopCtx.state.workspaceId, providerId, sandbox.sandboxId);
|
||||
|
||||
const cwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : undefined;
|
||||
|
||||
return await sandboxInstance.createSession({
|
||||
prompt: typeof loopCtx.state.initialPrompt === "string" ? loopCtx.state.initialPrompt : buildAgentPrompt(loopCtx.state.task),
|
||||
cwd,
|
||||
agent: (loopCtx.state.agentType ?? config.default_agent) as any,
|
||||
});
|
||||
}
|
||||
|
||||
export async function initExposeSandboxActivity(loopCtx: any, body: any, sandbox: any, sandboxInstanceReady?: { actorId?: string | null }): Promise<void> {
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const now = Date.now();
|
||||
const db = loopCtx.db;
|
||||
const activeCwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : null;
|
||||
const sandboxActorId = typeof sandboxInstanceReady?.actorId === "string" && sandboxInstanceReady.actorId.length > 0 ? sandboxInstanceReady.actorId : null;
|
||||
|
||||
await db
|
||||
.insert(taskSandboxes)
|
||||
.values({
|
||||
sandboxId: sandbox.sandboxId,
|
||||
providerId,
|
||||
sandboxActorId,
|
||||
switchTarget: sandbox.switchTarget,
|
||||
cwd: activeCwd,
|
||||
statusMessage: "sandbox ready",
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskSandboxes.sandboxId,
|
||||
set: {
|
||||
providerId,
|
||||
sandboxActorId,
|
||||
switchTarget: sandbox.switchTarget,
|
||||
cwd: activeCwd,
|
||||
statusMessage: "sandbox ready",
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
await db
|
||||
.update(taskRuntime)
|
||||
.set({
|
||||
activeSandboxId: sandbox.sandboxId,
|
||||
activeSwitchTarget: sandbox.switchTarget,
|
||||
activeCwd,
|
||||
statusMessage: "sandbox ready",
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||
.run();
|
||||
}
|
||||
|
||||
export async function initWriteDbActivity(
|
||||
loopCtx: any,
|
||||
body: any,
|
||||
sandbox: any,
|
||||
session: any,
|
||||
sandboxInstanceReady?: { actorId?: string | null },
|
||||
): Promise<void> {
|
||||
await setTaskState(loopCtx, "init_write_db", "persisting task runtime");
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const { config } = getActorRuntimeContext();
|
||||
const now = Date.now();
|
||||
const db = loopCtx.db;
|
||||
const sessionId = session?.id ?? null;
|
||||
const sessionHealthy = Boolean(sessionId) && session?.status !== "error";
|
||||
const activeSessionId = sessionHealthy ? sessionId : null;
|
||||
const statusMessage = sessionHealthy ? "session created" : session?.status === "error" ? (session.error ?? "session create failed") : "session unavailable";
|
||||
|
||||
const activeCwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : null;
|
||||
const sandboxActorId = typeof sandboxInstanceReady?.actorId === "string" && sandboxInstanceReady.actorId.length > 0 ? sandboxInstanceReady.actorId : null;
|
||||
|
||||
await db
|
||||
.update(taskTable)
|
||||
.set({
|
||||
providerId,
|
||||
status: sessionHealthy ? "running" : "error",
|
||||
agentType: loopCtx.state.agentType ?? config.default_agent,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(taskTable.id, TASK_ROW_ID))
|
||||
.run();
|
||||
|
||||
await db
|
||||
.insert(taskSandboxes)
|
||||
.values({
|
||||
sandboxId: sandbox.sandboxId,
|
||||
providerId,
|
||||
sandboxActorId,
|
||||
switchTarget: sandbox.switchTarget,
|
||||
cwd: activeCwd,
|
||||
statusMessage,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskSandboxes.sandboxId,
|
||||
set: {
|
||||
providerId,
|
||||
sandboxActorId,
|
||||
switchTarget: sandbox.switchTarget,
|
||||
cwd: activeCwd,
|
||||
statusMessage,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
await db
|
||||
.insert(taskRuntime)
|
||||
.values({
|
||||
id: TASK_ROW_ID,
|
||||
activeSandboxId: sandbox.sandboxId,
|
||||
activeSessionId,
|
||||
activeSwitchTarget: sandbox.switchTarget,
|
||||
activeCwd,
|
||||
statusMessage,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskRuntime.id,
|
||||
set: {
|
||||
activeSandboxId: sandbox.sandboxId,
|
||||
activeSessionId,
|
||||
activeSwitchTarget: sandbox.switchTarget,
|
||||
activeCwd,
|
||||
statusMessage,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
}
|
||||
|
||||
export async function initStartStatusSyncActivity(loopCtx: any, body: any, sandbox: any, session: any): Promise<void> {
|
||||
const sessionId = session?.id ?? null;
|
||||
if (!sessionId || session?.status === "error") {
|
||||
return;
|
||||
}
|
||||
|
||||
await setTaskState(loopCtx, "init_start_status_sync", "starting session status sync");
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const sync = await getOrCreateTaskStatusSync(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.taskId, sandbox.sandboxId, sessionId, {
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
taskId: loopCtx.state.taskId,
|
||||
providerId,
|
||||
sandboxId: sandbox.sandboxId,
|
||||
sessionId,
|
||||
intervalMs: 2_000,
|
||||
});
|
||||
|
||||
await sync.start();
|
||||
await sync.force();
|
||||
}
|
||||
|
||||
export async function initCompleteActivity(loopCtx: any, body: any, sandbox: any, session: any): Promise<void> {
|
||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
||||
const sessionId = session?.id ?? null;
|
||||
const sessionHealthy = Boolean(sessionId) && session?.status !== "error";
|
||||
if (sessionHealthy) {
|
||||
await setTaskState(loopCtx, "init_complete", "task initialized");
|
||||
|
||||
const history = await getOrCreateHistory(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId);
|
||||
await history.append({
|
||||
kind: "task.initialized",
|
||||
taskId: loopCtx.state.taskId,
|
||||
branchName: loopCtx.state.branchName,
|
||||
payload: { providerId, sandboxId: sandbox.sandboxId, sessionId },
|
||||
});
|
||||
|
||||
loopCtx.state.initialized = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const detail = session?.status === "error" ? (session.error ?? "session create failed") : "session unavailable";
|
||||
await setTaskState(loopCtx, "error", detail);
|
||||
await appendHistory(loopCtx, "task.error", {
|
||||
detail,
|
||||
messages: [detail],
|
||||
});
|
||||
loopCtx.state.initialized = false;
|
||||
}
|
||||
|
||||
export async function initFailedActivity(loopCtx: any, error: unknown): Promise<void> {
|
||||
const now = Date.now();
|
||||
const detail = resolveErrorDetail(error);
|
||||
const messages = collectErrorMessages(error);
|
||||
const db = loopCtx.db;
|
||||
const { config, providers } = getActorRuntimeContext();
|
||||
const providerId = loopCtx.state.providerId ?? providers.defaultProviderId();
|
||||
|
||||
await db
|
||||
.insert(taskTable)
|
||||
.values({
|
||||
id: TASK_ROW_ID,
|
||||
branchName: loopCtx.state.branchName ?? null,
|
||||
title: loopCtx.state.title ?? null,
|
||||
task: loopCtx.state.task,
|
||||
providerId,
|
||||
status: "error",
|
||||
agentType: loopCtx.state.agentType ?? config.default_agent,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskTable.id,
|
||||
set: {
|
||||
branchName: loopCtx.state.branchName ?? null,
|
||||
title: loopCtx.state.title ?? null,
|
||||
task: loopCtx.state.task,
|
||||
providerId,
|
||||
status: "error",
|
||||
agentType: loopCtx.state.agentType ?? config.default_agent,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
await db
|
||||
.insert(taskRuntime)
|
||||
.values({
|
||||
id: TASK_ROW_ID,
|
||||
activeSandboxId: null,
|
||||
activeSessionId: null,
|
||||
activeSwitchTarget: null,
|
||||
activeCwd: null,
|
||||
statusMessage: detail,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskRuntime.id,
|
||||
set: {
|
||||
activeSandboxId: null,
|
||||
activeSessionId: null,
|
||||
activeSwitchTarget: null,
|
||||
activeCwd: null,
|
||||
statusMessage: detail,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
await appendHistory(loopCtx, "task.error", {
|
||||
detail,
|
||||
messages,
|
||||
});
|
||||
}
|
||||
84
foundry/packages/backend/src/actors/task/workflow/push.ts
Normal file
84
foundry/packages/backend/src/actors/task/workflow/push.ts
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
// @ts-nocheck
|
||||
import { eq } from "drizzle-orm";
|
||||
import { getActorRuntimeContext } from "../../context.js";
|
||||
import { taskRuntime, taskSandboxes } from "../db/schema.js";
|
||||
import { TASK_ROW_ID, appendHistory, getCurrentRecord } from "./common.js";
|
||||
|
||||
export interface PushActiveBranchOptions {
|
||||
reason?: string | null;
|
||||
historyKind?: string;
|
||||
}
|
||||
|
||||
export async function pushActiveBranchActivity(loopCtx: any, options: PushActiveBranchOptions = {}): Promise<void> {
|
||||
const record = await getCurrentRecord(loopCtx);
|
||||
const activeSandboxId = record.activeSandboxId;
|
||||
const branchName = loopCtx.state.branchName ?? record.branchName;
|
||||
|
||||
if (!activeSandboxId) {
|
||||
throw new Error("cannot push: no active sandbox");
|
||||
}
|
||||
if (!branchName) {
|
||||
throw new Error("cannot push: task branch is not set");
|
||||
}
|
||||
|
||||
const activeSandbox = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === activeSandboxId) ?? null;
|
||||
const providerId = activeSandbox?.providerId ?? record.providerId;
|
||||
const cwd = activeSandbox?.cwd ?? null;
|
||||
if (!cwd) {
|
||||
throw new Error("cannot push: active sandbox cwd is not set");
|
||||
}
|
||||
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const provider = providers.get(providerId);
|
||||
|
||||
const now = Date.now();
|
||||
await loopCtx.db
|
||||
.update(taskRuntime)
|
||||
.set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now })
|
||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||
.run();
|
||||
|
||||
await loopCtx.db
|
||||
.update(taskSandboxes)
|
||||
.set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now })
|
||||
.where(eq(taskSandboxes.sandboxId, activeSandboxId))
|
||||
.run();
|
||||
|
||||
const script = [
|
||||
"set -euo pipefail",
|
||||
`cd ${JSON.stringify(cwd)}`,
|
||||
"git rev-parse --verify HEAD >/dev/null",
|
||||
"git config credential.helper '!f() { echo username=x-access-token; echo password=${GH_TOKEN:-$GITHUB_TOKEN}; }; f'",
|
||||
`git push -u origin ${JSON.stringify(branchName)}`,
|
||||
].join("; ");
|
||||
|
||||
const result = await provider.executeCommand({
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
sandboxId: activeSandboxId,
|
||||
command: ["bash", "-lc", JSON.stringify(script)].join(" "),
|
||||
label: `git push ${branchName}`,
|
||||
});
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(`git push failed (${result.exitCode}): ${result.result}`);
|
||||
}
|
||||
|
||||
const updatedAt = Date.now();
|
||||
await loopCtx.db
|
||||
.update(taskRuntime)
|
||||
.set({ statusMessage: `push complete for ${branchName}`, updatedAt })
|
||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||
.run();
|
||||
|
||||
await loopCtx.db
|
||||
.update(taskSandboxes)
|
||||
.set({ statusMessage: `push complete for ${branchName}`, updatedAt })
|
||||
.where(eq(taskSandboxes.sandboxId, activeSandboxId))
|
||||
.run();
|
||||
|
||||
await appendHistory(loopCtx, options.historyKind ?? "task.push", {
|
||||
reason: options.reason ?? null,
|
||||
branchName,
|
||||
sandboxId: activeSandboxId,
|
||||
});
|
||||
}
|
||||
31
foundry/packages/backend/src/actors/task/workflow/queue.ts
Normal file
31
foundry/packages/backend/src/actors/task/workflow/queue.ts
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
export const TASK_QUEUE_NAMES = [
|
||||
"task.command.initialize",
|
||||
"task.command.provision",
|
||||
"task.command.attach",
|
||||
"task.command.switch",
|
||||
"task.command.push",
|
||||
"task.command.sync",
|
||||
"task.command.merge",
|
||||
"task.command.archive",
|
||||
"task.command.kill",
|
||||
"task.command.get",
|
||||
"task.command.workbench.mark_unread",
|
||||
"task.command.workbench.rename_task",
|
||||
"task.command.workbench.rename_branch",
|
||||
"task.command.workbench.create_session",
|
||||
"task.command.workbench.rename_session",
|
||||
"task.command.workbench.set_session_unread",
|
||||
"task.command.workbench.update_draft",
|
||||
"task.command.workbench.change_model",
|
||||
"task.command.workbench.send_message",
|
||||
"task.command.workbench.stop_session",
|
||||
"task.command.workbench.sync_session_status",
|
||||
"task.command.workbench.close_session",
|
||||
"task.command.workbench.publish_pr",
|
||||
"task.command.workbench.revert_file",
|
||||
"task.status_sync.result",
|
||||
] as const;
|
||||
|
||||
export function taskWorkflowQueueName(name: string): string {
|
||||
return name;
|
||||
}
|
||||
143
foundry/packages/backend/src/actors/task/workflow/status-sync.ts
Normal file
143
foundry/packages/backend/src/actors/task/workflow/status-sync.ts
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
// @ts-nocheck
|
||||
import { eq } from "drizzle-orm";
|
||||
import { getActorRuntimeContext } from "../../context.js";
|
||||
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
||||
import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js";
|
||||
import { TASK_ROW_ID, appendHistory, resolveErrorDetail } from "./common.js";
|
||||
import { pushActiveBranchActivity } from "./push.js";
|
||||
|
||||
function mapSessionStatus(status: "running" | "idle" | "error") {
|
||||
if (status === "idle") return "idle";
|
||||
if (status === "error") return "error";
|
||||
return "running";
|
||||
}
|
||||
|
||||
export async function statusUpdateActivity(loopCtx: any, body: any): Promise<boolean> {
|
||||
const newStatus = mapSessionStatus(body.status);
|
||||
const wasIdle = loopCtx.state.previousStatus === "idle";
|
||||
const didTransition = newStatus === "idle" && !wasIdle;
|
||||
const isDuplicateStatus = loopCtx.state.previousStatus === newStatus;
|
||||
|
||||
if (isDuplicateStatus) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const db = loopCtx.db;
|
||||
const runtime = await db
|
||||
.select({
|
||||
activeSandboxId: taskRuntime.activeSandboxId,
|
||||
activeSessionId: taskRuntime.activeSessionId,
|
||||
})
|
||||
.from(taskRuntime)
|
||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||
.get();
|
||||
|
||||
const isActive = runtime?.activeSandboxId === body.sandboxId && runtime?.activeSessionId === body.sessionId;
|
||||
|
||||
if (isActive) {
|
||||
await db.update(taskTable).set({ status: newStatus, updatedAt: body.at }).where(eq(taskTable.id, TASK_ROW_ID)).run();
|
||||
|
||||
await db
|
||||
.update(taskRuntime)
|
||||
.set({ statusMessage: `session:${body.status}`, updatedAt: body.at })
|
||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||
.run();
|
||||
}
|
||||
|
||||
await db
|
||||
.update(taskSandboxes)
|
||||
.set({ statusMessage: `session:${body.status}`, updatedAt: body.at })
|
||||
.where(eq(taskSandboxes.sandboxId, body.sandboxId))
|
||||
.run();
|
||||
|
||||
await appendHistory(loopCtx, "task.status", {
|
||||
status: body.status,
|
||||
sessionId: body.sessionId,
|
||||
sandboxId: body.sandboxId,
|
||||
});
|
||||
|
||||
if (isActive) {
|
||||
loopCtx.state.previousStatus = newStatus;
|
||||
|
||||
const { driver } = getActorRuntimeContext();
|
||||
if (loopCtx.state.branchName) {
|
||||
driver.tmux.setWindowStatus(loopCtx.state.branchName, newStatus);
|
||||
}
|
||||
return didTransition;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function idleSubmitPrActivity(loopCtx: any): Promise<void> {
|
||||
const { driver } = getActorRuntimeContext();
|
||||
const db = loopCtx.db;
|
||||
|
||||
const self = await db.select({ prSubmitted: taskTable.prSubmitted }).from(taskTable).where(eq(taskTable.id, TASK_ROW_ID)).get();
|
||||
|
||||
if (self && self.prSubmitted) return;
|
||||
|
||||
try {
|
||||
await driver.git.fetch(loopCtx.state.repoLocalPath);
|
||||
} catch (error) {
|
||||
logActorWarning("task.status-sync", "fetch before PR submit failed", {
|
||||
workspaceId: loopCtx.state.workspaceId,
|
||||
repoId: loopCtx.state.repoId,
|
||||
taskId: loopCtx.state.taskId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
|
||||
if (!loopCtx.state.branchName || !loopCtx.state.title) {
|
||||
throw new Error("cannot submit PR before task has a branch and title");
|
||||
}
|
||||
|
||||
try {
|
||||
await pushActiveBranchActivity(loopCtx, {
|
||||
reason: "auto_submit_idle",
|
||||
historyKind: "task.push.auto",
|
||||
});
|
||||
|
||||
const pr = await driver.github.createPr(loopCtx.state.repoLocalPath, loopCtx.state.branchName, loopCtx.state.title);
|
||||
|
||||
await db.update(taskTable).set({ prSubmitted: 1, updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run();
|
||||
|
||||
await appendHistory(loopCtx, "task.step", {
|
||||
step: "pr_submit",
|
||||
taskId: loopCtx.state.taskId,
|
||||
branchName: loopCtx.state.branchName,
|
||||
prUrl: pr.url,
|
||||
prNumber: pr.number,
|
||||
});
|
||||
|
||||
await appendHistory(loopCtx, "task.pr_created", {
|
||||
taskId: loopCtx.state.taskId,
|
||||
branchName: loopCtx.state.branchName,
|
||||
prUrl: pr.url,
|
||||
prNumber: pr.number,
|
||||
});
|
||||
} catch (error) {
|
||||
const detail = resolveErrorDetail(error);
|
||||
await db
|
||||
.update(taskRuntime)
|
||||
.set({
|
||||
statusMessage: `pr submit failed: ${detail}`,
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||
.run();
|
||||
|
||||
await appendHistory(loopCtx, "task.pr_create_failed", {
|
||||
taskId: loopCtx.state.taskId,
|
||||
branchName: loopCtx.state.branchName,
|
||||
error: detail,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function idleNotifyActivity(loopCtx: any): Promise<void> {
|
||||
const { notifications } = getActorRuntimeContext();
|
||||
if (notifications && loopCtx.state.branchName) {
|
||||
await notifications.agentIdle(loopCtx.state.branchName);
|
||||
}
|
||||
}
|
||||
672
foundry/packages/backend/src/actors/workspace/actions.ts
Normal file
672
foundry/packages/backend/src/actors/workspace/actions.ts
Normal file
|
|
@ -0,0 +1,672 @@
|
|||
// @ts-nocheck
|
||||
import { desc, eq } from "drizzle-orm";
|
||||
import { Loop } from "rivetkit/workflow";
|
||||
import type {
|
||||
AddRepoInput,
|
||||
CreateTaskInput,
|
||||
TaskRecord,
|
||||
TaskSummary,
|
||||
TaskWorkbenchChangeModelInput,
|
||||
TaskWorkbenchCreateTaskInput,
|
||||
TaskWorkbenchDiffInput,
|
||||
TaskWorkbenchRenameInput,
|
||||
TaskWorkbenchRenameSessionInput,
|
||||
TaskWorkbenchSelectInput,
|
||||
TaskWorkbenchSetSessionUnreadInput,
|
||||
TaskWorkbenchSendMessageInput,
|
||||
TaskWorkbenchSnapshot,
|
||||
TaskWorkbenchTabInput,
|
||||
TaskWorkbenchUpdateDraftInput,
|
||||
HistoryEvent,
|
||||
HistoryQueryInput,
|
||||
ListTasksInput,
|
||||
ProviderId,
|
||||
RepoOverview,
|
||||
RepoStackActionInput,
|
||||
RepoStackActionResult,
|
||||
RepoRecord,
|
||||
StarSandboxAgentRepoInput,
|
||||
StarSandboxAgentRepoResult,
|
||||
SwitchResult,
|
||||
WorkspaceUseInput,
|
||||
} from "@sandbox-agent/foundry-shared";
|
||||
import { getActorRuntimeContext } from "../context.js";
|
||||
import { getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js";
|
||||
import { logActorWarning, resolveErrorMessage } from "../logging.js";
|
||||
import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js";
|
||||
import { taskLookup, repos, providerProfiles } from "./db/schema.js";
|
||||
import { agentTypeForModel } from "../task/workbench.js";
|
||||
import { expectQueueResponse } from "../../services/queue.js";
|
||||
import { workspaceAppActions } from "./app-shell.js";
|
||||
|
||||
interface WorkspaceState {
|
||||
workspaceId: string;
|
||||
}
|
||||
|
||||
interface RefreshProviderProfilesCommand {
|
||||
providerId?: ProviderId;
|
||||
}
|
||||
|
||||
interface GetTaskInput {
|
||||
workspaceId: string;
|
||||
taskId: string;
|
||||
}
|
||||
|
||||
interface TaskProxyActionInput extends GetTaskInput {
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
interface RepoOverviewInput {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
}
|
||||
|
||||
const WORKSPACE_QUEUE_NAMES = ["workspace.command.addRepo", "workspace.command.createTask", "workspace.command.refreshProviderProfiles"] as const;
|
||||
const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent";
|
||||
|
||||
type WorkspaceQueueName = (typeof WORKSPACE_QUEUE_NAMES)[number];
|
||||
|
||||
export { WORKSPACE_QUEUE_NAMES };
|
||||
|
||||
export function workspaceWorkflowQueueName(name: WorkspaceQueueName): WorkspaceQueueName {
|
||||
return name;
|
||||
}
|
||||
|
||||
function assertWorkspace(c: { state: WorkspaceState }, workspaceId: string): void {
|
||||
if (workspaceId !== c.state.workspaceId) {
|
||||
throw new Error(`Workspace actor mismatch: actor=${c.state.workspaceId} command=${workspaceId}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveRepoId(c: any, taskId: string): Promise<string> {
|
||||
const row = await c.db.select({ repoId: taskLookup.repoId }).from(taskLookup).where(eq(taskLookup.taskId, taskId)).get();
|
||||
|
||||
if (!row) {
|
||||
throw new Error(`Unknown task: ${taskId} (not in lookup)`);
|
||||
}
|
||||
|
||||
return row.repoId;
|
||||
}
|
||||
|
||||
async function upsertTaskLookupRow(c: any, taskId: string, repoId: string): Promise<void> {
|
||||
await c.db
|
||||
.insert(taskLookup)
|
||||
.values({
|
||||
taskId,
|
||||
repoId,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskLookup.taskId,
|
||||
set: { repoId },
|
||||
})
|
||||
.run();
|
||||
}
|
||||
|
||||
async function collectAllTaskSummaries(c: any): Promise<TaskSummary[]> {
|
||||
const repoRows = await c.db.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl }).from(repos).orderBy(desc(repos.updatedAt)).all();
|
||||
|
||||
const all: TaskSummary[] = [];
|
||||
for (const row of repoRows) {
|
||||
try {
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl);
|
||||
const snapshot = await project.listTaskSummaries({ includeArchived: true });
|
||||
all.push(...snapshot);
|
||||
} catch (error) {
|
||||
logActorWarning("workspace", "failed collecting tasks for repo", {
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId: row.repoId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
all.sort((a, b) => b.updatedAt - a.updatedAt);
|
||||
return all;
|
||||
}
|
||||
|
||||
function repoLabelFromRemote(remoteUrl: string): string {
|
||||
try {
|
||||
const url = new URL(remoteUrl.startsWith("http") ? remoteUrl : `https://${remoteUrl}`);
|
||||
const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean);
|
||||
if (parts.length >= 2) {
|
||||
return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/, "")}`;
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return remoteUrl;
|
||||
}
|
||||
|
||||
async function buildWorkbenchSnapshot(c: any): Promise<TaskWorkbenchSnapshot> {
|
||||
const repoRows = await c.db
|
||||
.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt })
|
||||
.from(repos)
|
||||
.orderBy(desc(repos.updatedAt))
|
||||
.all();
|
||||
|
||||
const tasks: Array<any> = [];
|
||||
const projects: Array<any> = [];
|
||||
for (const row of repoRows) {
|
||||
const projectTasks: Array<any> = [];
|
||||
try {
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl);
|
||||
const summaries = await project.listTaskSummaries({ includeArchived: true });
|
||||
for (const summary of summaries) {
|
||||
try {
|
||||
await upsertTaskLookupRow(c, summary.taskId, row.repoId);
|
||||
const task = getTask(c, c.state.workspaceId, row.repoId, summary.taskId);
|
||||
const snapshot = await task.getWorkbench({});
|
||||
tasks.push(snapshot);
|
||||
projectTasks.push(snapshot);
|
||||
} catch (error) {
|
||||
logActorWarning("workspace", "failed collecting workbench task", {
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId: row.repoId,
|
||||
taskId: summary.taskId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (projectTasks.length > 0) {
|
||||
projects.push({
|
||||
id: row.repoId,
|
||||
label: repoLabelFromRemote(row.remoteUrl),
|
||||
updatedAtMs: projectTasks[0]?.updatedAtMs ?? row.updatedAt,
|
||||
tasks: projectTasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logActorWarning("workspace", "failed collecting workbench repo snapshot", {
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId: row.repoId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
tasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs);
|
||||
projects.sort((left, right) => right.updatedAtMs - left.updatedAtMs);
|
||||
return {
|
||||
workspaceId: c.state.workspaceId,
|
||||
repos: repoRows.map((row) => ({
|
||||
id: row.repoId,
|
||||
label: repoLabelFromRemote(row.remoteUrl),
|
||||
})),
|
||||
projects,
|
||||
tasks,
|
||||
};
|
||||
}
|
||||
|
||||
async function requireWorkbenchTask(c: any, taskId: string) {
|
||||
const repoId = await resolveRepoId(c, taskId);
|
||||
return getTask(c, c.state.workspaceId, repoId, taskId);
|
||||
}
|
||||
|
||||
async function addRepoMutation(c: any, input: AddRepoInput): Promise<RepoRecord> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
|
||||
const remoteUrl = normalizeRemoteUrl(input.remoteUrl);
|
||||
if (!remoteUrl) {
|
||||
throw new Error("remoteUrl is required");
|
||||
}
|
||||
|
||||
const { driver } = getActorRuntimeContext();
|
||||
await driver.git.validateRemote(remoteUrl);
|
||||
|
||||
const repoId = repoIdFromRemote(remoteUrl);
|
||||
const now = Date.now();
|
||||
|
||||
await c.db
|
||||
.insert(repos)
|
||||
.values({
|
||||
repoId,
|
||||
remoteUrl,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: repos.repoId,
|
||||
set: {
|
||||
remoteUrl,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
await workspaceActions.notifyWorkbenchUpdated(c);
|
||||
return {
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId,
|
||||
remoteUrl,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
};
|
||||
}
|
||||
|
||||
async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskRecord> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const providerId = input.providerId ?? providers.defaultProviderId();
|
||||
|
||||
const repoId = input.repoId;
|
||||
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get();
|
||||
if (!repoRow) {
|
||||
throw new Error(`Unknown repo: ${repoId}`);
|
||||
}
|
||||
const remoteUrl = repoRow.remoteUrl;
|
||||
|
||||
await c.db
|
||||
.insert(providerProfiles)
|
||||
.values({
|
||||
providerId,
|
||||
profileJson: JSON.stringify({ providerId }),
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: providerProfiles.providerId,
|
||||
set: {
|
||||
profileJson: JSON.stringify({ providerId }),
|
||||
updatedAt: Date.now(),
|
||||
},
|
||||
})
|
||||
.run();
|
||||
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, repoId, remoteUrl);
|
||||
await project.ensure({ remoteUrl });
|
||||
|
||||
const created = await project.createTask({
|
||||
task: input.task,
|
||||
providerId,
|
||||
agentType: input.agentType ?? null,
|
||||
explicitTitle: input.explicitTitle ?? null,
|
||||
explicitBranchName: input.explicitBranchName ?? null,
|
||||
onBranch: input.onBranch ?? null,
|
||||
});
|
||||
|
||||
await c.db
|
||||
.insert(taskLookup)
|
||||
.values({
|
||||
taskId: created.taskId,
|
||||
repoId,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: taskLookup.taskId,
|
||||
set: { repoId },
|
||||
})
|
||||
.run();
|
||||
|
||||
const task = getTask(c, c.state.workspaceId, repoId, created.taskId);
|
||||
await task.provision({ providerId });
|
||||
|
||||
await workspaceActions.notifyWorkbenchUpdated(c);
|
||||
return created;
|
||||
}
|
||||
|
||||
async function refreshProviderProfilesMutation(c: any, command?: RefreshProviderProfilesCommand): Promise<void> {
|
||||
const body = command ?? {};
|
||||
const { providers } = getActorRuntimeContext();
|
||||
const providerIds: ProviderId[] = body.providerId ? [body.providerId] : providers.availableProviderIds();
|
||||
|
||||
for (const providerId of providerIds) {
|
||||
await c.db
|
||||
.insert(providerProfiles)
|
||||
.values({
|
||||
providerId,
|
||||
profileJson: JSON.stringify({ providerId }),
|
||||
updatedAt: Date.now(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: providerProfiles.providerId,
|
||||
set: {
|
||||
profileJson: JSON.stringify({ providerId }),
|
||||
updatedAt: Date.now(),
|
||||
},
|
||||
})
|
||||
.run();
|
||||
}
|
||||
}
|
||||
|
||||
export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
|
||||
await ctx.loop("workspace-command-loop", async (loopCtx: any) => {
|
||||
const msg = await loopCtx.queue.next("next-workspace-command", {
|
||||
names: [...WORKSPACE_QUEUE_NAMES],
|
||||
completable: true,
|
||||
});
|
||||
if (!msg) {
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "workspace.command.addRepo") {
|
||||
const result = await loopCtx.step({
|
||||
name: "workspace-add-repo",
|
||||
timeout: 60_000,
|
||||
run: async () => addRepoMutation(loopCtx, msg.body as AddRepoInput),
|
||||
});
|
||||
await msg.complete(result);
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "workspace.command.createTask") {
|
||||
const result = await loopCtx.step({
|
||||
name: "workspace-create-task",
|
||||
timeout: 12 * 60_000,
|
||||
run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput),
|
||||
});
|
||||
await msg.complete(result);
|
||||
return Loop.continue(undefined);
|
||||
}
|
||||
|
||||
if (msg.name === "workspace.command.refreshProviderProfiles") {
|
||||
await loopCtx.step("workspace-refresh-provider-profiles", async () =>
|
||||
refreshProviderProfilesMutation(loopCtx, msg.body as RefreshProviderProfilesCommand),
|
||||
);
|
||||
await msg.complete({ ok: true });
|
||||
}
|
||||
|
||||
return Loop.continue(undefined);
|
||||
});
|
||||
}
|
||||
|
||||
export const workspaceActions = {
|
||||
...workspaceAppActions,
|
||||
async useWorkspace(c: any, input: WorkspaceUseInput): Promise<{ workspaceId: string }> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
return { workspaceId: c.state.workspaceId };
|
||||
},
|
||||
|
||||
async addRepo(c: any, input: AddRepoInput): Promise<RepoRecord> {
|
||||
const self = selfWorkspace(c);
|
||||
return expectQueueResponse<RepoRecord>(
|
||||
await self.send(workspaceWorkflowQueueName("workspace.command.addRepo"), input, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
}),
|
||||
);
|
||||
},
|
||||
|
||||
async listRepos(c: any, input: WorkspaceUseInput): Promise<RepoRecord[]> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
|
||||
const rows = await c.db
|
||||
.select({
|
||||
repoId: repos.repoId,
|
||||
remoteUrl: repos.remoteUrl,
|
||||
createdAt: repos.createdAt,
|
||||
updatedAt: repos.updatedAt,
|
||||
})
|
||||
.from(repos)
|
||||
.orderBy(desc(repos.updatedAt))
|
||||
.all();
|
||||
|
||||
return rows.map((row) => ({
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId: row.repoId,
|
||||
remoteUrl: row.remoteUrl,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
}));
|
||||
},
|
||||
|
||||
async createTask(c: any, input: CreateTaskInput): Promise<TaskRecord> {
|
||||
const self = selfWorkspace(c);
|
||||
return expectQueueResponse<TaskRecord>(
|
||||
await self.send(workspaceWorkflowQueueName("workspace.command.createTask"), input, {
|
||||
wait: true,
|
||||
timeout: 12 * 60_000,
|
||||
}),
|
||||
);
|
||||
},
|
||||
|
||||
async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise<StarSandboxAgentRepoResult> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
const { driver } = getActorRuntimeContext();
|
||||
await driver.github.starRepository(SANDBOX_AGENT_REPO);
|
||||
return {
|
||||
repo: SANDBOX_AGENT_REPO,
|
||||
starredAt: Date.now(),
|
||||
};
|
||||
},
|
||||
|
||||
async getWorkbench(c: any, input: WorkspaceUseInput): Promise<TaskWorkbenchSnapshot> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
return await buildWorkbenchSnapshot(c);
|
||||
},
|
||||
|
||||
async notifyWorkbenchUpdated(c: any): Promise<void> {
|
||||
c.broadcast("workbenchUpdated", { at: Date.now() });
|
||||
},
|
||||
|
||||
async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string }> {
|
||||
const created = await workspaceActions.createTask(c, {
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId: input.repoId,
|
||||
task: input.task,
|
||||
...(input.title ? { explicitTitle: input.title } : {}),
|
||||
...(input.branch ? { explicitBranchName: input.branch } : {}),
|
||||
...(input.model ? { agentType: agentTypeForModel(input.model) } : {}),
|
||||
});
|
||||
return { taskId: created.taskId };
|
||||
},
|
||||
|
||||
async markWorkbenchUnread(c: any, input: TaskWorkbenchSelectInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.markWorkbenchUnread({});
|
||||
},
|
||||
|
||||
async renameWorkbenchTask(c: any, input: TaskWorkbenchRenameInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.renameWorkbenchTask(input);
|
||||
},
|
||||
|
||||
async renameWorkbenchBranch(c: any, input: TaskWorkbenchRenameInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.renameWorkbenchBranch(input);
|
||||
},
|
||||
|
||||
async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
return await task.createWorkbenchSession({ ...(input.model ? { model: input.model } : {}) });
|
||||
},
|
||||
|
||||
async renameWorkbenchSession(c: any, input: TaskWorkbenchRenameSessionInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.renameWorkbenchSession(input);
|
||||
},
|
||||
|
||||
async setWorkbenchSessionUnread(c: any, input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.setWorkbenchSessionUnread(input);
|
||||
},
|
||||
|
||||
async updateWorkbenchDraft(c: any, input: TaskWorkbenchUpdateDraftInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.updateWorkbenchDraft(input);
|
||||
},
|
||||
|
||||
async changeWorkbenchModel(c: any, input: TaskWorkbenchChangeModelInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.changeWorkbenchModel(input);
|
||||
},
|
||||
|
||||
async sendWorkbenchMessage(c: any, input: TaskWorkbenchSendMessageInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.sendWorkbenchMessage(input);
|
||||
},
|
||||
|
||||
async stopWorkbenchSession(c: any, input: TaskWorkbenchTabInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.stopWorkbenchSession(input);
|
||||
},
|
||||
|
||||
async closeWorkbenchSession(c: any, input: TaskWorkbenchTabInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.closeWorkbenchSession(input);
|
||||
},
|
||||
|
||||
async publishWorkbenchPr(c: any, input: TaskWorkbenchSelectInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.publishWorkbenchPr({});
|
||||
},
|
||||
|
||||
async revertWorkbenchFile(c: any, input: TaskWorkbenchDiffInput): Promise<void> {
|
||||
const task = await requireWorkbenchTask(c, input.taskId);
|
||||
await task.revertWorkbenchFile(input);
|
||||
},
|
||||
|
||||
async listTasks(c: any, input: ListTasksInput): Promise<TaskSummary[]> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
|
||||
if (input.repoId) {
|
||||
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get();
|
||||
if (!repoRow) {
|
||||
throw new Error(`Unknown repo: ${input.repoId}`);
|
||||
}
|
||||
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
|
||||
return await project.listTaskSummaries({ includeArchived: true });
|
||||
}
|
||||
|
||||
return await collectAllTaskSummaries(c);
|
||||
},
|
||||
|
||||
async getRepoOverview(c: any, input: RepoOverviewInput): Promise<RepoOverview> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
|
||||
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get();
|
||||
if (!repoRow) {
|
||||
throw new Error(`Unknown repo: ${input.repoId}`);
|
||||
}
|
||||
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
|
||||
await project.ensure({ remoteUrl: repoRow.remoteUrl });
|
||||
return await project.getRepoOverview({});
|
||||
},
|
||||
|
||||
async runRepoStackAction(c: any, input: RepoStackActionInput): Promise<RepoStackActionResult> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
|
||||
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get();
|
||||
if (!repoRow) {
|
||||
throw new Error(`Unknown repo: ${input.repoId}`);
|
||||
}
|
||||
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
|
||||
await project.ensure({ remoteUrl: repoRow.remoteUrl });
|
||||
return await project.runRepoStackAction({
|
||||
action: input.action,
|
||||
branchName: input.branchName,
|
||||
parentBranch: input.parentBranch,
|
||||
});
|
||||
},
|
||||
|
||||
async switchTask(c: any, taskId: string): Promise<SwitchResult> {
|
||||
const repoId = await resolveRepoId(c, taskId);
|
||||
const h = getTask(c, c.state.workspaceId, repoId, taskId);
|
||||
const record = await h.get();
|
||||
const switched = await h.switch();
|
||||
|
||||
return {
|
||||
workspaceId: c.state.workspaceId,
|
||||
taskId,
|
||||
providerId: record.providerId,
|
||||
switchTarget: switched.switchTarget,
|
||||
};
|
||||
},
|
||||
|
||||
async refreshProviderProfiles(c: any, command?: RefreshProviderProfilesCommand): Promise<void> {
|
||||
const self = selfWorkspace(c);
|
||||
await self.send(workspaceWorkflowQueueName("workspace.command.refreshProviderProfiles"), command ?? {}, {
|
||||
wait: true,
|
||||
timeout: 60_000,
|
||||
});
|
||||
},
|
||||
|
||||
async history(c: any, input: HistoryQueryInput): Promise<HistoryEvent[]> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
|
||||
const limit = input.limit ?? 20;
|
||||
const repoRows = await c.db.select({ repoId: repos.repoId }).from(repos).all();
|
||||
|
||||
const allEvents: HistoryEvent[] = [];
|
||||
|
||||
for (const row of repoRows) {
|
||||
try {
|
||||
const hist = await getOrCreateHistory(c, c.state.workspaceId, row.repoId);
|
||||
const items = await hist.list({
|
||||
branch: input.branch,
|
||||
taskId: input.taskId,
|
||||
limit,
|
||||
});
|
||||
allEvents.push(...items);
|
||||
} catch (error) {
|
||||
logActorWarning("workspace", "history lookup failed for repo", {
|
||||
workspaceId: c.state.workspaceId,
|
||||
repoId: row.repoId,
|
||||
error: resolveErrorMessage(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
allEvents.sort((a, b) => b.createdAt - a.createdAt);
|
||||
return allEvents.slice(0, limit);
|
||||
},
|
||||
|
||||
async getTask(c: any, input: GetTaskInput): Promise<TaskRecord> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
|
||||
const repoId = await resolveRepoId(c, input.taskId);
|
||||
|
||||
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get();
|
||||
if (!repoRow) {
|
||||
throw new Error(`Unknown repo: ${repoId}`);
|
||||
}
|
||||
|
||||
const project = await getOrCreateProject(c, c.state.workspaceId, repoId, repoRow.remoteUrl);
|
||||
return await project.getTaskEnriched({ taskId: input.taskId });
|
||||
},
|
||||
|
||||
async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
const repoId = await resolveRepoId(c, input.taskId);
|
||||
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
|
||||
return await h.attach({ reason: input.reason });
|
||||
},
|
||||
|
||||
async pushTask(c: any, input: TaskProxyActionInput): Promise<void> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
const repoId = await resolveRepoId(c, input.taskId);
|
||||
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
|
||||
await h.push({ reason: input.reason });
|
||||
},
|
||||
|
||||
async syncTask(c: any, input: TaskProxyActionInput): Promise<void> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
const repoId = await resolveRepoId(c, input.taskId);
|
||||
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
|
||||
await h.sync({ reason: input.reason });
|
||||
},
|
||||
|
||||
async mergeTask(c: any, input: TaskProxyActionInput): Promise<void> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
const repoId = await resolveRepoId(c, input.taskId);
|
||||
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
|
||||
await h.merge({ reason: input.reason });
|
||||
},
|
||||
|
||||
async archiveTask(c: any, input: TaskProxyActionInput): Promise<void> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
const repoId = await resolveRepoId(c, input.taskId);
|
||||
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
|
||||
await h.archive({ reason: input.reason });
|
||||
},
|
||||
|
||||
async killTask(c: any, input: TaskProxyActionInput): Promise<void> {
|
||||
assertWorkspace(c, input.workspaceId);
|
||||
const repoId = await resolveRepoId(c, input.taskId);
|
||||
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
|
||||
await h.kill({ reason: input.reason });
|
||||
},
|
||||
};
|
||||
1442
foundry/packages/backend/src/actors/workspace/app-shell.ts
Normal file
1442
foundry/packages/backend/src/actors/workspace/app-shell.ts
Normal file
File diff suppressed because it is too large
Load diff
5
foundry/packages/backend/src/actors/workspace/db/db.ts
Normal file
5
foundry/packages/backend/src/actors/workspace/db/db.ts
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
import { db } from "rivetkit/db/drizzle";
|
||||
import * as schema from "./schema.js";
|
||||
import migrations from "./migrations.js";
|
||||
|
||||
export const workspaceDb = db({ schema, migrations });
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import { defineConfig } from "rivetkit/db/drizzle";
|
||||
|
||||
export default defineConfig({
|
||||
out: "./src/actors/workspace/db/drizzle",
|
||||
schema: "./src/actors/workspace/db/schema.ts",
|
||||
});
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
CREATE TABLE `provider_profiles` (
|
||||
`provider_id` text PRIMARY KEY NOT NULL,
|
||||
`profile_json` text NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
CREATE TABLE `repos` (
|
||||
`repo_id` text PRIMARY KEY NOT NULL,
|
||||
`remote_url` text NOT NULL,
|
||||
`created_at` integer NOT NULL,
|
||||
`updated_at` integer NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
CREATE TABLE `task_lookup` (
|
||||
`task_id` text PRIMARY KEY NOT NULL,
|
||||
`repo_id` text NOT NULL
|
||||
);
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "a85809c0-65c2-4f99-92ed-34357c9f83d7",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"tables": {
|
||||
"provider_profiles": {
|
||||
"name": "provider_profiles",
|
||||
"columns": {
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"profile_json": {
|
||||
"name": "profile_json",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "450e2fdf-6349-482f-8a68-5bc0f0a9718a",
|
||||
"prevId": "a85809c0-65c2-4f99-92ed-34357c9f83d7",
|
||||
"tables": {
|
||||
"provider_profiles": {
|
||||
"name": "provider_profiles",
|
||||
"columns": {
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"profile_json": {
|
||||
"name": "profile_json",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"repos": {
|
||||
"name": "repos",
|
||||
"columns": {
|
||||
"repo_id": {
|
||||
"name": "repo_id",
|
||||
"type": "text",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"remote_url": {
|
||||
"name": "remote_url",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"created_at": {
|
||||
"name": "created_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"updated_at": {
|
||||
"name": "updated_at",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1770924376525,
|
||||
"tag": "0000_rare_iron_man",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"version": "6",
|
||||
"when": 1770947252912,
|
||||
"tag": "0001_sleepy_lady_deathstrike",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"version": "6",
|
||||
"when": 1772668800000,
|
||||
"tag": "0002_tiny_silver_surfer",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
189
foundry/packages/backend/src/actors/workspace/db/migrations.ts
Normal file
189
foundry/packages/backend/src/actors/workspace/db/migrations.ts
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
|
||||
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
|
||||
// Do not hand-edit this file.
|
||||
|
||||
const journal = {
|
||||
entries: [
|
||||
{
|
||||
idx: 0,
|
||||
when: 1770924376525,
|
||||
tag: "0000_rare_iron_man",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 1,
|
||||
when: 1770947252912,
|
||||
tag: "0001_sleepy_lady_deathstrike",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 2,
|
||||
when: 1772668800000,
|
||||
tag: "0002_tiny_silver_surfer",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 3,
|
||||
when: 1773100800000,
|
||||
tag: "0003_app_shell_organization_profile",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 4,
|
||||
when: 1773100800001,
|
||||
tag: "0004_app_shell_organization_members",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 5,
|
||||
when: 1773100800002,
|
||||
tag: "0005_app_shell_seat_assignments",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 6,
|
||||
when: 1773100800003,
|
||||
tag: "0006_app_shell_invoices",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 7,
|
||||
when: 1773100800004,
|
||||
tag: "0007_app_shell_sessions",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 8,
|
||||
when: 1773100800005,
|
||||
tag: "0008_app_shell_stripe_lookup",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 9,
|
||||
when: 1773100800006,
|
||||
tag: "0009_github_sync_status",
|
||||
breakpoints: true,
|
||||
},
|
||||
{
|
||||
idx: 10,
|
||||
when: 1772928000000,
|
||||
tag: "0010_app_session_starter_repo",
|
||||
breakpoints: true,
|
||||
},
|
||||
],
|
||||
} as const;
|
||||
|
||||
export default {
|
||||
journal,
|
||||
migrations: {
|
||||
m0000: `CREATE TABLE \`provider_profiles\` (
|
||||
\`provider_id\` text PRIMARY KEY NOT NULL,
|
||||
\`profile_json\` text NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0001: `CREATE TABLE \`repos\` (
|
||||
\`repo_id\` text PRIMARY KEY NOT NULL,
|
||||
\`remote_url\` text NOT NULL,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0002: `CREATE TABLE \`task_lookup\` (
|
||||
\`task_id\` text PRIMARY KEY NOT NULL,
|
||||
\`repo_id\` text NOT NULL
|
||||
);
|
||||
`,
|
||||
m0003: `CREATE TABLE \`organization_profile\` (
|
||||
\`id\` text PRIMARY KEY NOT NULL,
|
||||
\`kind\` text NOT NULL,
|
||||
\`github_account_id\` text NOT NULL,
|
||||
\`github_login\` text NOT NULL,
|
||||
\`github_account_type\` text NOT NULL,
|
||||
\`display_name\` text NOT NULL,
|
||||
\`slug\` text NOT NULL,
|
||||
\`primary_domain\` text NOT NULL,
|
||||
\`default_model\` text NOT NULL,
|
||||
\`auto_import_repos\` integer NOT NULL,
|
||||
\`repo_import_status\` text NOT NULL,
|
||||
\`github_connected_account\` text NOT NULL,
|
||||
\`github_installation_status\` text NOT NULL,
|
||||
\`github_installation_id\` integer,
|
||||
\`github_last_sync_label\` text NOT NULL,
|
||||
\`stripe_customer_id\` text,
|
||||
\`stripe_subscription_id\` text,
|
||||
\`stripe_price_id\` text,
|
||||
\`billing_plan_id\` text NOT NULL,
|
||||
\`billing_status\` text NOT NULL,
|
||||
\`billing_seats_included\` integer NOT NULL,
|
||||
\`billing_trial_ends_at\` text,
|
||||
\`billing_renewal_at\` text,
|
||||
\`billing_payment_method_label\` text NOT NULL,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0004: `CREATE TABLE \`organization_members\` (
|
||||
\`id\` text PRIMARY KEY NOT NULL,
|
||||
\`name\` text NOT NULL,
|
||||
\`email\` text NOT NULL,
|
||||
\`role\` text NOT NULL,
|
||||
\`state\` text NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0005: `CREATE TABLE \`seat_assignments\` (
|
||||
\`email\` text PRIMARY KEY NOT NULL,
|
||||
\`created_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0006: `CREATE TABLE \`invoices\` (
|
||||
\`id\` text PRIMARY KEY NOT NULL,
|
||||
\`label\` text NOT NULL,
|
||||
\`issued_at\` text NOT NULL,
|
||||
\`amount_usd\` integer NOT NULL,
|
||||
\`status\` text NOT NULL,
|
||||
\`created_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0007: `CREATE TABLE \`app_sessions\` (
|
||||
\`id\` text PRIMARY KEY NOT NULL,
|
||||
\`current_user_id\` text,
|
||||
\`current_user_name\` text,
|
||||
\`current_user_email\` text,
|
||||
\`current_user_github_login\` text,
|
||||
\`current_user_role_label\` text,
|
||||
\`eligible_organization_ids_json\` text NOT NULL,
|
||||
\`active_organization_id\` text,
|
||||
\`github_access_token\` text,
|
||||
\`github_scope\` text NOT NULL,
|
||||
\`starter_repo_status\` text NOT NULL,
|
||||
\`starter_repo_starred_at\` integer,
|
||||
\`starter_repo_skipped_at\` integer,
|
||||
\`oauth_state\` text,
|
||||
\`oauth_state_expires_at\` integer,
|
||||
\`created_at\` integer NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0008: `CREATE TABLE \`stripe_lookup\` (
|
||||
\`lookup_key\` text PRIMARY KEY NOT NULL,
|
||||
\`organization_id\` text NOT NULL,
|
||||
\`updated_at\` integer NOT NULL
|
||||
);
|
||||
`,
|
||||
m0009: `ALTER TABLE \`organization_profile\` ADD COLUMN \`github_sync_status\` text NOT NULL DEFAULT 'pending';
|
||||
ALTER TABLE \`organization_profile\` ADD COLUMN \`github_last_sync_at\` integer;
|
||||
UPDATE \`organization_profile\`
|
||||
SET \`github_sync_status\` = CASE
|
||||
WHEN \`repo_import_status\` = 'ready' THEN 'synced'
|
||||
WHEN \`repo_import_status\` = 'importing' THEN 'syncing'
|
||||
ELSE 'pending'
|
||||
END;
|
||||
`,
|
||||
m0010: `ALTER TABLE \`app_sessions\` ADD COLUMN \`starter_repo_status\` text NOT NULL DEFAULT 'pending';
|
||||
ALTER TABLE \`app_sessions\` ADD COLUMN \`starter_repo_starred_at\` integer;
|
||||
ALTER TABLE \`app_sessions\` ADD COLUMN \`starter_repo_skipped_at\` integer;
|
||||
`,
|
||||
} as const,
|
||||
};
|
||||
100
foundry/packages/backend/src/actors/workspace/db/schema.ts
Normal file
100
foundry/packages/backend/src/actors/workspace/db/schema.ts
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
|
||||
|
||||
// SQLite is per workspace actor instance, so no workspaceId column needed.
|
||||
export const providerProfiles = sqliteTable("provider_profiles", {
|
||||
providerId: text("provider_id").notNull().primaryKey(),
|
||||
profileJson: text("profile_json").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const repos = sqliteTable("repos", {
|
||||
repoId: text("repo_id").notNull().primaryKey(),
|
||||
remoteUrl: text("remote_url").notNull(),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const taskLookup = sqliteTable("task_lookup", {
|
||||
taskId: text("task_id").notNull().primaryKey(),
|
||||
repoId: text("repo_id").notNull(),
|
||||
});
|
||||
|
||||
export const organizationProfile = sqliteTable("organization_profile", {
|
||||
id: text("id").notNull().primaryKey(),
|
||||
kind: text("kind").notNull(),
|
||||
githubAccountId: text("github_account_id").notNull(),
|
||||
githubLogin: text("github_login").notNull(),
|
||||
githubAccountType: text("github_account_type").notNull(),
|
||||
displayName: text("display_name").notNull(),
|
||||
slug: text("slug").notNull(),
|
||||
primaryDomain: text("primary_domain").notNull(),
|
||||
defaultModel: text("default_model").notNull(),
|
||||
autoImportRepos: integer("auto_import_repos").notNull(),
|
||||
repoImportStatus: text("repo_import_status").notNull(),
|
||||
githubConnectedAccount: text("github_connected_account").notNull(),
|
||||
githubInstallationStatus: text("github_installation_status").notNull(),
|
||||
githubSyncStatus: text("github_sync_status").notNull(),
|
||||
githubInstallationId: integer("github_installation_id"),
|
||||
githubLastSyncLabel: text("github_last_sync_label").notNull(),
|
||||
githubLastSyncAt: integer("github_last_sync_at"),
|
||||
stripeCustomerId: text("stripe_customer_id"),
|
||||
stripeSubscriptionId: text("stripe_subscription_id"),
|
||||
stripePriceId: text("stripe_price_id"),
|
||||
billingPlanId: text("billing_plan_id").notNull(),
|
||||
billingStatus: text("billing_status").notNull(),
|
||||
billingSeatsIncluded: integer("billing_seats_included").notNull(),
|
||||
billingTrialEndsAt: text("billing_trial_ends_at"),
|
||||
billingRenewalAt: text("billing_renewal_at"),
|
||||
billingPaymentMethodLabel: text("billing_payment_method_label").notNull(),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const organizationMembers = sqliteTable("organization_members", {
|
||||
id: text("id").notNull().primaryKey(),
|
||||
name: text("name").notNull(),
|
||||
email: text("email").notNull(),
|
||||
role: text("role").notNull(),
|
||||
state: text("state").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const seatAssignments = sqliteTable("seat_assignments", {
|
||||
email: text("email").notNull().primaryKey(),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
});
|
||||
|
||||
export const invoices = sqliteTable("invoices", {
|
||||
id: text("id").notNull().primaryKey(),
|
||||
label: text("label").notNull(),
|
||||
issuedAt: text("issued_at").notNull(),
|
||||
amountUsd: integer("amount_usd").notNull(),
|
||||
status: text("status").notNull(),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
});
|
||||
|
||||
export const appSessions = sqliteTable("app_sessions", {
|
||||
id: text("id").notNull().primaryKey(),
|
||||
currentUserId: text("current_user_id"),
|
||||
currentUserName: text("current_user_name"),
|
||||
currentUserEmail: text("current_user_email"),
|
||||
currentUserGithubLogin: text("current_user_github_login"),
|
||||
currentUserRoleLabel: text("current_user_role_label"),
|
||||
eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(),
|
||||
activeOrganizationId: text("active_organization_id"),
|
||||
githubAccessToken: text("github_access_token"),
|
||||
githubScope: text("github_scope").notNull(),
|
||||
starterRepoStatus: text("starter_repo_status").notNull(),
|
||||
starterRepoStarredAt: integer("starter_repo_starred_at"),
|
||||
starterRepoSkippedAt: integer("starter_repo_skipped_at"),
|
||||
oauthState: text("oauth_state"),
|
||||
oauthStateExpiresAt: integer("oauth_state_expires_at"),
|
||||
createdAt: integer("created_at").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
|
||||
export const stripeLookup = sqliteTable("stripe_lookup", {
|
||||
lookupKey: text("lookup_key").notNull().primaryKey(),
|
||||
organizationId: text("organization_id").notNull(),
|
||||
updatedAt: integer("updated_at").notNull(),
|
||||
});
|
||||
17
foundry/packages/backend/src/actors/workspace/index.ts
Normal file
17
foundry/packages/backend/src/actors/workspace/index.ts
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import { actor, queue } from "rivetkit";
|
||||
import { workflow } from "rivetkit/workflow";
|
||||
import { workspaceDb } from "./db/db.js";
|
||||
import { runWorkspaceWorkflow, WORKSPACE_QUEUE_NAMES, workspaceActions } from "./actions.js";
|
||||
|
||||
export const workspace = actor({
|
||||
db: workspaceDb,
|
||||
queues: Object.fromEntries(WORKSPACE_QUEUE_NAMES.map((name) => [name, queue()])),
|
||||
options: {
|
||||
actionTimeout: 5 * 60_000,
|
||||
},
|
||||
createState: (_c, workspaceId: string) => ({
|
||||
workspaceId,
|
||||
}),
|
||||
actions: workspaceActions,
|
||||
run: workflow(runWorkspaceWorkflow),
|
||||
});
|
||||
22
foundry/packages/backend/src/config/backend.ts
Normal file
22
foundry/packages/backend/src/config/backend.ts
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { dirname } from "node:path";
|
||||
import { homedir } from "node:os";
|
||||
import * as toml from "@iarna/toml";
|
||||
import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared";
|
||||
|
||||
export const CONFIG_PATH = `${homedir()}/.config/foundry/config.toml`;
|
||||
|
||||
export function loadConfig(path = CONFIG_PATH): AppConfig {
|
||||
if (!existsSync(path)) {
|
||||
return ConfigSchema.parse({});
|
||||
}
|
||||
|
||||
const raw = readFileSync(path, "utf8");
|
||||
const parsed = toml.parse(raw) as unknown;
|
||||
return ConfigSchema.parse(parsed);
|
||||
}
|
||||
|
||||
export function saveConfig(config: AppConfig, path = CONFIG_PATH): void {
|
||||
mkdirSync(dirname(path), { recursive: true });
|
||||
writeFileSync(path, toml.stringify(config), "utf8");
|
||||
}
|
||||
13
foundry/packages/backend/src/config/workspace.ts
Normal file
13
foundry/packages/backend/src/config/workspace.ts
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import type { AppConfig } from "@sandbox-agent/foundry-shared";
|
||||
|
||||
export function defaultWorkspace(config: AppConfig): string {
|
||||
const ws = config.workspace.default.trim();
|
||||
return ws.length > 0 ? ws : "default";
|
||||
}
|
||||
|
||||
export function resolveWorkspace(flagWorkspace: string | undefined, config: AppConfig): string {
|
||||
if (flagWorkspace && flagWorkspace.trim().length > 0) {
|
||||
return flagWorkspace.trim();
|
||||
}
|
||||
return defaultWorkspace(config);
|
||||
}
|
||||
185
foundry/packages/backend/src/driver.ts
Normal file
185
foundry/packages/backend/src/driver.ts
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
import type { BranchSnapshot } from "./integrations/git/index.js";
|
||||
import type { PullRequestSnapshot } from "./integrations/github/index.js";
|
||||
import type { SandboxSession, SandboxAgentClientOptions, SandboxSessionCreateRequest } from "./integrations/sandbox-agent/client.js";
|
||||
import type {
|
||||
ListEventsRequest,
|
||||
ListPage,
|
||||
ListPageRequest,
|
||||
ProcessCreateRequest,
|
||||
ProcessInfo,
|
||||
ProcessLogFollowQuery,
|
||||
ProcessLogsResponse,
|
||||
ProcessSignalQuery,
|
||||
SessionEvent,
|
||||
SessionRecord,
|
||||
} from "sandbox-agent";
|
||||
import type { DaytonaClientOptions, DaytonaCreateSandboxOptions, DaytonaPreviewEndpoint, DaytonaSandbox } from "./integrations/daytona/client.js";
|
||||
import {
|
||||
validateRemote,
|
||||
ensureCloned,
|
||||
fetch,
|
||||
listRemoteBranches,
|
||||
remoteDefaultBaseRef,
|
||||
revParse,
|
||||
ensureRemoteBranch,
|
||||
diffStatForBranch,
|
||||
conflictsWithMain,
|
||||
} from "./integrations/git/index.js";
|
||||
import {
|
||||
gitSpiceAvailable,
|
||||
gitSpiceListStack,
|
||||
gitSpiceRebaseBranch,
|
||||
gitSpiceReparentBranch,
|
||||
gitSpiceRestackRepo,
|
||||
gitSpiceRestackSubtree,
|
||||
gitSpiceSyncRepo,
|
||||
gitSpiceTrackBranch,
|
||||
} from "./integrations/git-spice/index.js";
|
||||
import { listPullRequests, createPr, starRepository } from "./integrations/github/index.js";
|
||||
import { SandboxAgentClient } from "./integrations/sandbox-agent/client.js";
|
||||
import { DaytonaClient } from "./integrations/daytona/client.js";
|
||||
|
||||
export interface GitDriver {
|
||||
validateRemote(remoteUrl: string): Promise<void>;
|
||||
ensureCloned(remoteUrl: string, targetPath: string): Promise<void>;
|
||||
fetch(repoPath: string): Promise<void>;
|
||||
listRemoteBranches(repoPath: string): Promise<BranchSnapshot[]>;
|
||||
remoteDefaultBaseRef(repoPath: string): Promise<string>;
|
||||
revParse(repoPath: string, ref: string): Promise<string>;
|
||||
ensureRemoteBranch(repoPath: string, branchName: string): Promise<void>;
|
||||
diffStatForBranch(repoPath: string, branchName: string): Promise<string>;
|
||||
conflictsWithMain(repoPath: string, branchName: string): Promise<boolean>;
|
||||
}
|
||||
|
||||
export interface StackBranchSnapshot {
|
||||
branchName: string;
|
||||
parentBranch: string | null;
|
||||
}
|
||||
|
||||
export interface StackDriver {
|
||||
available(repoPath: string): Promise<boolean>;
|
||||
listStack(repoPath: string): Promise<StackBranchSnapshot[]>;
|
||||
syncRepo(repoPath: string): Promise<void>;
|
||||
restackRepo(repoPath: string): Promise<void>;
|
||||
restackSubtree(repoPath: string, branchName: string): Promise<void>;
|
||||
rebaseBranch(repoPath: string, branchName: string): Promise<void>;
|
||||
reparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void>;
|
||||
trackBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface GithubDriver {
|
||||
listPullRequests(repoPath: string): Promise<PullRequestSnapshot[]>;
|
||||
createPr(repoPath: string, headBranch: string, title: string, body?: string): Promise<{ number: number; url: string }>;
|
||||
starRepository(repoFullName: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface SandboxAgentClientLike {
|
||||
createSession(request: string | SandboxSessionCreateRequest): Promise<SandboxSession>;
|
||||
sessionStatus(sessionId: string): Promise<SandboxSession>;
|
||||
listSessions(request?: ListPageRequest): Promise<ListPage<SessionRecord>>;
|
||||
listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>>;
|
||||
createProcess(request: ProcessCreateRequest): Promise<ProcessInfo>;
|
||||
listProcesses(): Promise<{ processes: ProcessInfo[] }>;
|
||||
getProcessLogs(processId: string, query?: ProcessLogFollowQuery): Promise<ProcessLogsResponse>;
|
||||
stopProcess(processId: string, query?: ProcessSignalQuery): Promise<ProcessInfo>;
|
||||
killProcess(processId: string, query?: ProcessSignalQuery): Promise<ProcessInfo>;
|
||||
deleteProcess(processId: string): Promise<void>;
|
||||
sendPrompt(request: { sessionId: string; prompt: string; notification?: boolean }): Promise<void>;
|
||||
cancelSession(sessionId: string): Promise<void>;
|
||||
destroySession(sessionId: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface SandboxAgentDriver {
|
||||
createClient(options: SandboxAgentClientOptions): SandboxAgentClientLike;
|
||||
}
|
||||
|
||||
export interface DaytonaClientLike {
|
||||
createSandbox(options: DaytonaCreateSandboxOptions): Promise<DaytonaSandbox>;
|
||||
getSandbox(sandboxId: string): Promise<DaytonaSandbox>;
|
||||
startSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void>;
|
||||
stopSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void>;
|
||||
deleteSandbox(sandboxId: string): Promise<void>;
|
||||
executeCommand(sandboxId: string, command: string): Promise<{ exitCode: number; result: string }>;
|
||||
getPreviewEndpoint(sandboxId: string, port: number): Promise<DaytonaPreviewEndpoint>;
|
||||
}
|
||||
|
||||
export interface DaytonaDriver {
|
||||
createClient(options: DaytonaClientOptions): DaytonaClientLike;
|
||||
}
|
||||
|
||||
export interface TmuxDriver {
|
||||
setWindowStatus(branchName: string, status: string): number;
|
||||
}
|
||||
|
||||
export interface BackendDriver {
|
||||
git: GitDriver;
|
||||
stack: StackDriver;
|
||||
github: GithubDriver;
|
||||
sandboxAgent: SandboxAgentDriver;
|
||||
daytona: DaytonaDriver;
|
||||
tmux: TmuxDriver;
|
||||
}
|
||||
|
||||
export function createDefaultDriver(): BackendDriver {
|
||||
const sandboxAgentClients = new Map<string, SandboxAgentClient>();
|
||||
const daytonaClients = new Map<string, DaytonaClient>();
|
||||
|
||||
return {
|
||||
git: {
|
||||
validateRemote,
|
||||
ensureCloned,
|
||||
fetch,
|
||||
listRemoteBranches,
|
||||
remoteDefaultBaseRef,
|
||||
revParse,
|
||||
ensureRemoteBranch,
|
||||
diffStatForBranch,
|
||||
conflictsWithMain,
|
||||
},
|
||||
stack: {
|
||||
available: gitSpiceAvailable,
|
||||
listStack: gitSpiceListStack,
|
||||
syncRepo: gitSpiceSyncRepo,
|
||||
restackRepo: gitSpiceRestackRepo,
|
||||
restackSubtree: gitSpiceRestackSubtree,
|
||||
rebaseBranch: gitSpiceRebaseBranch,
|
||||
reparentBranch: gitSpiceReparentBranch,
|
||||
trackBranch: gitSpiceTrackBranch,
|
||||
},
|
||||
github: {
|
||||
listPullRequests,
|
||||
createPr,
|
||||
starRepository,
|
||||
},
|
||||
sandboxAgent: {
|
||||
createClient: (opts) => {
|
||||
if (opts.persist) {
|
||||
return new SandboxAgentClient(opts);
|
||||
}
|
||||
const key = `${opts.endpoint}|${opts.token ?? ""}|${opts.agent ?? ""}`;
|
||||
const cached = sandboxAgentClients.get(key);
|
||||
if (cached) {
|
||||
return cached;
|
||||
}
|
||||
const created = new SandboxAgentClient(opts);
|
||||
sandboxAgentClients.set(key, created);
|
||||
return created;
|
||||
},
|
||||
},
|
||||
daytona: {
|
||||
createClient: (opts) => {
|
||||
const key = `${opts.apiUrl ?? ""}|${opts.apiKey ?? ""}|${opts.target ?? ""}`;
|
||||
const cached = daytonaClients.get(key);
|
||||
if (cached) {
|
||||
return cached;
|
||||
}
|
||||
const created = new DaytonaClient(opts);
|
||||
daytonaClients.set(key, created);
|
||||
return created;
|
||||
},
|
||||
},
|
||||
tmux: {
|
||||
setWindowStatus: () => 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
351
foundry/packages/backend/src/index.ts
Normal file
351
foundry/packages/backend/src/index.ts
Normal file
|
|
@ -0,0 +1,351 @@
|
|||
import { Hono } from "hono";
|
||||
import { cors } from "hono/cors";
|
||||
import { initActorRuntimeContext } from "./actors/context.js";
|
||||
import { registry, resolveManagerPort } from "./actors/index.js";
|
||||
import { workspaceKey } from "./actors/keys.js";
|
||||
import { loadConfig } from "./config/backend.js";
|
||||
import { createBackends, createNotificationService } from "./notifications/index.js";
|
||||
import { createDefaultDriver } from "./driver.js";
|
||||
import { createProviderRegistry } from "./providers/index.js";
|
||||
import { createClient } from "rivetkit/client";
|
||||
import type { FoundryBillingPlanId } from "@sandbox-agent/foundry-shared";
|
||||
import { createDefaultAppShellServices } from "./services/app-shell-runtime.js";
|
||||
import { APP_SHELL_WORKSPACE_ID } from "./actors/workspace/app-shell.js";
|
||||
|
||||
export interface BackendStartOptions {
|
||||
host?: string;
|
||||
port?: number;
|
||||
}
|
||||
|
||||
export async function startBackend(options: BackendStartOptions = {}): Promise<void> {
|
||||
// sandbox-agent agent plugins vary on which env var they read for OpenAI/Codex auth.
|
||||
// Normalize to keep local dev + docker-compose simple.
|
||||
if (!process.env.CODEX_API_KEY && process.env.OPENAI_API_KEY) {
|
||||
process.env.CODEX_API_KEY = process.env.OPENAI_API_KEY;
|
||||
}
|
||||
|
||||
const config = loadConfig();
|
||||
config.backend.host = options.host ?? config.backend.host;
|
||||
config.backend.port = options.port ?? config.backend.port;
|
||||
|
||||
// Allow docker-compose/dev environments to supply provider config via env vars
|
||||
// instead of writing into the container's config.toml.
|
||||
const envFirst = (...keys: string[]): string | undefined => {
|
||||
for (const key of keys) {
|
||||
const raw = process.env[key];
|
||||
if (raw && raw.trim().length > 0) return raw.trim();
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
config.providers.daytona.endpoint = envFirst("HF_DAYTONA_ENDPOINT", "DAYTONA_ENDPOINT") ?? config.providers.daytona.endpoint;
|
||||
config.providers.daytona.apiKey = envFirst("HF_DAYTONA_API_KEY", "DAYTONA_API_KEY") ?? config.providers.daytona.apiKey;
|
||||
|
||||
const driver = createDefaultDriver();
|
||||
const providers = createProviderRegistry(config, driver);
|
||||
const backends = await createBackends(config.notify);
|
||||
const notifications = createNotificationService(backends);
|
||||
initActorRuntimeContext(config, providers, notifications, driver, createDefaultAppShellServices());
|
||||
|
||||
registry.startRunner();
|
||||
const inner = registry.serve();
|
||||
const actorClient = createClient({
|
||||
endpoint: `http://127.0.0.1:${resolveManagerPort()}`,
|
||||
disableMetadataLookup: true,
|
||||
}) as any;
|
||||
|
||||
// Wrap in a Hono app mounted at /api/rivet to serve on the backend port.
|
||||
// Uses Bun.serve — cannot use @hono/node-server because it conflicts with
|
||||
// RivetKit's internal Bun.serve manager server (Bun bug: mixing Node HTTP
|
||||
// server and Bun.serve in the same process breaks Bun.serve's fetch handler).
|
||||
const app = new Hono();
|
||||
const allowHeaders = [
|
||||
"Content-Type",
|
||||
"Authorization",
|
||||
"x-rivet-token",
|
||||
"x-rivet-encoding",
|
||||
"x-rivet-query",
|
||||
"x-rivet-conn-params",
|
||||
"x-rivet-actor",
|
||||
"x-rivet-target",
|
||||
"x-rivet-namespace",
|
||||
"x-rivet-endpoint",
|
||||
"x-rivet-total-slots",
|
||||
"x-rivet-runner-name",
|
||||
"x-rivet-namespace-name",
|
||||
"x-foundry-session",
|
||||
];
|
||||
const exposeHeaders = ["Content-Type", "x-foundry-session", "x-rivet-ray-id"];
|
||||
app.use(
|
||||
"/api/rivet/*",
|
||||
cors({
|
||||
origin: (origin) => origin ?? "*",
|
||||
credentials: true,
|
||||
allowHeaders,
|
||||
allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||
exposeHeaders,
|
||||
}),
|
||||
);
|
||||
app.use(
|
||||
"/api/rivet",
|
||||
cors({
|
||||
origin: (origin) => origin ?? "*",
|
||||
credentials: true,
|
||||
allowHeaders,
|
||||
allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||
exposeHeaders,
|
||||
}),
|
||||
);
|
||||
const forward = async (c: any) => {
|
||||
try {
|
||||
// RivetKit serverless handler is configured with basePath `/api/rivet` by default.
|
||||
return await inner.fetch(c.req.raw);
|
||||
} catch (err) {
|
||||
if (err instanceof URIError) {
|
||||
return c.text("Bad Request: Malformed URI", 400);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const appWorkspace = async () =>
|
||||
await actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), {
|
||||
createWithInput: APP_SHELL_WORKSPACE_ID,
|
||||
});
|
||||
|
||||
const resolveSessionId = async (c: any): Promise<string> => {
|
||||
const requested = c.req.header("x-foundry-session");
|
||||
const { sessionId } = await (await appWorkspace()).ensureAppSession({
|
||||
requestedSessionId: requested ?? null,
|
||||
});
|
||||
c.header("x-foundry-session", sessionId);
|
||||
return sessionId;
|
||||
};
|
||||
|
||||
app.get("/api/rivet/app/snapshot", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(await (await appWorkspace()).getAppSnapshot({ sessionId }));
|
||||
});
|
||||
|
||||
app.get("/api/rivet/app/auth/github/start", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
const result = await (await appWorkspace()).startAppGithubAuth({ sessionId });
|
||||
return Response.redirect(result.url, 302);
|
||||
});
|
||||
|
||||
app.get("/api/rivet/app/auth/github/callback", async (c) => {
|
||||
const code = c.req.query("code");
|
||||
const state = c.req.query("state");
|
||||
if (!code || !state) {
|
||||
return c.text("Missing GitHub OAuth callback parameters", 400);
|
||||
}
|
||||
const result = await (await appWorkspace()).completeAppGithubAuth({ code, state });
|
||||
c.header("x-foundry-session", result.sessionId);
|
||||
return Response.redirect(result.redirectTo, 302);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/sign-out", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(await (await appWorkspace()).signOutApp({ sessionId }));
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/onboarding/starter-repo/skip", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(await (await appWorkspace()).skipAppStarterRepo({ sessionId }));
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/organizations/:organizationId/starter-repo/star", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(
|
||||
await (await appWorkspace()).starAppStarterRepo({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/organizations/:organizationId/select", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(
|
||||
await (await appWorkspace()).selectAppOrganization({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.patch("/api/rivet/app/organizations/:organizationId/profile", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
const body = await c.req.json();
|
||||
return c.json(
|
||||
await (await appWorkspace()).updateAppOrganizationProfile({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
displayName: typeof body?.displayName === "string" ? body.displayName : "",
|
||||
slug: typeof body?.slug === "string" ? body.slug : "",
|
||||
primaryDomain: typeof body?.primaryDomain === "string" ? body.primaryDomain : "",
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/organizations/:organizationId/import", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(
|
||||
await (await appWorkspace()).triggerAppRepoImport({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/organizations/:organizationId/reconnect", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(
|
||||
await (await appWorkspace()).beginAppGithubInstall({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/organizations/:organizationId/billing/checkout", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
const body = await c.req.json().catch(() => ({}));
|
||||
const planId = body?.planId === "free" || body?.planId === "team" ? (body.planId as FoundryBillingPlanId) : "team";
|
||||
return c.json(
|
||||
await (await appWorkspace()).createAppCheckoutSession({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
planId,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.get("/api/rivet/app/billing/checkout/complete", async (c) => {
|
||||
const organizationId = c.req.query("organizationId");
|
||||
const sessionId = c.req.query("foundrySession");
|
||||
const checkoutSessionId = c.req.query("session_id");
|
||||
if (!organizationId || !sessionId || !checkoutSessionId) {
|
||||
return c.text("Missing Stripe checkout completion parameters", 400);
|
||||
}
|
||||
const result = await (await appWorkspace()).finalizeAppCheckoutSession({
|
||||
organizationId,
|
||||
sessionId,
|
||||
checkoutSessionId,
|
||||
});
|
||||
return Response.redirect(result.redirectTo, 302);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/organizations/:organizationId/billing/portal", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(
|
||||
await (await appWorkspace()).createAppBillingPortalSession({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/organizations/:organizationId/billing/cancel", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(
|
||||
await (await appWorkspace()).cancelAppScheduledRenewal({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/organizations/:organizationId/billing/resume", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(
|
||||
await (await appWorkspace()).resumeAppSubscription({
|
||||
sessionId,
|
||||
organizationId: c.req.param("organizationId"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
app.post("/api/rivet/app/workspaces/:workspaceId/seat-usage", async (c) => {
|
||||
const sessionId = await resolveSessionId(c);
|
||||
return c.json(
|
||||
await (await appWorkspace()).recordAppSeatUsage({
|
||||
sessionId,
|
||||
workspaceId: c.req.param("workspaceId"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
const handleStripeWebhook = async (c: any) => {
|
||||
const payload = await c.req.text();
|
||||
await (await appWorkspace()).handleAppStripeWebhook({
|
||||
payload,
|
||||
signatureHeader: c.req.header("stripe-signature") ?? null,
|
||||
});
|
||||
return c.json({ ok: true });
|
||||
};
|
||||
|
||||
app.post("/api/rivet/app/webhooks/stripe", handleStripeWebhook);
|
||||
app.post("/api/rivet/app/stripe/webhook", handleStripeWebhook);
|
||||
|
||||
app.all("/api/rivet", forward);
|
||||
app.all("/api/rivet/*", forward);
|
||||
|
||||
const server = Bun.serve({
|
||||
fetch: app.fetch,
|
||||
hostname: config.backend.host,
|
||||
port: config.backend.port,
|
||||
});
|
||||
|
||||
process.on("SIGINT", async () => {
|
||||
server.stop();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.on("SIGTERM", async () => {
|
||||
server.stop();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
// Keep process alive.
|
||||
await new Promise<void>(() => undefined);
|
||||
}
|
||||
|
||||
function parseArg(flag: string): string | undefined {
|
||||
const idx = process.argv.indexOf(flag);
|
||||
if (idx < 0) return undefined;
|
||||
return process.argv[idx + 1];
|
||||
}
|
||||
|
||||
function parseEnvPort(value: string | undefined): number | undefined {
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
const port = Number(value);
|
||||
if (!Number.isInteger(port) || port <= 0 || port > 65535) {
|
||||
return undefined;
|
||||
}
|
||||
return port;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const cmd = process.argv[2] ?? "start";
|
||||
if (cmd !== "start") {
|
||||
throw new Error(`Unsupported backend command: ${cmd}`);
|
||||
}
|
||||
|
||||
const host = parseArg("--host") ?? process.env.HOST ?? process.env.HF_BACKEND_HOST;
|
||||
const port = parseArg("--port") ?? process.env.PORT ?? process.env.HF_BACKEND_PORT;
|
||||
await startBackend({
|
||||
host,
|
||||
port: parseEnvPort(port),
|
||||
});
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
main().catch((err: unknown) => {
|
||||
const message = err instanceof Error ? (err.stack ?? err.message) : String(err);
|
||||
console.error(message);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
113
foundry/packages/backend/src/integrations/daytona/client.ts
Normal file
113
foundry/packages/backend/src/integrations/daytona/client.ts
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
import { Daytona, type Image } from "@daytonaio/sdk";
|
||||
|
||||
export interface DaytonaSandbox {
|
||||
id: string;
|
||||
state?: string;
|
||||
snapshot?: string;
|
||||
labels?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface DaytonaCreateSandboxOptions {
|
||||
image: string | Image;
|
||||
envVars?: Record<string, string>;
|
||||
labels?: Record<string, string>;
|
||||
autoStopInterval?: number;
|
||||
}
|
||||
|
||||
export interface DaytonaPreviewEndpoint {
|
||||
url: string;
|
||||
token?: string;
|
||||
}
|
||||
|
||||
export interface DaytonaClientOptions {
|
||||
apiUrl?: string;
|
||||
apiKey?: string;
|
||||
target?: string;
|
||||
}
|
||||
|
||||
function normalizeApiUrl(input?: string): string | undefined {
|
||||
if (!input) return undefined;
|
||||
const trimmed = input.replace(/\/+$/, "");
|
||||
if (trimmed.endsWith("/api")) {
|
||||
return trimmed;
|
||||
}
|
||||
return `${trimmed}/api`;
|
||||
}
|
||||
|
||||
export class DaytonaClient {
|
||||
private readonly daytona: Daytona;
|
||||
|
||||
constructor(options: DaytonaClientOptions) {
|
||||
const apiUrl = normalizeApiUrl(options.apiUrl);
|
||||
this.daytona = new Daytona({
|
||||
_experimental: {},
|
||||
...(apiUrl ? { apiUrl } : {}),
|
||||
...(options.apiKey ? { apiKey: options.apiKey } : {}),
|
||||
...(options.target ? { target: options.target } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
async createSandbox(options: DaytonaCreateSandboxOptions): Promise<DaytonaSandbox> {
|
||||
const sandbox = await this.daytona.create({
|
||||
image: options.image,
|
||||
envVars: options.envVars,
|
||||
labels: options.labels,
|
||||
...(options.autoStopInterval !== undefined ? { autoStopInterval: options.autoStopInterval } : {}),
|
||||
});
|
||||
|
||||
return {
|
||||
id: sandbox.id,
|
||||
state: sandbox.state,
|
||||
snapshot: sandbox.snapshot,
|
||||
labels: (sandbox as any).labels,
|
||||
};
|
||||
}
|
||||
|
||||
async getSandbox(sandboxId: string): Promise<DaytonaSandbox> {
|
||||
const sandbox = await this.daytona.get(sandboxId);
|
||||
return {
|
||||
id: sandbox.id,
|
||||
state: sandbox.state,
|
||||
snapshot: sandbox.snapshot,
|
||||
labels: (sandbox as any).labels,
|
||||
};
|
||||
}
|
||||
|
||||
async startSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void> {
|
||||
const sandbox = await this.daytona.get(sandboxId);
|
||||
await sandbox.start(timeoutSeconds);
|
||||
}
|
||||
|
||||
async stopSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void> {
|
||||
const sandbox = await this.daytona.get(sandboxId);
|
||||
await sandbox.stop(timeoutSeconds);
|
||||
}
|
||||
|
||||
async deleteSandbox(sandboxId: string): Promise<void> {
|
||||
const sandbox = await this.daytona.get(sandboxId);
|
||||
await this.daytona.delete(sandbox);
|
||||
}
|
||||
|
||||
async executeCommand(sandboxId: string, command: string): Promise<{ exitCode: number; result: string }> {
|
||||
const sandbox = await this.daytona.get(sandboxId);
|
||||
const response = await sandbox.process.executeCommand(command);
|
||||
return {
|
||||
exitCode: response.exitCode,
|
||||
result: response.result,
|
||||
};
|
||||
}
|
||||
|
||||
async getPreviewEndpoint(sandboxId: string, port: number): Promise<DaytonaPreviewEndpoint> {
|
||||
const sandbox = await this.daytona.get(sandboxId);
|
||||
// Use signed preview URLs for server-to-sandbox communication.
|
||||
// The standard preview link may redirect to an interactive Auth0 flow from non-browser clients.
|
||||
// Signed preview URLs work for direct HTTP access.
|
||||
//
|
||||
// Request a longer-lived URL so sessions can run for several minutes without refresh.
|
||||
const preview = await sandbox.getSignedPreviewUrl(port, 6 * 60 * 60);
|
||||
return {
|
||||
url: preview.url,
|
||||
token: preview.token,
|
||||
};
|
||||
}
|
||||
}
|
||||
223
foundry/packages/backend/src/integrations/git-spice/index.ts
Normal file
223
foundry/packages/backend/src/integrations/git-spice/index.ts
Normal file
|
|
@ -0,0 +1,223 @@
|
|||
import { execFile } from "node:child_process";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
const DEFAULT_TIMEOUT_MS = 2 * 60_000;
|
||||
|
||||
interface SpiceCommand {
|
||||
command: string;
|
||||
prefix: string[];
|
||||
}
|
||||
|
||||
export interface SpiceStackEntry {
|
||||
branchName: string;
|
||||
parentBranch: string | null;
|
||||
}
|
||||
|
||||
function spiceCommands(): SpiceCommand[] {
|
||||
const explicit = process.env.HF_GIT_SPICE_BIN?.trim();
|
||||
const list: SpiceCommand[] = [];
|
||||
if (explicit) {
|
||||
list.push({ command: explicit, prefix: [] });
|
||||
}
|
||||
list.push({ command: "git-spice", prefix: [] });
|
||||
list.push({ command: "git", prefix: ["spice"] });
|
||||
return list;
|
||||
}
|
||||
|
||||
function commandLabel(cmd: SpiceCommand): string {
|
||||
return [cmd.command, ...cmd.prefix].join(" ");
|
||||
}
|
||||
|
||||
function looksMissing(error: unknown): boolean {
|
||||
const detail = error instanceof Error ? error.message : String(error);
|
||||
return detail.includes("ENOENT") || detail.includes("not a git command") || detail.includes("command not found");
|
||||
}
|
||||
|
||||
async function tryRun(repoPath: string, cmd: SpiceCommand, args: string[]): Promise<{ stdout: string; stderr: string }> {
|
||||
return await execFileAsync(cmd.command, [...cmd.prefix, ...args], {
|
||||
cwd: repoPath,
|
||||
timeout: DEFAULT_TIMEOUT_MS,
|
||||
maxBuffer: 1024 * 1024 * 8,
|
||||
env: {
|
||||
...process.env,
|
||||
NO_COLOR: "1",
|
||||
FORCE_COLOR: "0",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function pickCommand(repoPath: string): Promise<SpiceCommand | null> {
|
||||
for (const candidate of spiceCommands()) {
|
||||
try {
|
||||
await tryRun(repoPath, candidate, ["--help"]);
|
||||
return candidate;
|
||||
} catch (error) {
|
||||
if (looksMissing(error)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async function runSpice(repoPath: string, args: string[]): Promise<{ stdout: string; stderr: string }> {
|
||||
const cmd = await pickCommand(repoPath);
|
||||
if (!cmd) {
|
||||
throw new Error("git-spice is not available (set HF_GIT_SPICE_BIN or install git-spice)");
|
||||
}
|
||||
return await tryRun(repoPath, cmd, args);
|
||||
}
|
||||
|
||||
function parseLogJson(stdout: string): SpiceStackEntry[] {
|
||||
const trimmed = stdout.trim();
|
||||
if (!trimmed) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const entries: SpiceStackEntry[] = [];
|
||||
|
||||
// `git-spice log ... --json` prints one JSON object per line.
|
||||
for (const line of trimmed.split("\n")) {
|
||||
const raw = line.trim();
|
||||
if (!raw.startsWith("{")) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const value = JSON.parse(raw) as {
|
||||
name?: string;
|
||||
branch?: string;
|
||||
parent?: string | null;
|
||||
parentBranch?: string | null;
|
||||
};
|
||||
const branchName = (value.name ?? value.branch ?? "").trim();
|
||||
if (!branchName) {
|
||||
continue;
|
||||
}
|
||||
const parentRaw = value.parent ?? value.parentBranch ?? null;
|
||||
const parentBranch = parentRaw ? parentRaw.trim() || null : null;
|
||||
entries.push({ branchName, parentBranch });
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const seen = new Set<string>();
|
||||
return entries.filter((entry) => {
|
||||
if (seen.has(entry.branchName)) {
|
||||
return false;
|
||||
}
|
||||
seen.add(entry.branchName);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
async function runFallbacks(repoPath: string, commands: string[][], errorContext: string): Promise<void> {
|
||||
const failures: string[] = [];
|
||||
for (const args of commands) {
|
||||
try {
|
||||
await runSpice(repoPath, args);
|
||||
return;
|
||||
} catch (error) {
|
||||
failures.push(`${args.join(" ")} :: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
}
|
||||
throw new Error(`${errorContext}. attempts=${failures.join(" | ")}`);
|
||||
}
|
||||
|
||||
export async function gitSpiceAvailable(repoPath: string): Promise<boolean> {
|
||||
return (await pickCommand(repoPath)) !== null;
|
||||
}
|
||||
|
||||
export async function gitSpiceListStack(repoPath: string): Promise<SpiceStackEntry[]> {
|
||||
try {
|
||||
const { stdout } = await runSpice(repoPath, ["log", "short", "--all", "--json", "--no-cr-status", "--no-prompt"]);
|
||||
return parseLogJson(stdout);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function gitSpiceSyncRepo(repoPath: string): Promise<void> {
|
||||
await runFallbacks(
|
||||
repoPath,
|
||||
[
|
||||
["repo", "sync", "--restack", "--no-prompt"],
|
||||
["repo", "sync", "--restack"],
|
||||
["repo", "sync"],
|
||||
],
|
||||
"git-spice repo sync failed",
|
||||
);
|
||||
}
|
||||
|
||||
export async function gitSpiceRestackRepo(repoPath: string): Promise<void> {
|
||||
await runFallbacks(
|
||||
repoPath,
|
||||
[
|
||||
["repo", "restack", "--no-prompt"],
|
||||
["repo", "restack"],
|
||||
],
|
||||
"git-spice repo restack failed",
|
||||
);
|
||||
}
|
||||
|
||||
export async function gitSpiceRestackSubtree(repoPath: string, branchName: string): Promise<void> {
|
||||
await runFallbacks(
|
||||
repoPath,
|
||||
[
|
||||
["upstack", "restack", "--branch", branchName, "--no-prompt"],
|
||||
["upstack", "restack", "--branch", branchName],
|
||||
["branch", "restack", "--branch", branchName, "--no-prompt"],
|
||||
["branch", "restack", "--branch", branchName],
|
||||
],
|
||||
`git-spice restack subtree failed for ${branchName}`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function gitSpiceRebaseBranch(repoPath: string, branchName: string): Promise<void> {
|
||||
await runFallbacks(
|
||||
repoPath,
|
||||
[
|
||||
["branch", "restack", "--branch", branchName, "--no-prompt"],
|
||||
["branch", "restack", "--branch", branchName],
|
||||
],
|
||||
`git-spice branch restack failed for ${branchName}`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function gitSpiceReparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void> {
|
||||
await runFallbacks(
|
||||
repoPath,
|
||||
[
|
||||
["upstack", "onto", "--branch", branchName, parentBranch, "--no-prompt"],
|
||||
["upstack", "onto", "--branch", branchName, parentBranch],
|
||||
["branch", "onto", "--branch", branchName, parentBranch, "--no-prompt"],
|
||||
["branch", "onto", "--branch", branchName, parentBranch],
|
||||
],
|
||||
`git-spice reparent failed for ${branchName} -> ${parentBranch}`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function gitSpiceTrackBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void> {
|
||||
await runFallbacks(
|
||||
repoPath,
|
||||
[
|
||||
["branch", "track", branchName, "--base", parentBranch, "--no-prompt"],
|
||||
["branch", "track", branchName, "--base", parentBranch],
|
||||
],
|
||||
`git-spice track failed for ${branchName}`,
|
||||
);
|
||||
}
|
||||
|
||||
export function normalizeBaseBranchName(ref: string): string {
|
||||
const trimmed = ref.trim();
|
||||
if (!trimmed) {
|
||||
return "main";
|
||||
}
|
||||
return trimmed.startsWith("origin/") ? trimmed.slice("origin/".length) : trimmed;
|
||||
}
|
||||
|
||||
export function describeSpiceCommandForLogs(repoPath: string): Promise<string | null> {
|
||||
return pickCommand(repoPath).then((cmd) => (cmd ? commandLabel(cmd) : null));
|
||||
}
|
||||
260
foundry/packages/backend/src/integrations/git/index.ts
Normal file
260
foundry/packages/backend/src/integrations/git/index.ts
Normal file
|
|
@ -0,0 +1,260 @@
|
|||
import { execFile } from "node:child_process";
|
||||
import { chmodSync, existsSync, mkdirSync, mkdtempSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { dirname, resolve } from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
const DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS = 15_000;
|
||||
const DEFAULT_GIT_FETCH_TIMEOUT_MS = 2 * 60_000;
|
||||
const DEFAULT_GIT_CLONE_TIMEOUT_MS = 5 * 60_000;
|
||||
|
||||
function resolveGithubToken(): string | null {
|
||||
const token = process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN ?? process.env.HF_GITHUB_TOKEN ?? process.env.HF_GH_TOKEN ?? null;
|
||||
if (!token) return null;
|
||||
const trimmed = token.trim();
|
||||
return trimmed.length > 0 ? trimmed : null;
|
||||
}
|
||||
|
||||
let cachedAskpassPath: string | null = null;
|
||||
function ensureAskpassScript(): string {
|
||||
if (cachedAskpassPath) {
|
||||
return cachedAskpassPath;
|
||||
}
|
||||
|
||||
const dir = mkdtempSync(resolve(tmpdir(), "foundry-git-askpass-"));
|
||||
const path = resolve(dir, "askpass.sh");
|
||||
|
||||
// Git invokes $GIT_ASKPASS with the prompt string as argv[1]. Provide both username and password.
|
||||
// We avoid embedding the token in this file; it is read from env at runtime.
|
||||
const content = [
|
||||
"#!/bin/sh",
|
||||
'prompt="$1"',
|
||||
// Prefer GH_TOKEN/GITHUB_TOKEN but support HF_* aliases too.
|
||||
'token="${GH_TOKEN:-${GITHUB_TOKEN:-${HF_GITHUB_TOKEN:-${HF_GH_TOKEN:-}}}}"',
|
||||
'case "$prompt" in',
|
||||
' *Username*) echo "x-access-token" ;;',
|
||||
' *Password*) echo "$token" ;;',
|
||||
' *) echo "" ;;',
|
||||
"esac",
|
||||
"",
|
||||
].join("\n");
|
||||
|
||||
writeFileSync(path, content, "utf8");
|
||||
chmodSync(path, 0o700);
|
||||
cachedAskpassPath = path;
|
||||
return path;
|
||||
}
|
||||
|
||||
function gitEnv(): Record<string, string> {
|
||||
const env: Record<string, string> = { ...(process.env as Record<string, string>) };
|
||||
env.GIT_TERMINAL_PROMPT = "0";
|
||||
|
||||
const token = resolveGithubToken();
|
||||
if (token) {
|
||||
env.GIT_ASKPASS = ensureAskpassScript();
|
||||
// Some tooling expects these vars; keep them aligned.
|
||||
env.GITHUB_TOKEN = env.GITHUB_TOKEN || token;
|
||||
env.GH_TOKEN = env.GH_TOKEN || token;
|
||||
}
|
||||
|
||||
return env;
|
||||
}
|
||||
|
||||
export interface BranchSnapshot {
|
||||
branchName: string;
|
||||
commitSha: string;
|
||||
}
|
||||
|
||||
export async function fetch(repoPath: string): Promise<void> {
|
||||
await execFileAsync("git", ["-C", repoPath, "fetch", "--prune"], {
|
||||
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
|
||||
env: gitEnv(),
|
||||
});
|
||||
}
|
||||
|
||||
export async function revParse(repoPath: string, ref: string): Promise<string> {
|
||||
const { stdout } = await execFileAsync("git", ["-C", repoPath, "rev-parse", ref], { env: gitEnv() });
|
||||
return stdout.trim();
|
||||
}
|
||||
|
||||
export async function validateRemote(remoteUrl: string): Promise<void> {
|
||||
const remote = remoteUrl.trim();
|
||||
if (!remote) {
|
||||
throw new Error("remoteUrl is required");
|
||||
}
|
||||
try {
|
||||
await execFileAsync("git", ["ls-remote", "--exit-code", remote, "HEAD"], {
|
||||
// This command does not need repo context. Running from a neutral directory
|
||||
// avoids inheriting broken worktree .git indirection inside dev containers.
|
||||
cwd: tmpdir(),
|
||||
maxBuffer: 1024 * 1024,
|
||||
timeout: DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS,
|
||||
env: gitEnv(),
|
||||
});
|
||||
} catch (error) {
|
||||
const detail = error instanceof Error ? error.message : String(error);
|
||||
throw new Error(`git remote validation failed: ${detail}`);
|
||||
}
|
||||
}
|
||||
|
||||
function isGitRepo(path: string): boolean {
|
||||
return existsSync(resolve(path, ".git"));
|
||||
}
|
||||
|
||||
export async function ensureCloned(remoteUrl: string, targetPath: string): Promise<void> {
|
||||
const remote = remoteUrl.trim();
|
||||
if (!remote) {
|
||||
throw new Error("remoteUrl is required");
|
||||
}
|
||||
|
||||
if (existsSync(targetPath)) {
|
||||
if (!isGitRepo(targetPath)) {
|
||||
throw new Error(`targetPath exists but is not a git repo: ${targetPath}`);
|
||||
}
|
||||
|
||||
// Keep origin aligned with the configured remote URL.
|
||||
await execFileAsync("git", ["-C", targetPath, "remote", "set-url", "origin", remote], {
|
||||
maxBuffer: 1024 * 1024,
|
||||
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
|
||||
env: gitEnv(),
|
||||
});
|
||||
await fetch(targetPath);
|
||||
return;
|
||||
}
|
||||
|
||||
mkdirSync(dirname(targetPath), { recursive: true });
|
||||
await execFileAsync("git", ["clone", remote, targetPath], {
|
||||
maxBuffer: 1024 * 1024 * 8,
|
||||
timeout: DEFAULT_GIT_CLONE_TIMEOUT_MS,
|
||||
env: gitEnv(),
|
||||
});
|
||||
await fetch(targetPath);
|
||||
}
|
||||
|
||||
export async function remoteDefaultBaseRef(repoPath: string): Promise<string> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync("git", ["-C", repoPath, "symbolic-ref", "refs/remotes/origin/HEAD"], { env: gitEnv() });
|
||||
const ref = stdout.trim(); // refs/remotes/origin/main
|
||||
const match = ref.match(/^refs\/remotes\/(.+)$/);
|
||||
if (match?.[1]) {
|
||||
return match[1];
|
||||
}
|
||||
} catch {
|
||||
// fall through
|
||||
}
|
||||
|
||||
const candidates = ["origin/main", "origin/master", "main", "master"];
|
||||
for (const ref of candidates) {
|
||||
try {
|
||||
await execFileAsync("git", ["-C", repoPath, "rev-parse", "--verify", ref], { env: gitEnv() });
|
||||
return ref;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return "origin/main";
|
||||
}
|
||||
|
||||
export async function listRemoteBranches(repoPath: string): Promise<BranchSnapshot[]> {
|
||||
const { stdout } = await execFileAsync("git", ["-C", repoPath, "for-each-ref", "--format=%(refname:short) %(objectname)", "refs/remotes/origin"], {
|
||||
maxBuffer: 1024 * 1024,
|
||||
env: gitEnv(),
|
||||
});
|
||||
|
||||
return stdout
|
||||
.trim()
|
||||
.split("\n")
|
||||
.filter((line) => line.trim().length > 0)
|
||||
.map((line) => {
|
||||
const [refName, commitSha] = line.trim().split(/\s+/, 2);
|
||||
const short = (refName ?? "").trim();
|
||||
const branchName = short.replace(/^origin\//, "");
|
||||
return { branchName, commitSha: commitSha ?? "" };
|
||||
})
|
||||
.filter((row) => row.branchName.length > 0 && row.branchName !== "HEAD" && row.branchName !== "origin" && row.commitSha.length > 0);
|
||||
}
|
||||
|
||||
async function remoteBranchExists(repoPath: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
await execFileAsync("git", ["-C", repoPath, "show-ref", "--verify", `refs/remotes/origin/${branchName}`], { env: gitEnv() });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureRemoteBranch(repoPath: string, branchName: string): Promise<void> {
|
||||
await fetch(repoPath);
|
||||
if (await remoteBranchExists(repoPath, branchName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const baseRef = await remoteDefaultBaseRef(repoPath);
|
||||
await execFileAsync("git", ["-C", repoPath, "push", "origin", `${baseRef}:refs/heads/${branchName}`], {
|
||||
maxBuffer: 1024 * 1024 * 2,
|
||||
env: gitEnv(),
|
||||
});
|
||||
await fetch(repoPath);
|
||||
}
|
||||
|
||||
export async function diffStatForBranch(repoPath: string, branchName: string): Promise<string> {
|
||||
try {
|
||||
const baseRef = await remoteDefaultBaseRef(repoPath);
|
||||
const headRef = `origin/${branchName}`;
|
||||
const { stdout } = await execFileAsync("git", ["-C", repoPath, "diff", "--shortstat", `${baseRef}...${headRef}`], {
|
||||
maxBuffer: 1024 * 1024,
|
||||
env: gitEnv(),
|
||||
});
|
||||
const trimmed = stdout.trim();
|
||||
if (!trimmed) {
|
||||
return "+0/-0";
|
||||
}
|
||||
const insertMatch = trimmed.match(/(\d+)\s+insertion/);
|
||||
const deleteMatch = trimmed.match(/(\d+)\s+deletion/);
|
||||
const insertions = insertMatch ? insertMatch[1] : "0";
|
||||
const deletions = deleteMatch ? deleteMatch[1] : "0";
|
||||
return `+${insertions}/-${deletions}`;
|
||||
} catch {
|
||||
return "+0/-0";
|
||||
}
|
||||
}
|
||||
|
||||
export async function conflictsWithMain(repoPath: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
const baseRef = await remoteDefaultBaseRef(repoPath);
|
||||
const headRef = `origin/${branchName}`;
|
||||
// Use merge-tree (git 2.38+) for a clean conflict check.
|
||||
try {
|
||||
await execFileAsync("git", ["-C", repoPath, "merge-tree", "--write-tree", "--no-messages", baseRef, headRef], { env: gitEnv() });
|
||||
// If merge-tree exits 0, no conflicts. Non-zero exit means conflicts.
|
||||
return false;
|
||||
} catch {
|
||||
// merge-tree exits non-zero when there are conflicts
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getOriginOwner(repoPath: string): Promise<string> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync("git", ["-C", repoPath, "remote", "get-url", "origin"], { env: gitEnv() });
|
||||
const url = stdout.trim();
|
||||
// Handle SSH: git@github.com:owner/repo.git
|
||||
const sshMatch = url.match(/[:\/]([^\/]+)\/[^\/]+(?:\.git)?$/);
|
||||
if (sshMatch) {
|
||||
return sshMatch[1] ?? "";
|
||||
}
|
||||
// Handle HTTPS: https://github.com/owner/repo.git
|
||||
const httpsMatch = url.match(/\/\/[^\/]+\/([^\/]+)\//);
|
||||
if (httpsMatch) {
|
||||
return httpsMatch[1] ?? "";
|
||||
}
|
||||
return "";
|
||||
} catch {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
231
foundry/packages/backend/src/integrations/github/index.ts
Normal file
231
foundry/packages/backend/src/integrations/github/index.ts
Normal file
|
|
@ -0,0 +1,231 @@
|
|||
import { execFile } from "node:child_process";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
export interface PullRequestSnapshot {
|
||||
number: number;
|
||||
headRefName: string;
|
||||
state: string;
|
||||
title: string;
|
||||
url: string;
|
||||
author: string;
|
||||
isDraft: boolean;
|
||||
ciStatus: string | null;
|
||||
reviewStatus: string | null;
|
||||
reviewer: string | null;
|
||||
}
|
||||
|
||||
interface GhPrListItem {
|
||||
number: number;
|
||||
headRefName: string;
|
||||
state: string;
|
||||
title: string;
|
||||
url?: string;
|
||||
author?: { login?: string };
|
||||
isDraft?: boolean;
|
||||
statusCheckRollup?: Array<{
|
||||
state?: string;
|
||||
status?: string;
|
||||
conclusion?: string;
|
||||
__typename?: string;
|
||||
}>;
|
||||
reviews?: Array<{
|
||||
state?: string;
|
||||
author?: { login?: string };
|
||||
}>;
|
||||
}
|
||||
|
||||
function parseCiStatus(checks: GhPrListItem["statusCheckRollup"]): string | null {
|
||||
if (!checks || checks.length === 0) return null;
|
||||
|
||||
let total = 0;
|
||||
let successes = 0;
|
||||
let hasRunning = false;
|
||||
|
||||
for (const check of checks) {
|
||||
total++;
|
||||
const conclusion = check.conclusion?.toUpperCase();
|
||||
const state = check.state?.toUpperCase();
|
||||
const status = check.status?.toUpperCase();
|
||||
|
||||
if (conclusion === "SUCCESS" || state === "SUCCESS") {
|
||||
successes++;
|
||||
} else if (status === "IN_PROGRESS" || status === "QUEUED" || status === "PENDING" || state === "PENDING") {
|
||||
hasRunning = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasRunning && successes < total) {
|
||||
return "running";
|
||||
}
|
||||
|
||||
return `${successes}/${total}`;
|
||||
}
|
||||
|
||||
function parseReviewStatus(reviews: GhPrListItem["reviews"]): { status: string | null; reviewer: string | null } {
|
||||
if (!reviews || reviews.length === 0) {
|
||||
return { status: null, reviewer: null };
|
||||
}
|
||||
|
||||
// Build a map of latest review per author
|
||||
const latestByAuthor = new Map<string, { state: string; login: string }>();
|
||||
for (const review of reviews) {
|
||||
const login = review.author?.login ?? "unknown";
|
||||
const state = review.state?.toUpperCase() ?? "";
|
||||
if (state === "COMMENTED") continue; // Skip comments, only track actionable reviews
|
||||
latestByAuthor.set(login, { state, login });
|
||||
}
|
||||
|
||||
// Check for CHANGES_REQUESTED first (takes priority), then APPROVED
|
||||
for (const [, entry] of latestByAuthor) {
|
||||
if (entry.state === "CHANGES_REQUESTED") {
|
||||
return { status: "CHANGES_REQUESTED", reviewer: entry.login };
|
||||
}
|
||||
}
|
||||
|
||||
for (const [, entry] of latestByAuthor) {
|
||||
if (entry.state === "APPROVED") {
|
||||
return { status: "APPROVED", reviewer: entry.login };
|
||||
}
|
||||
}
|
||||
|
||||
// If there are reviews but none are APPROVED or CHANGES_REQUESTED
|
||||
if (latestByAuthor.size > 0) {
|
||||
const first = latestByAuthor.values().next().value;
|
||||
return { status: "PENDING", reviewer: first?.login ?? null };
|
||||
}
|
||||
|
||||
return { status: null, reviewer: null };
|
||||
}
|
||||
|
||||
function snapshotFromGhItem(item: GhPrListItem): PullRequestSnapshot {
|
||||
const { status: reviewStatus, reviewer } = parseReviewStatus(item.reviews);
|
||||
return {
|
||||
number: item.number,
|
||||
headRefName: item.headRefName,
|
||||
state: item.state,
|
||||
title: item.title,
|
||||
url: item.url ?? "",
|
||||
author: item.author?.login ?? "",
|
||||
isDraft: item.isDraft ?? false,
|
||||
ciStatus: parseCiStatus(item.statusCheckRollup),
|
||||
reviewStatus,
|
||||
reviewer,
|
||||
};
|
||||
}
|
||||
|
||||
const PR_JSON_FIELDS = "number,headRefName,state,title,url,author,isDraft,statusCheckRollup,reviews";
|
||||
|
||||
export async function listPullRequests(repoPath: string): Promise<PullRequestSnapshot[]> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync("gh", ["pr", "list", "--json", PR_JSON_FIELDS, "--limit", "200"], { maxBuffer: 1024 * 1024 * 4, cwd: repoPath });
|
||||
|
||||
const parsed = JSON.parse(stdout) as GhPrListItem[];
|
||||
|
||||
return parsed.map((item) => {
|
||||
// Handle fork PRs where headRefName may contain "owner:branch"
|
||||
const headRefName = item.headRefName.includes(":") ? (item.headRefName.split(":").pop() ?? item.headRefName) : item.headRefName;
|
||||
|
||||
return snapshotFromGhItem({ ...item, headRefName });
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function getPrInfo(repoPath: string, branchName: string): Promise<PullRequestSnapshot | null> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", PR_JSON_FIELDS], { maxBuffer: 1024 * 1024 * 4, cwd: repoPath });
|
||||
|
||||
const item = JSON.parse(stdout) as GhPrListItem;
|
||||
return snapshotFromGhItem(item);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export async function createPr(repoPath: string, headBranch: string, title: string, body?: string): Promise<{ number: number; url: string }> {
|
||||
const args = ["pr", "create", "--title", title, "--head", headBranch];
|
||||
if (body) {
|
||||
args.push("--body", body);
|
||||
} else {
|
||||
args.push("--body", "");
|
||||
}
|
||||
|
||||
const { stdout } = await execFileAsync("gh", args, {
|
||||
maxBuffer: 1024 * 1024,
|
||||
cwd: repoPath,
|
||||
});
|
||||
|
||||
// gh pr create outputs the PR URL on success
|
||||
const url = stdout.trim();
|
||||
// Extract PR number from URL: https://github.com/owner/repo/pull/123
|
||||
const numberMatch = url.match(/\/pull\/(\d+)/);
|
||||
const number = numberMatch ? parseInt(numberMatch[1]!, 10) : 0;
|
||||
|
||||
return { number, url };
|
||||
}
|
||||
|
||||
export async function starRepository(repoFullName: string): Promise<void> {
|
||||
try {
|
||||
await execFileAsync("gh", ["api", "--method", "PUT", `user/starred/${repoFullName}`], {
|
||||
maxBuffer: 1024 * 1024,
|
||||
});
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : `Failed to star GitHub repository ${repoFullName}. Ensure GitHub auth is configured for the backend.`;
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getAllowedMergeMethod(repoPath: string): Promise<"squash" | "rebase" | "merge"> {
|
||||
try {
|
||||
// Get the repo owner/name from gh
|
||||
const { stdout: repoJson } = await execFileAsync("gh", ["repo", "view", "--json", "owner,name"], { cwd: repoPath });
|
||||
const repo = JSON.parse(repoJson) as { owner: { login: string }; name: string };
|
||||
const repoFullName = `${repo.owner.login}/${repo.name}`;
|
||||
|
||||
const { stdout } = await execFileAsync("gh", ["api", `repos/${repoFullName}`, "--jq", ".allow_squash_merge, .allow_rebase_merge, .allow_merge_commit"], {
|
||||
maxBuffer: 1024 * 1024,
|
||||
cwd: repoPath,
|
||||
});
|
||||
|
||||
const lines = stdout.trim().split("\n");
|
||||
const allowSquash = lines[0]?.trim() === "true";
|
||||
const allowRebase = lines[1]?.trim() === "true";
|
||||
const allowMerge = lines[2]?.trim() === "true";
|
||||
|
||||
if (allowSquash) return "squash";
|
||||
if (allowRebase) return "rebase";
|
||||
if (allowMerge) return "merge";
|
||||
return "squash";
|
||||
} catch {
|
||||
return "squash";
|
||||
}
|
||||
}
|
||||
|
||||
export async function mergePr(repoPath: string, prNumber: number): Promise<void> {
|
||||
const method = await getAllowedMergeMethod(repoPath);
|
||||
await execFileAsync("gh", ["pr", "merge", String(prNumber), `--${method}`, "--delete-branch"], { cwd: repoPath });
|
||||
}
|
||||
|
||||
export async function isPrMerged(repoPath: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", "state"], { cwd: repoPath });
|
||||
const parsed = JSON.parse(stdout) as { state: string };
|
||||
return parsed.state.toUpperCase() === "MERGED";
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getPrTitle(repoPath: string, branchName: string): Promise<string | null> {
|
||||
try {
|
||||
const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", "title"], { cwd: repoPath });
|
||||
const parsed = JSON.parse(stdout) as { title: string };
|
||||
return parsed.title;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
140
foundry/packages/backend/src/integrations/graphite/index.ts
Normal file
140
foundry/packages/backend/src/integrations/graphite/index.ts
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
import { execFile } from "node:child_process";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
export async function graphiteAvailable(repoPath: string): Promise<boolean> {
|
||||
try {
|
||||
await execFileAsync("gt", ["trunk"], { cwd: repoPath });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function graphiteGet(repoPath: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
await execFileAsync("gt", ["get", branchName], { cwd: repoPath });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function graphiteCreateBranch(repoPath: string, branchName: string): Promise<void> {
|
||||
await execFileAsync("gt", ["create", branchName], { cwd: repoPath });
|
||||
}
|
||||
|
||||
export async function graphiteCheckout(repoPath: string, branchName: string): Promise<void> {
|
||||
await execFileAsync("gt", ["checkout", branchName], { cwd: repoPath });
|
||||
}
|
||||
|
||||
export async function graphiteSubmit(repoPath: string): Promise<void> {
|
||||
await execFileAsync("gt", ["submit", "--no-edit"], { cwd: repoPath });
|
||||
}
|
||||
|
||||
export async function graphiteMergeBranch(repoPath: string, branchName: string): Promise<void> {
|
||||
await execFileAsync("gt", ["merge", branchName], { cwd: repoPath });
|
||||
}
|
||||
|
||||
export async function graphiteAbandon(repoPath: string, branchName: string): Promise<void> {
|
||||
await execFileAsync("gt", ["abandon", branchName], { cwd: repoPath });
|
||||
}
|
||||
|
||||
export interface GraphiteStackEntry {
|
||||
branchName: string;
|
||||
parentBranch: string | null;
|
||||
}
|
||||
|
||||
export async function graphiteGetStack(repoPath: string): Promise<GraphiteStackEntry[]> {
|
||||
try {
|
||||
// Try JSON output first
|
||||
const { stdout } = await execFileAsync("gt", ["log", "--json"], {
|
||||
cwd: repoPath,
|
||||
maxBuffer: 1024 * 1024,
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(stdout) as Array<{
|
||||
branch?: string;
|
||||
name?: string;
|
||||
parent?: string;
|
||||
parentBranch?: string;
|
||||
}>;
|
||||
|
||||
return parsed.map((entry) => ({
|
||||
branchName: entry.branch ?? entry.name ?? "",
|
||||
parentBranch: entry.parent ?? entry.parentBranch ?? null,
|
||||
}));
|
||||
} catch {
|
||||
// Fall back to text parsing of `gt log`
|
||||
try {
|
||||
const { stdout } = await execFileAsync("gt", ["log"], {
|
||||
cwd: repoPath,
|
||||
maxBuffer: 1024 * 1024,
|
||||
});
|
||||
|
||||
const entries: GraphiteStackEntry[] = [];
|
||||
const lines = stdout.split("\n").filter((l) => l.trim().length > 0);
|
||||
|
||||
// Parse indented tree output: each line has tree chars (|, /, \, -, etc.)
|
||||
// followed by branch names. Build parent-child from indentation level.
|
||||
const branchStack: string[] = [];
|
||||
|
||||
for (const line of lines) {
|
||||
// Strip ANSI color codes
|
||||
const clean = line.replace(/\x1b\[[0-9;]*m/g, "");
|
||||
// Extract branch name: skip tree characters and whitespace
|
||||
const branchMatch = clean.match(/[│├└─|/\\*\s]*(?:◉|○|●)?\s*(.+)/);
|
||||
if (!branchMatch) continue;
|
||||
|
||||
const branchName = branchMatch[1]!.trim();
|
||||
if (!branchName || branchName.startsWith("(") || branchName === "") continue;
|
||||
|
||||
// Determine indentation level by counting leading whitespace/tree chars
|
||||
const indent = clean.search(/[a-zA-Z0-9]/);
|
||||
const level = Math.max(0, Math.floor(indent / 2));
|
||||
|
||||
// Trim stack to current level
|
||||
while (branchStack.length > level) {
|
||||
branchStack.pop();
|
||||
}
|
||||
|
||||
const parentBranch = branchStack.length > 0 ? (branchStack[branchStack.length - 1] ?? null) : null;
|
||||
|
||||
entries.push({ branchName, parentBranch });
|
||||
branchStack.push(branchName);
|
||||
}
|
||||
|
||||
return entries;
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function graphiteGetParent(repoPath: string, branchName: string): Promise<string | null> {
|
||||
try {
|
||||
// Try `gt get <branchName>` to see parent info
|
||||
const { stdout } = await execFileAsync("gt", ["get", branchName], {
|
||||
cwd: repoPath,
|
||||
maxBuffer: 1024 * 1024,
|
||||
});
|
||||
|
||||
// Parse output for parent branch reference
|
||||
const parentMatch = stdout.match(/parent:\s*(\S+)/i);
|
||||
if (parentMatch) {
|
||||
return parentMatch[1] ?? null;
|
||||
}
|
||||
} catch {
|
||||
// Fall through to stack-based lookup
|
||||
}
|
||||
|
||||
// Fall back to stack info
|
||||
try {
|
||||
const stack = await graphiteGetStack(repoPath);
|
||||
const entry = stack.find((e) => e.branchName === branchName);
|
||||
return entry?.parentBranch ?? null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,418 @@
|
|||
import type { AgentType } from "@sandbox-agent/foundry-shared";
|
||||
import type {
|
||||
ListEventsRequest,
|
||||
ListPage,
|
||||
ListPageRequest,
|
||||
ProcessCreateRequest,
|
||||
ProcessInfo,
|
||||
ProcessLogFollowQuery,
|
||||
ProcessLogsResponse,
|
||||
ProcessSignalQuery,
|
||||
SessionEvent,
|
||||
SessionPersistDriver,
|
||||
SessionRecord,
|
||||
} from "sandbox-agent";
|
||||
import { SandboxAgent } from "sandbox-agent";
|
||||
|
||||
export type AgentId = AgentType | "opencode";
|
||||
|
||||
export interface SandboxSession {
|
||||
id: string;
|
||||
status: "running" | "idle" | "error";
|
||||
}
|
||||
|
||||
export interface SandboxSessionCreateRequest {
|
||||
prompt?: string;
|
||||
cwd?: string;
|
||||
agent?: AgentId;
|
||||
}
|
||||
|
||||
export interface SandboxSessionPromptRequest {
|
||||
sessionId: string;
|
||||
prompt: string;
|
||||
notification?: boolean;
|
||||
}
|
||||
|
||||
export interface SandboxAgentClientOptions {
|
||||
endpoint: string;
|
||||
token?: string;
|
||||
agent?: AgentId;
|
||||
persist?: SessionPersistDriver;
|
||||
}
|
||||
|
||||
const DEFAULT_AGENT: AgentId = "codex";
|
||||
|
||||
function modeIdForAgent(agent: AgentId): string | null {
|
||||
switch (agent) {
|
||||
case "codex":
|
||||
return "full-access";
|
||||
case "claude":
|
||||
return "acceptEdits";
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeStatusFromMessage(payload: unknown): SandboxSession["status"] | null {
|
||||
if (payload && typeof payload === "object") {
|
||||
const envelope = payload as {
|
||||
error?: unknown;
|
||||
method?: unknown;
|
||||
result?: unknown;
|
||||
};
|
||||
|
||||
const maybeError = envelope.error;
|
||||
if (maybeError) {
|
||||
return "error";
|
||||
}
|
||||
|
||||
if (envelope.result && typeof envelope.result === "object") {
|
||||
const stopReason = (envelope.result as { stopReason?: unknown }).stopReason;
|
||||
if (typeof stopReason === "string" && stopReason.length > 0) {
|
||||
return "idle";
|
||||
}
|
||||
}
|
||||
|
||||
const method = envelope.method;
|
||||
if (typeof method === "string") {
|
||||
const lowered = method.toLowerCase();
|
||||
if (lowered.includes("error") || lowered.includes("failed")) {
|
||||
return "error";
|
||||
}
|
||||
if (lowered.includes("ended") || lowered.includes("complete") || lowered.includes("stopped")) {
|
||||
return "idle";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export class SandboxAgentClient {
|
||||
readonly endpoint: string;
|
||||
readonly token?: string;
|
||||
readonly agent: AgentId;
|
||||
readonly persist?: SessionPersistDriver;
|
||||
private sdkPromise?: Promise<SandboxAgent>;
|
||||
private readonly statusBySessionId = new Map<string, SandboxSession["status"]>();
|
||||
|
||||
constructor(options: SandboxAgentClientOptions) {
|
||||
this.endpoint = options.endpoint.replace(/\/$/, "");
|
||||
this.token = options.token;
|
||||
this.agent = options.agent ?? DEFAULT_AGENT;
|
||||
this.persist = options.persist;
|
||||
}
|
||||
|
||||
private async sdk(): Promise<SandboxAgent> {
|
||||
if (!this.sdkPromise) {
|
||||
this.sdkPromise = SandboxAgent.connect({
|
||||
baseUrl: this.endpoint,
|
||||
token: this.token,
|
||||
persist: this.persist,
|
||||
});
|
||||
}
|
||||
|
||||
return this.sdkPromise;
|
||||
}
|
||||
|
||||
private setStatus(sessionId: string, status: SandboxSession["status"]): void {
|
||||
this.statusBySessionId.set(sessionId, status);
|
||||
}
|
||||
|
||||
private isLikelyPromptTimeout(err: unknown): boolean {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const lowered = message.toLowerCase();
|
||||
// sandbox-agent server times out long-running ACP prompts and returns a 504-like error.
|
||||
return lowered.includes("timeout waiting for agent response") || lowered.includes("timed out waiting for agent response") || lowered.includes("504");
|
||||
}
|
||||
|
||||
async createSession(request: string | SandboxSessionCreateRequest): Promise<SandboxSession> {
|
||||
const normalized: SandboxSessionCreateRequest = typeof request === "string" ? { prompt: request } : request;
|
||||
const sdk = await this.sdk();
|
||||
// Do not wrap createSession in a local Promise.race timeout. The underlying SDK
|
||||
// call is not abortable, so local timeout races create overlapping ACP requests and
|
||||
// can produce duplicate/orphaned sessions while the original request is still running.
|
||||
const session = await sdk.createSession({
|
||||
agent: normalized.agent ?? this.agent,
|
||||
sessionInit: {
|
||||
cwd: normalized.cwd ?? "/",
|
||||
mcpServers: [],
|
||||
},
|
||||
});
|
||||
const modeId = modeIdForAgent(normalized.agent ?? this.agent);
|
||||
|
||||
// Codex defaults to a restrictive "read-only" preset in some environments.
|
||||
// Foundry automation needs edits, command execution, and network access.
|
||||
// access (git push / PR creation). Use full-access where supported.
|
||||
//
|
||||
// If the agent doesn't support session modes, ignore.
|
||||
//
|
||||
// Do this in the background: ACP mode updates can occasionally time out (504),
|
||||
// and waiting here can stall session creation long enough to trip task init
|
||||
// step timeouts even though the session itself was created.
|
||||
if (modeId) {
|
||||
void session.rawSend("session/set_mode", { modeId }).catch(() => {
|
||||
// ignore
|
||||
});
|
||||
}
|
||||
|
||||
const prompt = normalized.prompt?.trim();
|
||||
if (!prompt) {
|
||||
this.setStatus(session.id, "idle");
|
||||
return {
|
||||
id: session.id,
|
||||
status: "idle",
|
||||
};
|
||||
}
|
||||
|
||||
// Fire the first turn in the background. We intentionally do not await this:
|
||||
// session creation must remain fast, and we observe completion via events/stopReason.
|
||||
//
|
||||
// Note: sandbox-agent's ACP adapter for Codex may take >2 minutes to respond.
|
||||
// sandbox-agent can return a timeout error (504) even though the agent continues
|
||||
// running. Treat that timeout as non-fatal and keep polling events.
|
||||
void session
|
||||
.prompt([{ type: "text", text: prompt }])
|
||||
.then(() => {
|
||||
this.setStatus(session.id, "idle");
|
||||
})
|
||||
.catch((err) => {
|
||||
if (this.isLikelyPromptTimeout(err)) {
|
||||
this.setStatus(session.id, "running");
|
||||
return;
|
||||
}
|
||||
this.setStatus(session.id, "error");
|
||||
});
|
||||
|
||||
this.setStatus(session.id, "running");
|
||||
return {
|
||||
id: session.id,
|
||||
status: "running",
|
||||
};
|
||||
}
|
||||
|
||||
async createSessionNoTask(dir: string): Promise<SandboxSession> {
|
||||
return this.createSession({
|
||||
cwd: dir,
|
||||
});
|
||||
}
|
||||
|
||||
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
|
||||
const sdk = await this.sdk();
|
||||
const page = await sdk.listSessions(request);
|
||||
return {
|
||||
items: page.items.map((session) => session.toRecord()),
|
||||
nextCursor: page.nextCursor,
|
||||
};
|
||||
}
|
||||
|
||||
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
|
||||
const sdk = await this.sdk();
|
||||
return sdk.getEvents(request);
|
||||
}
|
||||
|
||||
async createProcess(request: ProcessCreateRequest): Promise<ProcessInfo> {
|
||||
const sdk = await this.sdk();
|
||||
return await sdk.createProcess(request);
|
||||
}
|
||||
|
||||
async listProcesses(): Promise<{ processes: ProcessInfo[] }> {
|
||||
const sdk = await this.sdk();
|
||||
return await sdk.listProcesses();
|
||||
}
|
||||
|
||||
async getProcessLogs(processId: string, query: ProcessLogFollowQuery = {}): Promise<ProcessLogsResponse> {
|
||||
const sdk = await this.sdk();
|
||||
return await sdk.getProcessLogs(processId, query);
|
||||
}
|
||||
|
||||
async stopProcess(processId: string, query?: ProcessSignalQuery): Promise<ProcessInfo> {
|
||||
const sdk = await this.sdk();
|
||||
return await sdk.stopProcess(processId, query);
|
||||
}
|
||||
|
||||
async killProcess(processId: string, query?: ProcessSignalQuery): Promise<ProcessInfo> {
|
||||
const sdk = await this.sdk();
|
||||
return await sdk.killProcess(processId, query);
|
||||
}
|
||||
|
||||
async deleteProcess(processId: string): Promise<void> {
|
||||
const sdk = await this.sdk();
|
||||
await sdk.deleteProcess(processId);
|
||||
}
|
||||
|
||||
async sendPrompt(request: SandboxSessionPromptRequest): Promise<void> {
|
||||
const sdk = await this.sdk();
|
||||
const existing = await sdk.getSession(request.sessionId);
|
||||
if (!existing) {
|
||||
throw new Error(`session '${request.sessionId}' not found`);
|
||||
}
|
||||
|
||||
const session = await sdk.resumeSession(request.sessionId);
|
||||
const modeId = modeIdForAgent(this.agent);
|
||||
// Keep mode update best-effort and non-blocking for the same reason as createSession.
|
||||
if (modeId) {
|
||||
void session.rawSend("session/set_mode", { modeId }).catch(() => {
|
||||
// ignore
|
||||
});
|
||||
}
|
||||
const text = request.prompt.trim();
|
||||
if (!text) return;
|
||||
|
||||
// sandbox-agent's Session.send(notification=true) forwards an extNotification with
|
||||
// method "session/prompt", which some agents (e.g. codex-acp) do not implement.
|
||||
// Use Session.prompt and treat notification=true as "fire-and-forget".
|
||||
const fireAndForget = request.notification ?? true;
|
||||
if (fireAndForget) {
|
||||
void session
|
||||
.prompt([{ type: "text", text }])
|
||||
.then(() => {
|
||||
this.setStatus(request.sessionId, "idle");
|
||||
})
|
||||
.catch((err) => {
|
||||
if (this.isLikelyPromptTimeout(err)) {
|
||||
this.setStatus(request.sessionId, "running");
|
||||
return;
|
||||
}
|
||||
this.setStatus(request.sessionId, "error");
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
await session.prompt([{ type: "text", text }]);
|
||||
this.setStatus(request.sessionId, "idle");
|
||||
} catch (err) {
|
||||
if (this.isLikelyPromptTimeout(err)) {
|
||||
this.setStatus(request.sessionId, "running");
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
this.setStatus(request.sessionId, "running");
|
||||
}
|
||||
|
||||
async cancelSession(sessionId: string): Promise<void> {
|
||||
const sdk = await this.sdk();
|
||||
const existing = await sdk.getSession(sessionId);
|
||||
if (!existing) {
|
||||
throw new Error(`session '${sessionId}' not found`);
|
||||
}
|
||||
|
||||
const session = await sdk.resumeSession(sessionId);
|
||||
await session.rawSend("session/cancel", {});
|
||||
this.setStatus(sessionId, "idle");
|
||||
}
|
||||
|
||||
async destroySession(sessionId: string): Promise<void> {
|
||||
const sdk = await this.sdk();
|
||||
await sdk.destroySession(sessionId);
|
||||
this.setStatus(sessionId, "idle");
|
||||
}
|
||||
|
||||
async sessionStatus(sessionId: string): Promise<SandboxSession> {
|
||||
const cached = this.statusBySessionId.get(sessionId);
|
||||
if (cached && cached !== "running") {
|
||||
return { id: sessionId, status: cached };
|
||||
}
|
||||
|
||||
const sdk = await this.sdk();
|
||||
const session = await sdk.getSession(sessionId);
|
||||
|
||||
if (!session) {
|
||||
this.setStatus(sessionId, "error");
|
||||
return { id: sessionId, status: "error" };
|
||||
}
|
||||
|
||||
const record = session.toRecord();
|
||||
if (record.destroyedAt) {
|
||||
this.setStatus(sessionId, "idle");
|
||||
return { id: sessionId, status: "idle" };
|
||||
}
|
||||
|
||||
const events = await sdk.getEvents({
|
||||
sessionId,
|
||||
limit: 25,
|
||||
});
|
||||
|
||||
for (let i = events.items.length - 1; i >= 0; i--) {
|
||||
const item = events.items[i];
|
||||
if (!item) continue;
|
||||
const status = normalizeStatusFromMessage(item.payload);
|
||||
if (status) {
|
||||
this.setStatus(sessionId, status);
|
||||
return { id: sessionId, status };
|
||||
}
|
||||
}
|
||||
|
||||
this.setStatus(sessionId, "running");
|
||||
return { id: sessionId, status: "running" };
|
||||
}
|
||||
|
||||
async killSessionsInDirectory(dir: string): Promise<void> {
|
||||
const sdk = await this.sdk();
|
||||
let cursor: string | undefined;
|
||||
|
||||
do {
|
||||
const page = await sdk.listSessions({
|
||||
cursor,
|
||||
limit: 100,
|
||||
});
|
||||
|
||||
for (const session of page.items) {
|
||||
const initCwd = session.toRecord().sessionInit?.cwd;
|
||||
if (initCwd !== dir) {
|
||||
continue;
|
||||
}
|
||||
await sdk.destroySession(session.id);
|
||||
this.statusBySessionId.delete(session.id);
|
||||
}
|
||||
|
||||
cursor = page.nextCursor;
|
||||
} while (cursor);
|
||||
}
|
||||
|
||||
async generateCommitMessage(dir: string, spec: string, task: string): Promise<string> {
|
||||
const prompt = [
|
||||
"Generate a conventional commit message for the following changes.",
|
||||
"Return ONLY the commit message, no explanation or markdown formatting.",
|
||||
"",
|
||||
`Task: ${task}`,
|
||||
"",
|
||||
`Spec/diff:\n${spec}`,
|
||||
].join("\n");
|
||||
|
||||
const sdk = await this.sdk();
|
||||
const session = await sdk.createSession({
|
||||
agent: this.agent,
|
||||
sessionInit: {
|
||||
cwd: dir,
|
||||
mcpServers: [],
|
||||
},
|
||||
});
|
||||
|
||||
await session.prompt([{ type: "text", text: prompt }]);
|
||||
this.setStatus(session.id, "idle");
|
||||
|
||||
const events = await sdk.getEvents({
|
||||
sessionId: session.id,
|
||||
limit: 100,
|
||||
});
|
||||
|
||||
for (let i = events.items.length - 1; i >= 0; i--) {
|
||||
const event = events.items[i];
|
||||
if (!event) continue;
|
||||
if (event.sender !== "agent") continue;
|
||||
|
||||
const payload = event.payload as Record<string, unknown>;
|
||||
const params = payload.params;
|
||||
if (!params || typeof params !== "object") continue;
|
||||
|
||||
const text = (params as { text?: unknown }).text;
|
||||
if (typeof text === "string" && text.trim().length > 0) {
|
||||
return text.trim();
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error("sandbox-agent commit message response was empty");
|
||||
}
|
||||
}
|
||||
124
foundry/packages/backend/src/notifications/backends.ts
Normal file
124
foundry/packages/backend/src/notifications/backends.ts
Normal file
|
|
@ -0,0 +1,124 @@
|
|||
import { execFile } from "node:child_process";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
export type NotifyUrgency = "low" | "normal" | "high";
|
||||
|
||||
export interface NotifyBackend {
|
||||
name: string;
|
||||
available(): Promise<boolean>;
|
||||
send(title: string, body: string, urgency: NotifyUrgency): Promise<boolean>;
|
||||
}
|
||||
|
||||
async function isOnPath(binary: string): Promise<boolean> {
|
||||
try {
|
||||
await execFileAsync("which", [binary]);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export class OpenclawBackend implements NotifyBackend {
|
||||
readonly name = "openclaw";
|
||||
|
||||
async available(): Promise<boolean> {
|
||||
return isOnPath("openclaw");
|
||||
}
|
||||
|
||||
async send(title: string, body: string, _urgency: NotifyUrgency): Promise<boolean> {
|
||||
try {
|
||||
await execFileAsync("openclaw", ["wake", "--title", title, "--body", body]);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class MacOsNotifyBackend implements NotifyBackend {
|
||||
readonly name = "macos-osascript";
|
||||
|
||||
async available(): Promise<boolean> {
|
||||
return process.platform === "darwin";
|
||||
}
|
||||
|
||||
async send(title: string, body: string, _urgency: NotifyUrgency): Promise<boolean> {
|
||||
try {
|
||||
const escaped_body = body.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
||||
const escaped_title = title.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
||||
const script = `display notification "${escaped_body}" with title "${escaped_title}"`;
|
||||
await execFileAsync("osascript", ["-e", script]);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class LinuxNotifySendBackend implements NotifyBackend {
|
||||
readonly name = "linux-notify-send";
|
||||
|
||||
async available(): Promise<boolean> {
|
||||
return isOnPath("notify-send");
|
||||
}
|
||||
|
||||
async send(title: string, body: string, urgency: NotifyUrgency): Promise<boolean> {
|
||||
const urgencyMap: Record<NotifyUrgency, string> = {
|
||||
low: "low",
|
||||
normal: "normal",
|
||||
high: "critical",
|
||||
};
|
||||
|
||||
try {
|
||||
await execFileAsync("notify-send", ["-u", urgencyMap[urgency], title, body]);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class TerminalBellBackend implements NotifyBackend {
|
||||
readonly name = "terminal";
|
||||
|
||||
async available(): Promise<boolean> {
|
||||
return true;
|
||||
}
|
||||
|
||||
async send(title: string, body: string, _urgency: NotifyUrgency): Promise<boolean> {
|
||||
try {
|
||||
process.stderr.write("\x07");
|
||||
process.stderr.write(`[${title}] ${body}\n`);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const backendFactories: Record<string, () => NotifyBackend> = {
|
||||
openclaw: () => new OpenclawBackend(),
|
||||
"macos-osascript": () => new MacOsNotifyBackend(),
|
||||
"linux-notify-send": () => new LinuxNotifySendBackend(),
|
||||
terminal: () => new TerminalBellBackend(),
|
||||
};
|
||||
|
||||
export async function createBackends(configOrder: string[]): Promise<NotifyBackend[]> {
|
||||
const backends: NotifyBackend[] = [];
|
||||
|
||||
for (const name of configOrder) {
|
||||
const backendBuilder = backendFactories[name];
|
||||
if (!backendBuilder) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const backend = backendBuilder();
|
||||
if (await backend.available()) {
|
||||
backends.push(backend);
|
||||
}
|
||||
}
|
||||
|
||||
return backends;
|
||||
}
|
||||
63
foundry/packages/backend/src/notifications/index.ts
Normal file
63
foundry/packages/backend/src/notifications/index.ts
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
import type { NotifyBackend, NotifyUrgency } from "./backends.js";
|
||||
|
||||
export type { NotifyUrgency } from "./backends.js";
|
||||
export { createBackends } from "./backends.js";
|
||||
|
||||
export interface NotificationService {
|
||||
notify(title: string, body: string, urgency: NotifyUrgency): Promise<void>;
|
||||
agentIdle(branchName: string): Promise<void>;
|
||||
agentError(branchName: string, error: string): Promise<void>;
|
||||
ciPassed(branchName: string, prNumber: number): Promise<void>;
|
||||
ciFailed(branchName: string, prNumber: number): Promise<void>;
|
||||
prApproved(branchName: string, prNumber: number, reviewer: string): Promise<void>;
|
||||
changesRequested(branchName: string, prNumber: number, reviewer: string): Promise<void>;
|
||||
prMerged(branchName: string, prNumber: number): Promise<void>;
|
||||
taskCreated(branchName: string): Promise<void>;
|
||||
}
|
||||
|
||||
export function createNotificationService(backends: NotifyBackend[]): NotificationService {
|
||||
async function notify(title: string, body: string, urgency: NotifyUrgency): Promise<void> {
|
||||
for (const backend of backends) {
|
||||
const sent = await backend.send(title, body, urgency);
|
||||
if (sent) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
notify,
|
||||
|
||||
async agentIdle(branchName: string): Promise<void> {
|
||||
await notify("Agent Idle", `Agent finished on ${branchName}`, "normal");
|
||||
},
|
||||
|
||||
async agentError(branchName: string, error: string): Promise<void> {
|
||||
await notify("Agent Error", `Agent error on ${branchName}: ${error}`, "high");
|
||||
},
|
||||
|
||||
async ciPassed(branchName: string, prNumber: number): Promise<void> {
|
||||
await notify("CI Passed", `CI passed on ${branchName} (PR #${prNumber})`, "low");
|
||||
},
|
||||
|
||||
async ciFailed(branchName: string, prNumber: number): Promise<void> {
|
||||
await notify("CI Failed", `CI failed on ${branchName} (PR #${prNumber})`, "high");
|
||||
},
|
||||
|
||||
async prApproved(branchName: string, prNumber: number, reviewer: string): Promise<void> {
|
||||
await notify("PR Approved", `PR #${prNumber} on ${branchName} approved by ${reviewer}`, "normal");
|
||||
},
|
||||
|
||||
async changesRequested(branchName: string, prNumber: number, reviewer: string): Promise<void> {
|
||||
await notify("Changes Requested", `Changes requested on PR #${prNumber} (${branchName}) by ${reviewer}`, "high");
|
||||
},
|
||||
|
||||
async prMerged(branchName: string, prNumber: number): Promise<void> {
|
||||
await notify("PR Merged", `PR #${prNumber} on ${branchName} merged`, "normal");
|
||||
},
|
||||
|
||||
async taskCreated(branchName: string): Promise<void> {
|
||||
await notify("Task Created", `New task on ${branchName}`, "low");
|
||||
},
|
||||
};
|
||||
}
|
||||
43
foundry/packages/backend/src/notifications/state-tracker.ts
Normal file
43
foundry/packages/backend/src/notifications/state-tracker.ts
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
export type CiState = "running" | "pass" | "fail" | "unknown";
|
||||
export type ReviewState = "approved" | "changes_requested" | "pending" | "none" | "unknown";
|
||||
|
||||
export interface PrStateTransition {
|
||||
type: "ci_passed" | "ci_failed" | "pr_approved" | "changes_requested";
|
||||
branchName: string;
|
||||
prNumber: number;
|
||||
reviewer?: string;
|
||||
}
|
||||
|
||||
export class PrStateTracker {
|
||||
private states: Map<string, { ci: CiState; review: ReviewState }>;
|
||||
|
||||
constructor() {
|
||||
this.states = new Map();
|
||||
}
|
||||
|
||||
update(repoId: string, branchName: string, prNumber: number, ci: CiState, review: ReviewState, reviewer?: string): PrStateTransition[] {
|
||||
const key = `${repoId}:${branchName}`;
|
||||
const prev = this.states.get(key);
|
||||
const transitions: PrStateTransition[] = [];
|
||||
|
||||
if (prev) {
|
||||
// CI transitions: only fire when moving from "running" to a terminal state
|
||||
if (prev.ci === "running" && ci === "pass") {
|
||||
transitions.push({ type: "ci_passed", branchName, prNumber });
|
||||
} else if (prev.ci === "running" && ci === "fail") {
|
||||
transitions.push({ type: "ci_failed", branchName, prNumber });
|
||||
}
|
||||
|
||||
// Review transitions: only fire when moving from "pending" to a terminal state
|
||||
if (prev.review === "pending" && review === "approved") {
|
||||
transitions.push({ type: "pr_approved", branchName, prNumber, reviewer });
|
||||
} else if (prev.review === "pending" && review === "changes_requested") {
|
||||
transitions.push({ type: "changes_requested", branchName, prNumber, reviewer });
|
||||
}
|
||||
}
|
||||
|
||||
this.states.set(key, { ci, review });
|
||||
|
||||
return transitions;
|
||||
}
|
||||
}
|
||||
455
foundry/packages/backend/src/providers/daytona/index.ts
Normal file
455
foundry/packages/backend/src/providers/daytona/index.ts
Normal file
|
|
@ -0,0 +1,455 @@
|
|||
import type {
|
||||
AgentEndpoint,
|
||||
AttachTarget,
|
||||
AttachTargetRequest,
|
||||
CreateSandboxRequest,
|
||||
DestroySandboxRequest,
|
||||
EnsureAgentRequest,
|
||||
ExecuteSandboxCommandRequest,
|
||||
ExecuteSandboxCommandResult,
|
||||
ProviderCapabilities,
|
||||
ReleaseSandboxRequest,
|
||||
ResumeSandboxRequest,
|
||||
SandboxHandle,
|
||||
SandboxHealth,
|
||||
SandboxHealthRequest,
|
||||
SandboxProvider,
|
||||
} from "../provider-api/index.js";
|
||||
import type { DaytonaDriver } from "../../driver.js";
|
||||
import { Image } from "@daytonaio/sdk";
|
||||
|
||||
export interface DaytonaProviderConfig {
|
||||
endpoint?: string;
|
||||
apiKey?: string;
|
||||
image: string;
|
||||
target?: string;
|
||||
/**
|
||||
* Auto-stop interval in minutes. If omitted, Daytona's default applies.
|
||||
* Set to `0` to disable auto-stop.
|
||||
*/
|
||||
autoStopInterval?: number;
|
||||
}
|
||||
|
||||
export class DaytonaProvider implements SandboxProvider {
|
||||
constructor(
|
||||
private readonly config: DaytonaProviderConfig,
|
||||
private readonly daytona?: DaytonaDriver,
|
||||
) {}
|
||||
|
||||
private static readonly SANDBOX_AGENT_PORT = 2468;
|
||||
private static readonly SANDBOX_AGENT_VERSION = "0.3.0";
|
||||
private static readonly DEFAULT_ACP_REQUEST_TIMEOUT_MS = 120_000;
|
||||
private static readonly AGENT_IDS = ["codex", "claude"] as const;
|
||||
private static readonly PASSTHROUGH_ENV_KEYS = [
|
||||
"ANTHROPIC_API_KEY",
|
||||
"CLAUDE_API_KEY",
|
||||
"OPENAI_API_KEY",
|
||||
"CODEX_API_KEY",
|
||||
"OPENCODE_API_KEY",
|
||||
"CEREBRAS_API_KEY",
|
||||
"GH_TOKEN",
|
||||
"GITHUB_TOKEN",
|
||||
] as const;
|
||||
|
||||
private getRequestTimeoutMs(): number {
|
||||
const parsed = Number(process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS ?? "120000");
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return 120_000;
|
||||
}
|
||||
return Math.floor(parsed);
|
||||
}
|
||||
|
||||
private getAcpRequestTimeoutMs(): number {
|
||||
const parsed = Number(process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS ?? DaytonaProvider.DEFAULT_ACP_REQUEST_TIMEOUT_MS.toString());
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return DaytonaProvider.DEFAULT_ACP_REQUEST_TIMEOUT_MS;
|
||||
}
|
||||
return Math.floor(parsed);
|
||||
}
|
||||
|
||||
private async withTimeout<T>(label: string, fn: () => Promise<T>): Promise<T> {
|
||||
const timeoutMs = this.getRequestTimeoutMs();
|
||||
let timer: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
try {
|
||||
return await Promise.race([
|
||||
fn(),
|
||||
new Promise<T>((_, reject) => {
|
||||
timer = setTimeout(() => {
|
||||
reject(new Error(`daytona ${label} timed out after ${timeoutMs}ms`));
|
||||
}, timeoutMs);
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getClient() {
|
||||
const apiKey = this.config.apiKey?.trim();
|
||||
if (!apiKey) {
|
||||
return undefined;
|
||||
}
|
||||
const endpoint = this.config.endpoint?.trim();
|
||||
|
||||
return this.daytona?.createClient({
|
||||
...(endpoint ? { apiUrl: endpoint } : {}),
|
||||
apiKey,
|
||||
target: this.config.target,
|
||||
});
|
||||
}
|
||||
|
||||
private requireClient() {
|
||||
const client = this.getClient();
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
|
||||
if (!this.daytona) {
|
||||
throw new Error("daytona provider requires backend daytona driver");
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
"daytona provider is not configured: missing apiKey. " +
|
||||
"Set HF_DAYTONA_API_KEY (or DAYTONA_API_KEY). " +
|
||||
"Optionally set HF_DAYTONA_ENDPOINT (or DAYTONA_ENDPOINT).",
|
||||
);
|
||||
}
|
||||
|
||||
private async ensureStarted(sandboxId: string): Promise<void> {
|
||||
const client = this.requireClient();
|
||||
|
||||
const sandbox = await this.withTimeout("get sandbox", () => client.getSandbox(sandboxId));
|
||||
const state = String(sandbox.state ?? "unknown").toLowerCase();
|
||||
if (state === "started" || state === "running") {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the sandbox is stopped (or any non-started state), try starting it.
|
||||
// Daytona preserves the filesystem across stop/start, which is what we rely on for faster git setup.
|
||||
await this.withTimeout("start sandbox", () => client.startSandbox(sandboxId, 60));
|
||||
}
|
||||
|
||||
private buildEnvVars(): Record<string, string> {
|
||||
const envVars: Record<string, string> = {};
|
||||
|
||||
for (const key of DaytonaProvider.PASSTHROUGH_ENV_KEYS) {
|
||||
const value = process.env[key];
|
||||
if (value) {
|
||||
envVars[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return envVars;
|
||||
}
|
||||
|
||||
private buildSnapshotImage() {
|
||||
// Use Daytona image build + snapshot caching so base tooling (git + sandbox-agent)
|
||||
// is prepared once and reused for subsequent sandboxes.
|
||||
return Image.base(this.config.image).runCommands(
|
||||
"apt-get update && apt-get install -y curl ca-certificates git openssh-client nodejs npm",
|
||||
`curl -fsSL https://releases.rivet.dev/sandbox-agent/${DaytonaProvider.SANDBOX_AGENT_VERSION}/install.sh | sh`,
|
||||
`bash -lc 'export PATH="$HOME/.local/bin:$PATH"; sandbox-agent install-agent codex || true; sandbox-agent install-agent claude || true'`,
|
||||
);
|
||||
}
|
||||
|
||||
private async runCheckedCommand(sandboxId: string, command: string, label: string): Promise<void> {
|
||||
const client = this.requireClient();
|
||||
|
||||
const result = await this.withTimeout(`execute command (${label})`, () => client.executeCommand(sandboxId, command));
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Error(`daytona ${label} failed (${result.exitCode}): ${result.result}`);
|
||||
}
|
||||
}
|
||||
|
||||
id() {
|
||||
return "daytona" as const;
|
||||
}
|
||||
|
||||
capabilities(): ProviderCapabilities {
|
||||
return {
|
||||
remote: true,
|
||||
supportsSessionReuse: true,
|
||||
};
|
||||
}
|
||||
|
||||
async validateConfig(input: unknown): Promise<Record<string, unknown>> {
|
||||
return (input as Record<string, unknown> | undefined) ?? {};
|
||||
}
|
||||
|
||||
async createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle> {
|
||||
const client = this.requireClient();
|
||||
const emitDebug = req.debug ?? (() => {});
|
||||
|
||||
emitDebug("daytona.createSandbox.start", {
|
||||
workspaceId: req.workspaceId,
|
||||
repoId: req.repoId,
|
||||
taskId: req.taskId,
|
||||
branchName: req.branchName,
|
||||
});
|
||||
|
||||
const createStartedAt = Date.now();
|
||||
const sandbox = await this.withTimeout("create sandbox", () =>
|
||||
client.createSandbox({
|
||||
image: this.buildSnapshotImage(),
|
||||
envVars: this.buildEnvVars(),
|
||||
labels: {
|
||||
"foundry.workspace": req.workspaceId,
|
||||
"foundry.task": req.taskId,
|
||||
"foundry.repo_id": req.repoId,
|
||||
"foundry.repo_remote": req.repoRemote,
|
||||
"foundry.branch": req.branchName,
|
||||
},
|
||||
autoStopInterval: this.config.autoStopInterval,
|
||||
}),
|
||||
);
|
||||
emitDebug("daytona.createSandbox.created", {
|
||||
sandboxId: sandbox.id,
|
||||
durationMs: Date.now() - createStartedAt,
|
||||
state: sandbox.state ?? null,
|
||||
});
|
||||
|
||||
const repoDir = `/home/daytona/foundry/${req.workspaceId}/${req.repoId}/${req.taskId}/repo`;
|
||||
|
||||
// Prepare a working directory for the agent. This must succeed for the task to work.
|
||||
const installStartedAt = Date.now();
|
||||
await this.runCheckedCommand(
|
||||
sandbox.id,
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
`'set -euo pipefail; export DEBIAN_FRONTEND=noninteractive; if command -v git >/dev/null 2>&1 && command -v npx >/dev/null 2>&1; then exit 0; fi; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y git openssh-client ca-certificates nodejs npm >/tmp/apt-install.log 2>&1'`,
|
||||
].join(" "),
|
||||
"install git + node toolchain",
|
||||
);
|
||||
emitDebug("daytona.createSandbox.install_toolchain.done", {
|
||||
sandboxId: sandbox.id,
|
||||
durationMs: Date.now() - installStartedAt,
|
||||
});
|
||||
|
||||
const cloneStartedAt = Date.now();
|
||||
await this.runCheckedCommand(
|
||||
sandbox.id,
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
`${JSON.stringify(
|
||||
[
|
||||
"set -euo pipefail",
|
||||
"export GIT_TERMINAL_PROMPT=0",
|
||||
"export GIT_ASKPASS=/bin/echo",
|
||||
`rm -rf "${repoDir}"`,
|
||||
`mkdir -p "${repoDir}"`,
|
||||
`rmdir "${repoDir}"`,
|
||||
// Clone without embedding credentials. Auth for pushing is configured by the agent at runtime.
|
||||
`git clone "${req.repoRemote}" "${repoDir}"`,
|
||||
`cd "${repoDir}"`,
|
||||
`git fetch origin --prune`,
|
||||
// The task branch may not exist remotely yet (agent push creates it). Base off current branch (default branch).
|
||||
`if git show-ref --verify --quiet "refs/remotes/origin/${req.branchName}"; then git checkout -B "${req.branchName}" "origin/${req.branchName}"; else git checkout -B "${req.branchName}" "$(git branch --show-current 2>/dev/null || echo main)"; fi`,
|
||||
`git config user.email "foundry@local" >/dev/null 2>&1 || true`,
|
||||
`git config user.name "Foundry" >/dev/null 2>&1 || true`,
|
||||
].join("; "),
|
||||
)}`,
|
||||
].join(" "),
|
||||
"clone repo",
|
||||
);
|
||||
emitDebug("daytona.createSandbox.clone_repo.done", {
|
||||
sandboxId: sandbox.id,
|
||||
durationMs: Date.now() - cloneStartedAt,
|
||||
});
|
||||
|
||||
return {
|
||||
sandboxId: sandbox.id,
|
||||
switchTarget: `daytona://${sandbox.id}`,
|
||||
metadata: {
|
||||
endpoint: this.config.endpoint ?? null,
|
||||
image: this.config.image,
|
||||
snapshot: sandbox.snapshot ?? null,
|
||||
remote: true,
|
||||
state: sandbox.state ?? null,
|
||||
cwd: repoDir,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle> {
|
||||
const client = this.requireClient();
|
||||
|
||||
await this.ensureStarted(req.sandboxId);
|
||||
|
||||
// Reconstruct cwd from sandbox labels written at create time.
|
||||
const info = await this.withTimeout("resume get sandbox", () => client.getSandbox(req.sandboxId));
|
||||
const labels = info.labels ?? {};
|
||||
const workspaceId = labels["foundry.workspace"] ?? req.workspaceId;
|
||||
const repoId = labels["foundry.repo_id"] ?? "";
|
||||
const taskId = labels["foundry.task"] ?? "";
|
||||
const cwd = repoId && taskId ? `/home/daytona/foundry/${workspaceId}/${repoId}/${taskId}/repo` : null;
|
||||
|
||||
return {
|
||||
sandboxId: req.sandboxId,
|
||||
switchTarget: `daytona://${req.sandboxId}`,
|
||||
metadata: {
|
||||
resumed: true,
|
||||
endpoint: this.config.endpoint ?? null,
|
||||
...(cwd ? { cwd } : {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async destroySandbox(_req: DestroySandboxRequest): Promise<void> {
|
||||
const client = this.getClient();
|
||||
if (!client) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.withTimeout("delete sandbox", () => client.deleteSandbox(_req.sandboxId));
|
||||
} catch (error) {
|
||||
// Ignore not-found style cleanup failures.
|
||||
const text = error instanceof Error ? error.message : String(error);
|
||||
if (text.toLowerCase().includes("not found")) {
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async releaseSandbox(req: ReleaseSandboxRequest): Promise<void> {
|
||||
const client = this.getClient();
|
||||
if (!client) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.withTimeout("stop sandbox", () => client.stopSandbox(req.sandboxId, 60));
|
||||
} catch (error) {
|
||||
const text = error instanceof Error ? error.message : String(error);
|
||||
if (text.toLowerCase().includes("not found")) {
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async ensureSandboxAgent(req: EnsureAgentRequest): Promise<AgentEndpoint> {
|
||||
const client = this.requireClient();
|
||||
const acpRequestTimeoutMs = this.getAcpRequestTimeoutMs();
|
||||
|
||||
await this.ensureStarted(req.sandboxId);
|
||||
|
||||
await this.runCheckedCommand(
|
||||
req.sandboxId,
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
`'set -euo pipefail; if command -v curl >/dev/null 2>&1; then exit 0; fi; export DEBIAN_FRONTEND=noninteractive; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y curl ca-certificates >/tmp/apt-install.log 2>&1'`,
|
||||
].join(" "),
|
||||
"install curl",
|
||||
);
|
||||
|
||||
await this.runCheckedCommand(
|
||||
req.sandboxId,
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
`'set -euo pipefail; if command -v npx >/dev/null 2>&1; then exit 0; fi; export DEBIAN_FRONTEND=noninteractive; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y nodejs npm >/tmp/apt-install.log 2>&1'`,
|
||||
].join(" "),
|
||||
"install node toolchain",
|
||||
);
|
||||
|
||||
await this.runCheckedCommand(
|
||||
req.sandboxId,
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
`'set -euo pipefail; export PATH="$HOME/.local/bin:$PATH"; if sandbox-agent --version 2>/dev/null | grep -q "${DaytonaProvider.SANDBOX_AGENT_VERSION}"; then exit 0; fi; curl -fsSL https://releases.rivet.dev/sandbox-agent/${DaytonaProvider.SANDBOX_AGENT_VERSION}/install.sh | sh'`,
|
||||
].join(" "),
|
||||
"install sandbox-agent",
|
||||
);
|
||||
|
||||
for (const agentId of DaytonaProvider.AGENT_IDS) {
|
||||
try {
|
||||
await this.runCheckedCommand(
|
||||
req.sandboxId,
|
||||
["bash", "-lc", `'export PATH="$HOME/.local/bin:$PATH"; sandbox-agent install-agent ${agentId}'`].join(" "),
|
||||
`install agent ${agentId}`,
|
||||
);
|
||||
} catch {
|
||||
// Some sandbox-agent builds may not ship every agent plugin; treat this as best-effort.
|
||||
}
|
||||
}
|
||||
|
||||
await this.runCheckedCommand(
|
||||
req.sandboxId,
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
`'set -euo pipefail; export PATH="$HOME/.local/bin:$PATH"; command -v sandbox-agent >/dev/null 2>&1; if pgrep -x sandbox-agent >/dev/null; then exit 0; fi; nohup env SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS=${acpRequestTimeoutMs} sandbox-agent server --no-token --host 0.0.0.0 --port ${DaytonaProvider.SANDBOX_AGENT_PORT} >/tmp/sandbox-agent.log 2>&1 &'`,
|
||||
].join(" "),
|
||||
"start sandbox-agent",
|
||||
);
|
||||
|
||||
await this.runCheckedCommand(
|
||||
req.sandboxId,
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
`'for i in $(seq 1 45); do curl -fsS "http://127.0.0.1:${DaytonaProvider.SANDBOX_AGENT_PORT}/v1/health" >/dev/null && exit 0; sleep 1; done; echo "sandbox-agent failed to become healthy" >&2; tail -n 80 /tmp/sandbox-agent.log >&2; exit 1'`,
|
||||
].join(" "),
|
||||
"wait for sandbox-agent health",
|
||||
);
|
||||
|
||||
const preview = await this.withTimeout("get preview endpoint", () => client.getPreviewEndpoint(req.sandboxId, DaytonaProvider.SANDBOX_AGENT_PORT));
|
||||
|
||||
return {
|
||||
endpoint: preview.url,
|
||||
token: preview.token,
|
||||
};
|
||||
}
|
||||
|
||||
async health(req: SandboxHealthRequest): Promise<SandboxHealth> {
|
||||
const client = this.getClient();
|
||||
if (!client) {
|
||||
return {
|
||||
status: "degraded",
|
||||
message: "daytona driver not configured",
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const sandbox = await this.withTimeout("health get sandbox", () => client.getSandbox(req.sandboxId));
|
||||
const state = String(sandbox.state ?? "unknown");
|
||||
if (state.toLowerCase().includes("error")) {
|
||||
return {
|
||||
status: "down",
|
||||
message: `daytona sandbox in error state: ${state}`,
|
||||
};
|
||||
}
|
||||
return {
|
||||
status: "healthy",
|
||||
message: `daytona sandbox state: ${state}`,
|
||||
};
|
||||
} catch (error) {
|
||||
const text = error instanceof Error ? error.message : String(error);
|
||||
return {
|
||||
status: "down",
|
||||
message: `daytona sandbox health check failed: ${text}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async attachTarget(req: AttachTargetRequest): Promise<AttachTarget> {
|
||||
return {
|
||||
target: `daytona://${req.sandboxId}`,
|
||||
};
|
||||
}
|
||||
|
||||
async executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult> {
|
||||
const client = this.requireClient();
|
||||
await this.ensureStarted(req.sandboxId);
|
||||
return await this.withTimeout(`execute command (${req.label ?? "command"})`, () => client.executeCommand(req.sandboxId, req.command));
|
||||
}
|
||||
}
|
||||
77
foundry/packages/backend/src/providers/index.ts
Normal file
77
foundry/packages/backend/src/providers/index.ts
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
||||
import type { AppConfig } from "@sandbox-agent/foundry-shared";
|
||||
import type { BackendDriver } from "../driver.js";
|
||||
import { DaytonaProvider } from "./daytona/index.js";
|
||||
import { LocalProvider } from "./local/index.js";
|
||||
import type { SandboxProvider } from "./provider-api/index.js";
|
||||
|
||||
export interface ProviderRegistry {
|
||||
get(providerId: ProviderId): SandboxProvider;
|
||||
availableProviderIds(): ProviderId[];
|
||||
defaultProviderId(): ProviderId;
|
||||
}
|
||||
|
||||
export function createProviderRegistry(config: AppConfig, driver?: BackendDriver): ProviderRegistry {
|
||||
const gitDriver = driver?.git ?? {
|
||||
validateRemote: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
ensureCloned: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
fetch: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
listRemoteBranches: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
remoteDefaultBaseRef: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
revParse: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
ensureRemoteBranch: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
diffStatForBranch: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
conflictsWithMain: async () => {
|
||||
throw new Error("local provider requires backend git driver");
|
||||
},
|
||||
};
|
||||
|
||||
const local = new LocalProvider(
|
||||
{
|
||||
rootDir: config.providers.local.rootDir,
|
||||
sandboxAgentPort: config.providers.local.sandboxAgentPort,
|
||||
},
|
||||
gitDriver,
|
||||
);
|
||||
const daytona = new DaytonaProvider(
|
||||
{
|
||||
endpoint: config.providers.daytona.endpoint,
|
||||
apiKey: config.providers.daytona.apiKey,
|
||||
image: config.providers.daytona.image,
|
||||
},
|
||||
driver?.daytona,
|
||||
);
|
||||
|
||||
const map: Record<ProviderId, SandboxProvider> = {
|
||||
local,
|
||||
daytona,
|
||||
};
|
||||
|
||||
return {
|
||||
get(providerId: ProviderId): SandboxProvider {
|
||||
return map[providerId];
|
||||
},
|
||||
availableProviderIds(): ProviderId[] {
|
||||
return Object.keys(map) as ProviderId[];
|
||||
},
|
||||
defaultProviderId(): ProviderId {
|
||||
return config.providers.daytona.apiKey ? "daytona" : "local";
|
||||
},
|
||||
};
|
||||
}
|
||||
235
foundry/packages/backend/src/providers/local/index.ts
Normal file
235
foundry/packages/backend/src/providers/local/index.ts
Normal file
|
|
@ -0,0 +1,235 @@
|
|||
import { randomUUID } from "node:crypto";
|
||||
import { execFile } from "node:child_process";
|
||||
import { existsSync, mkdirSync, rmSync } from "node:fs";
|
||||
import { homedir } from "node:os";
|
||||
import { dirname, resolve } from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import { InMemorySessionPersistDriver, SandboxAgent } from "sandbox-agent";
|
||||
import type {
|
||||
AgentEndpoint,
|
||||
AttachTarget,
|
||||
AttachTargetRequest,
|
||||
CreateSandboxRequest,
|
||||
DestroySandboxRequest,
|
||||
EnsureAgentRequest,
|
||||
ExecuteSandboxCommandRequest,
|
||||
ExecuteSandboxCommandResult,
|
||||
ProviderCapabilities,
|
||||
ReleaseSandboxRequest,
|
||||
ResumeSandboxRequest,
|
||||
SandboxHandle,
|
||||
SandboxHealth,
|
||||
SandboxHealthRequest,
|
||||
SandboxProvider,
|
||||
} from "../provider-api/index.js";
|
||||
import type { GitDriver } from "../../driver.js";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
const DEFAULT_SANDBOX_AGENT_PORT = 2468;
|
||||
|
||||
export interface LocalProviderConfig {
|
||||
rootDir?: string;
|
||||
sandboxAgentPort?: number;
|
||||
}
|
||||
|
||||
function expandHome(value: string): string {
|
||||
if (value === "~") {
|
||||
return homedir();
|
||||
}
|
||||
if (value.startsWith("~/")) {
|
||||
return resolve(homedir(), value.slice(2));
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async function branchExists(repoPath: string, branchName: string): Promise<boolean> {
|
||||
try {
|
||||
await execFileAsync("git", ["-C", repoPath, "show-ref", "--verify", `refs/remotes/origin/${branchName}`]);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function checkoutBranch(repoPath: string, branchName: string, git: GitDriver): Promise<void> {
|
||||
await git.fetch(repoPath);
|
||||
const targetRef = (await branchExists(repoPath, branchName)) ? `origin/${branchName}` : await git.remoteDefaultBaseRef(repoPath);
|
||||
await execFileAsync("git", ["-C", repoPath, "checkout", "-B", branchName, targetRef], {
|
||||
env: process.env as Record<string, string>,
|
||||
});
|
||||
}
|
||||
|
||||
export class LocalProvider implements SandboxProvider {
|
||||
private sdkPromise: Promise<SandboxAgent> | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly config: LocalProviderConfig,
|
||||
private readonly git: GitDriver,
|
||||
) {}
|
||||
|
||||
private rootDir(): string {
|
||||
return expandHome(this.config.rootDir?.trim() || "~/.local/share/foundry/local-sandboxes");
|
||||
}
|
||||
|
||||
private sandboxRoot(workspaceId: string, sandboxId: string): string {
|
||||
return resolve(this.rootDir(), workspaceId, sandboxId);
|
||||
}
|
||||
|
||||
private repoDir(workspaceId: string, sandboxId: string): string {
|
||||
return resolve(this.sandboxRoot(workspaceId, sandboxId), "repo");
|
||||
}
|
||||
|
||||
private sandboxHandle(workspaceId: string, sandboxId: string, repoDir: string): SandboxHandle {
|
||||
return {
|
||||
sandboxId,
|
||||
switchTarget: `local://${repoDir}`,
|
||||
metadata: {
|
||||
cwd: repoDir,
|
||||
repoDir,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private async sandboxAgent(): Promise<SandboxAgent> {
|
||||
if (!this.sdkPromise) {
|
||||
const sandboxAgentHome = resolve(this.rootDir(), ".sandbox-agent-home");
|
||||
mkdirSync(sandboxAgentHome, { recursive: true });
|
||||
const spawnHome = process.env.HOME?.trim() || sandboxAgentHome;
|
||||
this.sdkPromise = SandboxAgent.start({
|
||||
persist: new InMemorySessionPersistDriver(),
|
||||
spawn: {
|
||||
enabled: true,
|
||||
host: "127.0.0.1",
|
||||
port: this.config.sandboxAgentPort ?? DEFAULT_SANDBOX_AGENT_PORT,
|
||||
log: "silent",
|
||||
env: {
|
||||
HOME: spawnHome,
|
||||
...(process.env.ANTHROPIC_API_KEY ? { ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY } : {}),
|
||||
...(process.env.CLAUDE_API_KEY ? { CLAUDE_API_KEY: process.env.CLAUDE_API_KEY } : {}),
|
||||
...(process.env.OPENAI_API_KEY ? { OPENAI_API_KEY: process.env.OPENAI_API_KEY } : {}),
|
||||
...(process.env.CODEX_API_KEY ? { CODEX_API_KEY: process.env.CODEX_API_KEY } : {}),
|
||||
...(process.env.GH_TOKEN ? { GH_TOKEN: process.env.GH_TOKEN } : {}),
|
||||
...(process.env.GITHUB_TOKEN ? { GITHUB_TOKEN: process.env.GITHUB_TOKEN } : {}),
|
||||
},
|
||||
},
|
||||
}).then(async (sdk) => {
|
||||
for (const agentName of ["claude", "codex"] as const) {
|
||||
try {
|
||||
const agent = await sdk.getAgent(agentName, { config: true });
|
||||
if (!agent.installed) {
|
||||
await sdk.installAgent(agentName);
|
||||
}
|
||||
} catch {
|
||||
// The local provider can still function if the agent is already available
|
||||
// through the user's PATH or the install check is unsupported.
|
||||
}
|
||||
}
|
||||
return sdk;
|
||||
});
|
||||
}
|
||||
return this.sdkPromise;
|
||||
}
|
||||
|
||||
id() {
|
||||
return "local" as const;
|
||||
}
|
||||
|
||||
capabilities(): ProviderCapabilities {
|
||||
return {
|
||||
remote: false,
|
||||
supportsSessionReuse: true,
|
||||
};
|
||||
}
|
||||
|
||||
async validateConfig(input: unknown): Promise<Record<string, unknown>> {
|
||||
return (input as Record<string, unknown> | undefined) ?? {};
|
||||
}
|
||||
|
||||
async createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle> {
|
||||
const sandboxId = req.taskId || `local-${randomUUID()}`;
|
||||
const repoDir = this.repoDir(req.workspaceId, sandboxId);
|
||||
mkdirSync(dirname(repoDir), { recursive: true });
|
||||
await this.git.ensureCloned(req.repoRemote, repoDir);
|
||||
await checkoutBranch(repoDir, req.branchName, this.git);
|
||||
return this.sandboxHandle(req.workspaceId, sandboxId, repoDir);
|
||||
}
|
||||
|
||||
async resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle> {
|
||||
const repoDir = this.repoDir(req.workspaceId, req.sandboxId);
|
||||
if (!existsSync(repoDir)) {
|
||||
throw new Error(`local sandbox repo is missing: ${repoDir}`);
|
||||
}
|
||||
return this.sandboxHandle(req.workspaceId, req.sandboxId, repoDir);
|
||||
}
|
||||
|
||||
async destroySandbox(req: DestroySandboxRequest): Promise<void> {
|
||||
rmSync(this.sandboxRoot(req.workspaceId, req.sandboxId), {
|
||||
force: true,
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
|
||||
async releaseSandbox(_req: ReleaseSandboxRequest): Promise<void> {
|
||||
// Local sandboxes stay warm on disk to preserve session state and repo context.
|
||||
}
|
||||
|
||||
async ensureSandboxAgent(_req: EnsureAgentRequest): Promise<AgentEndpoint> {
|
||||
const sdk = await this.sandboxAgent();
|
||||
const { baseUrl, token } = sdk as unknown as {
|
||||
baseUrl?: string;
|
||||
token?: string;
|
||||
};
|
||||
if (!baseUrl) {
|
||||
throw new Error("sandbox-agent baseUrl is unavailable");
|
||||
}
|
||||
return token ? { endpoint: baseUrl, token } : { endpoint: baseUrl };
|
||||
}
|
||||
|
||||
async health(req: SandboxHealthRequest): Promise<SandboxHealth> {
|
||||
try {
|
||||
const repoDir = this.repoDir(req.workspaceId, req.sandboxId);
|
||||
if (!existsSync(repoDir)) {
|
||||
return {
|
||||
status: "down",
|
||||
message: "local sandbox repo is missing",
|
||||
};
|
||||
}
|
||||
const sdk = await this.sandboxAgent();
|
||||
const health = await sdk.getHealth();
|
||||
return {
|
||||
status: health.status === "ok" ? "healthy" : "degraded",
|
||||
message: health.status,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
status: "down",
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async attachTarget(req: AttachTargetRequest): Promise<AttachTarget> {
|
||||
return { target: this.repoDir(req.workspaceId, req.sandboxId) };
|
||||
}
|
||||
|
||||
async executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult> {
|
||||
const cwd = this.repoDir(req.workspaceId, req.sandboxId);
|
||||
try {
|
||||
const { stdout, stderr } = await execFileAsync("bash", ["-lc", req.command], {
|
||||
cwd,
|
||||
env: process.env as Record<string, string>,
|
||||
maxBuffer: 1024 * 1024 * 16,
|
||||
});
|
||||
return {
|
||||
exitCode: 0,
|
||||
result: [stdout, stderr].filter(Boolean).join(""),
|
||||
};
|
||||
} catch (error) {
|
||||
const detail = error as { stdout?: string; stderr?: string; code?: number };
|
||||
return {
|
||||
exitCode: typeof detail.code === "number" ? detail.code : 1,
|
||||
result: [detail.stdout, detail.stderr, error instanceof Error ? error.message : String(error)].filter(Boolean).join(""),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
99
foundry/packages/backend/src/providers/provider-api/index.ts
Normal file
99
foundry/packages/backend/src/providers/provider-api/index.ts
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
||||
|
||||
export interface ProviderCapabilities {
|
||||
remote: boolean;
|
||||
supportsSessionReuse: boolean;
|
||||
}
|
||||
|
||||
export interface CreateSandboxRequest {
|
||||
workspaceId: string;
|
||||
repoId: string;
|
||||
repoRemote: string;
|
||||
branchName: string;
|
||||
taskId: string;
|
||||
debug?: (message: string, context?: Record<string, unknown>) => void;
|
||||
options?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface ResumeSandboxRequest {
|
||||
workspaceId: string;
|
||||
sandboxId: string;
|
||||
options?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface DestroySandboxRequest {
|
||||
workspaceId: string;
|
||||
sandboxId: string;
|
||||
}
|
||||
|
||||
export interface ReleaseSandboxRequest {
|
||||
workspaceId: string;
|
||||
sandboxId: string;
|
||||
}
|
||||
|
||||
export interface EnsureAgentRequest {
|
||||
workspaceId: string;
|
||||
sandboxId: string;
|
||||
}
|
||||
|
||||
export interface SandboxHealthRequest {
|
||||
workspaceId: string;
|
||||
sandboxId: string;
|
||||
}
|
||||
|
||||
export interface AttachTargetRequest {
|
||||
workspaceId: string;
|
||||
sandboxId: string;
|
||||
}
|
||||
|
||||
export interface ExecuteSandboxCommandRequest {
|
||||
workspaceId: string;
|
||||
sandboxId: string;
|
||||
command: string;
|
||||
label?: string;
|
||||
}
|
||||
|
||||
export interface SandboxHandle {
|
||||
sandboxId: string;
|
||||
switchTarget: string;
|
||||
metadata: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface AgentEndpoint {
|
||||
endpoint: string;
|
||||
token?: string;
|
||||
}
|
||||
|
||||
export interface SandboxHealth {
|
||||
status: "healthy" | "degraded" | "down";
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface AttachTarget {
|
||||
target: string;
|
||||
}
|
||||
|
||||
export interface ExecuteSandboxCommandResult {
|
||||
exitCode: number;
|
||||
result: string;
|
||||
}
|
||||
|
||||
export interface SandboxProvider {
|
||||
id(): ProviderId;
|
||||
capabilities(): ProviderCapabilities;
|
||||
validateConfig(input: unknown): Promise<Record<string, unknown>>;
|
||||
|
||||
createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle>;
|
||||
resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle>;
|
||||
destroySandbox(req: DestroySandboxRequest): Promise<void>;
|
||||
/**
|
||||
* Release resources for a sandbox without deleting its filesystem/state.
|
||||
* For remote providers, this typically maps to "stop"/"suspend".
|
||||
*/
|
||||
releaseSandbox(req: ReleaseSandboxRequest): Promise<void>;
|
||||
|
||||
ensureSandboxAgent(req: EnsureAgentRequest): Promise<AgentEndpoint>;
|
||||
health(req: SandboxHealthRequest): Promise<SandboxHealth>;
|
||||
attachTarget(req: AttachTargetRequest): Promise<AttachTarget>;
|
||||
executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult>;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue