From 70d31f819ced769d942ed28549fb053f996483b7 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sat, 14 Mar 2026 12:14:06 -0700 Subject: [PATCH 01/48] chore(foundry): improve sandbox impl + status pill (#252) * Improve Daytona sandbox provisioning and frontend UI Refactor git clone script in Daytona provider to use cleaner shell logic for GitHub token authentication and branch checkout. Add support for private repository clones with token-based auth. Improve Daytona provider error handling and git configuration setup. Frontend improvements include enhanced dev panel, workspace dashboard, sidebar navigation, and UI components for better task/session management. Update interest manager and backend client to support improved session state handling. Co-Authored-By: Claude Haiku 4.5 * Add header status pill showing task/session/sandbox state Surface aggregate status (error, provisioning, running, ready, no sandbox) as a colored pill in the transcript panel header. Integrates task runtime status, session status, and sandbox availability via the sandboxProcesses interest topic so the pill accurately reflects unreachable sandboxes. Includes mock tasks demonstrating error, provisioning, and running states, unit tests for deriveHeaderStatus, and workspace-dashboard integration. Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Claude Haiku 4.5 --- foundry/CLAUDE.md | 2 + foundry/compose.dev.yaml | 5 + foundry/packages/backend/package.json | 5 +- .../packages/backend/src/actors/context.ts | 15 +- .../packages/backend/src/actors/handles.ts | 47 +- foundry/packages/backend/src/actors/index.ts | 9 +- foundry/packages/backend/src/actors/keys.ts | 9 +- .../backend/src/actors/project/actions.ts | 56 +- .../src/actors/sandbox-instance/db/db.ts | 5 - .../sandbox-instance/db/drizzle.config.ts | 6 - .../db/drizzle/0000_smooth_sauron.sql | 27 - .../db/drizzle/meta/0000_snapshot.json | 180 ----- .../db/drizzle/meta/_journal.json | 13 - .../actors/sandbox-instance/db/migrations.ts | 48 -- .../src/actors/sandbox-instance/db/schema.ts | 38 -- .../src/actors/sandbox-instance/index.ts | 640 ------------------ .../src/actors/sandbox-instance/persist.ts | 266 -------- .../backend/src/actors/sandbox/index.ts | 401 +++++++++++ .../src/actors/task-status-sync/index.ts | 110 --- .../packages/backend/src/actors/task/index.ts | 4 +- .../backend/src/actors/task/workbench.ts | 375 +++++----- .../src/actors/task/workflow/commands.ts | 92 +-- .../backend/src/actors/task/workflow/index.ts | 68 +- .../backend/src/actors/task/workflow/init.ts | 543 +++------------ .../backend/src/actors/task/workflow/push.ts | 29 +- .../backend/src/actors/task/workflow/queue.ts | 1 - .../src/actors/task/workflow/status-sync.ts | 148 ---- .../backend/src/actors/workspace/actions.ts | 46 +- foundry/packages/backend/src/driver.ts | 82 --- foundry/packages/backend/src/index.ts | 8 +- .../src/integrations/daytona/client.ts | 113 ---- .../backend/src/integrations/git/index.ts | 2 +- .../backend/src/providers/daytona/index.ts | 485 ------------- .../packages/backend/src/providers/index.ts | 77 --- .../backend/src/providers/local/index.ts | 235 ------- .../src/providers/provider-api/index.ts | 100 --- .../packages/backend/src/sandbox-config.ts | 39 ++ .../backend/test/daytona-provider.test.ts | 184 ----- .../backend/test/helpers/test-context.ts | 7 +- .../backend/test/helpers/test-driver.ts | 88 +-- foundry/packages/backend/test/keys.test.ts | 14 +- .../packages/backend/test/providers.test.ts | 52 -- .../backend/test/sandbox-config.test.ts | 50 ++ .../test/sandbox-instance-persist.test.ts | 21 - .../backend/test/workspace-isolation.test.ts | 4 +- foundry/packages/cli/src/index.ts | 4 +- .../packages/cli/test/backend-manager.test.ts | 3 +- foundry/packages/cli/test/theme.test.ts | 3 +- foundry/packages/cli/test/tui-format.test.ts | 6 +- .../cli/test/workspace-config.test.ts | 3 +- foundry/packages/client/src/backend-client.ts | 189 +++--- .../packages/client/src/interest/manager.ts | 9 + .../client/src/interest/remote-manager.ts | 27 +- foundry/packages/client/src/keys.ts | 9 +- foundry/packages/client/src/mock-app.ts | 5 +- foundry/packages/client/src/view-model.ts | 6 - .../packages/client/src/workbench-model.ts | 115 +++- .../client/test/e2e/github-pr-e2e.test.ts | 12 +- .../client/test/e2e/workbench-e2e.test.ts | 60 +- .../test/e2e/workbench-load-e2e.test.ts | 10 +- .../client/test/interest-manager.test.ts | 11 + foundry/packages/client/test/keys.test.ts | 5 +- .../packages/client/test/view-model.test.ts | 12 +- .../frontend/src/components/dev-panel.tsx | 295 ++++++-- .../frontend/src/components/mock-layout.tsx | 182 ++++- .../src/components/mock-layout/sidebar.tsx | 6 +- .../mock-layout/transcript-header.tsx | 13 +- .../src/components/mock-layout/ui.tsx | 82 ++- .../components/mock-layout/view-model.test.ts | 2 +- .../src/components/mock-layout/view-model.ts | 8 +- .../src/components/workspace-dashboard.tsx | 167 ++++- .../frontend/src/features/tasks/model.test.ts | 6 +- .../src/features/tasks/status.test.ts | 133 ++++ .../frontend/src/features/tasks/status.ts | 179 +++++ foundry/packages/shared/src/app-shell.ts | 4 +- foundry/packages/shared/src/config.ts | 12 +- foundry/packages/shared/src/contracts.ts | 8 +- foundry/packages/shared/src/workbench.ts | 22 +- .../packages/shared/test/workspace.test.ts | 3 +- pnpm-lock.yaml | 283 ++++++-- sdks/typescript/src/client.ts | 79 ++- sdks/typescript/tests/integration.test.ts | 69 +- 82 files changed, 2625 insertions(+), 4166 deletions(-) delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/db/db.ts delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/db/drizzle.config.ts delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/0000_smooth_sauron.sql delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/meta/0000_snapshot.json delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/meta/_journal.json delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/db/migrations.ts delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/db/schema.ts delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/index.ts delete mode 100644 foundry/packages/backend/src/actors/sandbox-instance/persist.ts create mode 100644 foundry/packages/backend/src/actors/sandbox/index.ts delete mode 100644 foundry/packages/backend/src/actors/task-status-sync/index.ts delete mode 100644 foundry/packages/backend/src/actors/task/workflow/status-sync.ts delete mode 100644 foundry/packages/backend/src/integrations/daytona/client.ts delete mode 100644 foundry/packages/backend/src/providers/daytona/index.ts delete mode 100644 foundry/packages/backend/src/providers/index.ts delete mode 100644 foundry/packages/backend/src/providers/local/index.ts delete mode 100644 foundry/packages/backend/src/providers/provider-api/index.ts create mode 100644 foundry/packages/backend/src/sandbox-config.ts delete mode 100644 foundry/packages/backend/test/daytona-provider.test.ts delete mode 100644 foundry/packages/backend/test/providers.test.ts create mode 100644 foundry/packages/backend/test/sandbox-config.test.ts delete mode 100644 foundry/packages/backend/test/sandbox-instance-persist.test.ts create mode 100644 foundry/packages/frontend/src/features/tasks/status.test.ts create mode 100644 foundry/packages/frontend/src/features/tasks/status.ts diff --git a/foundry/CLAUDE.md b/foundry/CLAUDE.md index 8af6c92..e83c532 100644 --- a/foundry/CLAUDE.md +++ b/foundry/CLAUDE.md @@ -208,6 +208,8 @@ For all Rivet/RivetKit implementation: - Read paths must not force refresh/sync work inline. Serve the latest cached projection, mark staleness explicitly, and trigger background refresh separately when needed. - If a workflow needs to resume after some external work completes, model that as workflow state plus follow-up messages/events instead of holding the original request open. - No retries: never add retry loops (`withRetries`, `setTimeout` retry, exponential backoff) anywhere in the codebase. If an operation fails, surface the error immediately. If a dependency is not ready yet, model that explicitly with workflow state and resume from a push/event instead of polling or retry loops. +- Never throw errors that expect the caller to retry (e.g. `throw new Error("... retry shortly")`). If a dependency is not ready, write the current state to the DB with an appropriate pending status, enqueue the async work, and return successfully. Let the client observe the pending → ready transition via push events. +- Action return contract: every action that creates a resource must write the resource record to the DB before returning, so the client can immediately query/render it. The record may have a pending status, but it must exist. Never return an ID that doesn't yet have a corresponding DB row. - Actor handle policy: - Prefer explicit `get` or explicit `create` based on workflow intent; do not default to `getOrCreate`. - Use `get`/`getForId` when the actor is expected to already exist; if missing, surface an explicit `Actor not found` error with recovery context. diff --git a/foundry/compose.dev.yaml b/foundry/compose.dev.yaml index a66a8c6..e412ac9 100644 --- a/foundry/compose.dev.yaml +++ b/foundry/compose.dev.yaml @@ -39,6 +39,10 @@ services: STRIPE_SECRET_KEY: "${STRIPE_SECRET_KEY:-}" STRIPE_WEBHOOK_SECRET: "${STRIPE_WEBHOOK_SECRET:-}" STRIPE_PRICE_TEAM: "${STRIPE_PRICE_TEAM:-}" + FOUNDRY_SANDBOX_PROVIDER: "${FOUNDRY_SANDBOX_PROVIDER:-local}" + E2B_API_KEY: "${E2B_API_KEY:-}" + E2B_TEMPLATE: "${E2B_TEMPLATE:-}" + HF_E2B_TEMPLATE: "${HF_E2B_TEMPLATE:-${E2B_TEMPLATE:-}}" DAYTONA_ENDPOINT: "${DAYTONA_ENDPOINT:-}" DAYTONA_API_KEY: "${DAYTONA_API_KEY:-}" HF_DAYTONA_ENDPOINT: "${HF_DAYTONA_ENDPOINT:-}" @@ -52,6 +56,7 @@ services: - "../../../task/rivet-checkout:/task/rivet-checkout:ro" # Reuse the host Codex auth profile for local sandbox-agent Codex sessions in dev. - "${HOME}/.codex:/root/.codex" + - "/var/run/docker.sock:/var/run/docker.sock" # Keep backend dependency installs Linux-native instead of using host node_modules. - "foundry_backend_root_node_modules:/app/node_modules" - "foundry_backend_backend_node_modules:/app/foundry/packages/backend/node_modules" diff --git a/foundry/packages/backend/package.json b/foundry/packages/backend/package.json index aec80a0..e11cd62 100644 --- a/foundry/packages/backend/package.json +++ b/foundry/packages/backend/package.json @@ -13,18 +13,19 @@ "start": "bun dist/index.js start" }, "dependencies": { - "@daytonaio/sdk": "0.141.0", + "@e2b/code-interpreter": "^2.3.3", "@hono/node-server": "^1.19.7", "@hono/node-ws": "^1.3.0", "@iarna/toml": "^2.2.5", "@sandbox-agent/foundry-shared": "workspace:*", "@sandbox-agent/persist-rivet": "workspace:*", "better-auth": "^1.5.5", + "dockerode": "^4.0.9", "drizzle-kit": "^0.31.8", "drizzle-orm": "^0.44.5", "hono": "^4.11.9", "pino": "^10.3.1", - "rivetkit": "2.1.6", + "rivetkit": "https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a", "sandbox-agent": "workspace:*", "uuid": "^13.0.0", "ws": "^8.19.0", diff --git a/foundry/packages/backend/src/actors/context.ts b/foundry/packages/backend/src/actors/context.ts index 1c03ce2..3554a96 100644 --- a/foundry/packages/backend/src/actors/context.ts +++ b/foundry/packages/backend/src/actors/context.ts @@ -1,24 +1,15 @@ import type { AppConfig } from "@sandbox-agent/foundry-shared"; import type { BackendDriver } from "../driver.js"; import type { NotificationService } from "../notifications/index.js"; -import type { ProviderRegistry } from "../providers/index.js"; import type { AppShellServices } from "../services/app-shell-runtime.js"; let runtimeConfig: AppConfig | null = null; -let providerRegistry: ProviderRegistry | null = null; let notificationService: NotificationService | null = null; let runtimeDriver: BackendDriver | null = null; let appShellServices: AppShellServices | null = null; -export function initActorRuntimeContext( - config: AppConfig, - providers: ProviderRegistry, - notifications?: NotificationService, - driver?: BackendDriver, - appShell?: AppShellServices, -): void { +export function initActorRuntimeContext(config: AppConfig, notifications?: NotificationService, driver?: BackendDriver, appShell?: AppShellServices): void { runtimeConfig = config; - providerRegistry = providers; notificationService = notifications ?? null; runtimeDriver = driver ?? null; appShellServices = appShell ?? null; @@ -26,12 +17,11 @@ export function initActorRuntimeContext( export function getActorRuntimeContext(): { config: AppConfig; - providers: ProviderRegistry; notifications: NotificationService | null; driver: BackendDriver; appShell: AppShellServices; } { - if (!runtimeConfig || !providerRegistry) { + if (!runtimeConfig) { throw new Error("Actor runtime context not initialized"); } @@ -45,7 +35,6 @@ export function getActorRuntimeContext(): { return { config: runtimeConfig, - providers: providerRegistry, notifications: notificationService, driver: runtimeDriver, appShell: appShellServices, diff --git a/foundry/packages/backend/src/actors/handles.ts b/foundry/packages/backend/src/actors/handles.ts index 02de614..58f8cd7 100644 --- a/foundry/packages/backend/src/actors/handles.ts +++ b/foundry/packages/backend/src/actors/handles.ts @@ -1,15 +1,4 @@ -import { - authUserKey, - taskKey, - taskStatusSyncKey, - historyKey, - projectBranchSyncKey, - projectKey, - projectPrSyncKey, - sandboxInstanceKey, - workspaceKey, -} from "./keys.js"; -import type { ProviderId } from "@sandbox-agent/foundry-shared"; +import { authUserKey, taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "./keys.js"; export function actorClient(c: any) { return c.client(); @@ -86,30 +75,12 @@ export async function getOrCreateProjectBranchSync(c: any, workspaceId: string, }); } -export function getSandboxInstance(c: any, workspaceId: string, providerId: ProviderId, sandboxId: string) { - return actorClient(c).sandboxInstance.get(sandboxInstanceKey(workspaceId, providerId, sandboxId)); +export function getTaskSandbox(c: any, workspaceId: string, sandboxId: string) { + return actorClient(c).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId)); } -export async function getOrCreateSandboxInstance( - c: any, - workspaceId: string, - providerId: ProviderId, - sandboxId: string, - createWithInput: Record, -) { - return await actorClient(c).sandboxInstance.getOrCreate(sandboxInstanceKey(workspaceId, providerId, sandboxId), { createWithInput }); -} - -export async function getOrCreateTaskStatusSync( - c: any, - workspaceId: string, - repoId: string, - taskId: string, - sandboxId: string, - sessionId: string, - createWithInput: Record, -) { - return await actorClient(c).taskStatusSync.getOrCreate(taskStatusSyncKey(workspaceId, repoId, taskId, sandboxId, sessionId), { +export async function getOrCreateTaskSandbox(c: any, workspaceId: string, sandboxId: string, createWithInput?: Record) { + return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(workspaceId, sandboxId), { createWithInput, }); } @@ -122,10 +93,6 @@ export function selfProjectBranchSync(c: any) { return actorClient(c).projectBranchSync.getForId(c.actorId); } -export function selfTaskStatusSync(c: any) { - return actorClient(c).taskStatusSync.getForId(c.actorId); -} - export function selfHistory(c: any) { return actorClient(c).history.getForId(c.actorId); } @@ -142,10 +109,6 @@ export function selfProject(c: any) { return actorClient(c).project.getForId(c.actorId); } -export function selfSandboxInstance(c: any) { - return actorClient(c).sandboxInstance.getForId(c.actorId); -} - export function selfAuthUser(c: any) { return actorClient(c).authUser.getForId(c.actorId); } diff --git a/foundry/packages/backend/src/actors/index.ts b/foundry/packages/backend/src/actors/index.ts index 245b6a4..3c7a04a 100644 --- a/foundry/packages/backend/src/actors/index.ts +++ b/foundry/packages/backend/src/actors/index.ts @@ -1,12 +1,11 @@ import { authUser } from "./auth-user/index.js"; import { setup } from "rivetkit"; -import { taskStatusSync } from "./task-status-sync/index.js"; import { task } from "./task/index.js"; import { history } from "./history/index.js"; import { projectBranchSync } from "./project-branch-sync/index.js"; import { projectPrSync } from "./project-pr-sync/index.js"; import { project } from "./project/index.js"; -import { sandboxInstance } from "./sandbox-instance/index.js"; +import { taskSandbox } from "./sandbox/index.js"; import { workspace } from "./workspace/index.js"; import { logger } from "../logging.js"; @@ -27,23 +26,21 @@ export const registry = setup({ workspace, project, task, - sandboxInstance, + taskSandbox, history, projectPrSync, projectBranchSync, - taskStatusSync, }, }); export * from "./context.js"; export * from "./events.js"; export * from "./auth-user/index.js"; -export * from "./task-status-sync/index.js"; export * from "./task/index.js"; export * from "./history/index.js"; export * from "./keys.js"; export * from "./project-branch-sync/index.js"; export * from "./project-pr-sync/index.js"; export * from "./project/index.js"; -export * from "./sandbox-instance/index.js"; +export * from "./sandbox/index.js"; export * from "./workspace/index.js"; diff --git a/foundry/packages/backend/src/actors/keys.ts b/foundry/packages/backend/src/actors/keys.ts index bec675f..4e49ea0 100644 --- a/foundry/packages/backend/src/actors/keys.ts +++ b/foundry/packages/backend/src/actors/keys.ts @@ -16,8 +16,8 @@ export function taskKey(workspaceId: string, repoId: string, taskId: string): Ac return ["ws", workspaceId, "project", repoId, "task", taskId]; } -export function sandboxInstanceKey(workspaceId: string, providerId: string, sandboxId: string): ActorKey { - return ["ws", workspaceId, "provider", providerId, "sandbox", sandboxId]; +export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey { + return ["ws", workspaceId, "sandbox", sandboxId]; } export function historyKey(workspaceId: string, repoId: string): ActorKey { @@ -31,8 +31,3 @@ export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey { return ["ws", workspaceId, "project", repoId, "branch-sync"]; } - -export function taskStatusSyncKey(workspaceId: string, repoId: string, taskId: string, sandboxId: string, sessionId: string): ActorKey { - // Include sandbox + session so multiple sandboxes/sessions can be tracked per task. - return ["ws", workspaceId, "project", repoId, "task", taskId, "status-sync", sandboxId, sessionId]; -} diff --git a/foundry/packages/backend/src/actors/project/actions.ts b/foundry/packages/backend/src/actors/project/actions.ts index bcd8f36..4b2b245 100644 --- a/foundry/packages/backend/src/actors/project/actions.ts +++ b/foundry/packages/backend/src/actors/project/actions.ts @@ -126,12 +126,24 @@ async function ensureProjectSyncActors(c: any, localPath: string): Promise } const prSync = await getOrCreateProjectPrSync(c, c.state.workspaceId, c.state.repoId, localPath, 30_000); - await prSync.start(); - const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000); - await branchSync.start(); - c.state.syncActorsStarted = true; + + void prSync.start().catch((error: unknown) => { + logActorWarning("project.sync", "starting pr sync actor failed", { + workspaceId: c.state.workspaceId, + repoId: c.state.repoId, + error: resolveErrorMessage(error), + }); + }); + + void branchSync.start().catch((error: unknown) => { + logActorWarning("project.sync", "starting branch sync actor failed", { + workspaceId: c.state.workspaceId, + repoId: c.state.repoId, + error: resolveErrorMessage(error), + }); + }); } async function ensureRepoActionJobsTable(c: any): Promise { @@ -316,13 +328,17 @@ async function ensureProjectReadyForRead(c: any): Promise { throw new Error("project remoteUrl is not initialized"); } - if (!c.state.localPath || !c.state.syncActorsStarted) { + if (!c.state.localPath) { const result = await projectActions.ensure(c, { remoteUrl: c.state.remoteUrl }); - const localPath = result?.localPath ?? c.state.localPath; - if (!localPath) { - throw new Error("project local repo is not initialized"); - } - return localPath; + c.state.localPath = result?.localPath ?? c.state.localPath; + } + + if (!c.state.localPath) { + throw new Error("project local repo is not initialized"); + } + + if (!c.state.syncActorsStarted) { + await ensureProjectSyncActors(c, c.state.localPath); } return c.state.localPath; @@ -428,7 +444,6 @@ async function ensureProjectMutation(c: any, cmd: EnsureProjectCommand): Promise }) .run(); - await ensureProjectSyncActors(c, localPath); return { localPath }; } @@ -437,7 +452,6 @@ async function hydrateTaskIndexMutation(c: any, _cmd?: HydrateTaskIndexCommand): } async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { - const localPath = await ensureProjectReady(c); const onBranch = cmd.onBranch?.trim() || null; const initialBranchName = onBranch; const initialTitle = onBranch ? deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined) : null; @@ -463,7 +477,6 @@ async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise false)) { + if (branchAvailableInRepo && (await driver.stack.available(localPath).catch(() => false))) { let stackRows = await driver.stack.listStack(localPath).catch(() => []); let stackRow = stackRows.find((entry) => entry.branchName === branchName); @@ -874,6 +888,10 @@ async function applyPrSyncResultMutation(c: any, body: PrSyncResult): Promise { const incoming = new Set(body.items.map((item) => item.branchName)); + const reservedRows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); + const reservedBranches = new Set( + reservedRows.map((row) => row.branchName).filter((branchName): branchName is string => typeof branchName === "string" && branchName.length > 0), + ); for (const item of body.items) { const existing = await c.db @@ -918,7 +936,7 @@ async function applyBranchSyncResultMutation(c: any, body: BranchSyncResult): Pr const existingRows = await c.db.select({ branchName: branches.branchName }).from(branches).all(); for (const row of existingRows) { - if (incoming.has(row.branchName)) { + if (incoming.has(row.branchName) || reservedBranches.has(row.branchName)) { continue; } await c.db.delete(branches).where(eq(branches.branchName, row.branchName)).run(); @@ -954,7 +972,7 @@ export async function runProjectWorkflow(ctx: any): Promise { if (msg.name === "project.command.createTask") { const result = await loopCtx.step({ name: "project-create-task", - timeout: 12 * 60_000, + timeout: 5 * 60_000, run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), }); await msg.complete(result); @@ -1020,7 +1038,7 @@ export const projectActions = { return expectQueueResponse( await self.send(projectWorkflowQueueName("project.command.createTask"), cmd, { wait: true, - timeout: 12 * 60_000, + timeout: 5 * 60_000, }), ); }, diff --git a/foundry/packages/backend/src/actors/sandbox-instance/db/db.ts b/foundry/packages/backend/src/actors/sandbox-instance/db/db.ts deleted file mode 100644 index 0251c43..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/db/db.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { db } from "rivetkit/db/drizzle"; -import * as schema from "./schema.js"; -import migrations from "./migrations.js"; - -export const sandboxInstanceDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle.config.ts b/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle.config.ts deleted file mode 100644 index b09d4cb..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { defineConfig } from "rivetkit/db/drizzle"; - -export default defineConfig({ - out: "./src/actors/sandbox-instance/db/drizzle", - schema: "./src/actors/sandbox-instance/db/schema.ts", -}); diff --git a/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/0000_smooth_sauron.sql b/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/0000_smooth_sauron.sql deleted file mode 100644 index 20b3180..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/0000_smooth_sauron.sql +++ /dev/null @@ -1,27 +0,0 @@ -CREATE TABLE `sandbox_instance` ( - `id` integer PRIMARY KEY NOT NULL, - `metadata_json` text NOT NULL, - `status` text NOT NULL, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `sandbox_session_events` ( - `id` text PRIMARY KEY NOT NULL, - `session_id` text NOT NULL, - `event_index` integer NOT NULL, - `created_at` integer NOT NULL, - `connection_id` text NOT NULL, - `sender` text NOT NULL, - `payload_json` text NOT NULL -); ---> statement-breakpoint -CREATE UNIQUE INDEX `sandbox_session_events_session_id_event_index_unique` ON `sandbox_session_events` (`session_id`,`event_index`);--> statement-breakpoint -CREATE TABLE `sandbox_sessions` ( - `id` text PRIMARY KEY NOT NULL, - `agent` text NOT NULL, - `agent_session_id` text NOT NULL, - `last_connection_id` text NOT NULL, - `created_at` integer NOT NULL, - `destroyed_at` integer, - `session_init_json` text -); diff --git a/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/meta/0000_snapshot.json deleted file mode 100644 index d3e09c6..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/meta/0000_snapshot.json +++ /dev/null @@ -1,180 +0,0 @@ -{ - "version": "6", - "dialect": "sqlite", - "id": "130486c5-6208-4d00-b367-e02b9def953a", - "prevId": "00000000-0000-0000-0000-000000000000", - "tables": { - "sandbox_instance": { - "name": "sandbox_instance", - "columns": { - "id": { - "name": "id", - "type": "integer", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "metadata_json": { - "name": "metadata_json", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "status": { - "name": "status", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "sandbox_session_events": { - "name": "sandbox_session_events", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "session_id": { - "name": "session_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "event_index": { - "name": "event_index", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "created_at": { - "name": "created_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "connection_id": { - "name": "connection_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "sender": { - "name": "sender", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "payload_json": { - "name": "payload_json", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": { - "sandbox_session_events_session_id_event_index_unique": { - "name": "sandbox_session_events_session_id_event_index_unique", - "columns": ["session_id", "event_index"], - "isUnique": true - } - }, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "sandbox_sessions": { - "name": "sandbox_sessions", - "columns": { - "id": { - "name": "id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "agent": { - "name": "agent", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "agent_session_id": { - "name": "agent_session_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "last_connection_id": { - "name": "last_connection_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "created_at": { - "name": "created_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "destroyed_at": { - "name": "destroyed_at", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "session_init_json": { - "name": "session_init_json", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - } - }, - "views": {}, - "enums": {}, - "_meta": { - "schemas": {}, - "tables": {}, - "columns": {} - }, - "internal": { - "indexes": {} - } -} diff --git a/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/meta/_journal.json deleted file mode 100644 index fe993c2..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/db/drizzle/meta/_journal.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": "7", - "dialect": "sqlite", - "entries": [ - { - "idx": 0, - "version": "6", - "when": 1773376224446, - "tag": "0000_smooth_sauron", - "breakpoints": true - } - ] -} diff --git a/foundry/packages/backend/src/actors/sandbox-instance/db/migrations.ts b/foundry/packages/backend/src/actors/sandbox-instance/db/migrations.ts deleted file mode 100644 index 4db8b1b..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/db/migrations.ts +++ /dev/null @@ -1,48 +0,0 @@ -// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. -// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). -// Do not hand-edit this file. - -const journal = { - entries: [ - { - idx: 0, - when: 1773376224446, - tag: "0000_smooth_sauron", - breakpoints: true, - }, - ], -} as const; - -export default { - journal, - migrations: { - m0000: `CREATE TABLE \`sandbox_instance\` ( - \`id\` integer PRIMARY KEY NOT NULL, - \`metadata_json\` text NOT NULL, - \`status\` text NOT NULL, - \`updated_at\` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE \`sandbox_session_events\` ( - \`id\` text PRIMARY KEY NOT NULL, - \`session_id\` text NOT NULL, - \`event_index\` integer NOT NULL, - \`created_at\` integer NOT NULL, - \`connection_id\` text NOT NULL, - \`sender\` text NOT NULL, - \`payload_json\` text NOT NULL -); ---> statement-breakpoint -CREATE UNIQUE INDEX \`sandbox_session_events_session_id_event_index_unique\` ON \`sandbox_session_events\` (\`session_id\`,\`event_index\`);--> statement-breakpoint -CREATE TABLE \`sandbox_sessions\` ( - \`id\` text PRIMARY KEY NOT NULL, - \`agent\` text NOT NULL, - \`agent_session_id\` text NOT NULL, - \`last_connection_id\` text NOT NULL, - \`created_at\` integer NOT NULL, - \`destroyed_at\` integer, - \`session_init_json\` text -); -`, - } as const, -}; diff --git a/foundry/packages/backend/src/actors/sandbox-instance/db/schema.ts b/foundry/packages/backend/src/actors/sandbox-instance/db/schema.ts deleted file mode 100644 index 06ce05a..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/db/schema.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { integer, sqliteTable, text, uniqueIndex } from "rivetkit/db/drizzle"; - -// SQLite is per sandbox-instance actor instance. -export const sandboxInstance = sqliteTable("sandbox_instance", { - id: integer("id").primaryKey(), - // Structured by the provider/runtime metadata serializer for this actor. - metadataJson: text("metadata_json").notNull(), - status: text("status").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -// Persist sandbox-agent sessions/events in SQLite instead of actor state so they survive -// serverless actor evictions and backend restarts. -export const sandboxSessions = sqliteTable("sandbox_sessions", { - id: text("id").notNull().primaryKey(), - agent: text("agent").notNull(), - agentSessionId: text("agent_session_id").notNull(), - lastConnectionId: text("last_connection_id").notNull(), - createdAt: integer("created_at").notNull(), - destroyedAt: integer("destroyed_at"), - // Structured by the sandbox-agent ACP session bootstrap payload. - sessionInitJson: text("session_init_json"), -}); - -export const sandboxSessionEvents = sqliteTable( - "sandbox_session_events", - { - id: text("id").notNull().primaryKey(), - sessionId: text("session_id").notNull(), - eventIndex: integer("event_index").notNull(), - createdAt: integer("created_at").notNull(), - connectionId: text("connection_id").notNull(), - sender: text("sender").notNull(), - // Structured by the sandbox-agent session event envelope. - payloadJson: text("payload_json").notNull(), - }, - (table) => [uniqueIndex("sandbox_session_events_session_id_event_index_unique").on(table.sessionId, table.eventIndex)], -); diff --git a/foundry/packages/backend/src/actors/sandbox-instance/index.ts b/foundry/packages/backend/src/actors/sandbox-instance/index.ts deleted file mode 100644 index 566a378..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/index.ts +++ /dev/null @@ -1,640 +0,0 @@ -import { setTimeout as delay } from "node:timers/promises"; -import { eq } from "drizzle-orm"; -import { actor, queue } from "rivetkit"; -import { Loop, workflow } from "rivetkit/workflow"; -import type { ProviderId } from "@sandbox-agent/foundry-shared"; -import type { - ProcessCreateRequest, - ProcessInfo, - ProcessLogFollowQuery, - ProcessLogsResponse, - ProcessSignalQuery, - SessionEvent, - SessionRecord, -} from "sandbox-agent"; -import { sandboxInstanceDb } from "./db/db.js"; -import { sandboxInstance as sandboxInstanceTable } from "./db/schema.js"; -import { SandboxInstancePersistDriver } from "./persist.js"; -import { getActorRuntimeContext } from "../context.js"; -import { selfSandboxInstance } from "../handles.js"; -import { logActorWarning, resolveErrorMessage } from "../logging.js"; -import { expectQueueResponse } from "../../services/queue.js"; - -export interface SandboxInstanceInput { - workspaceId: string; - providerId: ProviderId; - sandboxId: string; -} - -interface SandboxAgentConnection { - endpoint: string; - token?: string; -} - -const SANDBOX_ROW_ID = 1; -const CREATE_SESSION_MAX_ATTEMPTS = 3; -const CREATE_SESSION_RETRY_BASE_MS = 1_000; -const CREATE_SESSION_STEP_TIMEOUT_MS = 10 * 60_000; - -function normalizeStatusFromEventPayload(payload: unknown): "running" | "idle" | "error" | null { - if (payload && typeof payload === "object") { - const envelope = payload as { - error?: unknown; - method?: unknown; - result?: unknown; - }; - - if (envelope.error) { - return "error"; - } - - if (envelope.result && typeof envelope.result === "object") { - const stopReason = (envelope.result as { stopReason?: unknown }).stopReason; - if (typeof stopReason === "string" && stopReason.length > 0) { - return "idle"; - } - } - - if (typeof envelope.method === "string") { - const lowered = envelope.method.toLowerCase(); - if (lowered.includes("error") || lowered.includes("failed")) { - return "error"; - } - if (lowered.includes("ended") || lowered.includes("complete") || lowered.includes("stopped")) { - return "idle"; - } - } - } - - return null; -} - -function stringifyJson(value: unknown): string { - return JSON.stringify(value, (_key, item) => { - if (typeof item === "bigint") return item.toString(); - return item; - }); -} - -function parseMetadata(metadataJson: string): Record { - try { - const parsed = JSON.parse(metadataJson) as unknown; - if (parsed && typeof parsed === "object") return parsed as Record; - return {}; - } catch { - return {}; - } -} - -async function loadPersistedAgentConfig(c: any): Promise { - try { - const row = await c.db - .select({ metadataJson: sandboxInstanceTable.metadataJson }) - .from(sandboxInstanceTable) - .where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID)) - .get(); - - if (row?.metadataJson) { - const metadata = parseMetadata(row.metadataJson); - const endpoint = typeof metadata.agentEndpoint === "string" ? metadata.agentEndpoint.trim() : ""; - const token = typeof metadata.agentToken === "string" ? metadata.agentToken.trim() : ""; - if (endpoint) { - return token ? { endpoint, token } : { endpoint }; - } - } - } catch { - return null; - } - return null; -} - -async function loadFreshDaytonaAgentConfig(c: any): Promise { - const { config, driver } = getActorRuntimeContext(); - const daytona = driver.daytona.createClient({ - apiUrl: config.providers.daytona.endpoint, - apiKey: config.providers.daytona.apiKey, - }); - const sandbox = await daytona.getSandbox(c.state.sandboxId); - const state = String(sandbox.state ?? "unknown").toLowerCase(); - if (state !== "started" && state !== "running") { - await daytona.startSandbox(c.state.sandboxId, 60); - } - const preview = await daytona.getPreviewEndpoint(c.state.sandboxId, 2468); - return preview.token ? { endpoint: preview.url, token: preview.token } : { endpoint: preview.url }; -} - -async function loadFreshProviderAgentConfig(c: any): Promise { - const { providers } = getActorRuntimeContext(); - const provider = providers.get(c.state.providerId); - return await provider.ensureSandboxAgent({ - workspaceId: c.state.workspaceId, - sandboxId: c.state.sandboxId, - }); -} - -async function loadAgentConfig(c: any): Promise { - const persisted = await loadPersistedAgentConfig(c); - if (c.state.providerId === "daytona") { - // Keep one stable signed preview endpoint per sandbox-instance actor. - // Rotating preview URLs on every call fragments SDK client state (sessions/events) - // because client caching keys by endpoint. - if (persisted) { - return persisted; - } - return await loadFreshDaytonaAgentConfig(c); - } - - // Local sandboxes are tied to the current backend process, so the sandbox-agent - // token can rotate on restart. Always refresh from the provider instead of - // trusting persisted metadata. - if (c.state.providerId === "local") { - return await loadFreshProviderAgentConfig(c); - } - - if (persisted) { - return persisted; - } - - return await loadFreshProviderAgentConfig(c); -} - -async function derivePersistedSessionStatus( - persist: SandboxInstancePersistDriver, - sessionId: string, -): Promise<{ id: string; status: "running" | "idle" | "error" }> { - const session = await persist.getSession(sessionId); - if (!session) { - return { id: sessionId, status: "error" }; - } - - if (session.destroyedAt) { - return { id: sessionId, status: "idle" }; - } - - const events = await persist.listEvents({ - sessionId, - limit: 25, - }); - - for (let index = events.items.length - 1; index >= 0; index -= 1) { - const event = events.items[index]; - if (!event) continue; - const status = normalizeStatusFromEventPayload(event.payload); - if (status) { - return { id: sessionId, status }; - } - } - - return { id: sessionId, status: "idle" }; -} - -function isTransientSessionCreateError(detail: string): boolean { - const lowered = detail.toLowerCase(); - if (lowered.includes("timed out") || lowered.includes("timeout") || lowered.includes("504") || lowered.includes("gateway timeout")) { - // ACP timeout errors are expensive and usually deterministic for the same - // request; immediate retries spawn additional sessions/processes and make - // recovery harder. - return false; - } - - return ( - lowered.includes("502") || lowered.includes("503") || lowered.includes("bad gateway") || lowered.includes("econnreset") || lowered.includes("econnrefused") - ); -} - -interface EnsureSandboxCommand { - metadata: Record; - status: string; - agentEndpoint?: string; - agentToken?: string; -} - -interface HealthSandboxCommand { - status: string; - message: string; -} - -interface CreateSessionCommand { - prompt: string; - cwd?: string; - agent?: "claude" | "codex" | "opencode"; -} - -interface CreateSessionResult { - id: string | null; - status: "running" | "idle" | "error"; - error?: string; -} - -interface ListSessionsCommand { - cursor?: string; - limit?: number; -} - -interface ListSessionEventsCommand { - sessionId: string; - cursor?: string; - limit?: number; -} - -interface SendPromptCommand { - sessionId: string; - prompt: string; - notification?: boolean; -} - -interface SessionStatusCommand { - sessionId: string; -} - -interface SessionControlCommand { - sessionId: string; -} - -const SANDBOX_INSTANCE_QUEUE_NAMES = [ - "sandboxInstance.command.ensure", - "sandboxInstance.command.updateHealth", - "sandboxInstance.command.destroy", - "sandboxInstance.command.createSession", - "sandboxInstance.command.sendPrompt", - "sandboxInstance.command.cancelSession", - "sandboxInstance.command.destroySession", -] as const; - -type SandboxInstanceQueueName = (typeof SANDBOX_INSTANCE_QUEUE_NAMES)[number]; - -function sandboxInstanceWorkflowQueueName(name: SandboxInstanceQueueName): SandboxInstanceQueueName { - return name; -} - -async function getSandboxAgentClient(c: any) { - const { driver } = getActorRuntimeContext(); - const persist = new SandboxInstancePersistDriver(c.db); - const { endpoint, token } = await loadAgentConfig(c); - return driver.sandboxAgent.createClient({ - endpoint, - token, - persist, - }); -} - -async function broadcastProcessesUpdated(c: any): Promise { - const client = await getSandboxAgentClient(c); - const { processes } = await client.listProcesses(); - c.broadcast("processesUpdated", { - type: "processesUpdated", - processes, - }); -} - -async function ensureSandboxMutation(c: any, command: EnsureSandboxCommand): Promise { - const now = Date.now(); - const metadata = { - ...command.metadata, - agentEndpoint: command.agentEndpoint ?? null, - agentToken: command.agentToken ?? null, - }; - - const metadataJson = stringifyJson(metadata); - await c.db - .insert(sandboxInstanceTable) - .values({ - id: SANDBOX_ROW_ID, - metadataJson, - status: command.status, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: sandboxInstanceTable.id, - set: { - metadataJson, - status: command.status, - updatedAt: now, - }, - }) - .run(); -} - -async function updateHealthMutation(c: any, command: HealthSandboxCommand): Promise { - await c.db - .update(sandboxInstanceTable) - .set({ - status: `${command.status}:${command.message}`, - updatedAt: Date.now(), - }) - .where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID)) - .run(); -} - -async function destroySandboxMutation(c: any): Promise { - await c.db.delete(sandboxInstanceTable).where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID)).run(); -} - -async function createSessionMutation(c: any, command: CreateSessionCommand): Promise { - let lastDetail = "sandbox-agent createSession failed"; - let attemptsMade = 0; - - for (let attempt = 1; attempt <= CREATE_SESSION_MAX_ATTEMPTS; attempt += 1) { - attemptsMade = attempt; - try { - const client = await getSandboxAgentClient(c); - - const session = await client.createSession({ - prompt: command.prompt, - cwd: command.cwd, - agent: command.agent, - }); - - return { id: session.id, status: session.status }; - } catch (error) { - const detail = error instanceof Error ? error.message : String(error); - lastDetail = detail; - const retryable = isTransientSessionCreateError(detail); - const canRetry = retryable && attempt < CREATE_SESSION_MAX_ATTEMPTS; - - if (!canRetry) { - break; - } - - const waitMs = CREATE_SESSION_RETRY_BASE_MS * attempt; - logActorWarning("sandbox-instance", "createSession transient failure; retrying", { - workspaceId: c.state.workspaceId, - providerId: c.state.providerId, - sandboxId: c.state.sandboxId, - attempt, - maxAttempts: CREATE_SESSION_MAX_ATTEMPTS, - waitMs, - error: detail, - }); - await delay(waitMs); - } - } - - const attemptLabel = attemptsMade === 1 ? "attempt" : "attempts"; - return { - id: null, - status: "error", - error: `sandbox-agent createSession failed after ${attemptsMade} ${attemptLabel}: ${lastDetail}`, - }; -} - -async function sendPromptMutation(c: any, command: SendPromptCommand): Promise { - const client = await getSandboxAgentClient(c); - await client.sendPrompt({ - sessionId: command.sessionId, - prompt: command.prompt, - notification: command.notification, - }); -} - -async function cancelSessionMutation(c: any, command: SessionControlCommand): Promise { - const client = await getSandboxAgentClient(c); - await client.cancelSession(command.sessionId); -} - -async function destroySessionMutation(c: any, command: SessionControlCommand): Promise { - const client = await getSandboxAgentClient(c); - await client.destroySession(command.sessionId); -} - -async function runSandboxInstanceWorkflow(ctx: any): Promise { - await ctx.loop("sandbox-instance-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-sandbox-instance-command", { - names: [...SANDBOX_INSTANCE_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - if (msg.name === "sandboxInstance.command.ensure") { - await loopCtx.step("sandbox-instance-ensure", async () => ensureSandboxMutation(loopCtx, msg.body as EnsureSandboxCommand)); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "sandboxInstance.command.updateHealth") { - await loopCtx.step("sandbox-instance-update-health", async () => updateHealthMutation(loopCtx, msg.body as HealthSandboxCommand)); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "sandboxInstance.command.destroy") { - await loopCtx.step("sandbox-instance-destroy", async () => destroySandboxMutation(loopCtx)); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "sandboxInstance.command.createSession") { - const result = await loopCtx.step({ - name: "sandbox-instance-create-session", - timeout: CREATE_SESSION_STEP_TIMEOUT_MS, - run: async () => createSessionMutation(loopCtx, msg.body as CreateSessionCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "sandboxInstance.command.sendPrompt") { - await loopCtx.step("sandbox-instance-send-prompt", async () => sendPromptMutation(loopCtx, msg.body as SendPromptCommand)); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "sandboxInstance.command.cancelSession") { - await loopCtx.step("sandbox-instance-cancel-session", async () => cancelSessionMutation(loopCtx, msg.body as SessionControlCommand)); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "sandboxInstance.command.destroySession") { - await loopCtx.step("sandbox-instance-destroy-session", async () => destroySessionMutation(loopCtx, msg.body as SessionControlCommand)); - await msg.complete({ ok: true }); - } - - return Loop.continue(undefined); - }); -} - -export const sandboxInstance = actor({ - db: sandboxInstanceDb, - queues: Object.fromEntries(SANDBOX_INSTANCE_QUEUE_NAMES.map((name) => [name, queue()])), - options: { - name: "Sandbox Instance", - icon: "box", - actionTimeout: 5 * 60_000, - }, - createState: (_c, input: SandboxInstanceInput) => ({ - workspaceId: input.workspaceId, - providerId: input.providerId, - sandboxId: input.sandboxId, - }), - actions: { - async sandboxAgentConnection(c: any): Promise { - return await loadAgentConfig(c); - }, - - async createProcess(c: any, request: ProcessCreateRequest): Promise { - const client = await getSandboxAgentClient(c); - const created = await client.createProcess(request); - await broadcastProcessesUpdated(c); - return created; - }, - - async listProcesses(c: any): Promise<{ processes: ProcessInfo[] }> { - const client = await getSandboxAgentClient(c); - return await client.listProcesses(); - }, - - async getProcessLogs(c: any, request: { processId: string; query?: ProcessLogFollowQuery }): Promise { - const client = await getSandboxAgentClient(c); - return await client.getProcessLogs(request.processId, request.query); - }, - - async stopProcess(c: any, request: { processId: string; query?: ProcessSignalQuery }): Promise { - const client = await getSandboxAgentClient(c); - const stopped = await client.stopProcess(request.processId, request.query); - await broadcastProcessesUpdated(c); - return stopped; - }, - - async killProcess(c: any, request: { processId: string; query?: ProcessSignalQuery }): Promise { - const client = await getSandboxAgentClient(c); - const killed = await client.killProcess(request.processId, request.query); - await broadcastProcessesUpdated(c); - return killed; - }, - - async deleteProcess(c: any, request: { processId: string }): Promise { - const client = await getSandboxAgentClient(c); - await client.deleteProcess(request.processId); - await broadcastProcessesUpdated(c); - }, - - async providerState(c: any): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> { - const at = Date.now(); - const { config, driver } = getActorRuntimeContext(); - - if (c.state.providerId === "daytona") { - const daytona = driver.daytona.createClient({ - apiUrl: config.providers.daytona.endpoint, - apiKey: config.providers.daytona.apiKey, - }); - const sandbox = await daytona.getSandbox(c.state.sandboxId); - const state = String(sandbox.state ?? "unknown").toLowerCase(); - return { providerId: c.state.providerId, sandboxId: c.state.sandboxId, state, at }; - } - - return { - providerId: c.state.providerId, - sandboxId: c.state.sandboxId, - state: "unknown", - at, - }; - }, - - async ensure(c, command: EnsureSandboxCommand): Promise { - const self = selfSandboxInstance(c); - await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.ensure"), command, { - wait: true, - timeout: 60_000, - }); - }, - - async updateHealth(c, command: HealthSandboxCommand): Promise { - const self = selfSandboxInstance(c); - await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.updateHealth"), command, { - wait: true, - timeout: 60_000, - }); - }, - - async destroy(c): Promise { - const self = selfSandboxInstance(c); - await self.send( - sandboxInstanceWorkflowQueueName("sandboxInstance.command.destroy"), - {}, - { - wait: true, - timeout: 60_000, - }, - ); - }, - - async createSession(c: any, command: CreateSessionCommand): Promise { - const self = selfSandboxInstance(c); - return expectQueueResponse( - await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.createSession"), command, { - wait: true, - timeout: 5 * 60_000, - }), - ); - }, - - async listSessions(c: any, command?: ListSessionsCommand): Promise<{ items: SessionRecord[]; nextCursor?: string }> { - const persist = new SandboxInstancePersistDriver(c.db); - try { - const client = await getSandboxAgentClient(c); - - const page = await client.listSessions({ - cursor: command?.cursor, - limit: command?.limit, - }); - - return { - items: page.items, - nextCursor: page.nextCursor, - }; - } catch (error) { - logActorWarning("sandbox-instance", "listSessions remote read failed; using persisted fallback", { - workspaceId: c.state.workspaceId, - providerId: c.state.providerId, - sandboxId: c.state.sandboxId, - error: resolveErrorMessage(error), - }); - return await persist.listSessions({ - cursor: command?.cursor, - limit: command?.limit, - }); - } - }, - - async listSessionEvents(c: any, command: ListSessionEventsCommand): Promise<{ items: SessionEvent[]; nextCursor?: string }> { - const persist = new SandboxInstancePersistDriver(c.db); - return await persist.listEvents({ - sessionId: command.sessionId, - cursor: command.cursor, - limit: command.limit, - }); - }, - - async sendPrompt(c, command: SendPromptCommand): Promise { - const self = selfSandboxInstance(c); - await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.sendPrompt"), command, { - wait: true, - timeout: 5 * 60_000, - }); - }, - - async cancelSession(c, command: SessionControlCommand): Promise { - const self = selfSandboxInstance(c); - await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.cancelSession"), command, { - wait: true, - timeout: 60_000, - }); - }, - - async destroySession(c, command: SessionControlCommand): Promise { - const self = selfSandboxInstance(c); - await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.destroySession"), command, { - wait: true, - timeout: 60_000, - }); - }, - - async sessionStatus(c, command: SessionStatusCommand): Promise<{ id: string; status: "running" | "idle" | "error" }> { - return await derivePersistedSessionStatus(new SandboxInstancePersistDriver(c.db), command.sessionId); - }, - }, - run: workflow(runSandboxInstanceWorkflow), -}); diff --git a/foundry/packages/backend/src/actors/sandbox-instance/persist.ts b/foundry/packages/backend/src/actors/sandbox-instance/persist.ts deleted file mode 100644 index 5400e30..0000000 --- a/foundry/packages/backend/src/actors/sandbox-instance/persist.ts +++ /dev/null @@ -1,266 +0,0 @@ -import { and, asc, count, eq } from "drizzle-orm"; -import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; -import { sandboxSessionEvents, sandboxSessions } from "./db/schema.js"; - -const DEFAULT_MAX_SESSIONS = 1024; -const DEFAULT_MAX_EVENTS_PER_SESSION = 500; -const DEFAULT_LIST_LIMIT = 100; - -function normalizeCap(value: number | undefined, fallback: number): number { - if (!Number.isFinite(value) || (value ?? 0) < 1) { - return fallback; - } - return Math.floor(value as number); -} - -function parseCursor(cursor: string | undefined): number { - if (!cursor) return 0; - const parsed = Number.parseInt(cursor, 10); - if (!Number.isFinite(parsed) || parsed < 0) return 0; - return parsed; -} - -export function resolveEventListOffset(params: { cursor?: string; total: number; limit: number }): number { - if (params.cursor != null) { - return parseCursor(params.cursor); - } - return Math.max(0, params.total - params.limit); -} - -function safeStringify(value: unknown): string { - return JSON.stringify(value, (_key, item) => { - if (typeof item === "bigint") return item.toString(); - return item; - }); -} - -function safeParseJson(value: string | null | undefined, fallback: T): T { - if (!value) return fallback; - try { - return JSON.parse(value) as T; - } catch { - return fallback; - } -} - -export interface SandboxInstancePersistDriverOptions { - maxSessions?: number; - maxEventsPerSession?: number; -} - -export class SandboxInstancePersistDriver implements SessionPersistDriver { - private readonly maxSessions: number; - private readonly maxEventsPerSession: number; - - constructor( - private readonly db: any, - options: SandboxInstancePersistDriverOptions = {}, - ) { - this.maxSessions = normalizeCap(options.maxSessions, DEFAULT_MAX_SESSIONS); - this.maxEventsPerSession = normalizeCap(options.maxEventsPerSession, DEFAULT_MAX_EVENTS_PER_SESSION); - } - - async getSession(id: string): Promise { - const row = await this.db - .select({ - id: sandboxSessions.id, - agent: sandboxSessions.agent, - agentSessionId: sandboxSessions.agentSessionId, - lastConnectionId: sandboxSessions.lastConnectionId, - createdAt: sandboxSessions.createdAt, - destroyedAt: sandboxSessions.destroyedAt, - sessionInitJson: sandboxSessions.sessionInitJson, - }) - .from(sandboxSessions) - .where(eq(sandboxSessions.id, id)) - .get(); - - if (!row) return null; - - return { - id: row.id, - agent: row.agent, - agentSessionId: row.agentSessionId, - lastConnectionId: row.lastConnectionId, - createdAt: row.createdAt, - destroyedAt: row.destroyedAt ?? undefined, - sessionInit: safeParseJson(row.sessionInitJson, undefined), - }; - } - - async listSessions(request: ListPageRequest = {}): Promise> { - const offset = parseCursor(request.cursor); - const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT); - - const rows = await this.db - .select({ - id: sandboxSessions.id, - agent: sandboxSessions.agent, - agentSessionId: sandboxSessions.agentSessionId, - lastConnectionId: sandboxSessions.lastConnectionId, - createdAt: sandboxSessions.createdAt, - destroyedAt: sandboxSessions.destroyedAt, - sessionInitJson: sandboxSessions.sessionInitJson, - }) - .from(sandboxSessions) - .orderBy(asc(sandboxSessions.createdAt), asc(sandboxSessions.id)) - .limit(limit) - .offset(offset) - .all(); - - const items = rows.map((row) => ({ - id: row.id, - agent: row.agent, - agentSessionId: row.agentSessionId, - lastConnectionId: row.lastConnectionId, - createdAt: row.createdAt, - destroyedAt: row.destroyedAt ?? undefined, - sessionInit: safeParseJson(row.sessionInitJson, undefined), - })); - - const totalRow = await this.db.select({ c: count() }).from(sandboxSessions).get(); - const total = Number(totalRow?.c ?? 0); - - const nextOffset = offset + items.length; - return { - items, - nextCursor: nextOffset < total ? String(nextOffset) : undefined, - }; - } - - async updateSession(session: SessionRecord): Promise { - const now = Date.now(); - await this.db - .insert(sandboxSessions) - .values({ - id: session.id, - agent: session.agent, - agentSessionId: session.agentSessionId, - lastConnectionId: session.lastConnectionId, - createdAt: session.createdAt ?? now, - destroyedAt: session.destroyedAt ?? null, - sessionInitJson: session.sessionInit ? safeStringify(session.sessionInit) : null, - }) - .onConflictDoUpdate({ - target: sandboxSessions.id, - set: { - agent: session.agent, - agentSessionId: session.agentSessionId, - lastConnectionId: session.lastConnectionId, - createdAt: session.createdAt ?? now, - destroyedAt: session.destroyedAt ?? null, - sessionInitJson: session.sessionInit ? safeStringify(session.sessionInit) : null, - }, - }) - .run(); - - // Evict oldest sessions beyond cap. - const totalRow = await this.db.select({ c: count() }).from(sandboxSessions).get(); - const total = Number(totalRow?.c ?? 0); - const overflow = total - this.maxSessions; - if (overflow <= 0) return; - - const toRemove = await this.db - .select({ id: sandboxSessions.id }) - .from(sandboxSessions) - .orderBy(asc(sandboxSessions.createdAt), asc(sandboxSessions.id)) - .limit(overflow) - .all(); - - for (const row of toRemove) { - await this.db.delete(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, row.id)).run(); - await this.db.delete(sandboxSessions).where(eq(sandboxSessions.id, row.id)).run(); - } - } - - async listEvents(request: ListEventsRequest): Promise> { - const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT); - const totalRow = await this.db.select({ c: count() }).from(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, request.sessionId)).get(); - const total = Number(totalRow?.c ?? 0); - const offset = resolveEventListOffset({ - cursor: request.cursor, - total, - limit, - }); - - const rows = await this.db - .select({ - id: sandboxSessionEvents.id, - sessionId: sandboxSessionEvents.sessionId, - eventIndex: sandboxSessionEvents.eventIndex, - createdAt: sandboxSessionEvents.createdAt, - connectionId: sandboxSessionEvents.connectionId, - sender: sandboxSessionEvents.sender, - payloadJson: sandboxSessionEvents.payloadJson, - }) - .from(sandboxSessionEvents) - .where(eq(sandboxSessionEvents.sessionId, request.sessionId)) - .orderBy(asc(sandboxSessionEvents.eventIndex), asc(sandboxSessionEvents.id)) - .limit(limit) - .offset(offset) - .all(); - - const items: SessionEvent[] = rows.map((row) => ({ - id: row.id, - eventIndex: row.eventIndex, - sessionId: row.sessionId, - createdAt: row.createdAt, - connectionId: row.connectionId, - sender: row.sender as any, - payload: safeParseJson(row.payloadJson, null), - })); - - const nextOffset = offset + items.length; - return { - items, - nextCursor: nextOffset < total ? String(nextOffset) : undefined, - }; - } - - async insertEvent(event: SessionEvent): Promise { - await this.db - .insert(sandboxSessionEvents) - .values({ - id: event.id, - sessionId: event.sessionId, - eventIndex: event.eventIndex, - createdAt: event.createdAt, - connectionId: event.connectionId, - sender: event.sender, - payloadJson: safeStringify(event.payload), - }) - .onConflictDoUpdate({ - target: sandboxSessionEvents.id, - set: { - sessionId: event.sessionId, - eventIndex: event.eventIndex, - createdAt: event.createdAt, - connectionId: event.connectionId, - sender: event.sender, - payloadJson: safeStringify(event.payload), - }, - }) - .run(); - - // Trim oldest events beyond cap. - const totalRow = await this.db.select({ c: count() }).from(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, event.sessionId)).get(); - const total = Number(totalRow?.c ?? 0); - const overflow = total - this.maxEventsPerSession; - if (overflow <= 0) return; - - const toRemove = await this.db - .select({ id: sandboxSessionEvents.id }) - .from(sandboxSessionEvents) - .where(eq(sandboxSessionEvents.sessionId, event.sessionId)) - .orderBy(asc(sandboxSessionEvents.eventIndex), asc(sandboxSessionEvents.id)) - .limit(overflow) - .all(); - - for (const row of toRemove) { - await this.db - .delete(sandboxSessionEvents) - .where(and(eq(sandboxSessionEvents.sessionId, event.sessionId), eq(sandboxSessionEvents.id, row.id))) - .run(); - } - } -} diff --git a/foundry/packages/backend/src/actors/sandbox/index.ts b/foundry/packages/backend/src/actors/sandbox/index.ts new file mode 100644 index 0000000..e65c151 --- /dev/null +++ b/foundry/packages/backend/src/actors/sandbox/index.ts @@ -0,0 +1,401 @@ +import { actor } from "rivetkit"; +import { e2b, sandboxActor } from "rivetkit/sandbox"; +import { existsSync } from "node:fs"; +import Dockerode from "dockerode"; +import { SandboxAgent } from "sandbox-agent"; +import { getActorRuntimeContext } from "../context.js"; +import { workspaceKey } from "../keys.js"; +import { resolveSandboxProviderId } from "../../sandbox-config.js"; + +const SANDBOX_REPO_CWD = "/home/sandbox/workspace/repo"; +const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:full"; +const DEFAULT_LOCAL_SANDBOX_PORT = 2468; +const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" }); + +function parseTaskSandboxKey(key: readonly string[]): { workspaceId: string; taskId: string } { + if (key.length !== 4 || key[0] !== "ws" || key[2] !== "sandbox") { + throw new Error(`Invalid task sandbox key: ${JSON.stringify(key)}`); + } + + return { + workspaceId: key[1]!, + taskId: key[3]!, + }; +} + +function preferredDockerHost(): string { + if (process.env.FOUNDRY_DOCKER_HOST?.trim()) { + return process.env.FOUNDRY_DOCKER_HOST.trim(); + } + + return existsSync("/.dockerenv") ? "host.docker.internal" : "127.0.0.1"; +} + +function preferredPublicDockerHost(): string { + if (process.env.FOUNDRY_PUBLIC_SANDBOX_HOST?.trim()) { + return process.env.FOUNDRY_PUBLIC_SANDBOX_HOST.trim(); + } + + return "127.0.0.1"; +} + +function localSandboxAgentPort(): number { + const raw = process.env.FOUNDRY_LOCAL_SANDBOX_PORT?.trim() ?? process.env.HF_LOCAL_SANDBOX_PORT?.trim() ?? ""; + const parsed = Number(raw); + if (Number.isInteger(parsed) && parsed > 0 && parsed <= 65535) { + return parsed; + } + return DEFAULT_LOCAL_SANDBOX_PORT; +} + +function sandboxEnvPairs(): string[] { + const openAiApiKey = process.env.OPENAI_API_KEY; + const entries = [ + ["ANTHROPIC_API_KEY", process.env.ANTHROPIC_API_KEY], + ["CLAUDE_API_KEY", process.env.CLAUDE_API_KEY ?? process.env.ANTHROPIC_API_KEY], + ["OPENAI_API_KEY", openAiApiKey], + // Codex ACP prefers CODEX_API_KEY when present. In dev we want that to be the + // actual OpenAI API key, not an unrelated local Codex auth token. + ["CODEX_API_KEY", openAiApiKey ?? process.env.CODEX_API_KEY], + ["GH_TOKEN", process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN], + ["GITHUB_TOKEN", process.env.GITHUB_TOKEN ?? process.env.GH_TOKEN], + ["E2B_API_KEY", process.env.E2B_API_KEY], + ]; + + return entries + .filter((entry): entry is [string, string] => typeof entry[1] === "string" && entry[1].trim().length > 0) + .map(([key, value]) => `${key}=${value}`); +} + +function sandboxEnvObject(): Record { + return Object.fromEntries( + sandboxEnvPairs().map((entry) => { + const [key, ...rest] = entry.split("="); + return [key!, rest.join("=")]; + }), + ); +} + +function modeIdForAgent(agent?: string | null): string | null { + switch (agent) { + case "codex": + return "full-access"; + case "claude": + return "acceptEdits"; + default: + return null; + } +} + +async function getPublishedDockerPort(sandboxId: string, containerPort: number): Promise { + const info = await dockerClient.getContainer(sandboxId).inspect(); + const hostPort = info.NetworkSettings?.Ports?.[`${containerPort}/tcp`]?.[0]?.HostPort; + if (!hostPort) { + throw new Error(`docker sandbox-agent port ${containerPort} is not published`); + } + return Number(hostPort); +} + +function createLocalSandboxProvider(image: string): any { + const agentPort = localSandboxAgentPort(); + const backendHost = preferredDockerHost(); + const publicHost = preferredPublicDockerHost(); + + return { + name: "docker", + + async create(_context: any): Promise { + const container = await dockerClient.createContainer({ + Image: image, + Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", String(agentPort)], + Env: sandboxEnvPairs(), + ExposedPorts: { + [`${agentPort}/tcp`]: {}, + }, + HostConfig: { + AutoRemove: true, + PortBindings: { + [`${agentPort}/tcp`]: [{ HostPort: "0" }], + }, + }, + }); + + await container.start(); + return container.id; + }, + + async destroy(sandboxId: string): Promise { + const container = dockerClient.getContainer(sandboxId); + try { + await container.stop({ t: 5 }); + } catch {} + try { + await container.remove({ force: true }); + } catch {} + }, + + async getUrl(sandboxId: string): Promise { + const hostPort = await getPublishedDockerPort(sandboxId, agentPort); + return `http://${publicHost}:${hostPort}`; + }, + + async connectAgent(sandboxId: string, connectOptions: any): Promise { + const hostPort = await getPublishedDockerPort(sandboxId, agentPort); + return await SandboxAgent.connect({ + baseUrl: `http://${backendHost}:${hostPort}`, + ...connectOptions, + }); + }, + }; +} + +function sanitizeActorResult(value: unknown, seen = new WeakSet()): unknown { + if (typeof value === "function" || value === undefined) { + return undefined; + } + + if (value && typeof value === "object") { + const maybeToRecord = (value as { toRecord?: unknown }).toRecord; + if (typeof maybeToRecord === "function") { + return sanitizeActorResult(maybeToRecord.call(value), seen); + } + } + + if (value === null || typeof value !== "object") { + return value; + } + + if (value instanceof Date) { + return value.toISOString(); + } + + if (Array.isArray(value)) { + return value.map((entry) => sanitizeActorResult(entry, seen)).filter((entry) => entry !== undefined); + } + + if (seen.has(value)) { + return undefined; + } + seen.add(value); + + const next: Record = {}; + for (const [key, entry] of Object.entries(value)) { + const sanitized = sanitizeActorResult(entry, seen); + if (sanitized !== undefined) { + next[key] = sanitized; + } + } + return next; +} + +const baseTaskSandbox = sandboxActor({ + createProvider: async (c) => { + const { config } = getActorRuntimeContext(); + const { workspaceId, taskId } = parseTaskSandboxKey(c.key); + const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), { + createWithInput: workspaceId, + }); + const task = await workspace.getTask({ workspaceId, taskId }); + const providerId = resolveSandboxProviderId(config, task.providerId); + + if (providerId === "e2b") { + return e2b({ + create: () => ({ + template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x", + envs: sandboxEnvObject(), + }), + installAgents: ["claude", "codex"], + }); + } + + return createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); + }, +}); + +async function broadcastProcesses(c: any, actions: Record Promise>): Promise { + try { + const listed = await actions.listProcesses(c); + c.broadcast("processesUpdated", { + type: "processesUpdated", + processes: listed.processes ?? [], + }); + } catch { + // Process broadcasts are best-effort. Callers still receive the primary action result. + } +} + +async function providerForConnection(c: any): Promise { + if (c.state.sandboxDestroyed || !c.state.sandboxId) { + return null; + } + + if (c.vars.provider) { + return c.vars.provider; + } + + const providerFactory = baseTaskSandbox.config.actions as Record; + void providerFactory; + const { config } = getActorRuntimeContext(); + const { workspaceId, taskId } = parseTaskSandboxKey(c.key); + const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), { + createWithInput: workspaceId, + }); + const task = await workspace.getTask({ workspaceId, taskId }); + const providerId = resolveSandboxProviderId(config, task.providerId); + + const provider = + providerId === "e2b" + ? e2b({ + create: () => ({ + template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x", + envs: sandboxEnvObject(), + }), + installAgents: ["claude", "codex"], + }) + : createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); + + c.vars.provider = provider; + return provider; +} + +const baseActions = baseTaskSandbox.config.actions as Record Promise>; + +export const taskSandbox = actor({ + ...baseTaskSandbox.config, + options: { + ...baseTaskSandbox.config.options, + actionTimeout: 10 * 60_000, + }, + actions: { + ...baseActions, + async createSession(c: any, request: any): Promise { + const session = await baseActions.createSession(c, request); + const sessionId = typeof request?.id === "string" && request.id.length > 0 ? request.id : session?.id; + const modeId = modeIdForAgent(request?.agent); + if (sessionId && modeId) { + try { + await baseActions.rawSendSessionMethod(c, sessionId, "session/set_mode", { modeId }); + } catch { + // Session mode updates are best-effort. + } + } + return sanitizeActorResult(session); + }, + + async resumeSession(c: any, sessionId: string): Promise { + return sanitizeActorResult(await baseActions.resumeSession(c, sessionId)); + }, + + async resumeOrCreateSession(c: any, request: any): Promise { + return sanitizeActorResult(await baseActions.resumeOrCreateSession(c, request)); + }, + + async getSession(c: any, sessionId: string): Promise { + return sanitizeActorResult(await baseActions.getSession(c, sessionId)); + }, + + async listSessions(c: any, query?: any): Promise { + return sanitizeActorResult(await baseActions.listSessions(c, query)); + }, + + async destroySession(c: any, sessionId: string): Promise { + return sanitizeActorResult(await baseActions.destroySession(c, sessionId)); + }, + + async sendPrompt(c: any, request: { sessionId: string; prompt: string }): Promise { + const text = typeof request?.prompt === "string" ? request.prompt.trim() : ""; + if (!text) { + return null; + } + + const session = await baseActions.resumeSession(c, request.sessionId); + if (!session || typeof session.prompt !== "function") { + throw new Error(`session '${request.sessionId}' not found`); + } + + return sanitizeActorResult(await session.prompt([{ type: "text", text }])); + }, + + async createProcess(c: any, request: any): Promise { + const created = await baseActions.createProcess(c, request); + await broadcastProcesses(c, baseActions); + return created; + }, + + async runProcess(c: any, request: any): Promise { + const result = await baseActions.runProcess(c, request); + await broadcastProcesses(c, baseActions); + return result; + }, + + async stopProcess(c: any, processId: string, query?: any): Promise { + const stopped = await baseActions.stopProcess(c, processId, query); + await broadcastProcesses(c, baseActions); + return stopped; + }, + + async killProcess(c: any, processId: string, query?: any): Promise { + const killed = await baseActions.killProcess(c, processId, query); + await broadcastProcesses(c, baseActions); + return killed; + }, + + async deleteProcess(c: any, processId: string): Promise { + await baseActions.deleteProcess(c, processId); + await broadcastProcesses(c, baseActions); + }, + + async sandboxAgentConnection(c: any): Promise<{ endpoint: string; token?: string }> { + const provider = await providerForConnection(c); + if (!provider || !c.state.sandboxId) { + return { endpoint: "mock://terminal-unavailable" }; + } + + try { + return { + endpoint: await provider.getUrl(c.state.sandboxId), + }; + } catch { + return { endpoint: "mock://terminal-unavailable" }; + } + }, + + async providerState(c: any): Promise<{ providerId: "e2b" | "local"; sandboxId: string; state: string; at: number }> { + const { config } = getActorRuntimeContext(); + const { taskId } = parseTaskSandboxKey(c.key); + const at = Date.now(); + const providerId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null); + + if (c.state.sandboxDestroyed) { + return { providerId, sandboxId: taskId, state: "destroyed", at }; + } + + if (!c.state.sandboxId) { + return { providerId, sandboxId: taskId, state: "pending", at }; + } + + try { + const health = await baseActions.getHealth(c); + return { + providerId, + sandboxId: taskId, + state: health.status === "ok" ? "running" : "degraded", + at, + }; + } catch { + return { + providerId, + sandboxId: taskId, + state: "error", + at, + }; + } + }, + + async repoCwd(): Promise<{ cwd: string }> { + return { cwd: SANDBOX_REPO_CWD }; + }, + }, +}); + +export { SANDBOX_REPO_CWD }; diff --git a/foundry/packages/backend/src/actors/task-status-sync/index.ts b/foundry/packages/backend/src/actors/task-status-sync/index.ts deleted file mode 100644 index 759cbe4..0000000 --- a/foundry/packages/backend/src/actors/task-status-sync/index.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import type { ProviderId } from "@sandbox-agent/foundry-shared"; -import { getTask, getSandboxInstance, selfTaskStatusSync } from "../handles.js"; -import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js"; -import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js"; - -export interface TaskStatusSyncInput { - workspaceId: string; - repoId: string; - taskId: string; - providerId: ProviderId; - sandboxId: string; - sessionId: string; - intervalMs: number; -} - -interface SetIntervalCommand { - intervalMs: number; -} - -interface TaskStatusSyncState extends PollingControlState { - workspaceId: string; - repoId: string; - taskId: string; - providerId: ProviderId; - sandboxId: string; - sessionId: string; -} - -const CONTROL = { - start: "task.status_sync.control.start", - stop: "task.status_sync.control.stop", - setInterval: "task.status_sync.control.set_interval", - force: "task.status_sync.control.force", -} as const; - -async function pollSessionStatus(c: { state: TaskStatusSyncState }): Promise { - const sandboxInstance = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, c.state.sandboxId); - const status = await sandboxInstance.sessionStatus({ sessionId: c.state.sessionId }); - - const parent = getTask(c, c.state.workspaceId, c.state.repoId, c.state.taskId); - await parent.syncWorkbenchSessionStatus({ - sessionId: c.state.sessionId, - status: status.status, - at: Date.now(), - }); -} - -export const taskStatusSync = actor({ - queues: { - [CONTROL.start]: queue(), - [CONTROL.stop]: queue(), - [CONTROL.setInterval]: queue(), - [CONTROL.force]: queue(), - }, - options: { - name: "Task Status Sync", - icon: "signal", - // Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling. - noSleep: true, - }, - createState: (_c, input: TaskStatusSyncInput): TaskStatusSyncState => ({ - workspaceId: input.workspaceId, - repoId: input.repoId, - taskId: input.taskId, - providerId: input.providerId, - sandboxId: input.sandboxId, - sessionId: input.sessionId, - intervalMs: input.intervalMs, - running: true, - }), - actions: { - async start(c): Promise { - const self = selfTaskStatusSync(c); - await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 }); - }, - - async stop(c): Promise { - const self = selfTaskStatusSync(c); - await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 }); - }, - - async setIntervalMs(c, payload: SetIntervalCommand): Promise { - const self = selfTaskStatusSync(c); - await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 }); - }, - - async force(c): Promise { - const self = selfTaskStatusSync(c); - await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 }); - }, - }, - run: workflow(async (ctx) => { - await runWorkflowPollingLoop(ctx, { - loopName: "task-status-sync-loop", - control: CONTROL, - onPoll: async (loopCtx) => { - try { - await pollSessionStatus(loopCtx); - } catch (error) { - logActorWarning("task-status-sync", "poll failed", { - error: resolveErrorMessage(error), - stack: resolveErrorStack(error), - }); - } - }, - }); - }), -}); diff --git a/foundry/packages/backend/src/actors/task/index.ts b/foundry/packages/backend/src/actors/task/index.ts index 8d9f418..6ea2e07 100644 --- a/foundry/packages/backend/src/actors/task/index.ts +++ b/foundry/packages/backend/src/actors/task/index.ts @@ -41,7 +41,7 @@ export interface TaskInput { repoId: string; taskId: string; repoRemote: string; - repoLocalPath: string; + repoLocalPath?: string; branchName: string | null; title: string | null; task: string; @@ -139,7 +139,7 @@ export const task = actor({ const self = selfTask(c); const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, { wait: true, - timeout: 60_000, + timeout: 5 * 60_000, }); return expectQueueResponse(result); }, diff --git a/foundry/packages/backend/src/actors/task/workbench.ts b/foundry/packages/backend/src/actors/task/workbench.ts index 0d00e77..12aed4e 100644 --- a/foundry/packages/backend/src/actors/task/workbench.ts +++ b/foundry/packages/backend/src/actors/task/workbench.ts @@ -1,15 +1,14 @@ // @ts-nocheck import { randomUUID } from "node:crypto"; -import { basename } from "node:path"; +import { basename, dirname } from "node:path"; import { asc, eq } from "drizzle-orm"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateTaskStatusSync, getOrCreateProject, getOrCreateWorkspace, getSandboxInstance, selfTask } from "../handles.js"; +import { getOrCreateProject, getOrCreateTaskSandbox, getOrCreateWorkspace, getTaskSandbox, selfTask } from "../handles.js"; +import { SANDBOX_REPO_CWD } from "../sandbox/index.js"; +import { resolveSandboxProviderId } from "../../sandbox-config.js"; import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; -import { task as taskTable, taskRuntime, taskWorkbenchSessions } from "./db/schema.js"; +import { task as taskTable, taskRuntime, taskSandboxes, taskWorkbenchSessions } from "./db/schema.js"; import { getCurrentRecord } from "./workflow/common.js"; -import { taskWorkflowQueueName } from "./workflow/queue.js"; - -const STATUS_SYNC_INTERVAL_MS = 1_000; function emptyGitState() { return { @@ -57,18 +56,22 @@ async function ensureTaskRuntimeCacheColumns(c: any): Promise { } function defaultModelForAgent(agentType: string | null | undefined) { - return agentType === "codex" ? "gpt-4o" : "claude-sonnet-4"; + return agentType === "codex" ? "gpt-5.3-codex" : "claude-sonnet-4"; +} + +function isCodexModel(model: string) { + return model.startsWith("gpt-") || model.startsWith("o"); } function agentKindForModel(model: string) { - if (model === "gpt-4o" || model === "o3") { + if (isCodexModel(model)) { return "Codex"; } return "Claude"; } export function agentTypeForModel(model: string) { - if (model === "gpt-4o" || model === "o3") { + if (isCodexModel(model)) { return "codex"; } return "claude"; @@ -291,6 +294,121 @@ function shellFragment(parts: string[]): string { return parts.join(" && "); } +function stableSandboxId(c: any): string { + return c.state.taskId; +} + +async function getTaskSandboxRuntime( + c: any, + record: any, +): Promise<{ + sandbox: any; + sandboxId: string; + providerId: string; + switchTarget: string; + cwd: string; +}> { + const { config } = getActorRuntimeContext(); + const sandboxId = stableSandboxId(c); + const providerId = resolveSandboxProviderId(config, record.providerId ?? c.state.providerId ?? null); + const sandbox = await getOrCreateTaskSandbox(c, c.state.workspaceId, sandboxId, {}); + const actorId = typeof sandbox.resolve === "function" ? await sandbox.resolve().catch(() => null) : null; + const switchTarget = providerId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`; + const now = Date.now(); + + await c.db + .insert(taskSandboxes) + .values({ + sandboxId, + providerId, + sandboxActorId: typeof actorId === "string" ? actorId : null, + switchTarget, + cwd: SANDBOX_REPO_CWD, + statusMessage: "sandbox ready", + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskSandboxes.sandboxId, + set: { + providerId, + sandboxActorId: typeof actorId === "string" ? actorId : null, + switchTarget, + cwd: SANDBOX_REPO_CWD, + updatedAt: now, + }, + }) + .run(); + + await c.db + .update(taskRuntime) + .set({ + activeSandboxId: sandboxId, + activeSwitchTarget: switchTarget, + activeCwd: SANDBOX_REPO_CWD, + updatedAt: now, + }) + .where(eq(taskRuntime.id, 1)) + .run(); + + return { + sandbox, + sandboxId, + providerId, + switchTarget, + cwd: SANDBOX_REPO_CWD, + }; +} + +async function ensureSandboxRepo(c: any, sandbox: any, record: any): Promise { + if (!record.branchName) { + throw new Error("cannot prepare a sandbox repo before the task branch exists"); + } + + const { driver } = getActorRuntimeContext(); + const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); + let repoLocalPath = c.state.repoLocalPath; + if (!repoLocalPath) { + const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote); + const ensured = await project.ensure({ remoteUrl: c.state.repoRemote }); + repoLocalPath = ensured.localPath; + c.state.repoLocalPath = repoLocalPath; + } + + const baseRef = await driver.git.remoteDefaultBaseRef(repoLocalPath); + const sandboxRepoRoot = dirname(SANDBOX_REPO_CWD); + const script = [ + "set -euo pipefail", + `mkdir -p ${JSON.stringify(sandboxRepoRoot)}`, + "git config --global credential.helper '!f() { echo username=x-access-token; echo password=${GH_TOKEN:-$GITHUB_TOKEN}; }; f'", + `if [ ! -d ${JSON.stringify(`${SANDBOX_REPO_CWD}/.git`)} ]; then rm -rf ${JSON.stringify(SANDBOX_REPO_CWD)} && git clone ${JSON.stringify( + c.state.repoRemote, + )} ${JSON.stringify(SANDBOX_REPO_CWD)}; fi`, + `cd ${JSON.stringify(SANDBOX_REPO_CWD)}`, + "git fetch origin --prune", + `if git show-ref --verify --quiet refs/remotes/origin/${JSON.stringify(record.branchName).slice(1, -1)}; then target_ref=${JSON.stringify( + `origin/${record.branchName}`, + )}; else target_ref=${JSON.stringify(baseRef)}; fi`, + `git checkout -B ${JSON.stringify(record.branchName)} \"$target_ref\"`, + ]; + const result = await sandbox.runProcess({ + command: "bash", + args: ["-lc", script.join("; ")], + cwd: "/", + env: auth?.githubToken + ? { + GH_TOKEN: auth.githubToken, + GITHUB_TOKEN: auth.githubToken, + } + : undefined, + timeoutMs: 5 * 60_000, + }); + + if ((result.exitCode ?? 0) !== 0) { + throw new Error(`sandbox repo preparation failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`); + } +} + async function executeInSandbox( c: any, params: { @@ -300,14 +418,20 @@ async function executeInSandbox( label: string; }, ): Promise<{ exitCode: number; result: string }> { - const { providers } = getActorRuntimeContext(); - const provider = providers.get(c.state.providerId); - return await provider.executeCommand({ - workspaceId: c.state.workspaceId, - sandboxId: params.sandboxId, - command: `bash -lc ${JSON.stringify(shellFragment([`cd ${JSON.stringify(params.cwd)}`, params.command]))}`, - label: params.label, + const record = await ensureWorkbenchSeeded(c); + const runtime = await getTaskSandboxRuntime(c, record); + await ensureSandboxRepo(c, runtime.sandbox, record); + const response = await runtime.sandbox.runProcess({ + command: "bash", + args: ["-lc", shellFragment([`cd ${JSON.stringify(params.cwd)}`, params.command])], + cwd: "/", + timeoutMs: 5 * 60_000, }); + + return { + exitCode: response.exitCode ?? 0, + result: [response.stdout, response.stderr].filter(Boolean).join(""), + }; } function parseGitStatus(output: string): Array<{ path: string; type: "M" | "A" | "D" }> { @@ -501,13 +625,13 @@ async function writeCachedGitState(c: any, gitState: { fileChanges: Array; } async function readSessionTranscript(c: any, record: any, sessionId: string) { - const sandboxId = record.activeSandboxId ?? record.sandboxes?.[0]?.sandboxId ?? null; + const sandboxId = record.activeSandboxId ?? stableSandboxId(c); if (!sandboxId) { return []; } - const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, sandboxId); - const page = await sandbox.listSessionEvents({ + const sandbox = getTaskSandbox(c, c.state.workspaceId, sandboxId); + const page = await sandbox.getEvents({ sessionId, limit: 100, }); @@ -599,7 +723,13 @@ export async function ensureWorkbenchSeeded(c: any): Promise { function buildSessionSummary(record: any, meta: any): any { const derivedSandboxSessionId = meta.sandboxSessionId ?? (meta.status === "pending_provision" && record.activeSessionId ? record.activeSessionId : null); const sessionStatus = - meta.status === "ready" && derivedSandboxSessionId ? activeSessionStatus(record, derivedSandboxSessionId) : meta.status === "error" ? "error" : "idle"; + meta.status === "pending_provision" || meta.status === "pending_session_create" + ? meta.status + : meta.status === "ready" && derivedSandboxSessionId + ? activeSessionStatus(record, derivedSandboxSessionId) + : meta.status === "error" + ? "error" + : "ready"; let thinkingSinceMs = meta.thinkingSinceMs ?? null; let unread = Boolean(meta.unread); if (thinkingSinceMs && sessionStatus !== "running") { @@ -617,6 +747,7 @@ function buildSessionSummary(record: any, meta: any): any { thinkingSinceMs: sessionStatus === "running" ? thinkingSinceMs : null, unread, created: Boolean(meta.created || derivedSandboxSessionId), + errorMessage: meta.errorMessage ?? null, }; } @@ -633,6 +764,7 @@ function buildSessionDetailFromMeta(record: any, meta: any): any { thinkingSinceMs: summary.thinkingSinceMs, unread: summary.unread, created: summary.created, + errorMessage: summary.errorMessage, draft: { text: meta.draftText ?? "", attachments: Array.isArray(meta.draftAttachments) ? meta.draftAttachments : [], @@ -655,7 +787,7 @@ export async function buildTaskSummary(c: any): Promise { id: c.state.taskId, repoId: c.state.repoId, title: record.title ?? "New Task", - status: record.status === "archived" ? "archived" : record.status === "running" ? "running" : record.status === "idle" ? "idle" : "new", + status: record.status ?? "new", repoName: repoLabelFromRemote(c.state.repoRemote), updatedAtMs: record.updatedAt, branch: record.branchName, @@ -708,6 +840,24 @@ export async function buildSessionDetail(c: any, tabId: string): Promise { throw new Error(`Unknown workbench session tab: ${tabId}`); } + if (!meta.sandboxSessionId) { + return buildSessionDetailFromMeta(record, meta); + } + + try { + const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); + if (JSON.stringify(meta.transcript ?? []) !== JSON.stringify(transcript)) { + await writeSessionTranscript(c, meta.tabId, transcript); + return buildSessionDetailFromMeta(record, { + ...meta, + transcript, + transcriptUpdatedAt: Date.now(), + }); + } + } catch { + // Session detail reads should degrade to cached transcript data if the live sandbox is unavailable. + } + return buildSessionDetailFromMeta(record, meta); } @@ -836,50 +986,16 @@ export async function renameWorkbenchBranch(c: any, value: string): Promise { - let record = await ensureWorkbenchSeeded(c); - if (!record.activeSandboxId) { - // Fire-and-forget: enqueue provisioning without waiting to avoid self-deadlock - // (this handler already runs inside the task workflow loop, so wait:true would deadlock). - const providerId = record.providerId ?? c.state.providerId ?? getActorRuntimeContext().providers.defaultProviderId(); - await selfTask(c).send(taskWorkflowQueueName("task.command.provision"), { providerId }, { wait: false }); - throw new Error("sandbox is provisioning — retry shortly"); - } - - if (record.activeSessionId) { - const existingSessions = await listSessionMetaRows(c); - if (existingSessions.length === 0) { - await ensureSessionMeta(c, { - tabId: record.activeSessionId, - sandboxSessionId: record.activeSessionId, - model: model ?? defaultModelForAgent(record.agentType), - sessionName: "Session 1", - status: "ready", - }); - await broadcastTaskUpdate(c, { sessionId: record.activeSessionId }); - return { tabId: record.activeSessionId }; - } - } - const tabId = `tab-${randomUUID()}`; + const record = await ensureWorkbenchSeeded(c); await ensureSessionMeta(c, { tabId, model: model ?? defaultModelForAgent(record.agentType), + sandboxSessionId: tabId, status: record.activeSandboxId ? "pending_session_create" : "pending_provision", created: false, }); - - const providerId = record.providerId ?? c.state.providerId ?? getActorRuntimeContext().providers.defaultProviderId(); - const self = selfTask(c); - if (!record.activeSandboxId && !String(record.status ?? "").startsWith("init_")) { - await self.send("task.command.provision", { providerId }, { wait: false }); - } - await self.send( - "task.command.workbench.ensure_session", - { tabId, ...(model ? { model } : {}) }, - { - wait: false, - }, - ); + await ensureWorkbenchSession(c, tabId, model); await broadcastTaskUpdate(c, { sessionId: tabId }); return { tabId }; } @@ -891,39 +1007,7 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri } const record = await ensureWorkbenchSeeded(c); - if (!record.activeSandboxId) { - await updateSessionMeta(c, tabId, { - status: "pending_provision", - errorMessage: null, - }); - return; - } - - if (!meta.sandboxSessionId && record.activeSessionId && meta.status === "pending_provision") { - const existingTabForActiveSession = await readSessionMetaBySandboxSessionId(c, record.activeSessionId); - if (existingTabForActiveSession && existingTabForActiveSession.tabId !== tabId) { - await updateSessionMeta(c, existingTabForActiveSession.tabId, { - closed: 1, - }); - } - await updateSessionMeta(c, tabId, { - sandboxSessionId: record.activeSessionId, - status: "ready", - errorMessage: null, - created: 1, - }); - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { - sessionId: record.activeSessionId, - }); - await broadcastTaskUpdate(c, { sessionId: tabId }); - return; - } - - if (meta.sandboxSessionId) { - await updateSessionMeta(c, tabId, { - status: "ready", - errorMessage: null, - }); + if (meta.sandboxSessionId && meta.status === "ready") { await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { sessionId: meta.sandboxSessionId, }); @@ -931,40 +1015,31 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri return; } - const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null; - const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null; - if (!cwd) { - await updateSessionMeta(c, tabId, { - status: "error", - errorMessage: "cannot create session without a sandbox cwd", - }); - await broadcastTaskUpdate(c, { sessionId: tabId }); - return; - } - await updateSessionMeta(c, tabId, { + sandboxSessionId: meta.sandboxSessionId ?? tabId, status: "pending_session_create", errorMessage: null, }); try { - const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId); - const created = await sandbox.createSession({ - prompt: "", - cwd, + const runtime = await getTaskSandboxRuntime(c, record); + await ensureSandboxRepo(c, runtime.sandbox, record); + await runtime.sandbox.createSession({ + id: meta.sandboxSessionId ?? tabId, agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)), + model: model ?? meta.model ?? defaultModelForAgent(record.agentType), + sessionInit: { + cwd: runtime.cwd, + }, }); - if (!created.id) { - throw new Error(created.error ?? "sandbox-agent session creation failed"); - } await updateSessionMeta(c, tabId, { - sandboxSessionId: created.id, + sandboxSessionId: meta.sandboxSessionId ?? tabId, status: "ready", errorMessage: null, }); await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { - sessionId: created.id, + sessionId: meta.sandboxSessionId ?? tabId, }); } catch (error) { await updateSessionMeta(c, tabId, { @@ -1031,26 +1106,17 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str } export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array): Promise { - const record = await ensureWorkbenchSeeded(c); - if (!record.activeSandboxId) { - throw new Error("cannot send message without an active sandbox"); - } - const meta = await requireReadySessionMeta(c, sessionId); - const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId); - const prompt = [text.trim(), ...attachments.map((attachment: any) => `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`)] - .filter(Boolean) - .join("\n\n"); - if (!prompt) { + const record = await ensureWorkbenchSeeded(c); + const runtime = await getTaskSandboxRuntime(c, record); + await ensureSandboxRepo(c, runtime.sandbox, record); + const prompt = [text.trim(), ...attachments.map((attachment: any) => `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`)].filter( + Boolean, + ); + if (prompt.length === 0) { throw new Error("message text is required"); } - await sandbox.sendPrompt({ - sessionId: meta.sandboxSessionId, - prompt, - notification: true, - }); - await updateSessionMeta(c, sessionId, { unread: 0, created: 1, @@ -1069,32 +1135,28 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri .where(eq(taskRuntime.id, 1)) .run(); - const sync = await getOrCreateTaskStatusSync(c, c.state.workspaceId, c.state.repoId, c.state.taskId, record.activeSandboxId, meta.sandboxSessionId, { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: c.state.taskId, - providerId: c.state.providerId, - sandboxId: record.activeSandboxId, - sessionId: meta.sandboxSessionId, - intervalMs: STATUS_SYNC_INTERVAL_MS, - }); - await sync.setIntervalMs({ intervalMs: STATUS_SYNC_INTERVAL_MS }); - await sync.start(); - await sync.force(); - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { - sessionId: meta.sandboxSessionId, - }); - await broadcastTaskUpdate(c, { sessionId }); + await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "running", Date.now()); + + try { + await runtime.sandbox.sendPrompt({ + sessionId: meta.sandboxSessionId, + prompt: prompt.join("\n\n"), + }); + await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "idle", Date.now()); + } catch (error) { + await updateSessionMeta(c, sessionId, { + status: "error", + errorMessage: error instanceof Error ? error.message : String(error), + }); + await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "error", Date.now()); + throw error; + } } export async function stopWorkbenchSession(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); - if (!record.activeSandboxId) { - return; - } const meta = await requireReadySessionMeta(c, sessionId); - const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId); - await sandbox.cancelSession({ sessionId: meta.sandboxSessionId }); + const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + await sandbox.destroySession(meta.sandboxSessionId); await updateSessionMeta(c, sessionId, { thinkingSinceMs: null, }); @@ -1178,9 +1240,9 @@ export async function closeWorkbenchSession(c: any, sessionId: string): Promise< if (!meta) { return; } - if (record.activeSandboxId && meta.sandboxSessionId) { - const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId); - await sandbox.destroySession({ sessionId: meta.sandboxSessionId }); + if (meta.sandboxSessionId) { + const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + await sandbox.destroySession(meta.sandboxSessionId); } await updateSessionMeta(c, sessionId, { closed: 1, @@ -1216,9 +1278,16 @@ export async function publishWorkbenchPr(c: any): Promise { if (!record.branchName) { throw new Error("cannot publish PR without a branch"); } + let repoLocalPath = c.state.repoLocalPath; + if (!repoLocalPath) { + const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote); + const result = await project.ensure({ remoteUrl: c.state.repoRemote }); + repoLocalPath = result.localPath; + c.state.repoLocalPath = repoLocalPath; + } const { driver } = getActorRuntimeContext(); const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - const created = await driver.github.createPr(c.state.repoLocalPath, record.branchName, record.title ?? c.state.task, undefined, { + const created = await driver.github.createPr(repoLocalPath, record.branchName, record.title ?? c.state.task, undefined, { githubToken: auth?.githubToken ?? null, }); await c.db diff --git a/foundry/packages/backend/src/actors/task/workflow/commands.ts b/foundry/packages/backend/src/actors/task/workflow/commands.ts index cc72ebf..5e55b6c 100644 --- a/foundry/packages/backend/src/actors/task/workflow/commands.ts +++ b/foundry/packages/backend/src/actors/task/workflow/commands.ts @@ -1,7 +1,6 @@ // @ts-nocheck import { eq } from "drizzle-orm"; -import { getActorRuntimeContext } from "../../context.js"; -import { getOrCreateTaskStatusSync } from "../../handles.js"; +import { getTaskSandbox } from "../../handles.js"; import { logActorWarning, resolveErrorMessage } from "../../logging.js"; import { task as taskTable, taskRuntime } from "../db/schema.js"; import { TASK_ROW_ID, appendHistory, getCurrentRecord, setTaskState } from "./common.js"; @@ -25,21 +24,27 @@ async function withTimeout(promise: Promise, timeoutMs: number, label: str export async function handleAttachActivity(loopCtx: any, msg: any): Promise { const record = await getCurrentRecord(loopCtx); - const { providers } = getActorRuntimeContext(); - const activeSandbox = record.activeSandboxId ? (record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null) : null; - const provider = providers.get(activeSandbox?.providerId ?? record.providerId); - const target = await provider.attachTarget({ - workspaceId: loopCtx.state.workspaceId, - sandboxId: record.activeSandboxId ?? "", - }); + let target = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === record.activeSandboxId)?.switchTarget ?? ""; + + if (record.activeSandboxId) { + try { + const sandbox = getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId); + const connection = await sandbox.sandboxAgentConnection(); + if (typeof connection?.endpoint === "string" && connection.endpoint.length > 0) { + target = connection.endpoint; + } + } catch { + // Best effort; keep the last known switch target if the sandbox actor is unavailable. + } + } await appendHistory(loopCtx, "task.attach", { - target: target.target, + target, sessionId: record.activeSessionId, }); await msg.complete({ - target: target.target, + target, sessionId: record.activeSessionId, }); } @@ -71,63 +76,14 @@ export async function handleArchiveActivity(loopCtx: any, msg: any): Promise { + logActorWarning("task.commands", "failed to release sandbox during archive", { workspaceId: loopCtx.state.workspaceId, repoId: loopCtx.state.repoId, taskId: loopCtx.state.taskId, sandboxId: record.activeSandboxId, - sessionId: record.activeSessionId, - error: resolveErrorMessage(error), - }); - } - } - - if (record.activeSandboxId) { - await setTaskState(loopCtx, "archive_release_sandbox", "releasing sandbox"); - const { providers } = getActorRuntimeContext(); - const activeSandbox = record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null; - const provider = providers.get(activeSandbox?.providerId ?? record.providerId); - const workspaceId = loopCtx.state.workspaceId; - const repoId = loopCtx.state.repoId; - const taskId = loopCtx.state.taskId; - const sandboxId = record.activeSandboxId; - - // Do not block archive finalization on provider stop. Some provider stop calls can - // run longer than the synchronous archive UX budget. - void withTimeout( - provider.releaseSandbox({ - workspaceId, - sandboxId, - }), - 45_000, - "provider releaseSandbox", - ).catch((error) => { - logActorWarning("task.commands", "failed to release sandbox during archive", { - workspaceId, - repoId, - taskId, - sandboxId, error: resolveErrorMessage(error), }); }); @@ -150,13 +106,7 @@ export async function killDestroySandboxActivity(loopCtx: any): Promise { return; } - const { providers } = getActorRuntimeContext(); - const activeSandbox = record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null; - const provider = providers.get(activeSandbox?.providerId ?? record.providerId); - await provider.destroySandbox({ - workspaceId: loopCtx.state.workspaceId, - sandboxId: record.activeSandboxId, - }); + await getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId).destroy(); } export async function killWriteDbActivity(loopCtx: any, msg: any): Promise { diff --git a/foundry/packages/backend/src/actors/task/workflow/index.ts b/foundry/packages/backend/src/actors/task/workflow/index.ts index 419d36d..7461c24 100644 --- a/foundry/packages/backend/src/actors/task/workflow/index.ts +++ b/foundry/packages/backend/src/actors/task/workflow/index.ts @@ -1,21 +1,13 @@ import { Loop } from "rivetkit/workflow"; -import { getActorRuntimeContext } from "../../context.js"; import { logActorWarning, resolveErrorMessage } from "../../logging.js"; import { getCurrentRecord } from "./common.js"; import { initAssertNameActivity, initBootstrapDbActivity, initCompleteActivity, - initCreateSandboxActivity, - initCreateSessionActivity, initEnqueueProvisionActivity, - initEnsureAgentActivity, initEnsureNameActivity, - initExposeSandboxActivity, initFailedActivity, - initStartSandboxInstanceActivity, - initStartStatusSyncActivity, - initWriteDbActivity, } from "./init.js"; import { handleArchiveActivity, @@ -27,7 +19,6 @@ import { killDestroySandboxActivity, killWriteDbActivity, } from "./commands.js"; -import { idleNotifyActivity, idleSubmitPrActivity, statusUpdateActivity } from "./status-sync.js"; import { TASK_QUEUE_NAMES } from "./queue.js"; import { changeWorkbenchModel, @@ -63,7 +54,6 @@ const commandHandlers: Record = { await loopCtx.step("init-enqueue-provision", async () => initEnqueueProvisionActivity(loopCtx, body)); await loopCtx.removed("init-dispatch-provision-v2", "step"); const currentRecord = await loopCtx.step("init-read-current-record", async () => getCurrentRecord(loopCtx)); - try { await msg.complete(currentRecord); } catch (error) { @@ -74,40 +64,26 @@ const commandHandlers: Record = { }, "task.command.provision": async (loopCtx, msg) => { - const body = msg.body; await loopCtx.removed("init-failed", "step"); + await loopCtx.removed("init-failed-v2", "step"); try { - await loopCtx.step("init-ensure-name", async () => initEnsureNameActivity(loopCtx)); + await loopCtx.step({ + name: "init-ensure-name", + timeout: 5 * 60_000, + run: async () => initEnsureNameActivity(loopCtx), + }); await loopCtx.step("init-assert-name", async () => initAssertNameActivity(loopCtx)); - - const sandbox = await loopCtx.step({ - name: "init-create-sandbox", - timeout: 180_000, - run: async () => initCreateSandboxActivity(loopCtx, body), - }); - const agent = await loopCtx.step({ - name: "init-ensure-agent", - timeout: 180_000, - run: async () => initEnsureAgentActivity(loopCtx, body, sandbox), - }); - const sandboxInstanceReady = await loopCtx.step({ - name: "init-start-sandbox-instance", - timeout: 60_000, - run: async () => initStartSandboxInstanceActivity(loopCtx, body, sandbox, agent), - }); - await loopCtx.step("init-expose-sandbox", async () => initExposeSandboxActivity(loopCtx, body, sandbox, sandboxInstanceReady)); - const session = await loopCtx.step({ - name: "init-create-session", - timeout: 180_000, - run: async () => initCreateSessionActivity(loopCtx, body, sandbox, sandboxInstanceReady), - }); - - await loopCtx.step("init-write-db", async () => initWriteDbActivity(loopCtx, body, sandbox, session, sandboxInstanceReady)); - await loopCtx.step("init-start-status-sync", async () => initStartStatusSyncActivity(loopCtx, body, sandbox, session)); - await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, body, sandbox, session)); + await loopCtx.removed("init-create-sandbox", "step"); + await loopCtx.removed("init-ensure-agent", "step"); + await loopCtx.removed("init-start-sandbox-instance", "step"); + await loopCtx.removed("init-expose-sandbox", "step"); + await loopCtx.removed("init-create-session", "step"); + await loopCtx.removed("init-write-db", "step"); + await loopCtx.removed("init-start-status-sync", "step"); + await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, msg.body)); await msg.complete({ ok: true }); } catch (error) { - await loopCtx.step("init-failed-v2", async () => initFailedActivity(loopCtx, error)); + await loopCtx.step("init-failed-v3", async () => initFailedActivity(loopCtx, error)); await msg.complete({ ok: false, error: resolveErrorMessage(error), @@ -171,7 +147,7 @@ const commandHandlers: Record = { try { const created = await loopCtx.step({ name: "workbench-create-session", - timeout: 30_000, + timeout: 5 * 60_000, run: async () => createWorkbenchSession(loopCtx, msg.body?.model), }); await msg.complete(created); @@ -276,18 +252,6 @@ const commandHandlers: Record = { }); await msg.complete({ ok: true }); }, - - "task.status_sync.result": async (loopCtx, msg) => { - const transitionedToIdle = await loopCtx.step("status-update", async () => statusUpdateActivity(loopCtx, msg.body)); - - if (transitionedToIdle) { - const { config } = getActorRuntimeContext(); - if (config.auto_submit) { - await loopCtx.step("idle-submit-pr", async () => idleSubmitPrActivity(loopCtx)); - } - await loopCtx.step("idle-notify", async () => idleNotifyActivity(loopCtx)); - } - }, }; export async function runTaskWorkflow(ctx: any): Promise { diff --git a/foundry/packages/backend/src/actors/task/workflow/init.ts b/foundry/packages/backend/src/actors/task/workflow/init.ts index 4e6fbb5..ec0b699 100644 --- a/foundry/packages/backend/src/actors/task/workflow/init.ts +++ b/foundry/packages/backend/src/actors/task/workflow/init.ts @@ -1,39 +1,14 @@ // @ts-nocheck -import { desc, eq } from "drizzle-orm"; +import { eq } from "drizzle-orm"; import { resolveCreateFlowDecision } from "../../../services/create-flow.js"; import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js"; import { getActorRuntimeContext } from "../../context.js"; -import { getOrCreateTaskStatusSync, getOrCreateHistory, getOrCreateProject, getOrCreateSandboxInstance, getSandboxInstance, selfTask } from "../../handles.js"; +import { getOrCreateHistory, getOrCreateProject, selfTask } from "../../handles.js"; import { logActorWarning, resolveErrorMessage } from "../../logging.js"; -import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js"; -import { TASK_ROW_ID, appendHistory, buildAgentPrompt, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; +import { task as taskTable, taskRuntime } from "../db/schema.js"; +import { TASK_ROW_ID, appendHistory, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js"; import { taskWorkflowQueueName } from "./queue.js"; -import { enqueuePendingWorkbenchSessions } from "../workbench.js"; - -const DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS = 180_000; - -function getInitCreateSandboxActivityTimeoutMs(): number { - const raw = process.env.HF_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS; - if (!raw) { - return DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS; - } - const parsed = Number(raw); - if (!Number.isFinite(parsed) || parsed <= 0) { - return DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS; - } - return Math.floor(parsed); -} - -function debugInit(loopCtx: any, message: string, context?: Record): void { - loopCtx.log.debug({ - msg: message, - scope: "task.init", - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - taskId: loopCtx.state.taskId, - ...(context ?? {}), - }); -} async function ensureTaskRuntimeCacheColumns(db: any): Promise { await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {}); @@ -42,94 +17,70 @@ async function ensureTaskRuntimeCacheColumns(db: any): Promise { await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {}); } -async function withActivityTimeout(timeoutMs: number, label: string, run: () => Promise): Promise { - let timer: ReturnType | null = null; - try { - return await Promise.race([ - run(), - new Promise((_, reject) => { - timer = setTimeout(() => { - reject(new Error(`${label} timed out after ${timeoutMs}ms`)); - }, timeoutMs); - }), - ]); - } finally { - if (timer) { - clearTimeout(timer); - } - } -} - export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise { - const providerId = body?.providerId ?? loopCtx.state.providerId; const { config } = getActorRuntimeContext(); + const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config); const now = Date.now(); - const db = loopCtx.db; const initialStatusMessage = loopCtx.state.branchName && loopCtx.state.title ? "provisioning" : "naming"; - try { - await ensureTaskRuntimeCacheColumns(db); + await ensureTaskRuntimeCacheColumns(loopCtx.db); - await db - .insert(taskTable) - .values({ - id: TASK_ROW_ID, + await loopCtx.db + .insert(taskTable) + .values({ + id: TASK_ROW_ID, + branchName: loopCtx.state.branchName, + title: loopCtx.state.title, + task: loopCtx.state.task, + providerId, + status: "init_bootstrap_db", + agentType: loopCtx.state.agentType ?? config.default_agent, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskTable.id, + set: { branchName: loopCtx.state.branchName, title: loopCtx.state.title, task: loopCtx.state.task, providerId, status: "init_bootstrap_db", agentType: loopCtx.state.agentType ?? config.default_agent, - createdAt: now, updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskTable.id, - set: { - branchName: loopCtx.state.branchName, - title: loopCtx.state.title, - task: loopCtx.state.task, - providerId, - status: "init_bootstrap_db", - agentType: loopCtx.state.agentType ?? config.default_agent, - updatedAt: now, - }, - }) - .run(); + }, + }) + .run(); - await db - .insert(taskRuntime) - .values({ - id: TASK_ROW_ID, + await loopCtx.db + .insert(taskRuntime) + .values({ + id: TASK_ROW_ID, + activeSandboxId: null, + activeSessionId: null, + activeSwitchTarget: null, + activeCwd: null, + statusMessage: initialStatusMessage, + gitStateJson: null, + gitStateUpdatedAt: null, + provisionStage: "queued", + provisionStageUpdatedAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskRuntime.id, + set: { activeSandboxId: null, activeSessionId: null, activeSwitchTarget: null, activeCwd: null, statusMessage: initialStatusMessage, - gitStateJson: null, - gitStateUpdatedAt: null, provisionStage: "queued", provisionStageUpdatedAt: now, updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskRuntime.id, - set: { - activeSandboxId: null, - activeSessionId: null, - activeSwitchTarget: null, - activeCwd: null, - statusMessage: initialStatusMessage, - provisionStage: "queued", - provisionStageUpdatedAt: now, - updatedAt: now, - }, - }) - .run(); - } catch (error) { - const detail = resolveErrorMessage(error); - throw new Error(`task init bootstrap db failed: ${detail}`); - } + }, + }) + .run(); } export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise { @@ -143,12 +94,13 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro }) .where(eq(taskRuntime.id, TASK_ROW_ID)) .run(); + const self = selfTask(loopCtx); try { await self.send(taskWorkflowQueueName("task.command.provision"), body, { wait: false, }); - } catch (error: unknown) { + } catch (error) { logActorWarning("task.init", "background provision command failed", { workspaceId: loopCtx.state.workspaceId, repoId: loopCtx.state.repoId, @@ -178,8 +130,16 @@ export async function initEnsureNameActivity(loopCtx: any): Promise { const { driver } = getActorRuntimeContext(); const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId); + let repoLocalPath = loopCtx.state.repoLocalPath; + if (!repoLocalPath) { + const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote); + const result = await project.ensure({ remoteUrl: loopCtx.state.repoRemote }); + repoLocalPath = result.localPath; + loopCtx.state.repoLocalPath = repoLocalPath; + } + try { - await driver.git.fetch(loopCtx.state.repoLocalPath, { githubToken: auth?.githubToken ?? null }); + await driver.git.fetch(repoLocalPath, { githubToken: auth?.githubToken ?? null }); } catch (error) { logActorWarning("task.init", "fetch before naming failed", { workspaceId: loopCtx.state.workspaceId, @@ -188,13 +148,12 @@ export async function initEnsureNameActivity(loopCtx: any): Promise { error: resolveErrorMessage(error), }); } - const remoteBranches = (await driver.git.listRemoteBranches(loopCtx.state.repoLocalPath, { githubToken: auth?.githubToken ?? null })).map( + + const remoteBranches = (await driver.git.listRemoteBranches(repoLocalPath, { githubToken: auth?.githubToken ?? null })).map( (branch: any) => branch.branchName, ); - const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote); const reservedBranches = await project.listReservedBranches({}); - const resolved = resolveCreateFlowDecision({ task: loopCtx.state.task, explicitTitle: loopCtx.state.explicitTitle ?? undefined, @@ -248,388 +207,42 @@ export async function initAssertNameActivity(loopCtx: any): Promise { } } -export async function initCreateSandboxActivity(loopCtx: any, body: any): Promise { - await setTaskState(loopCtx, "init_create_sandbox", "creating sandbox"); - await loopCtx.db - .update(taskRuntime) - .set({ - provisionStage: "sandbox_allocated", - provisionStageUpdatedAt: Date.now(), - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - const { providers } = getActorRuntimeContext(); - const providerId = body?.providerId ?? loopCtx.state.providerId; - const provider = providers.get(providerId); - const timeoutMs = getInitCreateSandboxActivityTimeoutMs(); - const startedAt = Date.now(); - - debugInit(loopCtx, "init_create_sandbox started", { - providerId, - timeoutMs, - supportsSessionReuse: provider.capabilities().supportsSessionReuse, - }); - - if (provider.capabilities().supportsSessionReuse) { - const runtime = await loopCtx.db.select({ activeSandboxId: taskRuntime.activeSandboxId }).from(taskRuntime).where(eq(taskRuntime.id, TASK_ROW_ID)).get(); - - const existing = await loopCtx.db - .select({ sandboxId: taskSandboxes.sandboxId }) - .from(taskSandboxes) - .where(eq(taskSandboxes.providerId, providerId)) - .orderBy(desc(taskSandboxes.updatedAt)) - .limit(1) - .get(); - - const sandboxId = runtime?.activeSandboxId ?? existing?.sandboxId ?? null; - if (sandboxId) { - debugInit(loopCtx, "init_create_sandbox attempting resume", { sandboxId }); - try { - const resumed = await withActivityTimeout(timeoutMs, "resumeSandbox", async () => - provider.resumeSandbox({ - workspaceId: loopCtx.state.workspaceId, - sandboxId, - }), - ); - - debugInit(loopCtx, "init_create_sandbox resume succeeded", { - sandboxId: resumed.sandboxId, - durationMs: Date.now() - startedAt, - }); - return resumed; - } catch (error) { - logActorWarning("task.init", "resume sandbox failed; creating a new sandbox", { - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - taskId: loopCtx.state.taskId, - sandboxId, - error: resolveErrorMessage(error), - }); - } - } - } - - debugInit(loopCtx, "init_create_sandbox creating fresh sandbox", { - branchName: loopCtx.state.branchName, - }); - - try { - const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId); - const sandbox = await withActivityTimeout(timeoutMs, "createSandbox", async () => - provider.createSandbox({ - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - repoRemote: loopCtx.state.repoRemote, - branchName: loopCtx.state.branchName, - taskId: loopCtx.state.taskId, - githubToken: auth?.githubToken ?? null, - debug: (message, context) => debugInit(loopCtx, message, context), - }), - ); - - debugInit(loopCtx, "init_create_sandbox create succeeded", { - sandboxId: sandbox.sandboxId, - durationMs: Date.now() - startedAt, - }); - return sandbox; - } catch (error) { - debugInit(loopCtx, "init_create_sandbox failed", { - durationMs: Date.now() - startedAt, - error: resolveErrorMessage(error), - }); - throw error; - } -} - -export async function initEnsureAgentActivity(loopCtx: any, body: any, sandbox: any): Promise { - await setTaskState(loopCtx, "init_ensure_agent", "ensuring sandbox agent"); - await loopCtx.db - .update(taskRuntime) - .set({ - provisionStage: "agent_installing", - provisionStageUpdatedAt: Date.now(), - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - const { providers } = getActorRuntimeContext(); - const providerId = body?.providerId ?? loopCtx.state.providerId; - const provider = providers.get(providerId); - return await provider.ensureSandboxAgent({ - workspaceId: loopCtx.state.workspaceId, - sandboxId: sandbox.sandboxId, - }); -} - -export async function initStartSandboxInstanceActivity(loopCtx: any, body: any, sandbox: any, agent: any): Promise { - await setTaskState(loopCtx, "init_start_sandbox_instance", "starting sandbox runtime"); - await loopCtx.db - .update(taskRuntime) - .set({ - provisionStage: "agent_starting", - provisionStageUpdatedAt: Date.now(), - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - try { - const providerId = body?.providerId ?? loopCtx.state.providerId; - const sandboxInstance = await getOrCreateSandboxInstance(loopCtx, loopCtx.state.workspaceId, providerId, sandbox.sandboxId, { - workspaceId: loopCtx.state.workspaceId, - providerId, - sandboxId: sandbox.sandboxId, - }); - - await sandboxInstance.ensure({ - metadata: sandbox.metadata, - status: "ready", - agentEndpoint: agent.endpoint, - agentToken: agent.token, - }); - - const actorId = typeof (sandboxInstance as any).resolve === "function" ? await (sandboxInstance as any).resolve() : null; - - return { - ok: true as const, - actorId: typeof actorId === "string" ? actorId : null, - }; - } catch (error) { - const detail = error instanceof Error ? error.message : String(error); - return { - ok: false as const, - error: `sandbox-instance ensure failed: ${detail}`, - }; - } -} - -export async function initCreateSessionActivity(loopCtx: any, body: any, sandbox: any, sandboxInstanceReady: any): Promise { - await setTaskState(loopCtx, "init_create_session", "creating agent session"); - await loopCtx.db - .update(taskRuntime) - .set({ - provisionStage: "session_creating", - provisionStageUpdatedAt: Date.now(), - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - if (!sandboxInstanceReady.ok) { - return { - id: null, - status: "error", - error: sandboxInstanceReady.error ?? "sandbox instance is not ready", - } as const; - } - - const { config } = getActorRuntimeContext(); - const providerId = body?.providerId ?? loopCtx.state.providerId; - const sandboxInstance = getSandboxInstance(loopCtx, loopCtx.state.workspaceId, providerId, sandbox.sandboxId); - - const cwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : undefined; - - return await sandboxInstance.createSession({ - prompt: typeof loopCtx.state.initialPrompt === "string" ? loopCtx.state.initialPrompt : buildAgentPrompt(loopCtx.state.task), - cwd, - agent: (loopCtx.state.agentType ?? config.default_agent) as any, - }); -} - -export async function initExposeSandboxActivity(loopCtx: any, body: any, sandbox: any, sandboxInstanceReady?: { actorId?: string | null }): Promise { - const providerId = body?.providerId ?? loopCtx.state.providerId; +export async function initCompleteActivity(loopCtx: any, body: any): Promise { const now = Date.now(); - const db = loopCtx.db; - const activeCwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : null; - const sandboxActorId = typeof sandboxInstanceReady?.actorId === "string" && sandboxInstanceReady.actorId.length > 0 ? sandboxInstanceReady.actorId : null; + const { config } = getActorRuntimeContext(); + const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config); - await db - .insert(taskSandboxes) - .values({ - sandboxId: sandbox.sandboxId, - providerId, - sandboxActorId, - switchTarget: sandbox.switchTarget, - cwd: activeCwd, - statusMessage: "sandbox ready", - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskSandboxes.sandboxId, - set: { - providerId, - sandboxActorId, - switchTarget: sandbox.switchTarget, - cwd: activeCwd, - statusMessage: "sandbox ready", - updatedAt: now, - }, - }) - .run(); - - await db + await setTaskState(loopCtx, "init_complete", "task initialized"); + await loopCtx.db .update(taskRuntime) .set({ - activeSandboxId: sandbox.sandboxId, - activeSwitchTarget: sandbox.switchTarget, - activeCwd, - statusMessage: "sandbox ready", - updatedAt: now, - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); -} - -export async function initWriteDbActivity( - loopCtx: any, - body: any, - sandbox: any, - session: any, - sandboxInstanceReady?: { actorId?: string | null }, -): Promise { - await setTaskState(loopCtx, "init_write_db", "persisting task runtime"); - const providerId = body?.providerId ?? loopCtx.state.providerId; - const { config } = getActorRuntimeContext(); - const now = Date.now(); - const db = loopCtx.db; - const sessionId = session?.id ?? null; - const sessionHealthy = Boolean(sessionId) && session?.status !== "error"; - const activeSessionId = sessionHealthy ? sessionId : null; - const statusMessage = sessionHealthy ? "session created" : session?.status === "error" ? (session.error ?? "session create failed") : "session unavailable"; - - const activeCwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : null; - const sandboxActorId = typeof sandboxInstanceReady?.actorId === "string" && sandboxInstanceReady.actorId.length > 0 ? sandboxInstanceReady.actorId : null; - - await db - .update(taskTable) - .set({ - providerId, - status: sessionHealthy ? "running" : "error", - agentType: loopCtx.state.agentType ?? config.default_agent, - updatedAt: now, - }) - .where(eq(taskTable.id, TASK_ROW_ID)) - .run(); - - await db - .insert(taskSandboxes) - .values({ - sandboxId: sandbox.sandboxId, - providerId, - sandboxActorId, - switchTarget: sandbox.switchTarget, - cwd: activeCwd, - statusMessage, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskSandboxes.sandboxId, - set: { - providerId, - sandboxActorId, - switchTarget: sandbox.switchTarget, - cwd: activeCwd, - statusMessage, - updatedAt: now, - }, - }) - .run(); - - await db - .insert(taskRuntime) - .values({ - id: TASK_ROW_ID, - activeSandboxId: sandbox.sandboxId, - activeSessionId, - activeSwitchTarget: sandbox.switchTarget, - activeCwd, - statusMessage, - provisionStage: sessionHealthy ? "ready" : "error", + statusMessage: "ready", + provisionStage: "ready", provisionStageUpdatedAt: now, updatedAt: now, }) - .onConflictDoUpdate({ - target: taskRuntime.id, - set: { - activeSandboxId: sandbox.sandboxId, - activeSessionId, - activeSwitchTarget: sandbox.switchTarget, - activeCwd, - statusMessage, - provisionStage: sessionHealthy ? "ready" : "error", - provisionStageUpdatedAt: now, - updatedAt: now, - }, - }) + .where(eq(taskRuntime.id, TASK_ROW_ID)) .run(); -} -export async function initStartStatusSyncActivity(loopCtx: any, body: any, sandbox: any, session: any): Promise { - const sessionId = session?.id ?? null; - if (!sessionId || session?.status === "error") { - return; - } - - await setTaskState(loopCtx, "init_start_status_sync", "starting session status sync"); - const providerId = body?.providerId ?? loopCtx.state.providerId; - const sync = await getOrCreateTaskStatusSync(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.taskId, sandbox.sandboxId, sessionId, { - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, + const history = await getOrCreateHistory(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId); + await history.append({ + kind: "task.initialized", taskId: loopCtx.state.taskId, - providerId, - sandboxId: sandbox.sandboxId, - sessionId, - intervalMs: 2_000, + branchName: loopCtx.state.branchName, + payload: { providerId }, }); - await sync.start(); - await sync.force(); -} - -export async function initCompleteActivity(loopCtx: any, body: any, sandbox: any, session: any): Promise { - const providerId = body?.providerId ?? loopCtx.state.providerId; - const sessionId = session?.id ?? null; - const sessionHealthy = Boolean(sessionId) && session?.status !== "error"; - if (sessionHealthy) { - await setTaskState(loopCtx, "init_complete", "task initialized"); - - const history = await getOrCreateHistory(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId); - await history.append({ - kind: "task.initialized", - taskId: loopCtx.state.taskId, - branchName: loopCtx.state.branchName, - payload: { providerId, sandboxId: sandbox.sandboxId, sessionId }, - }); - - loopCtx.state.initialized = true; - await enqueuePendingWorkbenchSessions(loopCtx); - const self = selfTask(loopCtx); - await self.send(taskWorkflowQueueName("task.command.workbench.refresh_derived"), {}, { wait: false }); - if (sessionId) { - await self.send(taskWorkflowQueueName("task.command.workbench.refresh_session_transcript"), { sessionId }, { wait: false }); - } - return; - } - - const detail = session?.status === "error" ? (session.error ?? "session create failed") : "session unavailable"; - await setTaskState(loopCtx, "error", detail); - await appendHistory(loopCtx, "task.error", { - detail, - messages: [detail], - }); - loopCtx.state.initialized = false; + loopCtx.state.initialized = true; } export async function initFailedActivity(loopCtx: any, error: unknown): Promise { const now = Date.now(); const detail = resolveErrorDetail(error); const messages = collectErrorMessages(error); - const db = loopCtx.db; - const { config, providers } = getActorRuntimeContext(); - const providerId = loopCtx.state.providerId ?? providers.defaultProviderId(); + const { config } = getActorRuntimeContext(); + const providerId = loopCtx.state.providerId ?? defaultSandboxProviderId(config); - await db + await loopCtx.db .insert(taskTable) .values({ id: TASK_ROW_ID, @@ -656,7 +269,7 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise< }) .run(); - await db + await loopCtx.db .insert(taskRuntime) .values({ id: TASK_ROW_ID, diff --git a/foundry/packages/backend/src/actors/task/workflow/push.ts b/foundry/packages/backend/src/actors/task/workflow/push.ts index 7ee929d..eda79e1 100644 --- a/foundry/packages/backend/src/actors/task/workflow/push.ts +++ b/foundry/packages/backend/src/actors/task/workflow/push.ts @@ -1,6 +1,7 @@ // @ts-nocheck import { eq } from "drizzle-orm"; -import { getActorRuntimeContext } from "../../context.js"; +import { getTaskSandbox } from "../../handles.js"; +import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js"; import { taskRuntime, taskSandboxes } from "../db/schema.js"; import { TASK_ROW_ID, appendHistory, getCurrentRecord } from "./common.js"; @@ -22,15 +23,11 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive } const activeSandbox = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === activeSandboxId) ?? null; - const providerId = activeSandbox?.providerId ?? record.providerId; const cwd = activeSandbox?.cwd ?? null; if (!cwd) { throw new Error("cannot push: active sandbox cwd is not set"); } - const { providers } = getActorRuntimeContext(); - const provider = providers.get(providerId); - const now = Date.now(); await loopCtx.db .update(taskRuntime) @@ -52,15 +49,23 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive `git push -u origin ${JSON.stringify(branchName)}`, ].join("; "); - const result = await provider.executeCommand({ - workspaceId: loopCtx.state.workspaceId, - sandboxId: activeSandboxId, - command: ["bash", "-lc", JSON.stringify(script)].join(" "), - label: `git push ${branchName}`, + const sandbox = getTaskSandbox(loopCtx, loopCtx.state.workspaceId, activeSandboxId); + const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId); + const result = await sandbox.runProcess({ + command: "bash", + args: ["-lc", script], + cwd: "/", + env: auth?.githubToken + ? { + GH_TOKEN: auth.githubToken, + GITHUB_TOKEN: auth.githubToken, + } + : undefined, + timeoutMs: 5 * 60_000, }); - if (result.exitCode !== 0) { - throw new Error(`git push failed (${result.exitCode}): ${result.result}`); + if ((result.exitCode ?? 0) !== 0) { + throw new Error(`git push failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`); } const updatedAt = Date.now(); diff --git a/foundry/packages/backend/src/actors/task/workflow/queue.ts b/foundry/packages/backend/src/actors/task/workflow/queue.ts index db5c0a3..6210468 100644 --- a/foundry/packages/backend/src/actors/task/workflow/queue.ts +++ b/foundry/packages/backend/src/actors/task/workflow/queue.ts @@ -26,7 +26,6 @@ export const TASK_QUEUE_NAMES = [ "task.command.workbench.close_session", "task.command.workbench.publish_pr", "task.command.workbench.revert_file", - "task.status_sync.result", ] as const; export function taskWorkflowQueueName(name: string): string { diff --git a/foundry/packages/backend/src/actors/task/workflow/status-sync.ts b/foundry/packages/backend/src/actors/task/workflow/status-sync.ts deleted file mode 100644 index ea3b0c8..0000000 --- a/foundry/packages/backend/src/actors/task/workflow/status-sync.ts +++ /dev/null @@ -1,148 +0,0 @@ -// @ts-nocheck -import { eq } from "drizzle-orm"; -import { getActorRuntimeContext } from "../../context.js"; -import { logActorWarning, resolveErrorMessage } from "../../logging.js"; -import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js"; -import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js"; -import { TASK_ROW_ID, appendHistory, resolveErrorDetail } from "./common.js"; -import { pushActiveBranchActivity } from "./push.js"; - -function mapSessionStatus(status: "running" | "idle" | "error") { - if (status === "idle") return "idle"; - if (status === "error") return "error"; - return "running"; -} - -export async function statusUpdateActivity(loopCtx: any, body: any): Promise { - const newStatus = mapSessionStatus(body.status); - const wasIdle = loopCtx.state.previousStatus === "idle"; - const didTransition = newStatus === "idle" && !wasIdle; - const isDuplicateStatus = loopCtx.state.previousStatus === newStatus; - - if (isDuplicateStatus) { - return false; - } - - const db = loopCtx.db; - const runtime = await db - .select({ - activeSandboxId: taskRuntime.activeSandboxId, - activeSessionId: taskRuntime.activeSessionId, - }) - .from(taskRuntime) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .get(); - - const isActive = runtime?.activeSandboxId === body.sandboxId && runtime?.activeSessionId === body.sessionId; - - if (isActive) { - await db.update(taskTable).set({ status: newStatus, updatedAt: body.at }).where(eq(taskTable.id, TASK_ROW_ID)).run(); - - await db - .update(taskRuntime) - .set({ statusMessage: `session:${body.status}`, updatedAt: body.at }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - } - - await db - .update(taskSandboxes) - .set({ statusMessage: `session:${body.status}`, updatedAt: body.at }) - .where(eq(taskSandboxes.sandboxId, body.sandboxId)) - .run(); - - await appendHistory(loopCtx, "task.status", { - status: body.status, - sessionId: body.sessionId, - sandboxId: body.sandboxId, - }); - - if (isActive) { - loopCtx.state.previousStatus = newStatus; - - const { driver } = getActorRuntimeContext(); - if (loopCtx.state.branchName) { - driver.tmux.setWindowStatus(loopCtx.state.branchName, newStatus); - } - return didTransition; - } - - return false; -} - -export async function idleSubmitPrActivity(loopCtx: any): Promise { - const { driver } = getActorRuntimeContext(); - const db = loopCtx.db; - - const self = await db.select({ prSubmitted: taskTable.prSubmitted }).from(taskTable).where(eq(taskTable.id, TASK_ROW_ID)).get(); - - if (self && self.prSubmitted) return; - - const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId); - - try { - await driver.git.fetch(loopCtx.state.repoLocalPath, { githubToken: auth?.githubToken ?? null }); - } catch (error) { - logActorWarning("task.status-sync", "fetch before PR submit failed", { - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - taskId: loopCtx.state.taskId, - error: resolveErrorMessage(error), - }); - } - - if (!loopCtx.state.branchName || !loopCtx.state.title) { - throw new Error("cannot submit PR before task has a branch and title"); - } - - try { - await pushActiveBranchActivity(loopCtx, { - reason: "auto_submit_idle", - historyKind: "task.push.auto", - }); - - const pr = await driver.github.createPr(loopCtx.state.repoLocalPath, loopCtx.state.branchName, loopCtx.state.title, undefined, { - githubToken: auth?.githubToken ?? null, - }); - - await db.update(taskTable).set({ prSubmitted: 1, updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run(); - - await appendHistory(loopCtx, "task.step", { - step: "pr_submit", - taskId: loopCtx.state.taskId, - branchName: loopCtx.state.branchName, - prUrl: pr.url, - prNumber: pr.number, - }); - - await appendHistory(loopCtx, "task.pr_created", { - taskId: loopCtx.state.taskId, - branchName: loopCtx.state.branchName, - prUrl: pr.url, - prNumber: pr.number, - }); - } catch (error) { - const detail = resolveErrorDetail(error); - await db - .update(taskRuntime) - .set({ - statusMessage: `pr submit failed: ${detail}`, - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - - await appendHistory(loopCtx, "task.pr_create_failed", { - taskId: loopCtx.state.taskId, - branchName: loopCtx.state.branchName, - error: detail, - }); - } -} - -export async function idleNotifyActivity(loopCtx: any): Promise { - const { notifications } = getActorRuntimeContext(); - if (notifications && loopCtx.state.branchName) { - await notifications.agentIdle(loopCtx.state.branchName); - } -} diff --git a/foundry/packages/backend/src/actors/workspace/actions.ts b/foundry/packages/backend/src/actors/workspace/actions.ts index 0ba55f8..0dd6d6e 100644 --- a/foundry/packages/backend/src/actors/workspace/actions.ts +++ b/foundry/packages/backend/src/actors/workspace/actions.ts @@ -1,4 +1,5 @@ // @ts-nocheck +import { setTimeout as delay } from "node:timers/promises"; import { desc, eq } from "drizzle-orm"; import { Loop } from "rivetkit/workflow"; import type { @@ -37,6 +38,7 @@ import type { import { getActorRuntimeContext } from "../context.js"; import { getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js"; import { logActorWarning, resolveErrorMessage } from "../logging.js"; +import { availableSandboxProviderIds, defaultSandboxProviderId } from "../../sandbox-config.js"; import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js"; import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; import { taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js"; @@ -258,6 +260,24 @@ async function requireWorkbenchTask(c: any, taskId: string) { return getTask(c, c.state.workspaceId, repoId, taskId); } +async function waitForWorkbenchTaskReady(task: any, timeoutMs = 5 * 60_000): Promise { + const startedAt = Date.now(); + + for (;;) { + const record = await task.get(); + if (record?.branchName && record?.title) { + return record; + } + if (record?.status === "error") { + throw new Error("task initialization failed before the workbench session was ready"); + } + if (Date.now() - startedAt > timeoutMs) { + throw new Error("timed out waiting for task initialization"); + } + await delay(1_000); + } +} + /** * Reads the workspace sidebar snapshot from the workspace actor's local SQLite * only. Task actors push summary updates into `task_summaries`, so clients do @@ -343,8 +363,8 @@ async function addRepoMutation(c: any, input: AddRepoInput): Promise async function createTaskMutation(c: any, input: CreateTaskInput): Promise { assertWorkspace(c, input.workspaceId); - const { providers } = getActorRuntimeContext(); - const providerId = input.providerId ?? providers.defaultProviderId(); + const { config } = getActorRuntimeContext(); + const providerId = input.providerId ?? defaultSandboxProviderId(config); const repoId = input.repoId; const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get(); @@ -370,7 +390,6 @@ async function createTaskMutation(c: any, input: CreateTaskInput): Promise { const body = command ?? {}; - const { providers } = getActorRuntimeContext(); - const providerIds: ProviderId[] = body.providerId ? [body.providerId] : providers.availableProviderIds(); + const { config } = getActorRuntimeContext(); + const providerIds: ProviderId[] = body.providerId ? [body.providerId] : availableSandboxProviderIds(config); for (const providerId of providerIds) { await c.db @@ -457,7 +476,7 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { if (msg.name === "workspace.command.createTask") { const result = await loopCtx.step({ name: "workspace-create-task", - timeout: 12 * 60_000, + timeout: 5 * 60_000, run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput), }); await msg.complete(result); @@ -547,7 +566,7 @@ export const workspaceActions = { return expectQueueResponse( await self.send(workspaceWorkflowQueueName("workspace.command.createTask"), input, { wait: true, - timeout: 12 * 60_000, + timeout: 5 * 60_000, }), ); }, @@ -604,8 +623,21 @@ export const workspaceActions = { ...(input.branch ? { explicitBranchName: input.branch } : {}), ...(input.model ? { agentType: agentTypeForModel(input.model) } : {}), }); + const task = await requireWorkbenchTask(c, created.taskId); + await waitForWorkbenchTaskReady(task); + const session = await task.createWorkbenchSession({ + taskId: created.taskId, + ...(input.model ? { model: input.model } : {}), + }); + await task.sendWorkbenchMessage({ + taskId: created.taskId, + tabId: session.tabId, + text: input.task, + attachments: [], + }); return { taskId: created.taskId, + tabId: session.tabId, }; }, diff --git a/foundry/packages/backend/src/driver.ts b/foundry/packages/backend/src/driver.ts index 4e1d248..e96fea8 100644 --- a/foundry/packages/backend/src/driver.ts +++ b/foundry/packages/backend/src/driver.ts @@ -1,19 +1,5 @@ import type { BranchSnapshot } from "./integrations/git/index.js"; import type { PullRequestSnapshot } from "./integrations/github/index.js"; -import type { SandboxSession, SandboxAgentClientOptions, SandboxSessionCreateRequest } from "./integrations/sandbox-agent/client.js"; -import type { - ListEventsRequest, - ListPage, - ListPageRequest, - ProcessCreateRequest, - ProcessInfo, - ProcessLogFollowQuery, - ProcessLogsResponse, - ProcessSignalQuery, - SessionEvent, - SessionRecord, -} from "sandbox-agent"; -import type { DaytonaClientOptions, DaytonaCreateSandboxOptions, DaytonaPreviewEndpoint, DaytonaSandbox } from "./integrations/daytona/client.js"; import { validateRemote, ensureCloned, @@ -36,8 +22,6 @@ import { gitSpiceTrackBranch, } from "./integrations/git-spice/index.js"; import { listPullRequests, createPr, starRepository } from "./integrations/github/index.js"; -import { SandboxAgentClient } from "./integrations/sandbox-agent/client.js"; -import { DaytonaClient } from "./integrations/daytona/client.js"; export interface GitDriver { validateRemote(remoteUrl: string, options?: { githubToken?: string | null }): Promise; @@ -79,40 +63,6 @@ export interface GithubDriver { starRepository(repoFullName: string, options?: { githubToken?: string | null }): Promise; } -export interface SandboxAgentClientLike { - createSession(request: string | SandboxSessionCreateRequest): Promise; - sessionStatus(sessionId: string): Promise; - listSessions(request?: ListPageRequest): Promise>; - listEvents(request: ListEventsRequest): Promise>; - createProcess(request: ProcessCreateRequest): Promise; - listProcesses(): Promise<{ processes: ProcessInfo[] }>; - getProcessLogs(processId: string, query?: ProcessLogFollowQuery): Promise; - stopProcess(processId: string, query?: ProcessSignalQuery): Promise; - killProcess(processId: string, query?: ProcessSignalQuery): Promise; - deleteProcess(processId: string): Promise; - sendPrompt(request: { sessionId: string; prompt: string; notification?: boolean }): Promise; - cancelSession(sessionId: string): Promise; - destroySession(sessionId: string): Promise; -} - -export interface SandboxAgentDriver { - createClient(options: SandboxAgentClientOptions): SandboxAgentClientLike; -} - -export interface DaytonaClientLike { - createSandbox(options: DaytonaCreateSandboxOptions): Promise; - getSandbox(sandboxId: string): Promise; - startSandbox(sandboxId: string, timeoutSeconds?: number): Promise; - stopSandbox(sandboxId: string, timeoutSeconds?: number): Promise; - deleteSandbox(sandboxId: string): Promise; - executeCommand(sandboxId: string, command: string): Promise<{ exitCode: number; result: string }>; - getPreviewEndpoint(sandboxId: string, port: number): Promise; -} - -export interface DaytonaDriver { - createClient(options: DaytonaClientOptions): DaytonaClientLike; -} - export interface TmuxDriver { setWindowStatus(branchName: string, status: string): number; } @@ -121,15 +71,10 @@ export interface BackendDriver { git: GitDriver; stack: StackDriver; github: GithubDriver; - sandboxAgent: SandboxAgentDriver; - daytona: DaytonaDriver; tmux: TmuxDriver; } export function createDefaultDriver(): BackendDriver { - const sandboxAgentClients = new Map(); - const daytonaClients = new Map(); - return { git: { validateRemote, @@ -157,33 +102,6 @@ export function createDefaultDriver(): BackendDriver { createPr, starRepository, }, - sandboxAgent: { - createClient: (opts) => { - if (opts.persist) { - return new SandboxAgentClient(opts); - } - const key = `${opts.endpoint}|${opts.token ?? ""}|${opts.agent ?? ""}`; - const cached = sandboxAgentClients.get(key); - if (cached) { - return cached; - } - const created = new SandboxAgentClient(opts); - sandboxAgentClients.set(key, created); - return created; - }, - }, - daytona: { - createClient: (opts) => { - const key = `${opts.apiUrl ?? ""}|${opts.apiKey ?? ""}|${opts.target ?? ""}`; - const cached = daytonaClients.get(key); - if (cached) { - return cached; - } - const created = new DaytonaClient(opts); - daytonaClients.set(key, created); - return created; - }, - }, tmux: { setWindowStatus: () => 0, }, diff --git a/foundry/packages/backend/src/index.ts b/foundry/packages/backend/src/index.ts index cf1e6e7..fb75b94 100644 --- a/foundry/packages/backend/src/index.ts +++ b/foundry/packages/backend/src/index.ts @@ -7,7 +7,6 @@ import { workspaceKey } from "./actors/keys.js"; import { loadConfig } from "./config/backend.js"; import { createBackends, createNotificationService } from "./notifications/index.js"; import { createDefaultDriver } from "./driver.js"; -import { createProviderRegistry } from "./providers/index.js"; import { createClient } from "rivetkit/client"; import { initBetterAuthService } from "./services/better-auth.js"; import { createDefaultAppShellServices } from "./services/app-shell-runtime.js"; @@ -69,15 +68,14 @@ export async function startBackend(options: BackendStartOptions = {}): Promise; -} - -export interface DaytonaCreateSandboxOptions { - image: string | Image; - envVars?: Record; - labels?: Record; - autoStopInterval?: number; -} - -export interface DaytonaPreviewEndpoint { - url: string; - token?: string; -} - -export interface DaytonaClientOptions { - apiUrl?: string; - apiKey?: string; - target?: string; -} - -function normalizeApiUrl(input?: string): string | undefined { - if (!input) return undefined; - const trimmed = input.replace(/\/+$/, ""); - if (trimmed.endsWith("/api")) { - return trimmed; - } - return `${trimmed}/api`; -} - -export class DaytonaClient { - private readonly daytona: Daytona; - - constructor(options: DaytonaClientOptions) { - const apiUrl = normalizeApiUrl(options.apiUrl); - this.daytona = new Daytona({ - _experimental: {}, - ...(apiUrl ? { apiUrl } : {}), - ...(options.apiKey ? { apiKey: options.apiKey } : {}), - ...(options.target ? { target: options.target } : {}), - }); - } - - async createSandbox(options: DaytonaCreateSandboxOptions): Promise { - const sandbox = await this.daytona.create({ - image: options.image, - envVars: options.envVars, - labels: options.labels, - ...(options.autoStopInterval !== undefined ? { autoStopInterval: options.autoStopInterval } : {}), - }); - - return { - id: sandbox.id, - state: sandbox.state, - snapshot: sandbox.snapshot, - labels: (sandbox as any).labels, - }; - } - - async getSandbox(sandboxId: string): Promise { - const sandbox = await this.daytona.get(sandboxId); - return { - id: sandbox.id, - state: sandbox.state, - snapshot: sandbox.snapshot, - labels: (sandbox as any).labels, - }; - } - - async startSandbox(sandboxId: string, timeoutSeconds?: number): Promise { - const sandbox = await this.daytona.get(sandboxId); - await sandbox.start(timeoutSeconds); - } - - async stopSandbox(sandboxId: string, timeoutSeconds?: number): Promise { - const sandbox = await this.daytona.get(sandboxId); - await sandbox.stop(timeoutSeconds); - } - - async deleteSandbox(sandboxId: string): Promise { - const sandbox = await this.daytona.get(sandboxId); - await this.daytona.delete(sandbox); - } - - async executeCommand(sandboxId: string, command: string): Promise<{ exitCode: number; result: string }> { - const sandbox = await this.daytona.get(sandboxId); - const response = await sandbox.process.executeCommand(command); - return { - exitCode: response.exitCode, - result: response.result, - }; - } - - async getPreviewEndpoint(sandboxId: string, port: number): Promise { - const sandbox = await this.daytona.get(sandboxId); - // Use signed preview URLs for server-to-sandbox communication. - // The standard preview link may redirect to an interactive Auth0 flow from non-browser clients. - // Signed preview URLs work for direct HTTP access. - // - // Request a longer-lived URL so sessions can run for several minutes without refresh. - const preview = await sandbox.getSignedPreviewUrl(port, 6 * 60 * 60); - return { - url: preview.url, - token: preview.token, - }; - } -} diff --git a/foundry/packages/backend/src/integrations/git/index.ts b/foundry/packages/backend/src/integrations/git/index.ts index 1b478c4..880e0f5 100644 --- a/foundry/packages/backend/src/integrations/git/index.ts +++ b/foundry/packages/backend/src/integrations/git/index.ts @@ -87,7 +87,7 @@ export interface BranchSnapshot { } export async function fetch(repoPath: string, options?: GitAuthOptions): Promise { - await execFileAsync("git", ["-C", repoPath, "fetch", "--prune"], { + await execFileAsync("git", ["-C", repoPath, "fetch", "--prune", "--no-auto-gc"], { timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS, env: gitEnv(options), }); diff --git a/foundry/packages/backend/src/providers/daytona/index.ts b/foundry/packages/backend/src/providers/daytona/index.ts deleted file mode 100644 index 8166668..0000000 --- a/foundry/packages/backend/src/providers/daytona/index.ts +++ /dev/null @@ -1,485 +0,0 @@ -import type { - AgentEndpoint, - AttachTarget, - AttachTargetRequest, - CreateSandboxRequest, - DestroySandboxRequest, - EnsureAgentRequest, - ExecuteSandboxCommandRequest, - ExecuteSandboxCommandResult, - ProviderCapabilities, - ReleaseSandboxRequest, - ResumeSandboxRequest, - SandboxHandle, - SandboxHealth, - SandboxHealthRequest, - SandboxProvider, -} from "../provider-api/index.js"; -import type { DaytonaDriver } from "../../driver.js"; -import { Image } from "@daytonaio/sdk"; - -export interface DaytonaProviderConfig { - endpoint?: string; - apiKey?: string; - image: string; - target?: string; - /** - * Auto-stop interval in minutes. If omitted, Daytona's default applies. - * Set to `0` to disable auto-stop. - */ - autoStopInterval?: number; -} - -export class DaytonaProvider implements SandboxProvider { - constructor( - private readonly config: DaytonaProviderConfig, - private readonly daytona?: DaytonaDriver, - ) {} - - private static readonly SANDBOX_AGENT_PORT = 2468; - private static readonly SANDBOX_AGENT_VERSION = "0.3.0"; - private static readonly DEFAULT_ACP_REQUEST_TIMEOUT_MS = 120_000; - private static readonly AGENT_IDS = ["codex", "claude"] as const; - private static readonly PASSTHROUGH_ENV_KEYS = [ - "ANTHROPIC_API_KEY", - "CLAUDE_API_KEY", - "OPENAI_API_KEY", - "CODEX_API_KEY", - "OPENCODE_API_KEY", - "CEREBRAS_API_KEY", - "GH_TOKEN", - "GITHUB_TOKEN", - ] as const; - - private getRequestTimeoutMs(): number { - const parsed = Number(process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS ?? "120000"); - if (!Number.isFinite(parsed) || parsed <= 0) { - return 120_000; - } - return Math.floor(parsed); - } - - private getAcpRequestTimeoutMs(): number { - const parsed = Number(process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS ?? DaytonaProvider.DEFAULT_ACP_REQUEST_TIMEOUT_MS.toString()); - if (!Number.isFinite(parsed) || parsed <= 0) { - return DaytonaProvider.DEFAULT_ACP_REQUEST_TIMEOUT_MS; - } - return Math.floor(parsed); - } - - private async withTimeout(label: string, fn: () => Promise): Promise { - const timeoutMs = this.getRequestTimeoutMs(); - let timer: ReturnType | null = null; - - try { - return await Promise.race([ - fn(), - new Promise((_, reject) => { - timer = setTimeout(() => { - reject(new Error(`daytona ${label} timed out after ${timeoutMs}ms`)); - }, timeoutMs); - }), - ]); - } finally { - if (timer) { - clearTimeout(timer); - } - } - } - - private getClient() { - const apiKey = this.config.apiKey?.trim(); - if (!apiKey) { - return undefined; - } - const endpoint = this.config.endpoint?.trim(); - - return this.daytona?.createClient({ - ...(endpoint ? { apiUrl: endpoint } : {}), - apiKey, - target: this.config.target, - }); - } - - private requireClient() { - const client = this.getClient(); - if (client) { - return client; - } - - if (!this.daytona) { - throw new Error("daytona provider requires backend daytona driver"); - } - - throw new Error( - "daytona provider is not configured: missing apiKey. " + - "Set HF_DAYTONA_API_KEY (or DAYTONA_API_KEY). " + - "Optionally set HF_DAYTONA_ENDPOINT (or DAYTONA_ENDPOINT).", - ); - } - - private async ensureStarted(sandboxId: string): Promise { - const client = this.requireClient(); - - const sandbox = await this.withTimeout("get sandbox", () => client.getSandbox(sandboxId)); - const state = String(sandbox.state ?? "unknown").toLowerCase(); - if (state === "started" || state === "running") { - return; - } - - // If the sandbox is stopped (or any non-started state), try starting it. - // Daytona preserves the filesystem across stop/start, which is what we rely on for faster git setup. - await this.withTimeout("start sandbox", () => client.startSandbox(sandboxId, 60)); - } - - private buildEnvVars(): Record { - const envVars: Record = {}; - - for (const key of DaytonaProvider.PASSTHROUGH_ENV_KEYS) { - const value = process.env[key]; - if (value) { - envVars[key] = value; - } - } - - return envVars; - } - - private buildShellExports(extra: Record = {}): string[] { - const merged = { - ...this.buildEnvVars(), - ...extra, - }; - - return Object.entries(merged).map(([key, value]) => { - const encoded = Buffer.from(value, "utf8").toString("base64"); - return `export ${key}="$(printf %s ${JSON.stringify(encoded)} | base64 -d)"`; - }); - } - - private buildSnapshotImage() { - // Use Daytona image build + snapshot caching so base tooling (git + sandbox-agent) - // is prepared once and reused for subsequent sandboxes. - return Image.base(this.config.image).runCommands( - "apt-get update && apt-get install -y curl ca-certificates git openssh-client nodejs npm", - `curl -fsSL https://releases.rivet.dev/sandbox-agent/${DaytonaProvider.SANDBOX_AGENT_VERSION}/install.sh | sh`, - `bash -lc 'export PATH="$HOME/.local/bin:$PATH"; sandbox-agent install-agent codex || true; sandbox-agent install-agent claude || true'`, - ); - } - - private async runCheckedCommand(sandboxId: string, command: string, label: string): Promise { - const client = this.requireClient(); - - const result = await this.withTimeout(`execute command (${label})`, () => client.executeCommand(sandboxId, command)); - if (result.exitCode !== 0) { - throw new Error(`daytona ${label} failed (${result.exitCode}): ${result.result}`); - } - } - - id() { - return "daytona" as const; - } - - capabilities(): ProviderCapabilities { - return { - remote: true, - supportsSessionReuse: true, - }; - } - - async validateConfig(input: unknown): Promise> { - return (input as Record | undefined) ?? {}; - } - - async createSandbox(req: CreateSandboxRequest): Promise { - const client = this.requireClient(); - const emitDebug = req.debug ?? (() => {}); - - emitDebug("daytona.createSandbox.start", { - workspaceId: req.workspaceId, - repoId: req.repoId, - taskId: req.taskId, - branchName: req.branchName, - }); - - const createStartedAt = Date.now(); - const sandbox = await this.withTimeout("create sandbox", () => - client.createSandbox({ - image: this.buildSnapshotImage(), - envVars: this.buildEnvVars(), - labels: { - "foundry.workspace": req.workspaceId, - "foundry.task": req.taskId, - "foundry.repo_id": req.repoId, - "foundry.repo_remote": req.repoRemote, - "foundry.branch": req.branchName, - }, - autoStopInterval: this.config.autoStopInterval, - }), - ); - emitDebug("daytona.createSandbox.created", { - sandboxId: sandbox.id, - durationMs: Date.now() - createStartedAt, - state: sandbox.state ?? null, - }); - - const repoDir = `/home/daytona/foundry/${req.workspaceId}/${req.repoId}/${req.taskId}/repo`; - - // Prepare a working directory for the agent. This must succeed for the task to work. - const installStartedAt = Date.now(); - await this.runCheckedCommand( - sandbox.id, - [ - "bash", - "-lc", - `'set -euo pipefail; export DEBIAN_FRONTEND=noninteractive; if command -v git >/dev/null 2>&1 && command -v npx >/dev/null 2>&1; then exit 0; fi; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y git openssh-client ca-certificates nodejs npm >/tmp/apt-install.log 2>&1'`, - ].join(" "), - "install git + node toolchain", - ); - emitDebug("daytona.createSandbox.install_toolchain.done", { - sandboxId: sandbox.id, - durationMs: Date.now() - installStartedAt, - }); - - const cloneStartedAt = Date.now(); - await this.runCheckedCommand( - sandbox.id, - [ - "bash", - "-lc", - `${JSON.stringify( - [ - "set -euo pipefail", - "export GIT_TERMINAL_PROMPT=0", - "export GIT_ASKPASS=/bin/echo", - `TOKEN=${JSON.stringify(req.githubToken ?? "")}`, - 'if [ -z "$TOKEN" ]; then TOKEN="${GH_TOKEN:-${GITHUB_TOKEN:-}}"; fi', - "GIT_AUTH_ARGS=()", - `if [ -n "$TOKEN" ] && [[ "${req.repoRemote}" == https://github.com/* ]]; then AUTH_HEADER="$(printf 'x-access-token:%s' "$TOKEN" | base64 | tr -d '\\n')"; GIT_AUTH_ARGS=(-c "http.https://github.com/.extraheader=AUTHORIZATION: basic $AUTH_HEADER"); fi`, - `rm -rf "${repoDir}"`, - `mkdir -p "${repoDir}"`, - `rmdir "${repoDir}"`, - // Foundry test repos can be private, so clone/fetch must use the sandbox's GitHub token when available. - `git "\${GIT_AUTH_ARGS[@]}" clone "${req.repoRemote}" "${repoDir}"`, - `cd "${repoDir}"`, - `if [ -n "$TOKEN" ] && [[ "${req.repoRemote}" == https://github.com/* ]]; then git config --local credential.helper ""; git config --local http.https://github.com/.extraheader "AUTHORIZATION: basic $AUTH_HEADER"; fi`, - `git "\${GIT_AUTH_ARGS[@]}" fetch origin --prune`, - // The task branch may not exist remotely yet (agent push creates it). Base off current branch (default branch). - `if git show-ref --verify --quiet "refs/remotes/origin/${req.branchName}"; then git checkout -B "${req.branchName}" "origin/${req.branchName}"; else git checkout -B "${req.branchName}" "$(git branch --show-current 2>/dev/null || echo main)"; fi`, - `git config user.email "foundry@local" >/dev/null 2>&1 || true`, - `git config user.name "Foundry" >/dev/null 2>&1 || true`, - ].join("; "), - )}`, - ].join(" "), - "clone repo", - ); - emitDebug("daytona.createSandbox.clone_repo.done", { - sandboxId: sandbox.id, - durationMs: Date.now() - cloneStartedAt, - }); - - return { - sandboxId: sandbox.id, - switchTarget: `daytona://${sandbox.id}`, - metadata: { - endpoint: this.config.endpoint ?? null, - image: this.config.image, - snapshot: sandbox.snapshot ?? null, - remote: true, - state: sandbox.state ?? null, - cwd: repoDir, - }, - }; - } - - async resumeSandbox(req: ResumeSandboxRequest): Promise { - const client = this.requireClient(); - - await this.ensureStarted(req.sandboxId); - - // Reconstruct cwd from sandbox labels written at create time. - const info = await this.withTimeout("resume get sandbox", () => client.getSandbox(req.sandboxId)); - const labels = info.labels ?? {}; - const workspaceId = labels["foundry.workspace"] ?? req.workspaceId; - const repoId = labels["foundry.repo_id"] ?? ""; - const taskId = labels["foundry.task"] ?? ""; - const cwd = repoId && taskId ? `/home/daytona/foundry/${workspaceId}/${repoId}/${taskId}/repo` : null; - - return { - sandboxId: req.sandboxId, - switchTarget: `daytona://${req.sandboxId}`, - metadata: { - resumed: true, - endpoint: this.config.endpoint ?? null, - ...(cwd ? { cwd } : {}), - }, - }; - } - - async destroySandbox(_req: DestroySandboxRequest): Promise { - const client = this.getClient(); - if (!client) { - return; - } - - try { - await this.withTimeout("delete sandbox", () => client.deleteSandbox(_req.sandboxId)); - } catch (error) { - // Ignore not-found style cleanup failures. - const text = error instanceof Error ? error.message : String(error); - if (text.toLowerCase().includes("not found")) { - return; - } - throw error; - } - } - - async releaseSandbox(req: ReleaseSandboxRequest): Promise { - const client = this.getClient(); - if (!client) { - return; - } - - try { - await this.withTimeout("stop sandbox", () => client.stopSandbox(req.sandboxId, 60)); - } catch (error) { - const text = error instanceof Error ? error.message : String(error); - if (text.toLowerCase().includes("not found")) { - return; - } - throw error; - } - } - - async ensureSandboxAgent(req: EnsureAgentRequest): Promise { - const client = this.requireClient(); - const acpRequestTimeoutMs = this.getAcpRequestTimeoutMs(); - const sandboxAgentExports = this.buildShellExports({ - SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS: acpRequestTimeoutMs.toString(), - }); - - await this.ensureStarted(req.sandboxId); - - await this.runCheckedCommand( - req.sandboxId, - [ - "bash", - "-lc", - `'set -euo pipefail; if command -v curl >/dev/null 2>&1; then exit 0; fi; export DEBIAN_FRONTEND=noninteractive; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y curl ca-certificates >/tmp/apt-install.log 2>&1'`, - ].join(" "), - "install curl", - ); - - await this.runCheckedCommand( - req.sandboxId, - [ - "bash", - "-lc", - `'set -euo pipefail; if command -v npx >/dev/null 2>&1; then exit 0; fi; export DEBIAN_FRONTEND=noninteractive; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y nodejs npm >/tmp/apt-install.log 2>&1'`, - ].join(" "), - "install node toolchain", - ); - - await this.runCheckedCommand( - req.sandboxId, - [ - "bash", - "-lc", - `'set -euo pipefail; export PATH="$HOME/.local/bin:$PATH"; if sandbox-agent --version 2>/dev/null | grep -q "${DaytonaProvider.SANDBOX_AGENT_VERSION}"; then exit 0; fi; curl -fsSL https://releases.rivet.dev/sandbox-agent/${DaytonaProvider.SANDBOX_AGENT_VERSION}/install.sh | sh'`, - ].join(" "), - "install sandbox-agent", - ); - - for (const agentId of DaytonaProvider.AGENT_IDS) { - try { - await this.runCheckedCommand( - req.sandboxId, - ["bash", "-lc", `'export PATH="$HOME/.local/bin:$PATH"; sandbox-agent install-agent ${agentId}'`].join(" "), - `install agent ${agentId}`, - ); - } catch { - // Some sandbox-agent builds may not ship every agent plugin; treat this as best-effort. - } - } - - await this.runCheckedCommand( - req.sandboxId, - [ - "bash", - "-lc", - JSON.stringify( - [ - "set -euo pipefail", - 'export PATH="$HOME/.local/bin:$PATH"', - ...sandboxAgentExports, - "command -v sandbox-agent >/dev/null 2>&1", - "if pgrep -x sandbox-agent >/dev/null; then exit 0; fi", - 'rm -f "$HOME/.codex/auth.json" "$HOME/.config/codex/auth.json"', - `nohup sandbox-agent server --no-token --host 0.0.0.0 --port ${DaytonaProvider.SANDBOX_AGENT_PORT} >/tmp/sandbox-agent.log 2>&1 &`, - ].join("; "), - ), - ].join(" "), - "start sandbox-agent", - ); - - await this.runCheckedCommand( - req.sandboxId, - [ - "bash", - "-lc", - `'for i in $(seq 1 45); do curl -fsS "http://127.0.0.1:${DaytonaProvider.SANDBOX_AGENT_PORT}/v1/health" >/dev/null && exit 0; sleep 1; done; echo "sandbox-agent failed to become healthy" >&2; tail -n 80 /tmp/sandbox-agent.log >&2; exit 1'`, - ].join(" "), - "wait for sandbox-agent health", - ); - - const preview = await this.withTimeout("get preview endpoint", () => client.getPreviewEndpoint(req.sandboxId, DaytonaProvider.SANDBOX_AGENT_PORT)); - - return { - endpoint: preview.url, - token: preview.token, - }; - } - - async health(req: SandboxHealthRequest): Promise { - const client = this.getClient(); - if (!client) { - return { - status: "degraded", - message: "daytona driver not configured", - }; - } - - try { - const sandbox = await this.withTimeout("health get sandbox", () => client.getSandbox(req.sandboxId)); - const state = String(sandbox.state ?? "unknown"); - if (state.toLowerCase().includes("error")) { - return { - status: "down", - message: `daytona sandbox in error state: ${state}`, - }; - } - return { - status: "healthy", - message: `daytona sandbox state: ${state}`, - }; - } catch (error) { - const text = error instanceof Error ? error.message : String(error); - return { - status: "down", - message: `daytona sandbox health check failed: ${text}`, - }; - } - } - - async attachTarget(req: AttachTargetRequest): Promise { - return { - target: `daytona://${req.sandboxId}`, - }; - } - - async executeCommand(req: ExecuteSandboxCommandRequest): Promise { - const client = this.requireClient(); - await this.ensureStarted(req.sandboxId); - return await this.withTimeout(`execute command (${req.label ?? "command"})`, () => client.executeCommand(req.sandboxId, req.command)); - } -} diff --git a/foundry/packages/backend/src/providers/index.ts b/foundry/packages/backend/src/providers/index.ts deleted file mode 100644 index 1f3af94..0000000 --- a/foundry/packages/backend/src/providers/index.ts +++ /dev/null @@ -1,77 +0,0 @@ -import type { ProviderId } from "@sandbox-agent/foundry-shared"; -import type { AppConfig } from "@sandbox-agent/foundry-shared"; -import type { BackendDriver } from "../driver.js"; -import { DaytonaProvider } from "./daytona/index.js"; -import { LocalProvider } from "./local/index.js"; -import type { SandboxProvider } from "./provider-api/index.js"; - -export interface ProviderRegistry { - get(providerId: ProviderId): SandboxProvider; - availableProviderIds(): ProviderId[]; - defaultProviderId(): ProviderId; -} - -export function createProviderRegistry(config: AppConfig, driver?: BackendDriver): ProviderRegistry { - const gitDriver = driver?.git ?? { - validateRemote: async () => { - throw new Error("local provider requires backend git driver"); - }, - ensureCloned: async () => { - throw new Error("local provider requires backend git driver"); - }, - fetch: async () => { - throw new Error("local provider requires backend git driver"); - }, - listRemoteBranches: async () => { - throw new Error("local provider requires backend git driver"); - }, - remoteDefaultBaseRef: async () => { - throw new Error("local provider requires backend git driver"); - }, - revParse: async () => { - throw new Error("local provider requires backend git driver"); - }, - ensureRemoteBranch: async () => { - throw new Error("local provider requires backend git driver"); - }, - diffStatForBranch: async () => { - throw new Error("local provider requires backend git driver"); - }, - conflictsWithMain: async () => { - throw new Error("local provider requires backend git driver"); - }, - }; - - const local = new LocalProvider( - { - rootDir: config.providers.local.rootDir, - sandboxAgentPort: config.providers.local.sandboxAgentPort, - }, - gitDriver, - ); - const daytona = new DaytonaProvider( - { - endpoint: config.providers.daytona.endpoint, - apiKey: config.providers.daytona.apiKey, - image: config.providers.daytona.image, - }, - driver?.daytona, - ); - - const map: Record = { - local, - daytona, - }; - - return { - get(providerId: ProviderId): SandboxProvider { - return map[providerId]; - }, - availableProviderIds(): ProviderId[] { - return Object.keys(map) as ProviderId[]; - }, - defaultProviderId(): ProviderId { - return config.providers.daytona.apiKey ? "daytona" : "local"; - }, - }; -} diff --git a/foundry/packages/backend/src/providers/local/index.ts b/foundry/packages/backend/src/providers/local/index.ts deleted file mode 100644 index f18313a..0000000 --- a/foundry/packages/backend/src/providers/local/index.ts +++ /dev/null @@ -1,235 +0,0 @@ -import { randomUUID } from "node:crypto"; -import { execFile } from "node:child_process"; -import { existsSync, mkdirSync, rmSync } from "node:fs"; -import { homedir } from "node:os"; -import { dirname, resolve } from "node:path"; -import { promisify } from "node:util"; -import { InMemorySessionPersistDriver, SandboxAgent } from "sandbox-agent"; -import type { - AgentEndpoint, - AttachTarget, - AttachTargetRequest, - CreateSandboxRequest, - DestroySandboxRequest, - EnsureAgentRequest, - ExecuteSandboxCommandRequest, - ExecuteSandboxCommandResult, - ProviderCapabilities, - ReleaseSandboxRequest, - ResumeSandboxRequest, - SandboxHandle, - SandboxHealth, - SandboxHealthRequest, - SandboxProvider, -} from "../provider-api/index.js"; -import type { GitDriver } from "../../driver.js"; - -const execFileAsync = promisify(execFile); -const DEFAULT_SANDBOX_AGENT_PORT = 2468; - -export interface LocalProviderConfig { - rootDir?: string; - sandboxAgentPort?: number; -} - -function expandHome(value: string): string { - if (value === "~") { - return homedir(); - } - if (value.startsWith("~/")) { - return resolve(homedir(), value.slice(2)); - } - return value; -} - -async function branchExists(repoPath: string, branchName: string): Promise { - try { - await execFileAsync("git", ["-C", repoPath, "show-ref", "--verify", `refs/remotes/origin/${branchName}`]); - return true; - } catch { - return false; - } -} - -async function checkoutBranch(repoPath: string, branchName: string, git: GitDriver): Promise { - await git.fetch(repoPath); - const targetRef = (await branchExists(repoPath, branchName)) ? `origin/${branchName}` : await git.remoteDefaultBaseRef(repoPath); - await execFileAsync("git", ["-C", repoPath, "checkout", "-B", branchName, targetRef], { - env: process.env as Record, - }); -} - -export class LocalProvider implements SandboxProvider { - private sdkPromise: Promise | null = null; - - constructor( - private readonly config: LocalProviderConfig, - private readonly git: GitDriver, - ) {} - - private rootDir(): string { - return expandHome(this.config.rootDir?.trim() || "~/.local/share/foundry/local-sandboxes"); - } - - private sandboxRoot(workspaceId: string, sandboxId: string): string { - return resolve(this.rootDir(), workspaceId, sandboxId); - } - - private repoDir(workspaceId: string, sandboxId: string): string { - return resolve(this.sandboxRoot(workspaceId, sandboxId), "repo"); - } - - private sandboxHandle(workspaceId: string, sandboxId: string, repoDir: string): SandboxHandle { - return { - sandboxId, - switchTarget: `local://${repoDir}`, - metadata: { - cwd: repoDir, - repoDir, - }, - }; - } - - private async sandboxAgent(): Promise { - if (!this.sdkPromise) { - const sandboxAgentHome = resolve(this.rootDir(), ".sandbox-agent-home"); - mkdirSync(sandboxAgentHome, { recursive: true }); - const spawnHome = process.env.HOME?.trim() || sandboxAgentHome; - this.sdkPromise = SandboxAgent.start({ - persist: new InMemorySessionPersistDriver(), - spawn: { - enabled: true, - host: "127.0.0.1", - port: this.config.sandboxAgentPort ?? DEFAULT_SANDBOX_AGENT_PORT, - log: "silent", - env: { - HOME: spawnHome, - ...(process.env.ANTHROPIC_API_KEY ? { ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY } : {}), - ...(process.env.CLAUDE_API_KEY ? { CLAUDE_API_KEY: process.env.CLAUDE_API_KEY } : {}), - ...(process.env.OPENAI_API_KEY ? { OPENAI_API_KEY: process.env.OPENAI_API_KEY } : {}), - ...(process.env.CODEX_API_KEY ? { CODEX_API_KEY: process.env.CODEX_API_KEY } : {}), - ...(process.env.GH_TOKEN ? { GH_TOKEN: process.env.GH_TOKEN } : {}), - ...(process.env.GITHUB_TOKEN ? { GITHUB_TOKEN: process.env.GITHUB_TOKEN } : {}), - }, - }, - }).then(async (sdk) => { - for (const agentName of ["claude", "codex"] as const) { - try { - const agent = await sdk.getAgent(agentName, { config: true }); - if (!agent.installed) { - await sdk.installAgent(agentName); - } - } catch { - // The local provider can still function if the agent is already available - // through the user's PATH or the install check is unsupported. - } - } - return sdk; - }); - } - return this.sdkPromise; - } - - id() { - return "local" as const; - } - - capabilities(): ProviderCapabilities { - return { - remote: false, - supportsSessionReuse: true, - }; - } - - async validateConfig(input: unknown): Promise> { - return (input as Record | undefined) ?? {}; - } - - async createSandbox(req: CreateSandboxRequest): Promise { - const sandboxId = req.taskId || `local-${randomUUID()}`; - const repoDir = this.repoDir(req.workspaceId, sandboxId); - mkdirSync(dirname(repoDir), { recursive: true }); - await this.git.ensureCloned(req.repoRemote, repoDir, { githubToken: req.githubToken }); - await checkoutBranch(repoDir, req.branchName, this.git); - return this.sandboxHandle(req.workspaceId, sandboxId, repoDir); - } - - async resumeSandbox(req: ResumeSandboxRequest): Promise { - const repoDir = this.repoDir(req.workspaceId, req.sandboxId); - if (!existsSync(repoDir)) { - throw new Error(`local sandbox repo is missing: ${repoDir}`); - } - return this.sandboxHandle(req.workspaceId, req.sandboxId, repoDir); - } - - async destroySandbox(req: DestroySandboxRequest): Promise { - rmSync(this.sandboxRoot(req.workspaceId, req.sandboxId), { - force: true, - recursive: true, - }); - } - - async releaseSandbox(_req: ReleaseSandboxRequest): Promise { - // Local sandboxes stay warm on disk to preserve session state and repo context. - } - - async ensureSandboxAgent(_req: EnsureAgentRequest): Promise { - const sdk = await this.sandboxAgent(); - const { baseUrl, token } = sdk as unknown as { - baseUrl?: string; - token?: string; - }; - if (!baseUrl) { - throw new Error("sandbox-agent baseUrl is unavailable"); - } - return token ? { endpoint: baseUrl, token } : { endpoint: baseUrl }; - } - - async health(req: SandboxHealthRequest): Promise { - try { - const repoDir = this.repoDir(req.workspaceId, req.sandboxId); - if (!existsSync(repoDir)) { - return { - status: "down", - message: "local sandbox repo is missing", - }; - } - const sdk = await this.sandboxAgent(); - const health = await sdk.getHealth(); - return { - status: health.status === "ok" ? "healthy" : "degraded", - message: health.status, - }; - } catch (error) { - return { - status: "down", - message: error instanceof Error ? error.message : String(error), - }; - } - } - - async attachTarget(req: AttachTargetRequest): Promise { - return { target: this.repoDir(req.workspaceId, req.sandboxId) }; - } - - async executeCommand(req: ExecuteSandboxCommandRequest): Promise { - const cwd = this.repoDir(req.workspaceId, req.sandboxId); - try { - const { stdout, stderr } = await execFileAsync("bash", ["-lc", req.command], { - cwd, - env: process.env as Record, - maxBuffer: 1024 * 1024 * 16, - }); - return { - exitCode: 0, - result: [stdout, stderr].filter(Boolean).join(""), - }; - } catch (error) { - const detail = error as { stdout?: string; stderr?: string; code?: number }; - return { - exitCode: typeof detail.code === "number" ? detail.code : 1, - result: [detail.stdout, detail.stderr, error instanceof Error ? error.message : String(error)].filter(Boolean).join(""), - }; - } - } -} diff --git a/foundry/packages/backend/src/providers/provider-api/index.ts b/foundry/packages/backend/src/providers/provider-api/index.ts deleted file mode 100644 index a15109d..0000000 --- a/foundry/packages/backend/src/providers/provider-api/index.ts +++ /dev/null @@ -1,100 +0,0 @@ -import type { ProviderId } from "@sandbox-agent/foundry-shared"; - -export interface ProviderCapabilities { - remote: boolean; - supportsSessionReuse: boolean; -} - -export interface CreateSandboxRequest { - workspaceId: string; - repoId: string; - repoRemote: string; - branchName: string; - taskId: string; - githubToken?: string | null; - debug?: (message: string, context?: Record) => void; - options?: Record; -} - -export interface ResumeSandboxRequest { - workspaceId: string; - sandboxId: string; - options?: Record; -} - -export interface DestroySandboxRequest { - workspaceId: string; - sandboxId: string; -} - -export interface ReleaseSandboxRequest { - workspaceId: string; - sandboxId: string; -} - -export interface EnsureAgentRequest { - workspaceId: string; - sandboxId: string; -} - -export interface SandboxHealthRequest { - workspaceId: string; - sandboxId: string; -} - -export interface AttachTargetRequest { - workspaceId: string; - sandboxId: string; -} - -export interface ExecuteSandboxCommandRequest { - workspaceId: string; - sandboxId: string; - command: string; - label?: string; -} - -export interface SandboxHandle { - sandboxId: string; - switchTarget: string; - metadata: Record; -} - -export interface AgentEndpoint { - endpoint: string; - token?: string; -} - -export interface SandboxHealth { - status: "healthy" | "degraded" | "down"; - message: string; -} - -export interface AttachTarget { - target: string; -} - -export interface ExecuteSandboxCommandResult { - exitCode: number; - result: string; -} - -export interface SandboxProvider { - id(): ProviderId; - capabilities(): ProviderCapabilities; - validateConfig(input: unknown): Promise>; - - createSandbox(req: CreateSandboxRequest): Promise; - resumeSandbox(req: ResumeSandboxRequest): Promise; - destroySandbox(req: DestroySandboxRequest): Promise; - /** - * Release resources for a sandbox without deleting its filesystem/state. - * For remote providers, this typically maps to "stop"/"suspend". - */ - releaseSandbox(req: ReleaseSandboxRequest): Promise; - - ensureSandboxAgent(req: EnsureAgentRequest): Promise; - health(req: SandboxHealthRequest): Promise; - attachTarget(req: AttachTargetRequest): Promise; - executeCommand(req: ExecuteSandboxCommandRequest): Promise; -} diff --git a/foundry/packages/backend/src/sandbox-config.ts b/foundry/packages/backend/src/sandbox-config.ts new file mode 100644 index 0000000..4fa388f --- /dev/null +++ b/foundry/packages/backend/src/sandbox-config.ts @@ -0,0 +1,39 @@ +import type { AppConfig, ProviderId } from "@sandbox-agent/foundry-shared"; + +function hasE2BApiKey(config: AppConfig): boolean { + return Boolean(config.providers.e2b.apiKey?.trim()); +} + +function forcedSandboxProviderId(): ProviderId | null { + const raw = process.env.FOUNDRY_SANDBOX_PROVIDER?.trim() ?? process.env.HF_SANDBOX_PROVIDER?.trim() ?? null; + if (raw === "local" || raw === "e2b") { + return raw; + } + return null; +} + +export function defaultSandboxProviderId(config: AppConfig): ProviderId { + const forced = forcedSandboxProviderId(); + if (forced === "local") { + return "local"; + } + if (forced === "e2b") { + if (!hasE2BApiKey(config)) { + throw new Error("FOUNDRY_SANDBOX_PROVIDER=e2b requires E2B_API_KEY to be configured."); + } + return "e2b"; + } + return hasE2BApiKey(config) ? "e2b" : "local"; +} + +export function availableSandboxProviderIds(config: AppConfig): ProviderId[] { + return hasE2BApiKey(config) ? ["e2b", "local"] : ["local"]; +} + +export function resolveSandboxProviderId(config: AppConfig, requested?: ProviderId | null): ProviderId { + if (requested === "e2b" && !hasE2BApiKey(config)) { + throw new Error("E2B provider is not configured. Set E2B_API_KEY before selecting the e2b provider."); + } + + return requested ?? defaultSandboxProviderId(config); +} diff --git a/foundry/packages/backend/test/daytona-provider.test.ts b/foundry/packages/backend/test/daytona-provider.test.ts deleted file mode 100644 index 363b405..0000000 --- a/foundry/packages/backend/test/daytona-provider.test.ts +++ /dev/null @@ -1,184 +0,0 @@ -import { describe, expect, it } from "vitest"; -import type { DaytonaClientLike, DaytonaDriver } from "../src/driver.js"; -import type { DaytonaCreateSandboxOptions } from "../src/integrations/daytona/client.js"; -import { DaytonaProvider } from "../src/providers/daytona/index.js"; - -class RecordingDaytonaClient implements DaytonaClientLike { - createSandboxCalls: DaytonaCreateSandboxOptions[] = []; - executedCommands: string[] = []; - - async createSandbox(options: DaytonaCreateSandboxOptions) { - this.createSandboxCalls.push(options); - return { - id: "sandbox-1", - state: "started", - snapshot: "snapshot-foundry", - labels: {}, - }; - } - - async getSandbox(sandboxId: string) { - return { - id: sandboxId, - state: "started", - snapshot: "snapshot-foundry", - labels: {}, - }; - } - - async startSandbox(_sandboxId: string, _timeoutSeconds?: number) {} - - async stopSandbox(_sandboxId: string, _timeoutSeconds?: number) {} - - async deleteSandbox(_sandboxId: string) {} - - async executeCommand(_sandboxId: string, command: string) { - this.executedCommands.push(command); - return { exitCode: 0, result: "" }; - } - - async getPreviewEndpoint(sandboxId: string, port: number) { - return { - url: `https://preview.example/sandbox/${sandboxId}/port/${port}`, - token: "preview-token", - }; - } -} - -function createProviderWithClient(client: DaytonaClientLike): DaytonaProvider { - const daytonaDriver: DaytonaDriver = { - createClient: () => client, - }; - - return new DaytonaProvider( - { - apiKey: "test-key", - image: "ubuntu:24.04", - }, - daytonaDriver, - ); -} - -describe("daytona provider snapshot image behavior", () => { - it("creates sandboxes using a snapshot-capable image recipe", async () => { - const client = new RecordingDaytonaClient(); - const provider = createProviderWithClient(client); - - const handle = await provider.createSandbox({ - workspaceId: "default", - repoId: "repo-1", - repoRemote: "https://github.com/acme/repo.git", - branchName: "feature/test", - taskId: "task-1", - }); - - expect(client.createSandboxCalls).toHaveLength(1); - const createCall = client.createSandboxCalls[0]; - if (!createCall) { - throw new Error("expected create sandbox call"); - } - - expect(typeof createCall.image).not.toBe("string"); - if (typeof createCall.image === "string") { - throw new Error("expected daytona image recipe object"); - } - - const dockerfile = createCall.image.dockerfile; - expect(dockerfile).toContain("apt-get install -y curl ca-certificates git openssh-client nodejs npm"); - expect(dockerfile).toContain("sandbox-agent/0.3.0/install.sh"); - const installAgentLines = dockerfile.match(/sandbox-agent install-agent [a-z0-9-]+/gi) ?? []; - expect(installAgentLines.length).toBeGreaterThanOrEqual(2); - const commands = client.executedCommands.join("\n"); - expect(commands).toContain("GIT_TERMINAL_PROMPT=0"); - expect(commands).toContain("GIT_ASKPASS=/bin/echo"); - - expect(handle.metadata.snapshot).toBe("snapshot-foundry"); - expect(handle.metadata.image).toBe("ubuntu:24.04"); - expect(handle.metadata.cwd).toBe("/home/daytona/foundry/default/repo-1/task-1/repo"); - expect(client.executedCommands.length).toBeGreaterThan(0); - }); - - it("starts sandbox-agent with ACP timeout env override", async () => { - const previous = process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS; - process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS = "240000"; - - try { - const client = new RecordingDaytonaClient(); - const provider = createProviderWithClient(client); - - await provider.ensureSandboxAgent({ - workspaceId: "default", - sandboxId: "sandbox-1", - }); - - const startCommand = client.executedCommands.find((command) => - command.includes("nohup env SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS=240000 sandbox-agent server"), - ); - - const joined = client.executedCommands.join("\n"); - expect(joined).toContain("sandbox-agent/0.3.0/install.sh"); - expect(joined).toContain("SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS=240000"); - expect(joined).toContain("apt-get install -y nodejs npm"); - expect(joined).toContain("sandbox-agent server --no-token --host 0.0.0.0 --port 2468"); - expect(startCommand).toBeTruthy(); - } finally { - if (previous === undefined) { - delete process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS; - } else { - process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS = previous; - } - } - }); - - it("fails with explicit timeout when daytona createSandbox hangs", async () => { - const previous = process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS; - process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS = "120"; - - const hangingClient: DaytonaClientLike = { - createSandbox: async () => await new Promise(() => {}), - getSandbox: async (sandboxId) => ({ id: sandboxId, state: "started" }), - startSandbox: async () => {}, - stopSandbox: async () => {}, - deleteSandbox: async () => {}, - executeCommand: async () => ({ exitCode: 0, result: "" }), - getPreviewEndpoint: async (sandboxId, port) => ({ - url: `https://preview.example/sandbox/${sandboxId}/port/${port}`, - token: "preview-token", - }), - }; - - try { - const provider = createProviderWithClient(hangingClient); - await expect( - provider.createSandbox({ - workspaceId: "default", - repoId: "repo-1", - repoRemote: "https://github.com/acme/repo.git", - branchName: "feature/test", - taskId: "task-timeout", - }), - ).rejects.toThrow("daytona create sandbox timed out after 120ms"); - } finally { - if (previous === undefined) { - delete process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS; - } else { - process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS = previous; - } - } - }); - - it("executes backend-managed sandbox commands through provider API", async () => { - const client = new RecordingDaytonaClient(); - const provider = createProviderWithClient(client); - - const result = await provider.executeCommand({ - workspaceId: "default", - sandboxId: "sandbox-1", - command: "echo backend-push", - label: "manual push", - }); - - expect(result.exitCode).toBe(0); - expect(client.executedCommands).toContain("echo backend-push"); - }); -}); diff --git a/foundry/packages/backend/test/helpers/test-context.ts b/foundry/packages/backend/test/helpers/test-context.ts index 07107ac..31b1965 100644 --- a/foundry/packages/backend/test/helpers/test-context.ts +++ b/foundry/packages/backend/test/helpers/test-context.ts @@ -3,7 +3,6 @@ import { join } from "node:path"; import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared"; import type { BackendDriver } from "../../src/driver.js"; import { initActorRuntimeContext } from "../../src/actors/context.js"; -import { createProviderRegistry } from "../../src/providers/index.js"; import { createDefaultAppShellServices } from "../../src/services/app-shell-runtime.js"; export function createTestConfig(overrides?: Partial): AppConfig { @@ -21,7 +20,8 @@ export function createTestConfig(overrides?: Partial): AppConfig { backup_retention_days: 7, }, providers: { - daytona: { image: "ubuntu:24.04" }, + local: {}, + e2b: {}, }, ...overrides, }); @@ -29,7 +29,6 @@ export function createTestConfig(overrides?: Partial): AppConfig { export function createTestRuntimeContext(driver: BackendDriver, configOverrides?: Partial): { config: AppConfig } { const config = createTestConfig(configOverrides); - const providers = createProviderRegistry(config, driver); - initActorRuntimeContext(config, providers, undefined, driver, createDefaultAppShellServices()); + initActorRuntimeContext(config, undefined, driver, createDefaultAppShellServices()); return { config }; } diff --git a/foundry/packages/backend/test/helpers/test-driver.ts b/foundry/packages/backend/test/helpers/test-driver.ts index c5b8bc4..505bcc4 100644 --- a/foundry/packages/backend/test/helpers/test-driver.ts +++ b/foundry/packages/backend/test/helpers/test-driver.ts @@ -1,23 +1,10 @@ -import type { - BackendDriver, - DaytonaClientLike, - DaytonaDriver, - GitDriver, - GithubDriver, - StackDriver, - SandboxAgentDriver, - SandboxAgentClientLike, - TmuxDriver, -} from "../../src/driver.js"; -import type { ListEventsRequest, ListPage, ListPageRequest, ProcessInfo, ProcessLogsResponse, SessionEvent, SessionRecord } from "sandbox-agent"; +import type { BackendDriver, GitDriver, GithubDriver, StackDriver, TmuxDriver } from "../../src/driver.js"; export function createTestDriver(overrides?: Partial): BackendDriver { return { git: overrides?.git ?? createTestGitDriver(), stack: overrides?.stack ?? createTestStackDriver(), github: overrides?.github ?? createTestGithubDriver(), - sandboxAgent: overrides?.sandboxAgent ?? createTestSandboxAgentDriver(), - daytona: overrides?.daytona ?? createTestDaytonaDriver(), tmux: overrides?.tmux ?? createTestTmuxDriver(), }; } @@ -63,79 +50,6 @@ export function createTestGithubDriver(overrides?: Partial): Githu }; } -export function createTestSandboxAgentDriver(overrides?: Partial): SandboxAgentDriver { - return { - createClient: (_opts) => createTestSandboxAgentClient(), - ...overrides, - }; -} - -export function createTestSandboxAgentClient(overrides?: Partial): SandboxAgentClientLike { - const defaultProcess: ProcessInfo = { - id: "process-1", - command: "bash", - args: ["-lc", "echo test"], - createdAtMs: Date.now(), - cwd: "/workspace", - exitCode: null, - exitedAtMs: null, - interactive: true, - pid: 123, - status: "running", - tty: true, - }; - const defaultLogs: ProcessLogsResponse = { - processId: defaultProcess.id, - stream: "combined", - entries: [], - }; - return { - createSession: async (_prompt) => ({ id: "test-session-1", status: "running" }), - sessionStatus: async (sessionId) => ({ id: sessionId, status: "running" }), - listSessions: async (_request?: ListPageRequest): Promise> => ({ - items: [], - nextCursor: undefined, - }), - listEvents: async (_request: ListEventsRequest): Promise> => ({ - items: [], - nextCursor: undefined, - }), - createProcess: async () => defaultProcess, - listProcesses: async () => ({ processes: [defaultProcess] }), - getProcessLogs: async () => defaultLogs, - stopProcess: async () => ({ ...defaultProcess, status: "exited", exitCode: 0, exitedAtMs: Date.now() }), - killProcess: async () => ({ ...defaultProcess, status: "exited", exitCode: 137, exitedAtMs: Date.now() }), - deleteProcess: async () => {}, - sendPrompt: async (_request) => {}, - cancelSession: async (_sessionId) => {}, - destroySession: async (_sessionId) => {}, - ...overrides, - }; -} - -export function createTestDaytonaDriver(overrides?: Partial): DaytonaDriver { - return { - createClient: (_opts) => createTestDaytonaClient(), - ...overrides, - }; -} - -export function createTestDaytonaClient(overrides?: Partial): DaytonaClientLike { - return { - createSandbox: async () => ({ id: "sandbox-test-1", state: "started" }), - getSandbox: async (sandboxId) => ({ id: sandboxId, state: "started" }), - startSandbox: async () => {}, - stopSandbox: async () => {}, - deleteSandbox: async () => {}, - executeCommand: async () => ({ exitCode: 0, result: "" }), - getPreviewEndpoint: async (sandboxId, port) => ({ - url: `https://preview.example/sandbox/${sandboxId}/port/${port}`, - token: "preview-token", - }), - ...overrides, - }; -} - export function createTestTmuxDriver(overrides?: Partial): TmuxDriver { return { setWindowStatus: () => 0, diff --git a/foundry/packages/backend/test/keys.test.ts b/foundry/packages/backend/test/keys.test.ts index b00a54d..d0886d2 100644 --- a/foundry/packages/backend/test/keys.test.ts +++ b/foundry/packages/backend/test/keys.test.ts @@ -1,14 +1,5 @@ import { describe, expect, it } from "vitest"; -import { - taskKey, - taskStatusSyncKey, - historyKey, - projectBranchSyncKey, - projectKey, - projectPrSyncKey, - sandboxInstanceKey, - workspaceKey, -} from "../src/actors/keys.js"; +import { taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "../src/actors/keys.js"; describe("actor keys", () => { it("prefixes every key with workspace namespace", () => { @@ -16,11 +7,10 @@ describe("actor keys", () => { workspaceKey("default"), projectKey("default", "repo"), taskKey("default", "repo", "task"), - sandboxInstanceKey("default", "daytona", "sbx"), + taskSandboxKey("default", "sbx"), historyKey("default", "repo"), projectPrSyncKey("default", "repo"), projectBranchSyncKey("default", "repo"), - taskStatusSyncKey("default", "repo", "task", "sandbox-1", "session-1"), ]; for (const key of keys) { diff --git a/foundry/packages/backend/test/providers.test.ts b/foundry/packages/backend/test/providers.test.ts deleted file mode 100644 index f659e27..0000000 --- a/foundry/packages/backend/test/providers.test.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { describe, expect, it } from "vitest"; -import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared"; -import { createProviderRegistry } from "../src/providers/index.js"; - -function makeConfig(): AppConfig { - return ConfigSchema.parse({ - auto_submit: true, - notify: ["terminal"], - workspace: { default: "default" }, - backend: { - host: "127.0.0.1", - port: 7741, - dbPath: "~/.local/share/foundry/task.db", - opencode_poll_interval: 2, - github_poll_interval: 30, - backup_interval_secs: 3600, - backup_retention_days: 7, - }, - providers: { - local: {}, - daytona: { image: "ubuntu:24.04" }, - }, - }); -} - -describe("provider registry", () => { - it("defaults to local when daytona is not configured", () => { - const registry = createProviderRegistry(makeConfig()); - expect(registry.defaultProviderId()).toBe("local"); - }); - - it("prefers daytona when an api key is configured", () => { - const registry = createProviderRegistry( - ConfigSchema.parse({ - ...makeConfig(), - providers: { - ...makeConfig().providers, - daytona: { - ...makeConfig().providers.daytona, - apiKey: "test-token", - }, - }, - }), - ); - expect(registry.defaultProviderId()).toBe("daytona"); - }); - - it("returns the built-in provider", () => { - const registry = createProviderRegistry(makeConfig()); - expect(registry.get("daytona").id()).toBe("daytona"); - }); -}); diff --git a/foundry/packages/backend/test/sandbox-config.test.ts b/foundry/packages/backend/test/sandbox-config.test.ts new file mode 100644 index 0000000..0b53f03 --- /dev/null +++ b/foundry/packages/backend/test/sandbox-config.test.ts @@ -0,0 +1,50 @@ +import { describe, expect, it } from "vitest"; +import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared"; +import { availableSandboxProviderIds, defaultSandboxProviderId, resolveSandboxProviderId } from "../src/sandbox-config.js"; + +function makeConfig(overrides?: Partial): AppConfig { + return ConfigSchema.parse({ + auto_submit: true, + notify: ["terminal"], + workspace: { default: "default" }, + backend: { + host: "127.0.0.1", + port: 7741, + dbPath: "~/.local/share/foundry/task.db", + opencode_poll_interval: 2, + github_poll_interval: 30, + backup_interval_secs: 3600, + backup_retention_days: 7, + }, + providers: { + local: {}, + e2b: {}, + }, + ...overrides, + }); +} + +describe("sandbox config", () => { + it("defaults to local when e2b is not configured", () => { + const config = makeConfig(); + expect(defaultSandboxProviderId(config)).toBe("local"); + expect(availableSandboxProviderIds(config)).toEqual(["local"]); + }); + + it("prefers e2b when an api key is configured", () => { + const config = makeConfig({ + providers: { + local: {}, + e2b: { apiKey: "test-token" }, + }, + }); + expect(defaultSandboxProviderId(config)).toBe("e2b"); + expect(availableSandboxProviderIds(config)).toEqual(["e2b", "local"]); + expect(resolveSandboxProviderId(config, "e2b")).toBe("e2b"); + }); + + it("rejects selecting e2b without an api key", () => { + const config = makeConfig(); + expect(() => resolveSandboxProviderId(config, "e2b")).toThrow("E2B provider is not configured"); + }); +}); diff --git a/foundry/packages/backend/test/sandbox-instance-persist.test.ts b/foundry/packages/backend/test/sandbox-instance-persist.test.ts deleted file mode 100644 index a3692ea..0000000 --- a/foundry/packages/backend/test/sandbox-instance-persist.test.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { describe, expect, it } from "vitest"; -import { resolveEventListOffset } from "../src/actors/sandbox-instance/persist.js"; - -describe("sandbox-instance persist event offset", () => { - it("returns newest tail when cursor is omitted", () => { - expect(resolveEventListOffset({ total: 180, limit: 50 })).toBe(130); - }); - - it("returns zero when total rows are below page size", () => { - expect(resolveEventListOffset({ total: 20, limit: 50 })).toBe(0); - }); - - it("uses explicit cursor when provided", () => { - expect(resolveEventListOffset({ cursor: "7", total: 180, limit: 50 })).toBe(7); - }); - - it("normalizes invalid cursors to zero", () => { - expect(resolveEventListOffset({ cursor: "-3", total: 180, limit: 50 })).toBe(0); - expect(resolveEventListOffset({ cursor: "not-a-number", total: 180, limit: 50 })).toBe(0); - }); -}); diff --git a/foundry/packages/backend/test/workspace-isolation.test.ts b/foundry/packages/backend/test/workspace-isolation.test.ts index fd0689d..fa004c7 100644 --- a/foundry/packages/backend/test/workspace-isolation.test.ts +++ b/foundry/packages/backend/test/workspace-isolation.test.ts @@ -56,7 +56,7 @@ describe("workspace isolation", () => { workspaceId: "alpha", repoId: repoA.repoId, task: "task A", - providerId: "daytona", + providerId: "local", explicitBranchName: "feature/a", explicitTitle: "A", }); @@ -65,7 +65,7 @@ describe("workspace isolation", () => { workspaceId: "beta", repoId: repoB.repoId, task: "task B", - providerId: "daytona", + providerId: "local", explicitBranchName: "feature/b", explicitTitle: "B", }); diff --git a/foundry/packages/cli/src/index.ts b/foundry/packages/cli/src/index.ts index 3e77291..4043f32 100644 --- a/foundry/packages/cli/src/index.ts +++ b/foundry/packages/cli/src/index.ts @@ -140,7 +140,7 @@ JSON Output: "tasks": { "total": 4, "byStatus": { "queued": 0, "running": 1, "idle": 2, "archived": 1, "killed": 0, "error": 0 }, - "byProvider": { "daytona": 4 } + "byProvider": { "local": 4 } } } `); @@ -169,7 +169,7 @@ JSON Output: "taskId": "...", "repoId": "...", "branchName": "feature/foo", - "payloadJson": "{\\"providerId\\":\\"daytona\\"}", + "payloadJson": "{\\"providerId\\":\\"local\\"}", "createdAt": 1770607522229 } ] diff --git a/foundry/packages/cli/test/backend-manager.test.ts b/foundry/packages/cli/test/backend-manager.test.ts index ab1892e..53529ab 100644 --- a/foundry/packages/cli/test/backend-manager.test.ts +++ b/foundry/packages/cli/test/backend-manager.test.ts @@ -69,7 +69,8 @@ describe("backend manager", () => { backup_retention_days: 7, }, providers: { - daytona: { image: "ubuntu:24.04" }, + local: {}, + e2b: {}, }, }); diff --git a/foundry/packages/cli/test/theme.test.ts b/foundry/packages/cli/test/theme.test.ts index 6b49c75..a492a63 100644 --- a/foundry/packages/cli/test/theme.test.ts +++ b/foundry/packages/cli/test/theme.test.ts @@ -32,7 +32,8 @@ describe("resolveTuiTheme", () => { backup_retention_days: 7, }, providers: { - daytona: { image: "ubuntu:24.04" }, + local: {}, + e2b: {}, }, }); diff --git a/foundry/packages/cli/test/tui-format.test.ts b/foundry/packages/cli/test/tui-format.test.ts index e60c839..fe80182 100644 --- a/foundry/packages/cli/test/tui-format.test.ts +++ b/foundry/packages/cli/test/tui-format.test.ts @@ -11,7 +11,7 @@ const sample: TaskRecord = { branchName: "feature/test", title: "Test Title", task: "Do test", - providerId: "daytona", + providerId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -19,8 +19,8 @@ const sample: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "daytona", - switchTarget: "daytona://sandbox-1", + providerId: "local", + switchTarget: "sandbox://local/sandbox-1", cwd: null, createdAt: 1, updatedAt: 1, diff --git a/foundry/packages/cli/test/workspace-config.test.ts b/foundry/packages/cli/test/workspace-config.test.ts index 1f2e33a..94145be 100644 --- a/foundry/packages/cli/test/workspace-config.test.ts +++ b/foundry/packages/cli/test/workspace-config.test.ts @@ -18,7 +18,8 @@ describe("cli workspace resolution", () => { backup_retention_days: 7, }, providers: { - daytona: { image: "ubuntu:24.04" }, + local: {}, + e2b: {}, }, }); diff --git a/foundry/packages/client/src/backend-client.ts b/foundry/packages/client/src/backend-client.ts index 05047bb..2c34442 100644 --- a/foundry/packages/client/src/backend-client.ts +++ b/foundry/packages/client/src/backend-client.ts @@ -43,7 +43,7 @@ import type { } from "@sandbox-agent/foundry-shared"; import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; import { createMockBackendClient } from "./mock/backend-client.js"; -import { sandboxInstanceKey, taskKey, workspaceKey } from "./keys.js"; +import { taskKey, taskSandboxKey, workspaceKey } from "./keys.js"; export type TaskAction = "push" | "sync" | "merge" | "archive" | "kill"; @@ -54,7 +54,7 @@ export interface SandboxSessionRecord { lastConnectionId: string; createdAt: number; destroyedAt?: number; - status?: "running" | "idle" | "error"; + status?: "pending_provision" | "pending_session_create" | "ready" | "running" | "idle" | "error"; } export interface SandboxSessionEventRecord { @@ -137,23 +137,26 @@ interface TaskHandle { connect(): ActorConn; } -interface SandboxInstanceHandle { +interface TaskSandboxHandle { connect(): ActorConn; createSession(input: { - prompt: string; - cwd?: string; - agent?: AgentType | "opencode"; - }): Promise<{ id: string | null; status: "running" | "idle" | "error"; error?: string }>; + id?: string; + agent: string; + model?: string; + sessionInit?: { + cwd?: string; + }; + }): Promise<{ id: string }>; listSessions(input?: { cursor?: string; limit?: number }): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }>; - listSessionEvents(input: { sessionId: string; cursor?: string; limit?: number }): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }>; + getEvents(input: { sessionId: string; cursor?: string; limit?: number }): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }>; createProcess(input: ProcessCreateRequest): Promise; listProcesses(): Promise<{ processes: SandboxProcessRecord[] }>; - getProcessLogs(input: { processId: string; query?: ProcessLogFollowQuery }): Promise; - stopProcess(input: { processId: string; query?: ProcessSignalQuery }): Promise; - killProcess(input: { processId: string; query?: ProcessSignalQuery }): Promise; - deleteProcess(input: { processId: string }): Promise; - sendPrompt(input: { sessionId: string; prompt: string; notification?: boolean }): Promise; - sessionStatus(input: { sessionId: string }): Promise<{ id: string; status: "running" | "idle" | "error" }>; + getProcessLogs(processId: string, query?: ProcessLogFollowQuery): Promise; + stopProcess(processId: string, query?: ProcessSignalQuery): Promise; + killProcess(processId: string, query?: ProcessSignalQuery): Promise; + deleteProcess(processId: string): Promise; + rawSendSessionMethod(sessionId: string, method: string, params: Record): Promise; + destroySession(sessionId: string): Promise; sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>; providerState(): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }>; } @@ -166,8 +169,10 @@ interface RivetClient { get(key?: string | string[]): TaskHandle; getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): TaskHandle; }; - sandboxInstance: { - getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): SandboxInstanceHandle; + taskSandbox: { + get(key?: string | string[]): TaskSandboxHandle; + getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): TaskSandboxHandle; + getForId(actorId: string): TaskSandboxHandle; }; } @@ -423,8 +428,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien const task = async (workspaceId: string, repoId: string, taskId: string): Promise => client.task.get(taskKey(workspaceId, repoId, taskId)); - const sandboxByKey = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise => { - return (client as any).sandboxInstance.get(sandboxInstanceKey(workspaceId, providerId, sandboxId)); + const sandboxByKey = async (workspaceId: string, _providerId: ProviderId, sandboxId: string): Promise => { + return (client as any).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId)); }; function isActorNotFoundError(error: unknown): boolean { @@ -432,7 +437,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return message.includes("Actor not found"); } - const sandboxByActorIdFromTask = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise => { + const sandboxByActorIdFromTask = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise => { const ws = await workspace(workspaceId); const rows = await ws.listTasks({ workspaceId }); const candidates = [...rows].sort((a, b) => b.updatedAt - a.updatedAt); @@ -451,7 +456,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien (sb as any).sandboxActorId.length > 0, ) as { sandboxActorId?: string } | undefined; if (sandbox?.sandboxActorId) { - return (client as any).sandboxInstance.getForId(sandbox.sandboxActorId); + return (client as any).taskSandbox.getForId(sandbox.sandboxActorId); } } catch (error) { const message = error instanceof Error ? error.message : String(error); @@ -469,7 +474,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien workspaceId: string, providerId: ProviderId, sandboxId: string, - run: (handle: SandboxInstanceHandle) => Promise, + run: (handle: TaskSandboxHandle) => Promise, ): Promise => { const handle = await sandboxByKey(workspaceId, providerId, sandboxId); try { @@ -511,48 +516,65 @@ export function createBackendClient(options: BackendClientOptions): BackendClien const getWorkbenchCompat = async (workspaceId: string): Promise => { const summary = await (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId }); - const tasks = await Promise.all( - summary.taskSummaries.map(async (taskSummary) => { - const detail = await (await task(workspaceId, taskSummary.repoId, taskSummary.id)).getTaskDetail(); - const sessionDetails = await Promise.all( - detail.sessionsSummary.map(async (session) => { - const full = await (await task(workspaceId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id }); - return [session.id, full] as const; - }), - ); - const sessionDetailsById = new Map(sessionDetails); - return { - id: detail.id, - repoId: detail.repoId, - title: detail.title, - status: detail.status, - repoName: detail.repoName, - updatedAtMs: detail.updatedAtMs, - branch: detail.branch, - pullRequest: detail.pullRequest, - tabs: detail.sessionsSummary.map((session) => { - const full = sessionDetailsById.get(session.id); - return { - id: session.id, - sessionId: session.sessionId, - sessionName: session.sessionName, - agent: session.agent, - model: session.model, - status: session.status, - thinkingSinceMs: session.thinkingSinceMs, - unread: session.unread, - created: session.created, - draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null }, - transcript: full?.transcript ?? [], - }; - }), - fileChanges: detail.fileChanges, - diffs: detail.diffs, - fileTree: detail.fileTree, - minutesUsed: detail.minutesUsed, - }; - }), - ); + const tasks = ( + await Promise.all( + summary.taskSummaries.map(async (taskSummary) => { + let detail; + try { + detail = await (await task(workspaceId, taskSummary.repoId, taskSummary.id)).getTaskDetail(); + } catch (error) { + if (isActorNotFoundError(error)) { + return null; + } + throw error; + } + const sessionDetails = await Promise.all( + detail.sessionsSummary.map(async (session) => { + try { + const full = await (await task(workspaceId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id }); + return [session.id, full] as const; + } catch (error) { + if (isActorNotFoundError(error)) { + return null; + } + throw error; + } + }), + ); + const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkbenchSessionDetail] => entry !== null)); + return { + id: detail.id, + repoId: detail.repoId, + title: detail.title, + status: detail.status, + repoName: detail.repoName, + updatedAtMs: detail.updatedAtMs, + branch: detail.branch, + pullRequest: detail.pullRequest, + tabs: detail.sessionsSummary.map((session) => { + const full = sessionDetailsById.get(session.id); + return { + id: session.id, + sessionId: session.sessionId, + sessionName: session.sessionName, + agent: session.agent, + model: session.model, + status: session.status, + thinkingSinceMs: session.thinkingSinceMs, + unread: session.unread, + created: session.created, + draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null }, + transcript: full?.transcript ?? [], + }; + }), + fileChanges: detail.fileChanges, + diffs: detail.diffs, + fileTree: detail.fileTree, + minutesUsed: detail.minutesUsed, + }; + }), + ) + ).filter((task): task is TaskWorkbenchSnapshot["tasks"][number] => task !== null); const projects = summary.repos .map((repo) => ({ @@ -639,8 +661,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!entry.disposeConnPromise) { entry.disposeConnPromise = (async () => { - const handle = await sandboxByKey(workspaceId, providerId, sandboxId); - const conn = (handle as any).connect(); + const conn = await connectSandbox(workspaceId, providerId, sandboxId); const unsubscribeEvent = conn.on("processesUpdated", () => { const current = sandboxProcessSubscriptions.get(key); if (!current) { @@ -958,17 +979,22 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }): Promise<{ id: string; status: "running" | "idle" | "error" }> { const created = await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => handle.createSession({ - prompt: input.prompt, - cwd: input.cwd, - agent: input.agent, + agent: input.agent ?? "claude", + sessionInit: { + cwd: input.cwd, + }, }), ); - if (!created.id) { - throw new Error(created.error ?? "sandbox session creation failed"); + if (input.prompt.trim().length > 0) { + await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => + handle.rawSendSessionMethod(created.id, "session/prompt", { + prompt: [{ type: "text", text: input.prompt }], + }), + ); } return { id: created.id, - status: created.status, + status: "idle", }; }, @@ -987,7 +1013,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien sandboxId: string, input: { sessionId: string; cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.listSessionEvents(input)); + return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getEvents(input)); }, async createSandboxProcess(input: { @@ -1010,7 +1036,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien processId: string, query?: ProcessLogFollowQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getProcessLogs({ processId, query })); + return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getProcessLogs(processId, query)); }, async stopSandboxProcess( @@ -1020,7 +1046,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien processId: string, query?: ProcessSignalQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.stopProcess({ processId, query })); + return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.stopProcess(processId, query)); }, async killSandboxProcess( @@ -1030,11 +1056,11 @@ export function createBackendClient(options: BackendClientOptions): BackendClien processId: string, query?: ProcessSignalQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.killProcess({ processId, query })); + return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.killProcess(processId, query)); }, async deleteSandboxProcess(workspaceId: string, providerId: ProviderId, sandboxId: string, processId: string): Promise { - await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.deleteProcess({ processId })); + await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.deleteProcess(processId)); }, subscribeSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): () => void { @@ -1050,10 +1076,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien notification?: boolean; }): Promise { await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => - handle.sendPrompt({ - sessionId: input.sessionId, - prompt: input.prompt, - notification: input.notification, + handle.rawSendSessionMethod(input.sessionId, "session/prompt", { + prompt: [{ type: "text", text: input.prompt }], }), ); }, @@ -1064,7 +1088,10 @@ export function createBackendClient(options: BackendClientOptions): BackendClien sandboxId: string, sessionId: string, ): Promise<{ id: string; status: "running" | "idle" | "error" }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.sessionStatus({ sessionId })); + return { + id: sessionId, + status: "idle", + }; }, async sandboxProviderState( diff --git a/foundry/packages/client/src/interest/manager.ts b/foundry/packages/client/src/interest/manager.ts index b2aab57..4b02230 100644 --- a/foundry/packages/client/src/interest/manager.ts +++ b/foundry/packages/client/src/interest/manager.ts @@ -2,6 +2,14 @@ import type { TopicData, TopicKey, TopicParams } from "./topics.js"; export type TopicStatus = "loading" | "connected" | "error"; +export interface DebugInterestTopic { + topicKey: TopicKey; + cacheKey: string; + listenerCount: number; + status: TopicStatus; + lastRefreshAt: number | null; +} + export interface TopicState { data: TopicData | undefined; status: TopicStatus; @@ -20,5 +28,6 @@ export interface InterestManager { getSnapshot(topicKey: K, params: TopicParams): TopicData | undefined; getStatus(topicKey: K, params: TopicParams): TopicStatus; getError(topicKey: K, params: TopicParams): Error | null; + listDebugTopics(): DebugInterestTopic[]; dispose(): void; } diff --git a/foundry/packages/client/src/interest/remote-manager.ts b/foundry/packages/client/src/interest/remote-manager.ts index 3016ad0..f857975 100644 --- a/foundry/packages/client/src/interest/remote-manager.ts +++ b/foundry/packages/client/src/interest/remote-manager.ts @@ -1,5 +1,5 @@ import type { BackendClient } from "../backend-client.js"; -import type { InterestManager, TopicStatus } from "./manager.js"; +import type { DebugInterestTopic, InterestManager, TopicStatus } from "./manager.js"; import { topicDefinitions, type TopicData, type TopicDefinition, type TopicKey, type TopicParams } from "./topics.js"; const GRACE_PERIOD_MS = 30_000; @@ -19,7 +19,7 @@ export class RemoteInterestManager implements InterestManager { let entry = this.entries.get(cacheKey); if (!entry) { - entry = new TopicEntry(definition, this.backend, params as any); + entry = new TopicEntry(topicKey, cacheKey, definition, this.backend, params as any); this.entries.set(cacheKey, entry); } @@ -53,6 +53,13 @@ export class RemoteInterestManager implements InterestManager { return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.error ?? null; } + listDebugTopics(): DebugInterestTopic[] { + return [...this.entries.values()] + .filter((entry) => entry.listenerCount > 0) + .map((entry) => entry.getDebugTopic()) + .sort((left, right) => left.cacheKey.localeCompare(right.cacheKey)); + } + dispose(): void { for (const entry of this.entries.values()) { entry.dispose(); @@ -66,6 +73,7 @@ class TopicEntry { status: TopicStatus = "loading"; error: Error | null = null; listenerCount = 0; + lastRefreshAt: number | null = null; private readonly listeners = new Set<() => void>(); private conn: Awaited["connect"]>> | null = null; @@ -76,11 +84,23 @@ class TopicEntry { private started = false; constructor( + private readonly topicKey: TopicKey, + private readonly cacheKey: string, private readonly definition: TopicDefinition, private readonly backend: BackendClient, private readonly params: TParams, ) {} + getDebugTopic(): DebugInterestTopic { + return { + topicKey: this.topicKey, + cacheKey: this.cacheKey, + listenerCount: this.listenerCount, + status: this.status, + lastRefreshAt: this.lastRefreshAt, + }; + } + addListener(listener: () => void): void { this.listeners.add(listener); this.listenerCount = this.listeners.size; @@ -125,6 +145,7 @@ class TopicEntry { this.data = undefined; this.status = "loading"; this.error = null; + this.lastRefreshAt = null; this.started = false; } @@ -140,6 +161,7 @@ class TopicEntry { return; } this.data = this.definition.applyEvent(this.data, event); + this.lastRefreshAt = Date.now(); this.notify(); }); this.unsubscribeError = this.conn.onError((error: unknown) => { @@ -149,6 +171,7 @@ class TopicEntry { }); this.data = await this.definition.fetchInitial(this.backend, this.params); this.status = "connected"; + this.lastRefreshAt = Date.now(); this.started = true; this.notify(); } catch (error) { diff --git a/foundry/packages/client/src/keys.ts b/foundry/packages/client/src/keys.ts index f6b210e..54fdcc1 100644 --- a/foundry/packages/client/src/keys.ts +++ b/foundry/packages/client/src/keys.ts @@ -12,8 +12,8 @@ export function taskKey(workspaceId: string, repoId: string, taskId: string): Ac return ["ws", workspaceId, "project", repoId, "task", taskId]; } -export function sandboxInstanceKey(workspaceId: string, providerId: string, sandboxId: string): ActorKey { - return ["ws", workspaceId, "provider", providerId, "sandbox", sandboxId]; +export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey { + return ["ws", workspaceId, "sandbox", sandboxId]; } export function historyKey(workspaceId: string, repoId: string): ActorKey { @@ -27,8 +27,3 @@ export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey { return ["ws", workspaceId, "project", repoId, "branch-sync"]; } - -export function taskStatusSyncKey(workspaceId: string, repoId: string, taskId: string, sandboxId: string, sessionId: string): ActorKey { - // Include sandbox + session so multiple sandboxes/sessions can be tracked per task. - return ["ws", workspaceId, "project", repoId, "task", taskId, "status-sync", sandboxId, sessionId]; -} diff --git a/foundry/packages/client/src/mock-app.ts b/foundry/packages/client/src/mock-app.ts index 0cf499d..1f1ed39 100644 --- a/foundry/packages/client/src/mock-app.ts +++ b/foundry/packages/client/src/mock-app.ts @@ -1,3 +1,4 @@ +import type { WorkbenchModelId } from "@sandbox-agent/foundry-shared"; import { injectMockLatency } from "./mock/latency.js"; import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" }; @@ -58,7 +59,7 @@ export interface MockFoundryOrganizationSettings { slug: string; primaryDomain: string; seatAccrualMode: "first_prompt"; - defaultModel: "claude-sonnet-4" | "claude-opus-4" | "gpt-4o" | "o3"; + defaultModel: WorkbenchModelId; autoImportRepos: boolean; } @@ -177,7 +178,7 @@ function buildRivetOrganization(): MockFoundryOrganization { slug: "rivet", primaryDomain: "rivet.dev", seatAccrualMode: "first_prompt", - defaultModel: "o3", + defaultModel: "gpt-5.3-codex", autoImportRepos: true, }, github: { diff --git a/foundry/packages/client/src/view-model.ts b/foundry/packages/client/src/view-model.ts index 4764bac..239b8a5 100644 --- a/foundry/packages/client/src/view-model.ts +++ b/foundry/packages/client/src/view-model.ts @@ -9,12 +9,6 @@ const QUEUED_STATUSES = new Set([ "init_enqueue_provision", "init_ensure_name", "init_assert_name", - "init_create_sandbox", - "init_ensure_agent", - "init_start_sandbox_instance", - "init_create_session", - "init_write_db", - "init_start_status_sync", "init_complete", "archive_stop_status_sync", "archive_release_sandbox", diff --git a/foundry/packages/client/src/workbench-model.ts b/foundry/packages/client/src/workbench-model.ts index b99f588..206d08a 100644 --- a/foundry/packages/client/src/workbench-model.ts +++ b/foundry/packages/client/src/workbench-model.ts @@ -26,8 +26,12 @@ export const MODEL_GROUPS: ModelGroup[] = [ { provider: "OpenAI", models: [ - { id: "gpt-4o", label: "GPT-4o" }, - { id: "o3", label: "o3" }, + { id: "gpt-5.3-codex", label: "GPT-5.3 Codex" }, + { id: "gpt-5.4", label: "GPT-5.4" }, + { id: "gpt-5.2-codex", label: "GPT-5.2 Codex" }, + { id: "gpt-5.1-codex-max", label: "GPT-5.1 Codex Max" }, + { id: "gpt-5.2", label: "GPT-5.2" }, + { id: "gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini" }, ], }, ]; @@ -334,7 +338,7 @@ export function buildInitialTasks(): Task[] { sessionId: "t2", sessionName: "Test coverage", agent: "Codex", - model: "gpt-4o", + model: "gpt-5.3-codex", status: "idle", thinkingSinceMs: null, unread: true, @@ -1083,7 +1087,7 @@ export function buildInitialTasks(): Task[] { sessionId: "t10", sessionName: "Namespace fix", agent: "Codex", - model: "gpt-4o", + model: "gpt-5.3-codex", status: "idle", thinkingSinceMs: null, unread: true, @@ -1120,6 +1124,109 @@ export function buildInitialTasks(): Task[] { fileTree: [], minutesUsed: 3, }, + + // ── Status demo tasks ────────────────────────────────────────────── + { + id: "status-error", + repoId: "sandbox-agent", + title: "Fix broken auth middleware (error demo)", + status: "error", + runtimeStatus: "error", + statusMessage: "session:error", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(2), + branch: "fix/auth-middleware", + pullRequest: null, + tabs: [ + { + id: "status-error-tab", + sessionId: "status-error-session", + sessionName: "Auth fix", + agent: "Claude", + model: "claude-sonnet-4", + status: "error", + thinkingSinceMs: null, + unread: false, + created: true, + errorMessage: "Sandbox process exited unexpectedly (exit code 137). The sandbox may have run out of memory.", + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: [], + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 1, + }, + { + id: "status-provisioning", + repoId: "sandbox-agent", + title: "Add rate limiting to API gateway (provisioning demo)", + status: "new", + runtimeStatus: "init_enqueue_provision", + statusMessage: "Queueing sandbox provisioning.", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(0), + branch: null, + pullRequest: null, + tabs: [ + { + id: "status-prov-tab", + sessionId: null, + sessionName: "Session 1", + agent: "Claude", + model: "claude-sonnet-4", + status: "pending_provision", + thinkingSinceMs: null, + unread: false, + created: false, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: [], + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 0, + }, + { + id: "status-running", + repoId: "sandbox-agent", + title: "Refactor WebSocket handler (running demo)", + status: "running", + runtimeStatus: "running", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(1), + branch: "refactor/ws-handler", + pullRequest: null, + tabs: [ + { + id: "status-run-tab", + sessionId: "status-run-session", + sessionName: "WS refactor", + agent: "Codex", + model: "gpt-5.3-codex", + status: "running", + thinkingSinceMs: Date.now() - 12_000, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("status-run-tab", [ + { + id: "sr1", + role: "user", + agent: null, + createdAtMs: minutesAgo(3), + lines: ["Refactor the WebSocket handler to use a connection pool pattern."], + }, + ]), + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 2, + }, ]; } diff --git a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts index c468717..8a8b0d3 100644 --- a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts +++ b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts @@ -171,7 +171,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "4. git push the branch to origin", "5. Stop when done (agent should go idle).", ].join("\n"), - providerId: "daytona", + providerId: "local", explicitTitle: `test(e2e): ${runId}`, explicitBranchName: `e2e/${runId}`, }); @@ -185,7 +185,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { try { const namedAndProvisioned = await poll( "task naming + sandbox provisioning", - // Cold Daytona snapshot/image preparation can exceed 5 minutes on first run. + // Cold local sandbox startup can exceed a few minutes on first run. 8 * 60_000, 1_000, async () => client.getTask(workspaceId, created.taskId), @@ -301,17 +301,17 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { if (sandboxId) { await poll<{ providerId: string; sandboxId: string; state: string; at: number }>( - "daytona sandbox to stop", + "sandbox to stop", 2 * 60_000, 2_000, - async () => client.sandboxProviderState(workspaceId, "daytona", sandboxId!), + async () => client.sandboxProviderState(workspaceId, "local", sandboxId!), (s) => { const st = String(s.state).toLowerCase(); - return st.includes("stopped") || st.includes("suspended") || st.includes("paused"); + return st.includes("destroyed") || st.includes("stopped") || st.includes("suspended") || st.includes("paused"); }, ).catch(async (err) => { const dump = await debugDump(client, workspaceId, created.taskId); - const state = await client.sandboxProviderState(workspaceId, "daytona", sandboxId!).catch(() => null); + const state = await client.sandboxProviderState(workspaceId, "local", sandboxId!).catch(() => null); throw new Error(`${err instanceof Error ? err.message : String(err)}\n` + `sandbox state: ${state ? state.state : "unknown"}\n` + `${dump}`); }); } diff --git a/foundry/packages/client/test/e2e/workbench-e2e.test.ts b/foundry/packages/client/test/e2e/workbench-e2e.test.ts index 5d85125..11f092b 100644 --- a/foundry/packages/client/test/e2e/workbench-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workbench-e2e.test.ts @@ -1,11 +1,8 @@ -import { execFile } from "node:child_process"; -import { promisify } from "node:util"; import { describe, expect, it } from "vitest"; import type { TaskWorkbenchSnapshot, WorkbenchAgentTab, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; const RUN_WORKBENCH_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_E2E === "1"; -const execFileAsync = promisify(execFile); function requiredEnv(name: string): string { const value = process.env[name]?.trim(); @@ -20,8 +17,12 @@ function workbenchModelEnv(name: string, fallback: WorkbenchModelId): WorkbenchM switch (value) { case "claude-sonnet-4": case "claude-opus-4": - case "gpt-4o": - case "o3": + case "gpt-5.3-codex": + case "gpt-5.4": + case "gpt-5.2-codex": + case "gpt-5.1-codex-max": + case "gpt-5.2": + case "gpt-5.1-codex-mini": return value; default: return fallback; @@ -32,16 +33,6 @@ async function sleep(ms: number): Promise { await new Promise((resolve) => setTimeout(resolve, ms)); } -async function seedSandboxFile(workspaceId: string, taskId: string, filePath: string, content: string): Promise { - const repoPath = `/root/.local/share/foundry/local-sandboxes/${workspaceId}/${taskId}/repo`; - const script = [ - `cd ${JSON.stringify(repoPath)}`, - `mkdir -p ${JSON.stringify(filePath.includes("/") ? filePath.slice(0, filePath.lastIndexOf("/")) : ".")}`, - `printf '%s\\n' ${JSON.stringify(content)} > ${JSON.stringify(filePath)}`, - ].join(" && "); - await execFileAsync("docker", ["exec", "foundry-backend-1", "bash", "-lc", script]); -} - async function poll(label: string, timeoutMs: number, intervalMs: number, fn: () => Promise, isDone: (value: T) => boolean): Promise { const startedAt = Date.now(); let lastValue: T; @@ -148,7 +139,7 @@ describe("e2e(client): workbench flows", () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); - const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-4o"); + const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); const runId = `wb-${Date.now().toString(36)}`; const expectedFile = `${runId}.txt`; const expectedInitialReply = `WORKBENCH_READY_${runId}`; @@ -192,17 +183,6 @@ describe("e2e(client): workbench flows", () => { expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy(); expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true); - await seedSandboxFile(workspaceId, created.taskId, expectedFile, runId); - - const fileSeeded = await poll( - "seeded sandbox file reflected in workbench", - 30_000, - 1_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => task.fileChanges.some((file) => file.path === expectedFile), - ); - expect(fileSeeded.fileChanges.some((file) => file.path === expectedFile)).toBe(true); - await client.renameWorkbenchTask(workspaceId, { taskId: created.taskId, value: `Workbench E2E ${runId} Renamed`, @@ -227,7 +207,11 @@ describe("e2e(client): workbench flows", () => { await client.updateWorkbenchDraft(workspaceId, { taskId: created.taskId, tabId: secondTab.tabId, - text: `Reply with exactly: ${expectedReply}`, + text: [ + `Create a file named ${expectedFile} in the repo root.`, + `Write exactly this single line into the file: ${runId}`, + `Then reply with exactly: ${expectedReply}`, + ].join("\n"), attachments: [ { id: `${expectedFile}:1`, @@ -245,8 +229,19 @@ describe("e2e(client): workbench flows", () => { await client.sendWorkbenchMessage(workspaceId, { taskId: created.taskId, tabId: secondTab.tabId, - text: `Reply with exactly: ${expectedReply}`, - attachments: [], + text: [ + `Create a file named ${expectedFile} in the repo root.`, + `Write exactly this single line into the file: ${runId}`, + `Then reply with exactly: ${expectedReply}`, + ].join("\n"), + attachments: [ + { + id: `${expectedFile}:1`, + filePath: expectedFile, + lineNumber: 1, + lineContent: runId, + }, + ], }); const withSecondReply = await poll( @@ -256,12 +251,15 @@ describe("e2e(client): workbench flows", () => { async () => findTask(await client.getWorkbench(workspaceId), created.taskId), (task) => { const tab = findTab(task, secondTab.tabId); - return tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply); + return ( + tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply) && task.fileChanges.some((file) => file.path === expectedFile) + ); }, ); const secondTranscript = findTab(withSecondReply, secondTab.tabId).transcript; expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true); + expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true); await client.setWorkbenchSessionUnread(workspaceId, { taskId: created.taskId, diff --git a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts b/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts index 3eba239..363fa66 100644 --- a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts @@ -30,8 +30,12 @@ function workbenchModelEnv(name: string, fallback: WorkbenchModelId): WorkbenchM switch (value) { case "claude-sonnet-4": case "claude-opus-4": - case "gpt-4o": - case "o3": + case "gpt-5.3-codex": + case "gpt-5.4": + case "gpt-5.2-codex": + case "gpt-5.1-codex-max": + case "gpt-5.2": + case "gpt-5.1-codex-mini": return value; default: return fallback; @@ -191,7 +195,7 @@ describe("e2e(client): workbench load", () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); - const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-4o"); + const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); const taskCount = intEnv("HF_LOAD_TASK_COUNT", 3); const extraSessionCount = intEnv("HF_LOAD_EXTRA_SESSION_COUNT", 2); const pollIntervalMs = intEnv("HF_LOAD_POLL_INTERVAL_MS", 2_000); diff --git a/foundry/packages/client/test/interest-manager.test.ts b/foundry/packages/client/test/interest-manager.test.ts index 188195c..db86b0d 100644 --- a/foundry/packages/client/test/interest-manager.test.ts +++ b/foundry/packages/client/test/interest-manager.test.ts @@ -104,6 +104,14 @@ describe("RemoteInterestManager", () => { expect(backend.getWorkspaceSummary).toHaveBeenCalledTimes(1); expect(manager.getStatus("workspace", params)).toBe("connected"); expect(manager.getSnapshot("workspace", params)?.taskSummaries[0]?.title).toBe("Initial task"); + expect(manager.listDebugTopics()).toEqual([ + expect.objectContaining({ + topicKey: "workspace", + cacheKey: "workspace:ws-1", + listenerCount: 2, + status: "connected", + }), + ]); conn.emit("workspaceUpdated", { type: "taskSummaryUpdated", @@ -123,6 +131,7 @@ describe("RemoteInterestManager", () => { expect(manager.getSnapshot("workspace", params)?.taskSummaries[0]?.title).toBe("Updated task"); expect(listenerA).toHaveBeenCalled(); expect(listenerB).toHaveBeenCalled(); + expect(manager.listDebugTopics()[0]?.lastRefreshAt).toEqual(expect.any(Number)); unsubscribeA(); unsubscribeB(); @@ -140,6 +149,7 @@ describe("RemoteInterestManager", () => { unsubscribeA(); vi.advanceTimersByTime(29_000); + expect(manager.listDebugTopics()).toEqual([]); const unsubscribeB = manager.subscribe("workspace", params, () => {}); await flushAsyncWork(); @@ -148,6 +158,7 @@ describe("RemoteInterestManager", () => { expect(conn.disposeCount).toBe(0); unsubscribeB(); + expect(manager.listDebugTopics()).toEqual([]); vi.advanceTimersByTime(30_000); expect(conn.disposeCount).toBe(1); diff --git a/foundry/packages/client/test/keys.test.ts b/foundry/packages/client/test/keys.test.ts index 281d0a9..8f1f735 100644 --- a/foundry/packages/client/test/keys.test.ts +++ b/foundry/packages/client/test/keys.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import { taskKey, taskStatusSyncKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, sandboxInstanceKey, workspaceKey } from "../src/keys.js"; +import { taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "../src/keys.js"; describe("actor keys", () => { it("prefixes every key with workspace namespace", () => { @@ -7,11 +7,10 @@ describe("actor keys", () => { workspaceKey("default"), projectKey("default", "repo"), taskKey("default", "repo", "task"), - sandboxInstanceKey("default", "daytona", "sbx"), + taskSandboxKey("default", "sbx"), historyKey("default", "repo"), projectPrSyncKey("default", "repo"), projectBranchSyncKey("default", "repo"), - taskStatusSyncKey("default", "repo", "task", "sandbox-1", "session-1"), ]; for (const key of keys) { diff --git a/foundry/packages/client/test/view-model.test.ts b/foundry/packages/client/test/view-model.test.ts index d80b5f1..4767b8f 100644 --- a/foundry/packages/client/test/view-model.test.ts +++ b/foundry/packages/client/test/view-model.test.ts @@ -10,7 +10,7 @@ const sample: TaskRecord = { branchName: "feature/test", title: "Test Title", task: "Do test", - providerId: "daytona", + providerId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -18,9 +18,9 @@ const sample: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "daytona", + providerId: "local", sandboxActorId: null, - switchTarget: "daytona://sandbox-1", + switchTarget: "sandbox://local/sandbox-1", cwd: null, createdAt: 1, updatedAt: 1, @@ -73,8 +73,8 @@ describe("summary helpers", () => { it("summarizes by status and provider", () => { const rows: TaskRecord[] = [ sample, - { ...sample, taskId: "task-2", status: "idle", providerId: "daytona" }, - { ...sample, taskId: "task-3", status: "error", providerId: "daytona" }, + { ...sample, taskId: "task-2", status: "idle", providerId: "local" }, + { ...sample, taskId: "task-3", status: "error", providerId: "local" }, ]; const summary = summarizeTasks(rows); @@ -82,6 +82,6 @@ describe("summary helpers", () => { expect(summary.byStatus.running).toBe(1); expect(summary.byStatus.idle).toBe(1); expect(summary.byStatus.error).toBe(1); - expect(summary.byProvider.daytona).toBe(3); + expect(summary.byProvider.local).toBe(3); }); }); diff --git a/foundry/packages/frontend/src/components/dev-panel.tsx b/foundry/packages/frontend/src/components/dev-panel.tsx index f0a176c..68d5cd9 100644 --- a/foundry/packages/frontend/src/components/dev-panel.tsx +++ b/foundry/packages/frontend/src/components/dev-panel.tsx @@ -1,23 +1,72 @@ -import { memo, useCallback, useEffect, useMemo, useState } from "react"; +import { memo, useEffect, useMemo, useState } from "react"; import { useStyletron } from "baseui"; import { useFoundryTokens } from "../app/theme"; import { isMockFrontendClient } from "../lib/env"; -import type { FoundryOrganization, TaskWorkbenchSnapshot, WorkbenchTask } from "@sandbox-agent/foundry-shared"; +import { interestManager } from "../lib/interest"; +import type { + FoundryOrganization, + TaskStatus, + TaskWorkbenchSnapshot, + WorkbenchSandboxSummary, + WorkbenchSessionSummary, + WorkbenchTaskStatus, +} from "@sandbox-agent/foundry-shared"; +import type { DebugInterestTopic } from "@sandbox-agent/foundry-client"; +import { describeTaskState } from "../features/tasks/status"; interface DevPanelProps { workspaceId: string; snapshot: TaskWorkbenchSnapshot; organization?: FoundryOrganization | null; + focusedTask?: DevPanelFocusedTask | null; +} + +export interface DevPanelFocusedTask { + id: string; + repoId: string; + title: string | null; + status: WorkbenchTaskStatus; + runtimeStatus?: TaskStatus | null; + statusMessage?: string | null; + branch?: string | null; + activeSandboxId?: string | null; + activeSessionId?: string | null; + sandboxes?: WorkbenchSandboxSummary[]; + sessions?: WorkbenchSessionSummary[]; } interface TopicInfo { label: string; key: string; + /** Parsed params portion of the cache key, or empty if none. */ + params: string; listenerCount: number; hasConnection: boolean; + status: "loading" | "connected" | "error"; lastRefresh: number | null; } +function topicLabel(topic: DebugInterestTopic): string { + switch (topic.topicKey) { + case "app": + return "App"; + case "workspace": + return "Workspace"; + case "task": + return "Task"; + case "session": + return "Session"; + case "sandboxProcesses": + return "Sandbox"; + } +} + +/** Extract the params portion of a cache key (everything after the first `:`) */ +function topicParams(topic: DebugInterestTopic): string { + const idx = topic.cacheKey.indexOf(":"); + return idx >= 0 ? topic.cacheKey.slice(idx + 1) : ""; +} + function timeAgo(ts: number | null): string { if (!ts) return "never"; const seconds = Math.floor((Date.now() - ts) / 1000); @@ -28,17 +77,17 @@ function timeAgo(ts: number | null): string { return `${Math.floor(minutes / 60)}h`; } -function taskStatusLabel(task: WorkbenchTask): string { - if (task.status === "archived") return "archived"; - const hasRunning = task.tabs?.some((tab) => tab.status === "running"); - if (hasRunning) return "running"; - return task.status ?? "idle"; -} - function statusColor(status: string, t: ReturnType): string { + if (status === "new" || status.startsWith("init_") || status.startsWith("archive_") || status.startsWith("kill_") || status.startsWith("pending_")) { + return t.statusWarning; + } switch (status) { + case "connected": case "running": + case "ready": return t.statusSuccess; + case "loading": + return t.statusWarning; case "archived": return t.textMuted; case "error": @@ -76,7 +125,15 @@ function installStatusColor(status: string, t: ReturnType { - const items: TopicInfo[] = []; + return interestManager.listDebugTopics().map((topic) => ({ + label: topicLabel(topic), + key: topic.cacheKey, + params: topicParams(topic), + listenerCount: topic.listenerCount, + hasConnection: topic.status === "connected", + status: topic.status, + lastRefresh: topic.lastRefreshAt, + })); + }, [now]); - // Workbench subscription topic - items.push({ - label: "Workbench", - key: `ws:${workspaceId}`, - listenerCount: 1, - hasConnection: true, - lastRefresh: now, - }); - - // Per-task tab subscriptions - for (const task of snapshot.tasks ?? []) { - if (task.status === "archived") continue; - for (const tab of task.tabs ?? []) { - items.push({ - label: `Tab/${task.title?.slice(0, 16) || task.id.slice(0, 8)}/${tab.sessionName.slice(0, 10)}`, - key: `${workspaceId}:${task.id}:${tab.id}`, - listenerCount: 1, - hasConnection: tab.status === "running", - lastRefresh: tab.status === "running" ? now : null, - }); - } - } - - return items; - }, [workspaceId, snapshot, now]); - - const tasks = snapshot.tasks ?? []; const repos = snapshot.repos ?? []; - const projects = snapshot.projects ?? []; + const focusedTaskStatus = focusedTask?.runtimeStatus ?? focusedTask?.status ?? null; + const focusedTaskState = describeTaskState(focusedTaskStatus, focusedTask?.statusMessage ?? null); const mono = css({ fontFamily: "ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace", @@ -199,7 +239,14 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza {topic.label} - {topic.key.length > 24 ? `...${topic.key.slice(-20)}` : topic.key} + {topic.status} + {topic.params && ( + + {topic.params} + + )} {timeAgo(topic.lastRefresh)} ))} @@ -210,44 +257,150 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza
- - +
- {/* Tasks */} - {tasks.length > 0 && ( -
- {tasks.slice(0, 10).map((task) => { - const status = taskStatusLabel(task); - return ( -
+ {focusedTask ? ( +
+
+ - + + {focusedTask.title || focusedTask.id.slice(0, 12)} + + + {focusedTaskStatus ?? focusedTask.status} + +
+
{focusedTaskState.detail}
+
task: {focusedTask.id}
+
repo: {focusedTask.repoId}
+
branch: {focusedTask.branch ?? "-"}
+
+ ) : ( + No task focused + )} +
+ + {/* Session — only when a task is focused */} + {focusedTask && ( +
+ {(focusedTask.sessions?.length ?? 0) > 0 ? ( + focusedTask.sessions!.map((session) => { + const isActive = session.id === focusedTask.activeSessionId; + const thinking = thinkingLabel(session.thinkingSinceMs, now); + return ( +
- - {task.title || task.id.slice(0, 12)} - - {status} - {task.tabs?.length ?? 0} tabs -
- ); - })} + > +
+ + + {session.sessionName || session.id.slice(0, 12)} + {isActive ? " *" : ""} + + {session.status} +
+
+ {session.agent} + {session.model} + {!session.created && not created} + {session.unread && unread} + {thinking && {thinking}} +
+ {session.errorMessage && ( +
{session.errorMessage}
+ )} + {session.sessionId &&
sid: {session.sessionId}
} + + ); + }) + ) : ( + No sessions + )} +
+ )} + + {/* Sandbox — only when a task is focused */} + {focusedTask && ( +
+ {(focusedTask.sandboxes?.length ?? 0) > 0 ? ( + focusedTask.sandboxes!.map((sandbox) => { + const isActive = sandbox.sandboxId === focusedTask.activeSandboxId; + return ( +
+
+ + + {sandbox.sandboxId.slice(0, 16)} + {isActive ? " *" : ""} + + {sandbox.providerId} +
+ {sandbox.cwd &&
cwd: {sandbox.cwd}
} +
+ ); + }) + ) : ( + No sandboxes + )}
)} diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx index 8bb3d5d..6988f23 100644 --- a/foundry/packages/frontend/src/components/mock-layout.tsx +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -22,7 +22,7 @@ import { Sidebar } from "./mock-layout/sidebar"; import { TabStrip } from "./mock-layout/tab-strip"; import { TerminalPane } from "./mock-layout/terminal-pane"; import { TranscriptHeader } from "./mock-layout/transcript-header"; -import { PROMPT_TEXTAREA_MAX_HEIGHT, PROMPT_TEXTAREA_MIN_HEIGHT, SPanel, ScrollBody, Shell } from "./mock-layout/ui"; +import { PROMPT_TEXTAREA_MAX_HEIGHT, PROMPT_TEXTAREA_MIN_HEIGHT, SPanel, ScrollBody, Shell, SpinnerDot } from "./mock-layout/ui"; import { DevPanel, useDevPanel } from "./dev-panel"; import { buildDisplayMessages, @@ -40,6 +40,7 @@ import { import { activeMockOrganization, useMockAppSnapshot } from "../lib/mock-app"; import { backendClient } from "../lib/backend"; import { interestManager } from "../lib/interest"; +import { describeTaskState, isProvisioningTaskStatus } from "../features/tasks/status"; function firstAgentTabId(task: Task): string | null { return task.tabs[0]?.id ?? null; @@ -88,6 +89,7 @@ function toLegacyTab( thinkingSinceMs: summary.thinkingSinceMs, unread: summary.unread, created: summary.created, + errorMessage: summary.errorMessage ?? null, draft: sessionDetail?.draft ?? { text: "", attachments: [], @@ -107,7 +109,9 @@ function toLegacyTask( id: summary.id, repoId: summary.repoId, title: detail?.title ?? summary.title, - status: detail?.status ?? summary.status, + status: detail?.runtimeStatus ?? detail?.status ?? summary.status, + runtimeStatus: detail?.runtimeStatus, + statusMessage: detail?.statusMessage ?? null, repoName: detail?.repoName ?? summary.repoName, updatedAtMs: detail?.updatedAtMs ?? summary.updatedAtMs, branch: detail?.branch ?? summary.branch, @@ -117,9 +121,26 @@ function toLegacyTask( diffs: detail?.diffs ?? {}, fileTree: detail?.fileTree ?? [], minutesUsed: detail?.minutesUsed ?? 0, + activeSandboxId: detail?.activeSandboxId ?? null, }; } +function sessionStateMessage(tab: Task["tabs"][number] | null | undefined): string | null { + if (!tab) { + return null; + } + if (tab.status === "pending_provision") { + return "Provisioning sandbox..."; + } + if (tab.status === "pending_session_create") { + return "Creating session..."; + } + if (tab.status === "error") { + return tab.errorMessage ?? "Session failed to start."; + } + return null; +} + function groupProjects(repos: Array<{ id: string; label: string }>, tasks: Task[]) { return repos .map((repo) => ({ @@ -152,6 +173,7 @@ interface WorkbenchActions { const TranscriptPanel = memo(function TranscriptPanel({ taskWorkbenchClient, task, + hasSandbox, activeTabId, lastAgentTabId, openDiffs, @@ -169,6 +191,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ }: { taskWorkbenchClient: WorkbenchActions; task: Task; + hasSandbox: boolean; activeTabId: string | null; lastAgentTabId: string | null; openDiffs: string[]; @@ -202,6 +225,16 @@ const TranscriptPanel = memo(function TranscriptPanel({ const isTerminal = task.status === "archived"; const historyEvents = useMemo(() => buildHistoryEvents(task.tabs), [task.tabs]); const activeMessages = useMemo(() => buildDisplayMessages(activeAgentTab), [activeAgentTab]); + const taskRuntimeStatus = task.runtimeStatus ?? task.status; + const taskState = describeTaskState(taskRuntimeStatus, task.statusMessage ?? null); + const taskProvisioning = isProvisioningTaskStatus(taskRuntimeStatus); + const taskProvisioningMessage = taskState.detail; + const activeSessionMessage = sessionStateMessage(activeAgentTab); + const showPendingSessionState = + !activeDiff && + !!activeAgentTab && + (activeAgentTab.status === "pending_provision" || activeAgentTab.status === "pending_session_create" || activeAgentTab.status === "error") && + activeMessages.length === 0; const draft = promptTab?.draft.text ?? ""; const attachments = promptTab?.draft.attachments ?? []; @@ -542,6 +575,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ -

Create the first session

-

Sessions are where you chat with the agent. Start one now to send the first prompt on this task.

- + {taskProvisioning ? ( + <> + +

{taskState.title}

+

{taskProvisioningMessage}

+ + ) : ( + <> +

Create the first session

+

Sessions are where you chat with the agent. Start one now to send the first prompt on this task.

+ + + )} + + + + ) : showPendingSessionState ? ( + +
+
+ {activeAgentTab?.status === "error" ? null : } +

+ {activeAgentTab?.status === "pending_provision" + ? "Provisioning sandbox" + : activeAgentTab?.status === "pending_session_create" + ? "Creating session" + : "Session unavailable"} +

+

{activeSessionMessage}

+ {activeAgentTab?.status === "error" ? ( + + ) : null}
@@ -658,7 +754,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ /> )} - {!isTerminal && promptTab ? ( + {!isTerminal && promptTab && (promptTab.status === "ready" || promptTab.status === "running" || promptTab.status === "idle") ? ( { + if (!taskState.data?.activeSandboxId) return null; + return taskState.data.sandboxes?.find((s) => s.sandboxId === taskState.data!.activeSandboxId) ?? null; + }, [taskState.data?.activeSandboxId, taskState.data?.sandboxes]); + const sandboxState = useInterest( + interestManager, + "sandboxProcesses", + activeSandbox + ? { + workspaceId, + providerId: activeSandbox.providerId, + sandboxId: activeSandbox.sandboxId, + } + : null, + ); + const hasSandbox = Boolean(activeSandbox) && sandboxState.status !== "error"; const tasks = useMemo(() => { const sessionCache = new Map(); if (selectedTaskSummary && taskState.data) { @@ -1293,7 +1405,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M const { taskId, tabId } = await taskWorkbenchClient.createTask({ repoId, task: "New task", - model: "gpt-4o", + model: "gpt-5.3-codex", title: "New task", }); await navigate({ @@ -1693,6 +1805,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M workspaceId={workspaceId} snapshot={{ workspaceId, repos: workspaceRepos, projects: rawProjects, tasks } as TaskWorkbenchSnapshot} organization={activeOrg} + focusedTask={null} /> )} @@ -1794,6 +1907,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M ({ + id: tab.id, + sessionId: tab.sessionId ?? null, + sessionName: tab.sessionName ?? tab.id, + agent: tab.agent, + model: tab.model, + status: tab.status, + thinkingSinceMs: tab.thinkingSinceMs ?? null, + unread: tab.unread ?? false, + created: tab.created ?? false, + })) ?? [], + }} /> )} diff --git a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx index 0f8f688..44c78ba 100644 --- a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx @@ -521,6 +521,10 @@ export const Sidebar = memo(function Sidebar({ const isActive = task.id === activeId; const isDim = task.status === "archived"; const isRunning = task.tabs.some((tab) => tab.status === "running"); + const isProvisioning = + String(task.status).startsWith("init_") || + task.status === "new" || + task.tabs.some((tab) => tab.status === "pending_provision" || tab.status === "pending_session_create"); const hasUnread = task.tabs.some((tab) => tab.unread); const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); @@ -592,7 +596,7 @@ export const Sidebar = memo(function Sidebar({ flexShrink: 0, })} > - + deriveHeaderStatus(taskStatus, task.statusMessage ?? null, activeTab?.status ?? null, activeTab?.errorMessage ?? null, hasSandbox), + [taskStatus, task.statusMessage, activeTab?.status, activeTab?.errorMessage, hasSandbox], + ); return ( @@ -161,6 +169,7 @@ export const TranscriptHeader = memo(function TranscriptHeader({ ) ) : null} +
; + if (isProvisioning) return ; if (hasUnread) return ; if (isDraft) return ; return ; @@ -173,8 +184,75 @@ export const AgentIcon = memo(function AgentIcon({ agent, size = 14 }: { agent: } }); +export type HeaderStatusVariant = "error" | "warning" | "success" | "neutral"; + +export interface HeaderStatusInfo { + variant: HeaderStatusVariant; + label: string; + spinning: boolean; + tooltip?: string; +} + +export const HeaderStatusPill = memo(function HeaderStatusPill({ status }: { status: HeaderStatusInfo }) { + const [css] = useStyletron(); + const t = useFoundryTokens(); + + const colorMap: Record = { + error: { bg: `${t.statusError}18`, text: t.statusError, dot: t.statusError }, + warning: { bg: `${t.statusWarning}18`, text: t.statusWarning, dot: t.statusWarning }, + success: { bg: `${t.statusSuccess}18`, text: t.statusSuccess, dot: t.statusSuccess }, + neutral: { bg: t.interactiveSubtle, text: t.textTertiary, dot: t.textTertiary }, + }; + const colors = colorMap[status.variant]; + + return ( +
+ {status.spinning ? ( +
+ ) : ( +
+ )} + {status.label} +
+ ); +}); + export const TabAvatar = memo(function TabAvatar({ tab }: { tab: AgentTab }) { - if (tab.status === "running") return ; + if (tab.status === "running" || tab.status === "pending_provision" || tab.status === "pending_session_create") return ; if (tab.unread) return ; return ; }); diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts index f3362dc..810b065 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts @@ -8,7 +8,7 @@ function makeTab(transcript: WorkbenchAgentTab["transcript"]): WorkbenchAgentTab sessionId: "session-1", sessionName: "Session 1", agent: "Codex", - model: "gpt-4o", + model: "gpt-5.3-codex", status: "idle", thinkingSinceMs: null, unread: false, diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.ts index d22ea5c..bb5e72b 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.ts @@ -28,8 +28,12 @@ export const MODEL_GROUPS: ModelGroup[] = [ { provider: "OpenAI", models: [ - { id: "gpt-4o", label: "GPT-4o" }, - { id: "o3", label: "o3" }, + { id: "gpt-5.3-codex", label: "GPT-5.3 Codex" }, + { id: "gpt-5.4", label: "GPT-5.4" }, + { id: "gpt-5.2-codex", label: "GPT-5.2 Codex" }, + { id: "gpt-5.1-codex-max", label: "GPT-5.1 Codex Max" }, + { id: "gpt-5.2", label: "GPT-5.2" }, + { id: "gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini" }, ], }, ]; diff --git a/foundry/packages/frontend/src/components/workspace-dashboard.tsx b/foundry/packages/frontend/src/components/workspace-dashboard.tsx index fca4279..569c4bf 100644 --- a/foundry/packages/frontend/src/components/workspace-dashboard.tsx +++ b/foundry/packages/frontend/src/components/workspace-dashboard.tsx @@ -1,5 +1,5 @@ import { useEffect, useMemo, useState, type ReactNode } from "react"; -import type { AgentType, RepoBranchRecord, RepoOverview, RepoStackAction, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared"; +import type { AgentType, RepoBranchRecord, RepoOverview, RepoStackAction, TaskWorkbenchSnapshot, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared"; import { useInterest } from "@sandbox-agent/foundry-client"; import { useMutation, useQuery } from "@tanstack/react-query"; import { Link, useNavigate } from "@tanstack/react-router"; @@ -15,9 +15,12 @@ import { styled, useStyletron } from "baseui"; import { HeadingSmall, HeadingXSmall, LabelSmall, LabelXSmall, MonoLabelSmall, ParagraphSmall } from "baseui/typography"; import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal, Shuffle } from "lucide-react"; import { formatDiffStat } from "../features/tasks/model"; +import { deriveHeaderStatus, describeTaskState } from "../features/tasks/status"; +import { HeaderStatusPill } from "./mock-layout/ui"; import { buildTranscript, resolveSessionSelection } from "../features/sessions/model"; import { backendClient } from "../lib/backend"; import { interestManager } from "../lib/interest"; +import { DevPanel, useDevPanel } from "./dev-panel"; interface WorkspaceDashboardProps { workspaceId: string; @@ -99,7 +102,8 @@ const AGENT_OPTIONS: SelectItem[] = [ function statusKind(status: WorkbenchTaskStatus): StatusTagKind { if (status === "running") return "positive"; - if (status === "new") return "warning"; + if (status === "error") return "negative"; + if (status === "new" || String(status).startsWith("init_")) return "warning"; return "neutral"; } @@ -332,6 +336,7 @@ function MetaRow({ label, value, mono = false }: { label: string; value: string; export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }: WorkspaceDashboardProps) { const [css, theme] = useStyletron(); const navigate = useNavigate(); + const showDevPanel = useDevPanel(); const repoOverviewMode = typeof selectedRepoId === "string" && selectedRepoId.length > 0; const [draft, setDraft] = useState(""); @@ -467,6 +472,10 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }, [selectedForSession?.id]); const sessionRows = selectedForSession?.sessionsSummary ?? []; + const taskRuntimeStatus = selectedForSession?.runtimeStatus ?? selectedForSession?.status ?? null; + const taskStatusState = describeTaskState(taskRuntimeStatus, selectedForSession?.statusMessage ?? null); + const taskStateSummary = `${taskStatusState.title}. ${taskStatusState.detail}`; + const shouldUseTaskStateEmptyState = Boolean(selectedForSession && taskRuntimeStatus && taskRuntimeStatus !== "running" && taskRuntimeStatus !== "idle"); const sessionSelection = useMemo( () => resolveSessionSelection({ @@ -497,7 +506,69 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId } : null, ); + const selectedSessionSummary = useMemo(() => sessionRows.find((session) => session.id === resolvedSessionId) ?? null, [resolvedSessionId, sessionRows]); + const isPendingProvision = selectedSessionSummary?.status === "pending_provision"; + const isPendingSessionCreate = selectedSessionSummary?.status === "pending_session_create"; + const isSessionError = selectedSessionSummary?.status === "error"; const canStartSession = Boolean(selectedForSession && activeSandbox?.sandboxId); + const devPanelFocusedTask = useMemo(() => { + if (repoOverviewMode) { + return null; + } + + const task = selectedForSession ?? selectedSummary; + if (!task) { + return null; + } + + return { + id: task.id, + repoId: task.repoId, + title: task.title, + status: task.status, + runtimeStatus: selectedForSession?.runtimeStatus ?? null, + statusMessage: selectedForSession?.statusMessage ?? null, + branch: task.branch ?? null, + activeSandboxId: selectedForSession?.activeSandboxId ?? null, + activeSessionId: selectedForSession?.activeSessionId ?? null, + sandboxes: selectedForSession?.sandboxes ?? [], + sessions: selectedForSession?.sessionsSummary ?? [], + }; + }, [repoOverviewMode, selectedForSession, selectedSummary]); + const devPanelSnapshot = useMemo( + (): TaskWorkbenchSnapshot => ({ + workspaceId, + repos: repos.map((repo) => ({ id: repo.id, label: repo.label })), + projects: [], + tasks: rows.map((task) => ({ + id: task.id, + repoId: task.repoId, + title: task.title, + status: task.status, + runtimeStatus: selectedForSession?.id === task.id ? selectedForSession.runtimeStatus : undefined, + statusMessage: selectedForSession?.id === task.id ? selectedForSession.statusMessage : null, + repoName: task.repoName, + updatedAtMs: task.updatedAtMs, + branch: task.branch ?? null, + pullRequest: task.pullRequest, + tabs: task.sessionsSummary.map((session) => ({ + ...session, + draft: { + text: "", + attachments: [], + updatedAtMs: null, + }, + transcript: [], + })), + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 0, + activeSandboxId: selectedForSession?.id === task.id ? selectedForSession.activeSandboxId : null, + })), + }), + [repos, rows, selectedForSession, workspaceId], + ); const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => { if (!selectedForSession || !activeSandbox?.sandboxId) { @@ -1265,7 +1336,17 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId {selectedForSession ? (selectedForSession.title ?? "Determining title...") : "No task selected"} - {selectedForSession ? {selectedForSession.status} : null} + {selectedForSession ? ( + + ) : null}
{selectedForSession && !resolvedSessionId ? ( @@ -1280,6 +1361,11 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId ) : null}
+ {selectedForSession ? ( + + {taskStateSummary} + + ) : null}
{resolvedSessionId && sessionState.status === "loading" ? : null} + {selectedSessionSummary && (isPendingProvision || isPendingSessionCreate) ? ( +
+ + {shouldUseTaskStateEmptyState ? taskStatusState.title : isPendingProvision ? "Provisioning sandbox..." : "Creating session..."} + + + + {shouldUseTaskStateEmptyState + ? taskStateSummary + : (selectedForSession?.statusMessage ?? + (isPendingProvision ? "The task is still provisioning." : "The session is being created."))} + +
+ ) : null} + {transcript.length === 0 && !(resolvedSessionId && sessionState.status === "loading") ? ( - {selectedForSession.runtimeStatus === "error" && selectedForSession.statusMessage - ? `Session failed: ${selectedForSession.statusMessage}` - : !activeSandbox?.sandboxId - ? selectedForSession.statusMessage - ? `Sandbox unavailable: ${selectedForSession.statusMessage}` - : "This task is still provisioning its sandbox." - : staleSessionId - ? `Session ${staleSessionId} is unavailable. Start a new session to continue.` - : resolvedSessionId - ? "No transcript events yet. Send a prompt to start this session." - : "No active session for this task."} + {shouldUseTaskStateEmptyState + ? taskStateSummary + : isPendingProvision + ? (selectedForSession.statusMessage ?? "Provisioning sandbox...") + : isPendingSessionCreate + ? "Creating session..." + : isSessionError + ? (selectedSessionSummary?.errorMessage ?? "Session failed to start.") + : !activeSandbox?.sandboxId + ? selectedForSession.statusMessage + ? `Sandbox unavailable: ${selectedForSession.statusMessage}` + : "This task is still provisioning its sandbox." + : staleSessionId + ? `Session ${staleSessionId} is unavailable. Start a new session to continue.` + : resolvedSessionId + ? "No transcript events yet. Send a prompt to start this session." + : "No active session for this task."} ) : null} @@ -1442,7 +1559,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId onChange={(event) => setDraft(event.target.value)} placeholder="Send a follow-up prompt to this session" rows={5} - disabled={!activeSandbox?.sandboxId} + disabled={!activeSandbox?.sandboxId || isPendingProvision || isPendingSessionCreate || isSessionError} overrides={textareaTestIdOverrides("task-session-prompt")} />
+ + @@ -1606,7 +1732,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
- {selectedForSession.runtimeStatus === "error" ? ( + {taskRuntimeStatus === "error" ? (
- Session reported an error state + Task reported an error state
- {selectedForSession.statusMessage ? selectedForSession.statusMessage : "Open transcript in the center panel for details."} + {taskStatusState.detail}
) : null} @@ -1837,7 +1963,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }} data-testid="task-create-submit" > - Create Task + {createTask.isPending ? "Creating..." : "Create Task"} @@ -1886,6 +2012,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId + {showDevPanel ? : null} ); } diff --git a/foundry/packages/frontend/src/features/tasks/model.test.ts b/foundry/packages/frontend/src/features/tasks/model.test.ts index 08f7b76..dc62f30 100644 --- a/foundry/packages/frontend/src/features/tasks/model.test.ts +++ b/foundry/packages/frontend/src/features/tasks/model.test.ts @@ -10,7 +10,7 @@ const base: TaskRecord = { branchName: "feature/one", title: "Feature one", task: "Ship one", - providerId: "daytona", + providerId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -18,9 +18,9 @@ const base: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "daytona", + providerId: "local", sandboxActorId: null, - switchTarget: "daytona://sandbox-1", + switchTarget: "sandbox://local/sandbox-1", cwd: null, createdAt: 10, updatedAt: 10, diff --git a/foundry/packages/frontend/src/features/tasks/status.test.ts b/foundry/packages/frontend/src/features/tasks/status.test.ts new file mode 100644 index 0000000..408b85c --- /dev/null +++ b/foundry/packages/frontend/src/features/tasks/status.test.ts @@ -0,0 +1,133 @@ +import { describe, expect, it } from "vitest"; +import { TaskStatusSchema } from "@sandbox-agent/foundry-shared"; +import { defaultTaskStatusMessage, deriveHeaderStatus, describeTaskState, isProvisioningTaskStatus, resolveTaskStateDetail } from "./status"; + +describe("defaultTaskStatusMessage", () => { + it("covers every backend task status", () => { + for (const status of [...TaskStatusSchema.options, "new"] as const) { + expect(defaultTaskStatusMessage(status)).toMatch(/\S/); + } + }); + + it("returns the expected copy for init_ensure_name", () => { + expect(defaultTaskStatusMessage("init_ensure_name")).toBe("Determining title and branch."); + }); +}); + +describe("resolveTaskStateDetail", () => { + it("prefers the backend status message when present", () => { + expect(resolveTaskStateDetail("init_ensure_name", "determining title and branch")).toBe("determining title and branch"); + }); + + it("falls back to the default copy when the backend message is empty", () => { + expect(resolveTaskStateDetail("init_complete", " ")).toBe("Finalizing task initialization."); + }); +}); + +describe("describeTaskState", () => { + it("includes the raw backend status code in the title", () => { + expect(describeTaskState("kill_destroy_sandbox", null)).toEqual({ + title: "Task state: kill_destroy_sandbox", + detail: "Destroying sandbox resources.", + }); + }); +}); + +describe("isProvisioningTaskStatus", () => { + it("treats all init states as provisioning", () => { + expect(isProvisioningTaskStatus("init_bootstrap_db")).toBe(true); + expect(isProvisioningTaskStatus("init_ensure_name")).toBe(true); + expect(isProvisioningTaskStatus("init_complete")).toBe(true); + }); + + it("does not treat steady-state or terminal states as provisioning", () => { + expect(isProvisioningTaskStatus("running")).toBe(false); + expect(isProvisioningTaskStatus("archived")).toBe(false); + expect(isProvisioningTaskStatus("killed")).toBe(false); + }); +}); + +describe("deriveHeaderStatus", () => { + it("returns error variant when session has error", () => { + const result = deriveHeaderStatus("running", null, "error", "Sandbox crashed"); + expect(result.variant).toBe("error"); + expect(result.label).toBe("Session error"); + expect(result.tooltip).toBe("Sandbox crashed"); + expect(result.spinning).toBe(false); + }); + + it("returns error variant when task has error", () => { + const result = deriveHeaderStatus("error", "session:error", null, null); + expect(result.variant).toBe("error"); + expect(result.label).toBe("Error"); + expect(result.spinning).toBe(false); + }); + + it("returns warning variant with spinner for provisioning task", () => { + const result = deriveHeaderStatus("init_enqueue_provision", null, null, null); + expect(result.variant).toBe("warning"); + expect(result.label).toBe("Provisioning"); + expect(result.spinning).toBe(true); + }); + + it("returns warning variant for pending_provision session", () => { + const result = deriveHeaderStatus("running", null, "pending_provision", null); + expect(result.variant).toBe("warning"); + expect(result.label).toBe("Provisioning"); + expect(result.spinning).toBe(true); + }); + + it("returns warning variant for pending_session_create session", () => { + const result = deriveHeaderStatus("running", null, "pending_session_create", null); + expect(result.variant).toBe("warning"); + expect(result.label).toBe("Creating session"); + expect(result.spinning).toBe(true); + }); + + it("returns success variant with spinner for running session", () => { + const result = deriveHeaderStatus("running", null, "running", null); + expect(result.variant).toBe("success"); + expect(result.label).toBe("Running"); + expect(result.spinning).toBe(true); + }); + + it("returns success variant for idle/ready state", () => { + const result = deriveHeaderStatus("idle", null, "idle", null); + expect(result.variant).toBe("success"); + expect(result.label).toBe("Ready"); + expect(result.spinning).toBe(false); + }); + + it("returns neutral variant for archived task", () => { + const result = deriveHeaderStatus("archived", null, null, null); + expect(result.variant).toBe("neutral"); + expect(result.label).toBe("Archived"); + }); + + it("session error takes priority over task error", () => { + const result = deriveHeaderStatus("error", "session:error", "error", "Sandbox OOM"); + expect(result.variant).toBe("error"); + expect(result.label).toBe("Session error"); + expect(result.tooltip).toBe("Sandbox OOM"); + }); + + it("returns warning when no sandbox is available", () => { + const result = deriveHeaderStatus("idle", null, "idle", null, false); + expect(result.variant).toBe("warning"); + expect(result.label).toBe("No sandbox"); + expect(result.spinning).toBe(false); + }); + + it("still shows provisioning when no sandbox but task is provisioning", () => { + const result = deriveHeaderStatus("init_enqueue_provision", null, null, null, false); + expect(result.variant).toBe("warning"); + expect(result.label).toBe("Provisioning"); + expect(result.spinning).toBe(true); + }); + + it("shows error over no-sandbox when session has error", () => { + const result = deriveHeaderStatus("idle", null, "error", "Connection lost", false); + expect(result.variant).toBe("error"); + expect(result.label).toBe("Session error"); + }); +}); diff --git a/foundry/packages/frontend/src/features/tasks/status.ts b/foundry/packages/frontend/src/features/tasks/status.ts new file mode 100644 index 0000000..3e8ec82 --- /dev/null +++ b/foundry/packages/frontend/src/features/tasks/status.ts @@ -0,0 +1,179 @@ +import type { TaskStatus, WorkbenchSessionStatus } from "@sandbox-agent/foundry-shared"; +import type { HeaderStatusInfo } from "../../components/mock-layout/ui"; + +export type TaskDisplayStatus = TaskStatus | "new"; + +export interface TaskStateDescriptor { + title: string; + detail: string; +} + +export function isProvisioningTaskStatus(status: TaskDisplayStatus | null | undefined): boolean { + return ( + status === "new" || + status === "init_bootstrap_db" || + status === "init_enqueue_provision" || + status === "init_ensure_name" || + status === "init_assert_name" || + status === "init_complete" + ); +} + +export function defaultTaskStatusMessage(status: TaskDisplayStatus | null | undefined): string { + switch (status) { + case "new": + return "Task created. Waiting to initialize."; + case "init_bootstrap_db": + return "Creating task records."; + case "init_enqueue_provision": + return "Queueing sandbox provisioning."; + case "init_ensure_name": + return "Determining title and branch."; + case "init_assert_name": + return "Validating title and branch."; + case "init_complete": + return "Finalizing task initialization."; + case "running": + return "Agent session is actively running."; + case "idle": + return "Sandbox is ready for the next prompt."; + case "archive_stop_status_sync": + return "Stopping sandbox status sync before archiving."; + case "archive_release_sandbox": + return "Releasing sandbox resources."; + case "archive_finalize": + return "Finalizing archive."; + case "archived": + return "Task has been archived."; + case "kill_destroy_sandbox": + return "Destroying sandbox resources."; + case "kill_finalize": + return "Finalizing task termination."; + case "killed": + return "Task has been terminated."; + case "error": + return "Task entered an error state."; + case null: + case undefined: + return "Task state unavailable."; + } +} + +export function resolveTaskStateDetail(status: TaskDisplayStatus | null | undefined, statusMessage: string | null | undefined): string { + const normalized = statusMessage?.trim(); + return normalized && normalized.length > 0 ? normalized : defaultTaskStatusMessage(status); +} + +export function describeTaskState(status: TaskDisplayStatus | null | undefined, statusMessage: string | null | undefined): TaskStateDescriptor { + return { + title: status ? `Task state: ${status}` : "Task state unavailable", + detail: resolveTaskStateDetail(status, statusMessage), + }; +} + +/** + * Derives the header status pill state from the combined task + active session + sandbox state. + * Priority: session error > task error > no sandbox > provisioning > running > ready/idle > neutral. + */ +export function deriveHeaderStatus( + taskStatus: TaskDisplayStatus | null | undefined, + taskStatusMessage: string | null | undefined, + sessionStatus: WorkbenchSessionStatus | null | undefined, + sessionErrorMessage: string | null | undefined, + hasSandbox?: boolean, +): HeaderStatusInfo { + // Session error takes priority + if (sessionStatus === "error") { + return { + variant: "error", + label: "Session error", + spinning: false, + tooltip: sessionErrorMessage ?? "Session failed to start.", + }; + } + + // Task error + if (taskStatus === "error") { + return { + variant: "error", + label: "Error", + spinning: false, + tooltip: taskStatusMessage ?? "Task entered an error state.", + }; + } + + // No sandbox available (not provisioning, not errored — just missing) + if (hasSandbox === false && !isProvisioningTaskStatus(taskStatus)) { + return { + variant: "warning", + label: "No sandbox", + spinning: false, + tooltip: taskStatusMessage ?? "Sandbox is not available for this task.", + }; + } + + // Task provisioning (init_* states) + if (isProvisioningTaskStatus(taskStatus)) { + return { + variant: "warning", + label: "Provisioning", + spinning: true, + tooltip: resolveTaskStateDetail(taskStatus, taskStatusMessage), + }; + } + + // Session pending states + if (sessionStatus === "pending_provision") { + return { + variant: "warning", + label: "Provisioning", + spinning: true, + tooltip: "Provisioning sandbox...", + }; + } + + if (sessionStatus === "pending_session_create") { + return { + variant: "warning", + label: "Creating session", + spinning: true, + tooltip: "Creating agent session...", + }; + } + + // Running + if (sessionStatus === "running") { + return { + variant: "success", + label: "Running", + spinning: true, + tooltip: "Agent is actively running.", + }; + } + + // Ready / idle + if (sessionStatus === "ready" || sessionStatus === "idle" || taskStatus === "idle" || taskStatus === "running") { + return { + variant: "success", + label: "Ready", + spinning: false, + tooltip: "Sandbox is ready.", + }; + } + + // Terminal states + if (taskStatus === "archived" || taskStatus === "killed") { + return { + variant: "neutral", + label: taskStatus === "archived" ? "Archived" : "Terminated", + spinning: false, + }; + } + + // Fallback + return { + variant: "neutral", + label: taskStatus ?? "Unknown", + spinning: false, + }; +} diff --git a/foundry/packages/shared/src/app-shell.ts b/foundry/packages/shared/src/app-shell.ts index 8e757c5..d21370f 100644 --- a/foundry/packages/shared/src/app-shell.ts +++ b/foundry/packages/shared/src/app-shell.ts @@ -1,3 +1,5 @@ +import type { WorkbenchModelId } from "./workbench.js"; + export type FoundryBillingPlanId = "free" | "team"; export type FoundryBillingStatus = "active" | "trialing" | "past_due" | "scheduled_cancel"; export type FoundryGithubInstallationStatus = "connected" | "install_required" | "reconnect_required"; @@ -55,7 +57,7 @@ export interface FoundryOrganizationSettings { slug: string; primaryDomain: string; seatAccrualMode: "first_prompt"; - defaultModel: "claude-sonnet-4" | "claude-opus-4" | "gpt-4o" | "o3"; + defaultModel: WorkbenchModelId; autoImportRepos: boolean; } diff --git a/foundry/packages/shared/src/config.ts b/foundry/packages/shared/src/config.ts index 8fd31df..3f7e0b0 100644 --- a/foundry/packages/shared/src/config.ts +++ b/foundry/packages/shared/src/config.ts @@ -43,19 +43,17 @@ export const ConfigSchema = z.object({ .object({ local: z .object({ - rootDir: z.string().optional(), - sandboxAgentPort: z.number().int().min(1).max(65535).optional(), + image: z.string().optional(), }) .default({}), - daytona: z + e2b: z .object({ - endpoint: z.string().optional(), apiKey: z.string().optional(), - image: z.string().default("ubuntu:24.04"), + template: z.string().optional(), }) - .default({ image: "ubuntu:24.04" }), + .default({}), }) - .default({ local: {}, daytona: { image: "ubuntu:24.04" } }), + .default({ local: {}, e2b: {} }), }); export type AppConfig = z.infer; diff --git a/foundry/packages/shared/src/contracts.ts b/foundry/packages/shared/src/contracts.ts index 6c99d4e..40c4f53 100644 --- a/foundry/packages/shared/src/contracts.ts +++ b/foundry/packages/shared/src/contracts.ts @@ -7,7 +7,7 @@ export const WorkspaceIdSchema = z .regex(/^[a-zA-Z0-9._-]+$/); export type WorkspaceId = z.infer; -export const ProviderIdSchema = z.enum(["daytona", "local"]); +export const ProviderIdSchema = z.enum(["e2b", "local"]); export type ProviderId = z.infer; export const AgentTypeSchema = z.enum(["claude", "codex"]); @@ -24,12 +24,6 @@ export const TaskStatusSchema = z.enum([ "init_enqueue_provision", "init_ensure_name", "init_assert_name", - "init_create_sandbox", - "init_ensure_agent", - "init_start_sandbox_instance", - "init_create_session", - "init_write_db", - "init_start_status_sync", "init_complete", "running", "idle", diff --git a/foundry/packages/shared/src/workbench.ts b/foundry/packages/shared/src/workbench.ts index 2aa6a6e..e549c6e 100644 --- a/foundry/packages/shared/src/workbench.ts +++ b/foundry/packages/shared/src/workbench.ts @@ -1,8 +1,17 @@ import type { AgentType, ProviderId, TaskStatus } from "./contracts.js"; -export type WorkbenchTaskStatus = "running" | "idle" | "new" | "archived"; +export type WorkbenchTaskStatus = TaskStatus | "new"; export type WorkbenchAgentKind = "Claude" | "Codex" | "Cursor"; -export type WorkbenchModelId = "claude-sonnet-4" | "claude-opus-4" | "gpt-4o" | "o3"; +export type WorkbenchModelId = + | "claude-sonnet-4" + | "claude-opus-4" + | "gpt-5.3-codex" + | "gpt-5.4" + | "gpt-5.2-codex" + | "gpt-5.1-codex-max" + | "gpt-5.2" + | "gpt-5.1-codex-mini"; +export type WorkbenchSessionStatus = "pending_provision" | "pending_session_create" | "ready" | "running" | "idle" | "error"; export interface WorkbenchTranscriptEvent { id: string; @@ -27,10 +36,11 @@ export interface WorkbenchSessionSummary { sessionName: string; agent: WorkbenchAgentKind; model: WorkbenchModelId; - status: "running" | "idle" | "error"; + status: WorkbenchSessionStatus; thinkingSinceMs: number | null; unread: boolean; created: boolean; + errorMessage?: string | null; } /** Full session content — only fetched when viewing a specific session tab. */ @@ -42,10 +52,11 @@ export interface WorkbenchSessionDetail { sessionName: string; agent: WorkbenchAgentKind; model: WorkbenchModelId; - status: "running" | "idle" | "error"; + status: WorkbenchSessionStatus; thinkingSinceMs: number | null; unread: boolean; created: boolean; + errorMessage?: string | null; draft: WorkbenchComposerDraft; transcript: WorkbenchTranscriptEvent[]; } @@ -166,6 +177,8 @@ export interface WorkbenchTask { repoId: string; title: string; status: WorkbenchTaskStatus; + runtimeStatus?: TaskStatus; + statusMessage?: string | null; repoName: string; updatedAtMs: number; branch: string | null; @@ -175,6 +188,7 @@ export interface WorkbenchTask { diffs: Record; fileTree: WorkbenchFileTreeNode[]; minutesUsed: number; + activeSandboxId?: string | null; } export interface WorkbenchRepo { diff --git a/foundry/packages/shared/test/workspace.test.ts b/foundry/packages/shared/test/workspace.test.ts index ab596ac..c57173a 100644 --- a/foundry/packages/shared/test/workspace.test.ts +++ b/foundry/packages/shared/test/workspace.test.ts @@ -15,7 +15,8 @@ const cfg: AppConfig = ConfigSchema.parse({ backup_retention_days: 7, }, providers: { - daytona: { image: "ubuntu:24.04" }, + local: {}, + e2b: {}, }, }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index d69182e..b4a4540 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -452,9 +452,9 @@ importers: foundry/packages/backend: dependencies: - '@daytonaio/sdk': - specifier: 0.141.0 - version: 0.141.0(ws@8.19.0) + '@e2b/code-interpreter': + specifier: ^2.3.3 + version: 2.3.3 '@hono/node-server': specifier: ^1.19.7 version: 1.19.9(hono@4.12.2) @@ -473,6 +473,9 @@ importers: better-auth: specifier: ^1.5.5 version: 1.5.5(@cloudflare/workers-types@4.20260313.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) + dockerode: + specifier: ^4.0.9 + version: 4.0.9 drizzle-kit: specifier: ^0.31.8 version: 0.31.9 @@ -486,8 +489,8 @@ importers: specifier: ^10.3.1 version: 10.3.1 rivetkit: - specifier: 2.1.6 - version: 2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) + specifier: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a + version: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) sandbox-agent: specifier: workspace:* version: link:../../../sdks/typescript @@ -1667,21 +1670,12 @@ packages: moment: optional: true - '@daytonaio/api-client@0.141.0': - resolution: {integrity: sha512-DSPCurIEjfFyXCd07jkDgfsoFppVhTLyIJdvfb0LgG1EgV75BPqqzk2WM4ragBFJUuK2URF5CK7qkaHW0AXKMA==} - '@daytonaio/api-client@0.151.0': resolution: {integrity: sha512-Ahu7bjunHbJEEAEkcEFjjdazN+1hML/lLZwOyul2WFaCTh9q5dmufhr0qKAKCIs3ccTY+Is0fO5UtPpo/fig+A==} - '@daytonaio/sdk@0.141.0': - resolution: {integrity: sha512-JUopkS9SkO7h4WN8CjparOrP9k954euOF5KG//PeCEFOxUWTPFOME70GrmHXQKa1qkdZiF/4tz9jtZ744B1I2w==} - '@daytonaio/sdk@0.151.0': resolution: {integrity: sha512-wd4x9Bipt1KmTD+0GXTVEQtgXBmyy/gAmCjdOJllwo5Ya5RbGu/CZeitBCIEKhDM8TnkxefVxdpxBCfi/Wg9xA==} - '@daytonaio/toolbox-api-client@0.141.0': - resolution: {integrity: sha512-KGkCLDLAltd9FCic3PhSJGrTp3RwGsUwWEGp5vyWZFQGWpJV8CVp08CH5SBdo4YhuqFUVlyQcwha1HpzpVH++A==} - '@daytonaio/toolbox-api-client@0.151.0': resolution: {integrity: sha512-63n/wBNnZh1r8dUypzwNeenoA4okWNEWzsE6kZ8b047y5zBYT0cI63cGRn25nSrepLlGKpX4MJnVjjz50+bVqA==} @@ -3146,9 +3140,17 @@ packages: '@rivetkit/engine-runner-protocol@2.1.6': resolution: {integrity: sha512-QwaWvAJN2KGae+UHKZbLiEWaWj9ycmwtrRtUq728CU+lidkaGv5yHxXb4gkXSD7rhGQcR98+XWZLb0F0BM/vAg==} + '@rivetkit/engine-runner-protocol@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner-protocol@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner-protocol@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd} + version: 2.1.6 + '@rivetkit/engine-runner@2.1.6': resolution: {integrity: sha512-WpiEmi/SxAVED0N/M0kvPZwq/MxMuuz/Y89ut1sTP7syPzpCauGxafOdqkTqiX1ef+N1ZlrtX+v/LwDF/jIgFw==} + '@rivetkit/engine-runner@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd} + version: 2.1.6 + '@rivetkit/fast-json-patch@3.1.2': resolution: {integrity: sha512-CtA50xgsSSzICQduF/NDShPRzvucnNvsW/lQO0WgMTT1XAj9Lfae4pm7r3llFwilgG+9iq76Hv1LUqNy72v6yw==} @@ -3160,6 +3162,11 @@ packages: resolution: {integrity: sha512-jbCrigzqoygZTYdZu7izaQjr77Q4BFX1HwhW4Mf0UFIaKT72AteH/w4PcktzrKcw4Utmo0zX0C6zNBRKo0IpOA==} engines: {node: '>=20.0.0'} + '@rivetkit/sqlite-vfs@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/sqlite-vfs@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/sqlite-vfs@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd} + version: 2.1.6 + engines: {node: '>=20.0.0'} + '@rivetkit/sqlite@0.1.1': resolution: {integrity: sha512-NE7ZBy/hQhOrWzMZFjkHX9SoXxf+ILcDvVV+mNbUYPgiy/fsDzlXdK0+JDTGnko5f4Xl6/KVCoCozz9gkwkq8A==} @@ -3167,13 +3174,27 @@ packages: resolution: {integrity: sha512-wuuGWoWWdUPbqs5u+31YodSUOsYMydaa+/cxZ7I5KaUe26fK0i1E+0ytqC1JGQm6utWeuYp8cLUX3WSEfVKJhQ==} engines: {node: '>=18.0.0'} + '@rivetkit/traces@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/traces@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/traces@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd} + version: 2.1.6 + engines: {node: '>=18.0.0'} + '@rivetkit/virtual-websocket@2.0.33': resolution: {integrity: sha512-sMoHZgBy9WDW76pv+ML3LPgf7TWk5vXdu3ZpPO20j6n+rB3fLacnnmzjt5xD6tZcJ/x5qINyEywGgcxA7MTMuQ==} + '@rivetkit/virtual-websocket@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd} + version: 2.0.33 + '@rivetkit/workflow-engine@2.1.6': resolution: {integrity: sha512-eLVFBbhOlBQKzO5lu032tOo0OEAFFp7uNcGwvB1mBFmYsm7aKBgnJl214IV39a6fRtCL2meVxiMU1GKb006zYw==} engines: {node: '>=18.0.0'} + '@rivetkit/workflow-engine@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/workflow-engine@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/workflow-engine@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd} + version: 2.1.6 + engines: {node: '>=18.0.0'} + '@rolldown/pluginutils@1.0.0-beta.27': resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==} @@ -3317,6 +3338,38 @@ packages: '@rtsao/csstype@2.6.5-forked.0': resolution: {integrity: sha512-0HwnY8uPWcCloTgdbbaJG3MbDUfNf6yKWZfCKxFv9yj2Sbp4mSKaIjC7Cr/5L4hMxvrrk85CU3wlAg7EtBBJ1Q==} + '@sandbox-agent/cli-darwin-arm64@0.3.2': + resolution: {integrity: sha512-w4aAr7vPj4m6Lwr25bIoXRVTJDoybP/tcGhB+unzsRUSQqCtrZpRCg7opJqds5vIOQzVxFlmVwquKWcPlDjYRA==} + cpu: [arm64] + os: [darwin] + + '@sandbox-agent/cli-darwin-x64@0.3.2': + resolution: {integrity: sha512-OJkaeNGjvPNWHVnNgUXxN4H5nv75nWwRnxOVRf1UP8VvLhd3xoL6uWGgmVoRSF+Wwg3dMHbkrmMwAdcXyaSV9w==} + cpu: [x64] + os: [darwin] + + '@sandbox-agent/cli-linux-arm64@0.3.2': + resolution: {integrity: sha512-WTreRie8wrvGijuqBaK7/78sfizCcex1CNBZjws7a6/tn4Ar+thQ0jbY8alrpdtBLuwzk/pBIQ2oRU9vidHtvQ==} + cpu: [arm64] + os: [linux] + + '@sandbox-agent/cli-linux-x64@0.3.2': + resolution: {integrity: sha512-aas9+UdW0+j2aWOCp+EV5GA8JkmwsIg0lSRkrRijzrnewsNxlSFQ4dIsSpTkqyMWO18STqjqFtmkZI/dIASEyQ==} + cpu: [x64] + os: [linux] + + '@sandbox-agent/cli-shared@0.3.2': + resolution: {integrity: sha512-4UQKczwfY+Bf83kQ9rdXv3U5Z9PqfWm2wp6EK2rzKh6iuMAtuNI8PSUWodi1LBUjch8mJv4rx7Gs/XCO92dRRw==} + + '@sandbox-agent/cli-win32-x64@0.3.2': + resolution: {integrity: sha512-mJejOEp9czrMzbpN0VzC/y6UfKU/RyAJjTWHBNcCVfHP2zDuIOwT8y3gErM+q4CHt0lHR616wQYIqNpY8QqyDA==} + cpu: [x64] + os: [win32] + + '@sandbox-agent/cli@0.3.2': + resolution: {integrity: sha512-nQ3bxbrr0QMdm0eK/MvBNd6Npvt1VCxQrrkpf747dVCHmKxCmcRc3t/jeeye1Lof6sP8l01mHgvSFNiez/KNHQ==} + hasBin: true + '@shikijs/core@3.21.0': resolution: {integrity: sha512-AXSQu/2n1UIQekY8euBJlvFYZIw0PHY63jUzGbrOma4wPxzznJXTXkri+QcHeBNaFxiiOljKxxJkVSoB3PjbyA==} @@ -3770,6 +3823,9 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + acp-http-client@0.3.2: + resolution: {integrity: sha512-btRUDXAA9BlcTQURsJogdWthoXsKOnMeFhtYlEYQxgt0vq7H6xMfMrewlIgFjRXgRTbru4Fre2T6wS/amTTyjQ==} + aggregate-error@5.0.0: resolution: {integrity: sha512-gOsf2YwSlleG6IjRYG2A7k0HmBMEo6qVNk9Bp/EaLgAJT5ngH6PXbqa4ItvnEwCm/velL5jAnQgsHsWnjhGmvw==} engines: {node: '>=18'} @@ -6407,6 +6463,40 @@ packages: ws: optional: true + rivetkit@https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a: + resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a} + version: 2.1.6 + engines: {node: '>=22.0.0'} + peerDependencies: + '@daytonaio/sdk': ^0.150.0 + '@e2b/code-interpreter': ^2.3.3 + '@hono/node-server': ^1.14.0 + '@hono/node-ws': ^1.1.1 + dockerode: ^4.0.9 + drizzle-kit: ^0.31.2 + drizzle-orm: ^0.44.2 + eventsource: ^4.0.0 + ws: ^8.0.0 + peerDependenciesMeta: + '@daytonaio/sdk': + optional: true + '@e2b/code-interpreter': + optional: true + '@hono/node-server': + optional: true + '@hono/node-ws': + optional: true + dockerode: + optional: true + drizzle-kit: + optional: true + drizzle-orm: + optional: true + eventsource: + optional: true + ws: + optional: true + robust-predicates@3.0.2: resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} @@ -6438,6 +6528,9 @@ packages: safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + sandbox-agent@0.3.2: + resolution: {integrity: sha512-ic1UPLMKExjXIw4ViL0Wo07YsSqGtS25g6b6BechOX5CBC4d130tCR1xrhJ8Fuy6jlzx6I+f7gIxDedTnRUWSA==} + sax@1.4.4: resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==} engines: {node: '>=11.0.0'} @@ -8424,49 +8517,12 @@ snapshots: optionalDependencies: moment: 2.30.1 - '@daytonaio/api-client@0.141.0': - dependencies: - axios: 1.13.5 - transitivePeerDependencies: - - debug - '@daytonaio/api-client@0.151.0': dependencies: axios: 1.13.5 transitivePeerDependencies: - debug - '@daytonaio/sdk@0.141.0(ws@8.19.0)': - dependencies: - '@aws-sdk/client-s3': 3.975.0 - '@aws-sdk/lib-storage': 3.975.0(@aws-sdk/client-s3@3.975.0) - '@daytonaio/api-client': 0.141.0 - '@daytonaio/toolbox-api-client': 0.141.0 - '@iarna/toml': 2.2.5 - '@opentelemetry/api': 1.9.0 - '@opentelemetry/exporter-trace-otlp-http': 0.207.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-http': 0.207.0(@opentelemetry/api@1.9.0) - '@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-node': 0.207.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-trace-base': 2.5.0(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.39.0 - axios: 1.13.5 - busboy: 1.6.0 - dotenv: 17.2.3 - expand-tilde: 2.0.2 - fast-glob: 3.3.3 - form-data: 4.0.5 - isomorphic-ws: 5.0.0(ws@8.19.0) - pathe: 2.0.3 - shell-quote: 1.8.3 - tar: 7.5.7 - transitivePeerDependencies: - - aws-crt - - debug - - supports-color - - ws - '@daytonaio/sdk@0.151.0(ws@8.19.0)': dependencies: '@aws-sdk/client-s3': 3.975.0 @@ -8498,12 +8554,6 @@ snapshots: - supports-color - ws - '@daytonaio/toolbox-api-client@0.141.0': - dependencies: - axios: 1.13.5 - transitivePeerDependencies: - - debug - '@daytonaio/toolbox-api-client@0.151.0': dependencies: axios: 1.13.5 @@ -9597,6 +9647,10 @@ snapshots: dependencies: '@rivetkit/bare-ts': 0.6.2 + '@rivetkit/engine-runner-protocol@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner-protocol@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + dependencies: + '@rivetkit/bare-ts': 0.6.2 + '@rivetkit/engine-runner@2.1.6': dependencies: '@rivetkit/engine-runner-protocol': 2.1.6 @@ -9608,6 +9662,17 @@ snapshots: - bufferutil - utf-8-validate + '@rivetkit/engine-runner@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + dependencies: + '@rivetkit/engine-runner-protocol': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner-protocol@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd + '@rivetkit/virtual-websocket': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd + pino: 9.14.0 + uuid: 12.0.0 + ws: 8.19.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + '@rivetkit/fast-json-patch@3.1.2': {} '@rivetkit/on-change@6.0.2-rc.1': {} @@ -9618,6 +9683,12 @@ snapshots: '@rivetkit/sqlite': 0.1.1 vbare: 0.0.4 + '@rivetkit/sqlite-vfs@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/sqlite-vfs@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + dependencies: + '@rivetkit/bare-ts': 0.6.2 + '@rivetkit/sqlite': 0.1.1 + vbare: 0.0.4 + '@rivetkit/sqlite@0.1.1': {} '@rivetkit/traces@2.1.6': @@ -9627,8 +9698,17 @@ snapshots: fdb-tuple: 1.0.0 vbare: 0.0.4 + '@rivetkit/traces@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/traces@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + dependencies: + '@rivetkit/bare-ts': 0.6.2 + cbor-x: 1.6.3 + fdb-tuple: 1.0.0 + vbare: 0.0.4 + '@rivetkit/virtual-websocket@2.0.33': {} + '@rivetkit/virtual-websocket@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': {} + '@rivetkit/workflow-engine@2.1.6': dependencies: '@rivetkit/bare-ts': 0.6.2 @@ -9637,6 +9717,14 @@ snapshots: pino: 9.14.0 vbare: 0.0.4 + '@rivetkit/workflow-engine@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/workflow-engine@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': + dependencies: + '@rivetkit/bare-ts': 0.6.2 + cbor-x: 1.6.3 + fdb-tuple: 1.0.0 + pino: 9.14.0 + vbare: 0.0.4 + '@rolldown/pluginutils@1.0.0-beta.27': {} '@rolldown/pluginutils@1.0.0-rc.3': {} @@ -9726,6 +9814,34 @@ snapshots: '@rtsao/csstype@2.6.5-forked.0': {} + '@sandbox-agent/cli-darwin-arm64@0.3.2': + optional: true + + '@sandbox-agent/cli-darwin-x64@0.3.2': + optional: true + + '@sandbox-agent/cli-linux-arm64@0.3.2': + optional: true + + '@sandbox-agent/cli-linux-x64@0.3.2': + optional: true + + '@sandbox-agent/cli-shared@0.3.2': {} + + '@sandbox-agent/cli-win32-x64@0.3.2': + optional: true + + '@sandbox-agent/cli@0.3.2': + dependencies: + '@sandbox-agent/cli-shared': 0.3.2 + optionalDependencies: + '@sandbox-agent/cli-darwin-arm64': 0.3.2 + '@sandbox-agent/cli-darwin-x64': 0.3.2 + '@sandbox-agent/cli-linux-arm64': 0.3.2 + '@sandbox-agent/cli-linux-x64': 0.3.2 + '@sandbox-agent/cli-win32-x64': 0.3.2 + optional: true + '@shikijs/core@3.21.0': dependencies: '@shikijs/types': 3.21.0 @@ -10400,6 +10516,12 @@ snapshots: acorn@8.15.0: {} + acp-http-client@0.3.2(zod@4.3.6): + dependencies: + '@agentclientprotocol/sdk': 0.14.1(zod@4.3.6) + transitivePeerDependencies: + - zod + aggregate-error@5.0.0: dependencies: clean-stack: 5.3.0 @@ -13427,6 +13549,44 @@ snapshots: - bufferutil - utf-8-validate + rivetkit@https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): + dependencies: + '@hono/standard-validator': 0.1.5(@standard-schema/spec@1.1.0)(hono@4.12.2) + '@hono/zod-openapi': 1.2.2(hono@4.12.2)(zod@4.3.6) + '@rivetkit/bare-ts': 0.6.2 + '@rivetkit/engine-runner': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd + '@rivetkit/fast-json-patch': 3.1.2 + '@rivetkit/on-change': 6.0.2-rc.1 + '@rivetkit/sqlite': 0.1.1 + '@rivetkit/sqlite-vfs': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/sqlite-vfs@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd + '@rivetkit/traces': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/traces@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd + '@rivetkit/virtual-websocket': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd + '@rivetkit/workflow-engine': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/workflow-engine@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd + cbor-x: 1.6.3 + get-port: 7.1.0 + hono: 4.12.2 + invariant: 2.2.4 + nanoevents: 9.1.0 + p-retry: 6.2.1 + pino: 9.14.0 + sandbox-agent: 0.3.2(zod@4.3.6) + tar: 7.5.7 + uuid: 12.0.0 + vbare: 0.0.4 + zod: 4.3.6 + optionalDependencies: + '@e2b/code-interpreter': 2.3.3 + '@hono/node-server': 1.19.9(hono@4.12.2) + '@hono/node-ws': 1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2) + dockerode: 4.0.9 + drizzle-kit: 0.31.9 + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + ws: 8.19.0 + transitivePeerDependencies: + - '@standard-schema/spec' + - bufferutil + - utf-8-validate + robust-predicates@3.0.2: {} rollup@4.56.0: @@ -13484,6 +13644,15 @@ snapshots: safer-buffer@2.1.2: {} + sandbox-agent@0.3.2(zod@4.3.6): + dependencies: + '@sandbox-agent/cli-shared': 0.3.2 + acp-http-client: 0.3.2(zod@4.3.6) + optionalDependencies: + '@sandbox-agent/cli': 0.3.2 + transitivePeerDependencies: + - zod + sax@1.4.4: {} scheduler@0.23.2: diff --git a/sdks/typescript/src/client.ts b/sdks/typescript/src/client.ts index 6b11490..9945c0a 100644 --- a/sdks/typescript/src/client.ts +++ b/sdks/typescript/src/client.ts @@ -82,6 +82,7 @@ const DEFAULT_BASE_URL = "http://sandbox-agent"; const DEFAULT_REPLAY_MAX_EVENTS = 50; const DEFAULT_REPLAY_MAX_CHARS = 12_000; const EVENT_INDEX_SCAN_EVENTS_LIMIT = 500; +const MAX_EVENT_INDEX_INSERT_RETRIES = 3; const SESSION_CANCEL_METHOD = "session/cancel"; const MANUAL_CANCEL_ERROR = "Manual session/cancel calls are not allowed. Use destroySession(sessionId) instead."; const HEALTH_WAIT_MIN_DELAY_MS = 500; @@ -841,6 +842,7 @@ export class SandboxAgent { private readonly pendingPermissionRequests = new Map(); private readonly nextSessionEventIndexBySession = new Map(); private readonly seedSessionEventIndexBySession = new Map>(); + private readonly pendingObservedEnvelopePersistenceBySession = new Map>(); constructor(options: SandboxAgentConnectOptions) { const baseUrl = options.baseUrl?.trim(); @@ -906,6 +908,7 @@ export class SandboxAgent { this.liveConnections.clear(); const pending = [...this.pendingLiveConnections.values()]; this.pendingLiveConnections.clear(); + this.pendingObservedEnvelopePersistenceBySession.clear(); const pendingSettled = await Promise.allSettled(pending); for (const item of pendingSettled) { @@ -969,7 +972,6 @@ export class SandboxAgent { }; await this.persist.updateSession(record); - this.nextSessionEventIndexBySession.set(record.id, 1); live.bindSession(record.id, record.agentSessionId); let session = this.upsertSessionHandle(record); @@ -1639,7 +1641,9 @@ export class SandboxAgent { agent, serverId, onObservedEnvelope: (connection, envelope, direction, localSessionId) => { - void this.persistObservedEnvelope(connection, envelope, direction, localSessionId); + void this.enqueueObservedEnvelopePersistence(connection, envelope, direction, localSessionId).catch((error) => { + console.error("Failed to persist observed sandbox-agent envelope", error); + }); }, onPermissionRequest: async (connection, localSessionId, agentSessionId, request) => this.enqueuePermissionRequest(connection, localSessionId, agentSessionId, request), @@ -1675,17 +1679,32 @@ export class SandboxAgent { return; } - const event: SessionEvent = { - id: randomId(), - eventIndex: await this.allocateSessionEventIndex(localSessionId), - sessionId: localSessionId, - createdAt: nowMs(), - connectionId: connection.connectionId, - sender: direction === "outbound" ? "client" : "agent", - payload: cloneEnvelope(envelope), - }; + let event: SessionEvent | null = null; + for (let attempt = 0; attempt < MAX_EVENT_INDEX_INSERT_RETRIES; attempt += 1) { + event = { + id: randomId(), + eventIndex: await this.allocateSessionEventIndex(localSessionId), + sessionId: localSessionId, + createdAt: nowMs(), + connectionId: connection.connectionId, + sender: direction === "outbound" ? "client" : "agent", + payload: cloneEnvelope(envelope), + }; + + try { + await this.persist.insertEvent(event); + break; + } catch (error) { + if (!isSessionEventIndexConflict(error) || attempt === MAX_EVENT_INDEX_INSERT_RETRIES - 1) { + throw error; + } + } + } + + if (!event) { + return; + } - await this.persist.insertEvent(event); await this.persistSessionStateFromEvent(localSessionId, envelope, direction); const listeners = this.eventListeners.get(localSessionId); @@ -1698,6 +1717,34 @@ export class SandboxAgent { } } + private async enqueueObservedEnvelopePersistence( + connection: LiveAcpConnection, + envelope: AnyMessage, + direction: AcpEnvelopeDirection, + localSessionId: string | null, + ): Promise { + if (!localSessionId) { + return; + } + + const previous = this.pendingObservedEnvelopePersistenceBySession.get(localSessionId) ?? Promise.resolve(); + const current = previous + .catch(() => { + // Keep later envelope persistence moving even if an earlier write failed. + }) + .then(() => this.persistObservedEnvelope(connection, envelope, direction, localSessionId)); + + this.pendingObservedEnvelopePersistenceBySession.set(localSessionId, current); + + try { + await current; + } finally { + if (this.pendingObservedEnvelopePersistenceBySession.get(localSessionId) === current) { + this.pendingObservedEnvelopePersistenceBySession.delete(localSessionId); + } + } + } + private async persistSessionStateFromEvent(sessionId: string, envelope: AnyMessage, direction: AcpEnvelopeDirection): Promise { if (direction !== "inbound") { return; @@ -2066,6 +2113,14 @@ export class SandboxAgent { } } +function isSessionEventIndexConflict(error: unknown): boolean { + if (!(error instanceof Error)) { + return false; + } + + return /UNIQUE constraint failed: .*session_id, .*event_index/.test(error.message); +} + type PendingPermissionRequestState = { id: string; sessionId: string; diff --git a/sdks/typescript/tests/integration.test.ts b/sdks/typescript/tests/integration.test.ts index 36e53d4..003b0dd 100644 --- a/sdks/typescript/tests/integration.test.ts +++ b/sdks/typescript/tests/integration.test.ts @@ -5,7 +5,15 @@ import { dirname, resolve } from "node:path"; import { join } from "node:path"; import { fileURLToPath } from "node:url"; import { tmpdir } from "node:os"; -import { InMemorySessionPersistDriver, SandboxAgent, type SessionEvent } from "../src/index.ts"; +import { + InMemorySessionPersistDriver, + SandboxAgent, + type ListEventsRequest, + type ListPage, + type SessionEvent, + type SessionPersistDriver, + type SessionRecord, +} from "../src/index.ts"; import { spawnSandboxAgent, isNodeRuntime, type SandboxAgentSpawnHandle } from "../src/spawn.ts"; import { prepareMockAgentDataHome } from "./helpers/mock-agent.ts"; import WebSocket from "ws"; @@ -40,6 +48,44 @@ function sleep(ms: number): Promise { return new Promise((resolve) => setTimeout(resolve, ms)); } +class StrictUniqueSessionPersistDriver implements SessionPersistDriver { + private readonly events = new InMemorySessionPersistDriver({ + maxEventsPerSession: 500, + }); + private readonly eventIndexesBySession = new Map>(); + + async getSession(id: string): Promise { + return this.events.getSession(id); + } + + async listSessions(request?: { cursor?: string; limit?: number }): Promise> { + return this.events.listSessions(request); + } + + async updateSession(session: SessionRecord): Promise { + await this.events.updateSession(session); + } + + async listEvents(request: ListEventsRequest): Promise> { + return this.events.listEvents(request); + } + + async insertEvent(event: SessionEvent): Promise { + await sleep(5); + + const indexes = this.eventIndexesBySession.get(event.sessionId) ?? new Set(); + if (indexes.has(event.eventIndex)) { + throw new Error("UNIQUE constraint failed: sandbox_agent_events.session_id, sandbox_agent_events.event_index"); + } + + indexes.add(event.eventIndex); + this.eventIndexesBySession.set(event.sessionId, indexes); + + await sleep(5); + await this.events.insertEvent(event); + } +} + async function waitFor(fn: () => T | undefined | null, timeoutMs = 6000, stepMs = 30): Promise { const started = Date.now(); while (Date.now() - started < timeoutMs) { @@ -207,6 +253,27 @@ describe("Integration: TypeScript SDK flat session API", () => { await sdk.dispose(); }); + it("preserves observed event indexes across session creation follow-up calls", async () => { + const persist = new StrictUniqueSessionPersistDriver(); + const sdk = await SandboxAgent.connect({ + baseUrl, + token, + persist, + }); + + const session = await sdk.createSession({ agent: "mock" }); + const prompt = await session.prompt([{ type: "text", text: "preserve event indexes" }]); + expect(prompt.stopReason).toBe("end_turn"); + + const events = await waitForAsync(async () => { + const page = await sdk.getEvents({ sessionId: session.id, limit: 200 }); + return page.items.length >= 4 ? page : null; + }); + expect(new Set(events.items.map((event) => event.eventIndex)).size).toBe(events.items.length); + + await sdk.dispose(); + }); + it("covers agent query flags and filesystem HTTP helpers", async () => { const sdk = await SandboxAgent.connect({ baseUrl, From 5ea9ec5e2f0766cd51400edfc30e52292d024842 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sat, 14 Mar 2026 14:38:29 -0700 Subject: [PATCH 02/48] wip (#253) --- .env.development.example | 3 + docs/deploy/foundry-self-hosting.mdx | 2 + foundry/CLAUDE.md | 31 +- foundry/compose.dev.yaml | 21 + .../backend/src/actors/github-data/db/db.ts | 5 + .../src/actors/github-data/db/migrations.ts | 61 ++ .../src/actors/github-data/db/schema.ts | 46 ++ .../backend/src/actors/github-data/index.ts | 775 ++++++++++++++++++ .../packages/backend/src/actors/handles.ts | 21 +- foundry/packages/backend/src/actors/index.ts | 6 +- foundry/packages/backend/src/actors/keys.ts | 4 +- .../src/actors/project-pr-sync/index.ts | 98 --- .../backend/src/actors/project/actions.ts | 324 +++----- .../src/actors/project/db/migrations.ts | 15 - .../backend/src/actors/project/db/schema.ts | 15 - .../packages/backend/src/actors/task/index.ts | 14 +- .../backend/src/actors/task/workbench.ts | 110 ++- .../backend/src/actors/task/workflow/index.ts | 16 +- .../backend/src/actors/workspace/actions.ts | 322 ++++++-- .../backend/src/actors/workspace/app-shell.ts | 129 +-- .../db/drizzle/0000_melted_viper.sql | 2 + .../db/drizzle/meta/0000_snapshot.json | 14 + .../src/actors/workspace/db/migrations.ts | 2 + .../backend/src/actors/workspace/db/schema.ts | 2 + .../backend/src/services/app-github.ts | 192 ++++- .../packages/backend/src/services/queue.ts | 8 + foundry/packages/backend/test/keys.test.ts | 4 +- .../backend/test/workbench-unread.test.ts | 45 +- foundry/packages/client/src/backend-client.ts | 24 + .../packages/client/src/interest/topics.ts | 15 + foundry/packages/client/src/mock-app.ts | 14 + .../client/src/mock/backend-client.ts | 9 + .../client/src/remote/workbench-client.ts | 3 +- .../client/test/interest-manager.test.ts | 1 + .../frontend/src/components/dev-panel.tsx | 31 +- .../frontend/src/components/mock-layout.tsx | 420 ++++++++-- .../mock-layout/history-minimap.tsx | 4 +- .../components/mock-layout/model-picker.tsx | 1 - .../components/mock-layout/right-sidebar.tsx | 27 +- .../src/components/mock-layout/sidebar.tsx | 233 ++++-- .../components/mock-layout/terminal-pane.tsx | 5 +- .../mock-layout/transcript-header.tsx | 1 - .../src/components/mock-layout/ui.tsx | 5 +- foundry/packages/shared/src/app-shell.ts | 2 + .../packages/shared/src/realtime-events.ts | 6 +- foundry/packages/shared/src/workbench.ts | 17 + foundry/research/specs/github-data-actor.md | 169 ++++ 47 files changed, 2605 insertions(+), 669 deletions(-) create mode 100644 foundry/packages/backend/src/actors/github-data/db/db.ts create mode 100644 foundry/packages/backend/src/actors/github-data/db/migrations.ts create mode 100644 foundry/packages/backend/src/actors/github-data/db/schema.ts create mode 100644 foundry/packages/backend/src/actors/github-data/index.ts delete mode 100644 foundry/packages/backend/src/actors/project-pr-sync/index.ts create mode 100644 foundry/research/specs/github-data-actor.md diff --git a/.env.development.example b/.env.development.example index c4132f4..0ae0f58 100644 --- a/.env.development.example +++ b/.env.development.example @@ -23,6 +23,9 @@ GITHUB_APP_PRIVATE_KEY= # Webhook secret for verifying GitHub webhook payloads. # Use smee.io for local development: https://smee.io/new GITHUB_WEBHOOK_SECRET= +# Required for local GitHub webhook forwarding in compose.dev. +SMEE_URL= +SMEE_TARGET=http://backend:7741/v1/webhooks/github # Fill these in when enabling live Stripe billing. STRIPE_SECRET_KEY= diff --git a/docs/deploy/foundry-self-hosting.mdx b/docs/deploy/foundry-self-hosting.mdx index 172d680..8fd43ae 100644 --- a/docs/deploy/foundry-self-hosting.mdx +++ b/docs/deploy/foundry-self-hosting.mdx @@ -92,6 +92,8 @@ Recommended GitHub App permissions: Set the webhook URL to `https:///v1/webhooks/github` and generate a webhook secret. Store the secret as `GITHUB_WEBHOOK_SECRET`. +This is required, not optional. Foundry depends on GitHub App webhook delivery for installation lifecycle changes, repo access changes, and ongoing repo / pull request sync. If the GitHub App is not installed for the workspace, or webhook delivery is misconfigured, Foundry will remain in an install / reconnect state and core GitHub-backed functionality will not work correctly. + Recommended webhook subscriptions: - `installation` diff --git a/foundry/CLAUDE.md b/foundry/CLAUDE.md index e83c532..aae89c3 100644 --- a/foundry/CLAUDE.md +++ b/foundry/CLAUDE.md @@ -143,6 +143,7 @@ The client subscribes to `app` always, `workspace` when entering a workspace, `t - If a requested UI cannot be implemented cleanly with an existing `BaseUI` component, stop and ask the user whether they are sure they want to diverge from the system. - In that case, recommend the closest existing `BaseUI` components or compositions that could satisfy the need before proposing custom UI work. - Only introduce custom UI primitives when `BaseUI` and existing Foundry patterns are not sufficient, or when the user explicitly confirms they want the divergence. +- **Styletron atomic CSS rule:** Never mix CSS shorthand properties with their longhand equivalents in the same style object (including nested pseudo-selectors like `:hover`), or in a base styled component whose consumers override with longhand via `$style`. This includes `padding`/`paddingLeft`, `margin`/`marginTop`, `background`/`backgroundColor`, `border`/`borderLeft`, etc. Styletron generates independent atomic classes for shorthand and longhand, so they conflict unpredictably. Use `backgroundColor: "transparent"` instead of `background: "none"` for button resets. Always use longhand properties when any side may be overridden individually. ## Runtime Policy @@ -201,15 +202,37 @@ For all Rivet/RivetKit implementation: - Do not build blocking flows that wait on external systems to become ready or complete. Prefer push-based progression driven by actor messages, events, webhooks, or queue/workflow state changes. - Use workflows/background commands for any repo sync, sandbox provisioning, agent install, branch restack/rebase, or other multi-step external work. Do not keep user-facing actions/requests open while that work runs. - `send` policy: always `await` the `send(...)` call itself so enqueue failures surface immediately, but default to `wait: false`. -- Only use `send(..., { wait: true })` for short, bounded local mutations (e.g. a DB write that returns a result the caller needs). Never use `wait: true` for operations that depend on external readiness, polling actors, provider setup, repo/network I/O, sandbox sessions, GitHub API calls, or long-running queue drains. - Never self-send with `wait: true` from inside a workflow handler — the workflow processes one message at a time, so the handler would deadlock waiting for the new message to be dequeued. -- When an action is void-returning and triggers external work, use `wait: false` and let the UI react to state changes pushed by the workflow. -- Request/action contract: wait only until the minimum resource needed for the client's next step exists. Example: task creation may wait for task actor creation/identity, but not for sandbox provisioning or session bootstrap. - Read paths must not force refresh/sync work inline. Serve the latest cached projection, mark staleness explicitly, and trigger background refresh separately when needed. - If a workflow needs to resume after some external work completes, model that as workflow state plus follow-up messages/events instead of holding the original request open. - No retries: never add retry loops (`withRetries`, `setTimeout` retry, exponential backoff) anywhere in the codebase. If an operation fails, surface the error immediately. If a dependency is not ready yet, model that explicitly with workflow state and resume from a push/event instead of polling or retry loops. - Never throw errors that expect the caller to retry (e.g. `throw new Error("... retry shortly")`). If a dependency is not ready, write the current state to the DB with an appropriate pending status, enqueue the async work, and return successfully. Let the client observe the pending → ready transition via push events. - Action return contract: every action that creates a resource must write the resource record to the DB before returning, so the client can immediately query/render it. The record may have a pending status, but it must exist. Never return an ID that doesn't yet have a corresponding DB row. + +### Action handler responsiveness + +Action handlers must return fast. The pattern: + +1. **Creating an entity** — `wait: true` is fine. Do the DB write, return the ID/record. The caller needs the ID to proceed. The record may have a pending status; that's expected. +2. **Enqueuing work** (sending a message, triggering a sandbox operation, starting a sync) — `wait: false`. Write any precondition state to the DB synchronously, enqueue the work, and return. The client observes progress via push events on the relevant topic (session status, task status, etc.). +3. **Validating preconditions** — check state synchronously in the action handler *before* enqueuing. If a precondition isn't met (e.g. session not ready, task not initialized), throw an error immediately. Do not implicitly provision missing dependencies or poll for readiness inside the action handler. It is the client's responsibility to ensure preconditions are met before calling the action. + +Examples: +- `createTask` → `wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via workspace events. +- `sendWorkbenchMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running` → `idle` via session events. +- `createWorkbenchSession` → `wait: true` (returns `{ tabId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision` → `ready` via task events. + +Never use `wait: true` for operations that depend on external readiness, sandbox I/O, agent responses, git network operations, polling loops, or long-running queue drains. Never hold an action open while waiting for an external system to become ready — that is a polling/retry loop in disguise. + +### Task creation: resolve metadata before creating the actor + +When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the parent actor (project) *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. + +- Title is derived from the task description via `deriveFallbackTitle()` — pure string manipulation, no external I/O. +- Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against remote branches and the project's task index. +- The project actor already has the repo clone and task index. Do the git fetch + name resolution there. +- Do not defer naming to a background provision workflow. Do not poll for names to become available. +- The `onBranch` path (attaching to an existing branch) and the new-task path should both produce a fully-named task record on return. - Actor handle policy: - Prefer explicit `get` or explicit `create` based on workflow intent; do not default to `getOrCreate`. - Use `get`/`getForId` when the actor is expected to already exist; if missing, surface an explicit `Actor not found` error with recovery context. @@ -235,6 +258,8 @@ For all Rivet/RivetKit implementation: - For Foundry live verification, use `rivet-dev/sandbox-agent-testing` as the default testing repo unless the task explicitly says otherwise. - Secrets (e.g. `OPENAI_API_KEY`, `GITHUB_TOKEN`/`GH_TOKEN`) must be provided via environment variables, never hardcoded in the repo. - `~/misc/env.txt` and `~/misc/the-foundry.env` contain the expected local OpenAI + GitHub OAuth/App config for dev. + - For local GitHub webhook development, use the configured Smee proxy (`SMEE_URL`) to forward deliveries into `POST /v1/webhooks/github`. Check `.env` / `foundry/.env` if you need the current channel URL. + - If GitHub repos, PRs, or install state are not showing up, verify that the GitHub App is installed for the workspace and that webhook delivery is enabled and healthy. Foundry depends on webhook events for GitHub-backed state; missing webhooks means the product will appear broken. - Do not assume `gh auth token` is sufficient for Foundry task provisioning against private repos. Sandbox/bootstrap git clone, push, and PR flows require a repo-capable `GITHUB_TOKEN`/`GH_TOKEN` in the backend container. - Preferred product behavior for org workspaces is to mint a GitHub App installation token from the workspace installation and inject it into backend/sandbox git operations. Do not rely on an operator's ambient CLI auth as the long-term solution. - Treat client E2E tests in `packages/client/test` as the primary end-to-end source of truth for product behavior. diff --git a/foundry/compose.dev.yaml b/foundry/compose.dev.yaml index e412ac9..8dd9f97 100644 --- a/foundry/compose.dev.yaml +++ b/foundry/compose.dev.yaml @@ -93,6 +93,27 @@ services: - "foundry_shared_node_modules:/app/foundry/packages/shared/node_modules" - "foundry_pnpm_store:/tmp/.local/share/pnpm/store" + smee: + image: node:20-alpine + depends_on: + - backend + env_file: + - path: .env + required: false + environment: + SMEE_URL: "${SMEE_URL:-}" + SMEE_TARGET: "${SMEE_TARGET:-http://backend:7741/v1/webhooks/github}" + command: + - /bin/sh + - -lc + - | + if [ -z "$SMEE_URL" ]; then + echo "SMEE_URL is required for local GitHub webhook forwarding" >&2 + exit 1 + fi + exec npx --yes smee-client --url "$SMEE_URL" --target "$SMEE_TARGET" + restart: unless-stopped + volumes: foundry_backend_root_node_modules: {} foundry_backend_backend_node_modules: {} diff --git a/foundry/packages/backend/src/actors/github-data/db/db.ts b/foundry/packages/backend/src/actors/github-data/db/db.ts new file mode 100644 index 0000000..00e5a11 --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/db/db.ts @@ -0,0 +1,5 @@ +import { db } from "rivetkit/db/drizzle"; +import * as schema from "./schema.js"; +import migrations from "./migrations.js"; + +export const githubDataDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/github-data/db/migrations.ts b/foundry/packages/backend/src/actors/github-data/db/migrations.ts new file mode 100644 index 0000000..528e8fa --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/db/migrations.ts @@ -0,0 +1,61 @@ +const journal = { + entries: [ + { + idx: 0, + when: 1773446400000, + tag: "0000_github_data", + breakpoints: true, + }, + ], +} as const; + +export default { + journal, + migrations: { + m0000: `CREATE TABLE \`github_meta\` ( + \`id\` integer PRIMARY KEY NOT NULL, + \`connected_account\` text NOT NULL, + \`installation_status\` text NOT NULL, + \`sync_status\` text NOT NULL, + \`installation_id\` integer, + \`last_sync_label\` text NOT NULL, + \`last_sync_at\` integer, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`github_repositories\` ( + \`repo_id\` text PRIMARY KEY NOT NULL, + \`full_name\` text NOT NULL, + \`clone_url\` text NOT NULL, + \`private\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`github_members\` ( + \`member_id\` text PRIMARY KEY NOT NULL, + \`login\` text NOT NULL, + \`display_name\` text NOT NULL, + \`email\` text, + \`role\` text, + \`state\` text NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`github_pull_requests\` ( + \`pr_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`repo_full_name\` text NOT NULL, + \`number\` integer NOT NULL, + \`title\` text NOT NULL, + \`body\` text, + \`state\` text NOT NULL, + \`url\` text NOT NULL, + \`head_ref_name\` text NOT NULL, + \`base_ref_name\` text NOT NULL, + \`author_login\` text, + \`is_draft\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +`, + } as const, +}; diff --git a/foundry/packages/backend/src/actors/github-data/db/schema.ts b/foundry/packages/backend/src/actors/github-data/db/schema.ts new file mode 100644 index 0000000..9527fc6 --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/db/schema.ts @@ -0,0 +1,46 @@ +import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; + +export const githubMeta = sqliteTable("github_meta", { + id: integer("id").primaryKey(), + connectedAccount: text("connected_account").notNull(), + installationStatus: text("installation_status").notNull(), + syncStatus: text("sync_status").notNull(), + installationId: integer("installation_id"), + lastSyncLabel: text("last_sync_label").notNull(), + lastSyncAt: integer("last_sync_at"), + updatedAt: integer("updated_at").notNull(), +}); + +export const githubRepositories = sqliteTable("github_repositories", { + repoId: text("repo_id").notNull().primaryKey(), + fullName: text("full_name").notNull(), + cloneUrl: text("clone_url").notNull(), + private: integer("private").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const githubMembers = sqliteTable("github_members", { + memberId: text("member_id").notNull().primaryKey(), + login: text("login").notNull(), + displayName: text("display_name").notNull(), + email: text("email"), + role: text("role"), + state: text("state").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const githubPullRequests = sqliteTable("github_pull_requests", { + prId: text("pr_id").notNull().primaryKey(), + repoId: text("repo_id").notNull(), + repoFullName: text("repo_full_name").notNull(), + number: integer("number").notNull(), + title: text("title").notNull(), + body: text("body"), + state: text("state").notNull(), + url: text("url").notNull(), + headRefName: text("head_ref_name").notNull(), + baseRefName: text("base_ref_name").notNull(), + authorLogin: text("author_login"), + isDraft: integer("is_draft").notNull(), + updatedAt: integer("updated_at").notNull(), +}); diff --git a/foundry/packages/backend/src/actors/github-data/index.ts b/foundry/packages/backend/src/actors/github-data/index.ts new file mode 100644 index 0000000..6b7d1f8 --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/index.ts @@ -0,0 +1,775 @@ +// @ts-nocheck +import { eq } from "drizzle-orm"; +import { actor } from "rivetkit"; +import type { FoundryOrganization } from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../context.js"; +import { getOrCreateWorkspace, getTask } from "../handles.js"; +import { repoIdFromRemote } from "../../services/repo.js"; +import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; +import { githubDataDb } from "./db/db.js"; +import { githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js"; + +const META_ROW_ID = 1; + +interface GithubDataInput { + workspaceId: string; +} + +interface GithubMemberRecord { + id: string; + login: string; + name: string; + email?: string | null; + role?: string | null; + state?: string | null; +} + +interface GithubRepositoryRecord { + fullName: string; + cloneUrl: string; + private: boolean; +} + +interface GithubPullRequestRecord { + repoId: string; + repoFullName: string; + number: number; + title: string; + body: string | null; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin: string | null; + isDraft: boolean; + updatedAt: number; +} + +interface FullSyncInput { + connectedAccount?: string | null; + installationStatus?: FoundryOrganization["github"]["installationStatus"]; + installationId?: number | null; + githubLogin?: string | null; + kind?: FoundryOrganization["kind"] | null; + accessToken?: string | null; + label?: string | null; +} + +interface ClearStateInput { + connectedAccount: string; + installationStatus: FoundryOrganization["github"]["installationStatus"]; + installationId: number | null; + label: string; +} + +interface PullRequestWebhookInput { + connectedAccount: string; + installationStatus: FoundryOrganization["github"]["installationStatus"]; + installationId: number | null; + repository: { + fullName: string; + cloneUrl: string; + private: boolean; + }; + pullRequest: { + number: number; + title: string; + body: string | null; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin: string | null; + isDraft: boolean; + merged?: boolean; + }; +} + +function normalizePrStatus(input: { state: string; isDraft?: boolean; merged?: boolean }): "OPEN" | "DRAFT" | "CLOSED" | "MERGED" { + const state = input.state.trim().toUpperCase(); + if (input.merged || state === "MERGED") return "MERGED"; + if (state === "CLOSED") return "CLOSED"; + return input.isDraft ? "DRAFT" : "OPEN"; +} + +function pullRequestSummaryFromRow(row: any) { + return { + prId: row.prId, + repoId: row.repoId, + repoFullName: row.repoFullName, + number: row.number, + title: row.title, + state: row.state, + url: row.url, + headRefName: row.headRefName, + baseRefName: row.baseRefName, + authorLogin: row.authorLogin ?? null, + isDraft: Boolean(row.isDraft), + updatedAtMs: row.updatedAt, + }; +} + +async function readMeta(c: any) { + const row = await c.db.select().from(githubMeta).where(eq(githubMeta.id, META_ROW_ID)).get(); + return { + connectedAccount: row?.connectedAccount ?? "", + installationStatus: (row?.installationStatus ?? "install_required") as FoundryOrganization["github"]["installationStatus"], + syncStatus: (row?.syncStatus ?? "pending") as FoundryOrganization["github"]["syncStatus"], + installationId: row?.installationId ?? null, + lastSyncLabel: row?.lastSyncLabel ?? "Waiting for first import", + lastSyncAt: row?.lastSyncAt ?? null, + }; +} + +async function writeMeta(c: any, patch: Partial>>) { + const current = await readMeta(c); + const next = { + ...current, + ...patch, + }; + await c.db + .insert(githubMeta) + .values({ + id: META_ROW_ID, + connectedAccount: next.connectedAccount, + installationStatus: next.installationStatus, + syncStatus: next.syncStatus, + installationId: next.installationId, + lastSyncLabel: next.lastSyncLabel, + lastSyncAt: next.lastSyncAt, + updatedAt: Date.now(), + }) + .onConflictDoUpdate({ + target: githubMeta.id, + set: { + connectedAccount: next.connectedAccount, + installationStatus: next.installationStatus, + syncStatus: next.syncStatus, + installationId: next.installationId, + lastSyncLabel: next.lastSyncLabel, + lastSyncAt: next.lastSyncAt, + updatedAt: Date.now(), + }, + }) + .run(); + return next; +} + +async function getOrganizationContext(c: any, overrides?: FullSyncInput) { + const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); + const organization = await workspace.getOrganizationShellStateIfInitialized({}); + if (!organization) { + throw new Error(`Workspace ${c.state.workspaceId} is not initialized`); + } + const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); + return { + kind: overrides?.kind ?? organization.snapshot.kind, + githubLogin: overrides?.githubLogin ?? organization.githubLogin, + connectedAccount: overrides?.connectedAccount ?? organization.snapshot.github.connectedAccount ?? organization.githubLogin, + installationId: overrides?.installationId ?? organization.githubInstallationId ?? null, + installationStatus: + overrides?.installationStatus ?? + organization.snapshot.github.installationStatus ?? + (organization.snapshot.kind === "personal" ? "connected" : "reconnect_required"), + accessToken: overrides?.accessToken ?? auth?.githubToken ?? null, + }; +} + +async function replaceRepositories(c: any, repositories: GithubRepositoryRecord[], updatedAt: number) { + await c.db.delete(githubRepositories).run(); + for (const repository of repositories) { + await c.db + .insert(githubRepositories) + .values({ + repoId: repoIdFromRemote(repository.cloneUrl), + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + updatedAt, + }) + .run(); + } +} + +async function replaceMembers(c: any, members: GithubMemberRecord[], updatedAt: number) { + await c.db.delete(githubMembers).run(); + for (const member of members) { + await c.db + .insert(githubMembers) + .values({ + memberId: member.id, + login: member.login, + displayName: member.name || member.login, + email: member.email ?? null, + role: member.role ?? null, + state: member.state ?? "active", + updatedAt, + }) + .run(); + } +} + +async function replacePullRequests(c: any, pullRequests: GithubPullRequestRecord[]) { + await c.db.delete(githubPullRequests).run(); + for (const pullRequest of pullRequests) { + await c.db + .insert(githubPullRequests) + .values({ + prId: `${pullRequest.repoId}#${pullRequest.number}`, + repoId: pullRequest.repoId, + repoFullName: pullRequest.repoFullName, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: pullRequest.isDraft ? 1 : 0, + updatedAt: pullRequest.updatedAt, + }) + .run(); + } +} + +async function refreshTaskSummaryForBranch(c: any, repoId: string, branchName: string) { + const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); + await workspace.refreshTaskSummaryForGithubBranch({ repoId, branchName }); +} + +async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: any[]) { + const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); + const beforeById = new Map(beforeRows.map((row) => [row.prId, row])); + const afterById = new Map(afterRows.map((row) => [row.prId, row])); + + for (const [prId, row] of afterById) { + const previous = beforeById.get(prId); + const changed = + !previous || + previous.title !== row.title || + previous.state !== row.state || + previous.url !== row.url || + previous.headRefName !== row.headRefName || + previous.baseRefName !== row.baseRefName || + previous.authorLogin !== row.authorLogin || + previous.isDraft !== row.isDraft || + previous.updatedAt !== row.updatedAt; + if (!changed) { + continue; + } + await workspace.applyOpenPullRequestUpdate({ + pullRequest: pullRequestSummaryFromRow(row), + }); + await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); + } + + for (const [prId, row] of beforeById) { + if (afterById.has(prId)) { + continue; + } + await workspace.removeOpenPullRequest({ prId }); + await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); + } +} + +async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { + const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); + const match = await workspace.findTaskForGithubBranch({ + repoId: row.repoId, + branchName: row.headRefName, + }); + if (!match?.taskId) { + return; + } + try { + const task = getTask(c, c.state.workspaceId, row.repoId, match.taskId); + await task.archive({ reason: `PR ${String(row.state).toLowerCase()}` }); + } catch { + // Best-effort only. Task summary refresh will still clear the PR state. + } +} + +async function resolveRepositories(c: any, context: Awaited>): Promise { + const { appShell } = getActorRuntimeContext(); + if (context.kind === "personal") { + if (!context.accessToken) { + return []; + } + return await appShell.github.listUserRepositories(context.accessToken); + } + + if (context.installationId != null) { + try { + return await appShell.github.listInstallationRepositories(context.installationId); + } catch (error) { + if (!context.accessToken) { + throw error; + } + } + } + + if (!context.accessToken) { + return []; + } + + return (await appShell.github.listUserRepositories(context.accessToken)).filter((repository) => repository.fullName.startsWith(`${context.githubLogin}/`)); +} + +async function resolveMembers(c: any, context: Awaited>): Promise { + const { appShell } = getActorRuntimeContext(); + if (context.kind === "personal") { + return []; + } + if (context.installationId != null) { + try { + return await appShell.github.listInstallationMembers(context.installationId, context.githubLogin); + } catch (error) { + if (!context.accessToken) { + throw error; + } + } + } + if (!context.accessToken) { + return []; + } + return await appShell.github.listOrganizationMembers(context.accessToken, context.githubLogin); +} + +async function resolvePullRequests( + c: any, + context: Awaited>, + repositories: GithubRepositoryRecord[], +): Promise { + const { appShell } = getActorRuntimeContext(); + if (repositories.length === 0) { + return []; + } + + let pullRequests: Array<{ + repoFullName: string; + cloneUrl: string; + number: number; + title: string; + body?: string | null; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin?: string | null; + isDraft?: boolean; + merged?: boolean; + }> = []; + + if (context.installationId != null) { + try { + pullRequests = await appShell.github.listInstallationPullRequestsForRepositories(context.installationId, repositories); + } catch (error) { + if (!context.accessToken) { + throw error; + } + } + } + + if (pullRequests.length === 0 && context.accessToken) { + pullRequests = await appShell.github.listPullRequestsForUserRepositories(context.accessToken, repositories); + } + + return pullRequests.map((pullRequest) => ({ + repoId: repoIdFromRemote(pullRequest.cloneUrl), + repoFullName: pullRequest.repoFullName, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: normalizePrStatus(pullRequest), + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: Boolean(pullRequest.isDraft), + updatedAt: Date.now(), + })); +} + +async function readAllPullRequestRows(c: any) { + return await c.db.select().from(githubPullRequests).all(); +} + +async function runFullSync(c: any, input: FullSyncInput = {}) { + const startedAt = Date.now(); + const beforeRows = await readAllPullRequestRows(c); + const context = await getOrganizationContext(c, input); + + await writeMeta(c, { + connectedAccount: context.connectedAccount, + installationStatus: context.installationStatus, + installationId: context.installationId, + syncStatus: "syncing", + lastSyncLabel: input.label?.trim() || "Syncing GitHub data...", + }); + + const repositories = await resolveRepositories(c, context); + const members = await resolveMembers(c, context); + const pullRequests = await resolvePullRequests(c, context, repositories); + + await replaceRepositories(c, repositories, startedAt); + await replaceMembers(c, members, startedAt); + await replacePullRequests(c, pullRequests); + + const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); + await workspace.applyGithubDataProjection({ + connectedAccount: context.connectedAccount, + installationStatus: context.installationStatus, + installationId: context.installationId, + syncStatus: "synced", + lastSyncLabel: repositories.length > 0 ? `Synced ${repositories.length} repositories` : "No repositories available", + lastSyncAt: startedAt, + repositories, + }); + + const meta = await writeMeta(c, { + connectedAccount: context.connectedAccount, + installationStatus: context.installationStatus, + installationId: context.installationId, + syncStatus: "synced", + lastSyncLabel: repositories.length > 0 ? `Synced ${repositories.length} repositories` : "No repositories available", + lastSyncAt: startedAt, + }); + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, beforeRows, afterRows); + + return { + ...meta, + repositoryCount: repositories.length, + memberCount: members.length, + pullRequestCount: afterRows.length, + }; +} + +export const githubData = actor({ + db: githubDataDb, + options: { + name: "GitHub Data", + icon: "github", + actionTimeout: 5 * 60_000, + }, + createState: (_c, input: GithubDataInput) => ({ + workspaceId: input.workspaceId, + }), + actions: { + async getSummary(c) { + const repositories = await c.db.select().from(githubRepositories).all(); + const members = await c.db.select().from(githubMembers).all(); + const pullRequests = await c.db.select().from(githubPullRequests).all(); + return { + ...(await readMeta(c)), + repositoryCount: repositories.length, + memberCount: members.length, + pullRequestCount: pullRequests.length, + }; + }, + + async listRepositories(c) { + const rows = await c.db.select().from(githubRepositories).all(); + return rows.map((row) => ({ + repoId: row.repoId, + fullName: row.fullName, + cloneUrl: row.cloneUrl, + private: Boolean(row.private), + })); + }, + + async listPullRequestsForRepository(c, input: { repoId: string }) { + const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all(); + return rows.map(pullRequestSummaryFromRow); + }, + + async listOpenPullRequests(c) { + const rows = await c.db.select().from(githubPullRequests).all(); + return rows.map(pullRequestSummaryFromRow).sort((left, right) => right.updatedAtMs - left.updatedAtMs); + }, + + async getPullRequestForBranch(c, input: { repoId: string; branchName: string }) { + const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all(); + const match = rows.find((candidate) => candidate.headRefName === input.branchName) ?? null; + if (!match) { + return null; + } + return { + number: match.number, + status: match.isDraft ? ("draft" as const) : ("ready" as const), + }; + }, + + async fullSync(c, input: FullSyncInput = {}) { + return await runFullSync(c, input); + }, + + async reloadOrganization(c) { + return await runFullSync(c, { label: "Reloading GitHub organization..." }); + }, + + async reloadAllPullRequests(c) { + return await runFullSync(c, { label: "Reloading GitHub pull requests..." }); + }, + + async reloadRepository(c, input: { repoId: string }) { + const context = await getOrganizationContext(c); + const current = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); + if (!current) { + throw new Error(`Unknown GitHub repository: ${input.repoId}`); + } + const { appShell } = getActorRuntimeContext(); + const repository = + context.installationId != null + ? await appShell.github.getInstallationRepository(context.installationId, current.fullName) + : context.accessToken + ? await appShell.github.getUserRepository(context.accessToken, current.fullName) + : null; + if (!repository) { + throw new Error(`Unable to reload repository: ${current.fullName}`); + } + + const updatedAt = Date.now(); + await c.db + .insert(githubRepositories) + .values({ + repoId: input.repoId, + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + updatedAt, + }, + }) + .run(); + + const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); + await workspace.applyGithubRepositoryProjection({ + repoId: input.repoId, + remoteUrl: repository.cloneUrl, + }); + return { + repoId: input.repoId, + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private, + }; + }, + + async reloadPullRequest(c, input: { repoId: string; prNumber: number }) { + const repository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); + if (!repository) { + throw new Error(`Unknown GitHub repository: ${input.repoId}`); + } + const context = await getOrganizationContext(c); + const { appShell } = getActorRuntimeContext(); + const pullRequest = + context.installationId != null + ? await appShell.github.getInstallationPullRequest(context.installationId, repository.fullName, input.prNumber) + : context.accessToken + ? await appShell.github.getUserPullRequest(context.accessToken, repository.fullName, input.prNumber) + : null; + if (!pullRequest) { + throw new Error(`Unable to reload pull request #${input.prNumber} for ${repository.fullName}`); + } + + const beforeRows = await readAllPullRequestRows(c); + const updatedAt = Date.now(); + const nextState = normalizePrStatus(pullRequest); + const prId = `${input.repoId}#${input.prNumber}`; + if (nextState === "CLOSED" || nextState === "MERGED") { + await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); + } else { + await c.db + .insert(githubPullRequests) + .values({ + prId, + repoId: input.repoId, + repoFullName: repository.fullName, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: nextState, + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: pullRequest.isDraft ? 1 : 0, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubPullRequests.prId, + set: { + title: pullRequest.title, + body: pullRequest.body ?? null, + state: nextState, + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: pullRequest.isDraft ? 1 : 0, + updatedAt, + }, + }) + .run(); + } + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, beforeRows, afterRows); + const closed = afterRows.find((row) => row.prId === prId); + if (!closed && (nextState === "CLOSED" || nextState === "MERGED")) { + const previous = beforeRows.find((row) => row.prId === prId); + if (previous) { + await autoArchiveTaskForClosedPullRequest(c, { + ...previous, + state: nextState, + }); + } + } + return pullRequestSummaryFromRow( + afterRows.find((row) => row.prId === prId) ?? { + prId, + repoId: input.repoId, + repoFullName: repository.fullName, + number: input.prNumber, + title: pullRequest.title, + state: nextState, + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: pullRequest.isDraft ? 1 : 0, + updatedAt, + }, + ); + }, + + async clearState(c, input: ClearStateInput) { + const beforeRows = await readAllPullRequestRows(c); + await c.db.delete(githubPullRequests).run(); + await c.db.delete(githubRepositories).run(); + await c.db.delete(githubMembers).run(); + await writeMeta(c, { + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "pending", + lastSyncLabel: input.label, + lastSyncAt: null, + }); + + const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); + await workspace.applyGithubDataProjection({ + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "pending", + lastSyncLabel: input.label, + lastSyncAt: null, + repositories: [], + }); + await emitPullRequestChangeEvents(c, beforeRows, []); + }, + + async handlePullRequestWebhook(c, input: PullRequestWebhookInput) { + const beforeRows = await readAllPullRequestRows(c); + const repoId = repoIdFromRemote(input.repository.cloneUrl); + const updatedAt = Date.now(); + const state = normalizePrStatus(input.pullRequest); + const prId = `${repoId}#${input.pullRequest.number}`; + + await c.db + .insert(githubRepositories) + .values({ + repoId, + fullName: input.repository.fullName, + cloneUrl: input.repository.cloneUrl, + private: input.repository.private ? 1 : 0, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: input.repository.fullName, + cloneUrl: input.repository.cloneUrl, + private: input.repository.private ? 1 : 0, + updatedAt, + }, + }) + .run(); + + if (state === "CLOSED" || state === "MERGED") { + await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); + } else { + await c.db + .insert(githubPullRequests) + .values({ + prId, + repoId, + repoFullName: input.repository.fullName, + number: input.pullRequest.number, + title: input.pullRequest.title, + body: input.pullRequest.body ?? null, + state, + url: input.pullRequest.url, + headRefName: input.pullRequest.headRefName, + baseRefName: input.pullRequest.baseRefName, + authorLogin: input.pullRequest.authorLogin ?? null, + isDraft: input.pullRequest.isDraft ? 1 : 0, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubPullRequests.prId, + set: { + title: input.pullRequest.title, + body: input.pullRequest.body ?? null, + state, + url: input.pullRequest.url, + headRefName: input.pullRequest.headRefName, + baseRefName: input.pullRequest.baseRefName, + authorLogin: input.pullRequest.authorLogin ?? null, + isDraft: input.pullRequest.isDraft ? 1 : 0, + updatedAt, + }, + }) + .run(); + } + + await writeMeta(c, { + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "synced", + lastSyncLabel: "GitHub webhook received", + lastSyncAt: updatedAt, + }); + + const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); + await workspace.applyGithubRepositoryProjection({ + repoId, + remoteUrl: input.repository.cloneUrl, + }); + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, beforeRows, afterRows); + if (state === "CLOSED" || state === "MERGED") { + const previous = beforeRows.find((row) => row.prId === prId); + if (previous) { + await autoArchiveTaskForClosedPullRequest(c, { + ...previous, + state, + }); + } + } + }, + }, +}); diff --git a/foundry/packages/backend/src/actors/handles.ts b/foundry/packages/backend/src/actors/handles.ts index 58f8cd7..b488bf7 100644 --- a/foundry/packages/backend/src/actors/handles.ts +++ b/foundry/packages/backend/src/actors/handles.ts @@ -1,4 +1,4 @@ -import { authUserKey, taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "./keys.js"; +import { authUserKey, githubDataKey, taskKey, historyKey, projectBranchSyncKey, projectKey, taskSandboxKey, workspaceKey } from "./keys.js"; export function actorClient(c: any) { return c.client(); @@ -53,17 +53,18 @@ export async function getOrCreateHistory(c: any, workspaceId: string, repoId: st }); } -export async function getOrCreateProjectPrSync(c: any, workspaceId: string, repoId: string, repoPath: string, intervalMs: number) { - return await actorClient(c).projectPrSync.getOrCreate(projectPrSyncKey(workspaceId, repoId), { +export async function getOrCreateGithubData(c: any, workspaceId: string) { + return await actorClient(c).githubData.getOrCreate(githubDataKey(workspaceId), { createWithInput: { workspaceId, - repoId, - repoPath, - intervalMs, }, }); } +export function getGithubData(c: any, workspaceId: string) { + return actorClient(c).githubData.get(githubDataKey(workspaceId)); +} + export async function getOrCreateProjectBranchSync(c: any, workspaceId: string, repoId: string, repoPath: string, intervalMs: number) { return await actorClient(c).projectBranchSync.getOrCreate(projectBranchSyncKey(workspaceId, repoId), { createWithInput: { @@ -85,10 +86,6 @@ export async function getOrCreateTaskSandbox(c: any, workspaceId: string, sandbo }); } -export function selfProjectPrSync(c: any) { - return actorClient(c).projectPrSync.getForId(c.actorId); -} - export function selfProjectBranchSync(c: any) { return actorClient(c).projectBranchSync.getForId(c.actorId); } @@ -112,3 +109,7 @@ export function selfProject(c: any) { export function selfAuthUser(c: any) { return actorClient(c).authUser.getForId(c.actorId); } + +export function selfGithubData(c: any) { + return actorClient(c).githubData.getForId(c.actorId); +} diff --git a/foundry/packages/backend/src/actors/index.ts b/foundry/packages/backend/src/actors/index.ts index 3c7a04a..4f67459 100644 --- a/foundry/packages/backend/src/actors/index.ts +++ b/foundry/packages/backend/src/actors/index.ts @@ -1,9 +1,9 @@ import { authUser } from "./auth-user/index.js"; import { setup } from "rivetkit"; +import { githubData } from "./github-data/index.js"; import { task } from "./task/index.js"; import { history } from "./history/index.js"; import { projectBranchSync } from "./project-branch-sync/index.js"; -import { projectPrSync } from "./project-pr-sync/index.js"; import { project } from "./project/index.js"; import { taskSandbox } from "./sandbox/index.js"; import { workspace } from "./workspace/index.js"; @@ -28,7 +28,7 @@ export const registry = setup({ task, taskSandbox, history, - projectPrSync, + githubData, projectBranchSync, }, }); @@ -36,11 +36,11 @@ export const registry = setup({ export * from "./context.js"; export * from "./events.js"; export * from "./auth-user/index.js"; +export * from "./github-data/index.js"; export * from "./task/index.js"; export * from "./history/index.js"; export * from "./keys.js"; export * from "./project-branch-sync/index.js"; -export * from "./project-pr-sync/index.js"; export * from "./project/index.js"; export * from "./sandbox/index.js"; export * from "./workspace/index.js"; diff --git a/foundry/packages/backend/src/actors/keys.ts b/foundry/packages/backend/src/actors/keys.ts index 4e49ea0..1dfaa48 100644 --- a/foundry/packages/backend/src/actors/keys.ts +++ b/foundry/packages/backend/src/actors/keys.ts @@ -24,8 +24,8 @@ export function historyKey(workspaceId: string, repoId: string): ActorKey { return ["ws", workspaceId, "project", repoId, "history"]; } -export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "pr-sync"]; +export function githubDataKey(workspaceId: string): ActorKey { + return ["ws", workspaceId, "github-data"]; } export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey { diff --git a/foundry/packages/backend/src/actors/project-pr-sync/index.ts b/foundry/packages/backend/src/actors/project-pr-sync/index.ts deleted file mode 100644 index f525d64..0000000 --- a/foundry/packages/backend/src/actors/project-pr-sync/index.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import { getActorRuntimeContext } from "../context.js"; -import { getProject, selfProjectPrSync } from "../handles.js"; -import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js"; -import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; - -export interface ProjectPrSyncInput { - workspaceId: string; - repoId: string; - repoPath: string; - intervalMs: number; -} - -interface SetIntervalCommand { - intervalMs: number; -} - -interface ProjectPrSyncState extends PollingControlState { - workspaceId: string; - repoId: string; - repoPath: string; -} - -const CONTROL = { - start: "project.pr_sync.control.start", - stop: "project.pr_sync.control.stop", - setInterval: "project.pr_sync.control.set_interval", - force: "project.pr_sync.control.force", -} as const; - -async function pollPrs(c: { state: ProjectPrSyncState }): Promise { - const { driver } = getActorRuntimeContext(); - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - const items = await driver.github.listPullRequests(c.state.repoPath, { githubToken: auth?.githubToken ?? null }); - const parent = getProject(c, c.state.workspaceId, c.state.repoId); - await parent.applyPrSyncResult({ items, at: Date.now() }); -} - -export const projectPrSync = actor({ - queues: { - [CONTROL.start]: queue(), - [CONTROL.stop]: queue(), - [CONTROL.setInterval]: queue(), - [CONTROL.force]: queue(), - }, - options: { - name: "Project PR Sync", - icon: "code-merge", - // Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling. - noSleep: true, - }, - createState: (_c, input: ProjectPrSyncInput): ProjectPrSyncState => ({ - workspaceId: input.workspaceId, - repoId: input.repoId, - repoPath: input.repoPath, - intervalMs: input.intervalMs, - running: true, - }), - actions: { - async start(c): Promise { - const self = selfProjectPrSync(c); - await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 }); - }, - - async stop(c): Promise { - const self = selfProjectPrSync(c); - await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 }); - }, - - async setIntervalMs(c, payload: SetIntervalCommand): Promise { - const self = selfProjectPrSync(c); - await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 }); - }, - - async force(c): Promise { - const self = selfProjectPrSync(c); - await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 }); - }, - }, - run: workflow(async (ctx) => { - await runWorkflowPollingLoop(ctx, { - loopName: "project-pr-sync-loop", - control: CONTROL, - onPoll: async (loopCtx) => { - try { - await pollPrs(loopCtx); - } catch (error) { - logActorWarning("project-pr-sync", "poll failed", { - error: resolveErrorMessage(error), - stack: resolveErrorStack(error), - }); - } - }, - }); - }), -}); diff --git a/foundry/packages/backend/src/actors/project/actions.ts b/foundry/packages/backend/src/actors/project/actions.ts index 4b2b245..8f9090d 100644 --- a/foundry/packages/backend/src/actors/project/actions.ts +++ b/foundry/packages/backend/src/actors/project/actions.ts @@ -4,13 +4,13 @@ import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; import { Loop } from "rivetkit/workflow"; import type { AgentType, TaskRecord, TaskSummary, ProviderId, RepoOverview, RepoStackAction, RepoStackActionResult } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getTask, getOrCreateTask, getOrCreateHistory, getOrCreateProjectBranchSync, getOrCreateProjectPrSync, selfProject } from "../handles.js"; +import { getGithubData, getTask, getOrCreateTask, getOrCreateHistory, getOrCreateProjectBranchSync, selfProject } from "../handles.js"; import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js"; import { foundryRepoClonePath } from "../../services/foundry-paths.js"; import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; import { expectQueueResponse } from "../../services/queue.js"; import { withRepoGitLock } from "../../services/repo-git-lock.js"; -import { branches, taskIndex, prCache, repoActionJobs, repoMeta } from "./db/schema.js"; +import { branches, taskIndex, repoActionJobs, repoMeta } from "./db/schema.js"; import { deriveFallbackTitle } from "../../services/create-flow.js"; import { normalizeBaseBranchName } from "../../integrations/git-spice/index.js"; import { sortBranchesForOverview } from "./stack-model.js"; @@ -55,22 +55,6 @@ interface GetPullRequestForBranchCommand { branchName: string; } -interface PrSyncResult { - items: Array<{ - number: number; - headRefName: string; - state: string; - title: string; - url?: string; - author?: string; - isDraft?: boolean; - ciStatus?: string | null; - reviewStatus?: string | null; - reviewer?: string | null; - }>; - at: number; -} - interface BranchSyncResult { items: Array<{ branchName: string; @@ -99,7 +83,6 @@ const PROJECT_QUEUE_NAMES = [ "project.command.createTask", "project.command.registerTaskBranch", "project.command.runRepoStackAction", - "project.command.applyPrSyncResult", "project.command.applyBranchSyncResult", ] as const; @@ -125,18 +108,9 @@ async function ensureProjectSyncActors(c: any, localPath: string): Promise return; } - const prSync = await getOrCreateProjectPrSync(c, c.state.workspaceId, c.state.repoId, localPath, 30_000); const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000); c.state.syncActorsStarted = true; - void prSync.start().catch((error: unknown) => { - logActorWarning("project.sync", "starting pr sync actor failed", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - error: resolveErrorMessage(error), - }); - }); - void branchSync.start().catch((error: unknown) => { logActorWarning("project.sync", "starting branch sync actor failed", { workspaceId: c.state.workspaceId, @@ -352,9 +326,6 @@ async function ensureTaskIndexHydratedForRead(c: any): Promise { } async function forceProjectSync(c: any, localPath: string): Promise { - const prSync = await getOrCreateProjectPrSync(c, c.state.workspaceId, c.state.repoId, localPath, 30_000); - await prSync.force(); - const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000); await branchSync.force(); } @@ -377,17 +348,10 @@ async function enrichTaskRecord(c: any, record: TaskRecord): Promise const pr = branchName != null - ? await c.db - .select({ - prUrl: prCache.prUrl, - prAuthor: prCache.prAuthor, - ciStatus: prCache.ciStatus, - reviewStatus: prCache.reviewStatus, - reviewer: prCache.reviewer, - }) - .from(prCache) - .where(eq(prCache.branchName, branchName)) - .get() + ? await getGithubData(c, c.state.workspaceId) + .listPullRequestsForRepository({ repoId: c.state.repoId }) + .then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null) + .catch(() => null) : null; return { @@ -396,11 +360,11 @@ async function enrichTaskRecord(c: any, record: TaskRecord): Promise hasUnpushed: br?.hasUnpushed != null ? String(br.hasUnpushed) : null, conflictsWithMain: br?.conflictsWithMain != null ? String(br.conflictsWithMain) : null, parentBranch: br?.parentBranch ?? null, - prUrl: pr?.prUrl ?? null, - prAuthor: pr?.prAuthor ?? null, - ciStatus: pr?.ciStatus ?? null, - reviewStatus: pr?.reviewStatus ?? null, - reviewer: pr?.reviewer ?? null, + prUrl: pr?.url ?? null, + prAuthor: pr?.authorLogin ?? null, + ciStatus: null, + reviewStatus: null, + reviewer: pr?.authorLogin ?? null, }; } @@ -458,11 +422,6 @@ async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { - await c.db.delete(prCache).run(); - - for (const item of body.items) { - await c.db - .insert(prCache) - .values({ - branchName: item.headRefName, - prNumber: item.number, - state: item.state, - title: item.title, - prUrl: item.url ?? null, - prAuthor: item.author ?? null, - isDraft: item.isDraft ? 1 : 0, - ciStatus: item.ciStatus ?? null, - reviewStatus: item.reviewStatus ?? null, - reviewer: item.reviewer ?? null, - fetchedAt: body.at, - updatedAt: body.at, - }) - .onConflictDoUpdate({ - target: prCache.branchName, - set: { - prNumber: item.number, - state: item.state, - title: item.title, - prUrl: item.url ?? null, - prAuthor: item.author ?? null, - isDraft: item.isDraft ? 1 : 0, - ciStatus: item.ciStatus ?? null, - reviewStatus: item.reviewStatus ?? null, - reviewer: item.reviewer ?? null, - fetchedAt: body.at, - updatedAt: body.at, - }, - }) - .run(); - } - - for (const item of body.items) { - if (item.state !== "MERGED" && item.state !== "CLOSED") { - continue; - } - - const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.branchName, item.headRefName)).get(); - if (!row) { - continue; - } - - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, row.taskId); - await h.archive({ reason: `PR ${item.state.toLowerCase()}` }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - logActorWarning("project", "pruned stale task index row during PR close archive", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - branchName: item.headRefName, - prState: item.state, - }); - continue; - } - logActorWarning("project", "failed to auto-archive task after PR close", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - branchName: item.headRefName, - prState: item.state, - error: resolveErrorMessage(error), - }); - } - } -} - async function applyBranchSyncResultMutation(c: any, body: BranchSyncResult): Promise { const incoming = new Set(body.items.map((item) => item.branchName)); const reservedRows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); @@ -953,69 +836,77 @@ export async function runProjectWorkflow(ctx: any): Promise { return Loop.continue(undefined); } - if (msg.name === "project.command.ensure") { - const result = await loopCtx.step({ - name: "project-ensure", - timeout: 5 * 60_000, - run: async () => ensureProjectMutation(loopCtx, msg.body as EnsureProjectCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } + try { + if (msg.name === "project.command.ensure") { + const result = await loopCtx.step({ + name: "project-ensure", + timeout: 5 * 60_000, + run: async () => ensureProjectMutation(loopCtx, msg.body as EnsureProjectCommand), + }); + await msg.complete(result); + return Loop.continue(undefined); + } - if (msg.name === "project.command.hydrateTaskIndex") { - await loopCtx.step("project-hydrate-task-index", async () => hydrateTaskIndexMutation(loopCtx, msg.body as HydrateTaskIndexCommand)); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } + if (msg.name === "project.command.hydrateTaskIndex") { + await loopCtx.step("project-hydrate-task-index", async () => hydrateTaskIndexMutation(loopCtx, msg.body as HydrateTaskIndexCommand)); + await msg.complete({ ok: true }); + return Loop.continue(undefined); + } - if (msg.name === "project.command.createTask") { - const result = await loopCtx.step({ - name: "project-create-task", - timeout: 5 * 60_000, - run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } + if (msg.name === "project.command.createTask") { + const result = await loopCtx.step({ + name: "project-create-task", + timeout: 5 * 60_000, + run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), + }); + await msg.complete(result); + return Loop.continue(undefined); + } - if (msg.name === "project.command.registerTaskBranch") { - const result = await loopCtx.step({ - name: "project-register-task-branch", - timeout: 5 * 60_000, - run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } + if (msg.name === "project.command.registerTaskBranch") { + const result = await loopCtx.step({ + name: "project-register-task-branch", + timeout: 5 * 60_000, + run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand), + }); + await msg.complete(result); + return Loop.continue(undefined); + } - if (msg.name === "project.command.runRepoStackAction") { - const result = await loopCtx.step({ - name: "project-run-repo-stack-action", - timeout: 12 * 60_000, - run: async () => runRepoStackActionMutation(loopCtx, msg.body as RunRepoStackActionCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } + if (msg.name === "project.command.runRepoStackAction") { + const result = await loopCtx.step({ + name: "project-run-repo-stack-action", + timeout: 12 * 60_000, + run: async () => runRepoStackActionMutation(loopCtx, msg.body as RunRepoStackActionCommand), + }); + await msg.complete(result); + return Loop.continue(undefined); + } - if (msg.name === "project.command.applyPrSyncResult") { - await loopCtx.step({ - name: "project-apply-pr-sync-result", - timeout: 60_000, - run: async () => applyPrSyncResultMutation(loopCtx, msg.body as PrSyncResult), + if (msg.name === "project.command.applyBranchSyncResult") { + await loopCtx.step({ + name: "project-apply-branch-sync-result", + timeout: 60_000, + run: async () => applyBranchSyncResultMutation(loopCtx, msg.body as BranchSyncResult), + }); + await msg.complete({ ok: true }); + } + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("project", "project workflow command failed", { + workspaceId: loopCtx.state.workspaceId, + repoId: loopCtx.state.repoId, + queueName: msg.name, + error: message, }); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.applyBranchSyncResult") { - await loopCtx.step({ - name: "project-apply-branch-sync-result", - timeout: 60_000, - run: async () => applyBranchSyncResultMutation(loopCtx, msg.body as BranchSyncResult), + await msg.complete({ error: message }).catch((completeError: unknown) => { + logActorWarning("project", "project workflow failed completing error response", { + workspaceId: loopCtx.state.workspaceId, + repoId: loopCtx.state.repoId, + queueName: msg.name, + error: resolveErrorMessage(completeError), + }); }); - await msg.complete({ ok: true }); } return Loop.continue(undefined); @@ -1219,19 +1110,9 @@ export const projectActions = { } } - const prRows = await c.db - .select({ - branchName: prCache.branchName, - prNumber: prCache.prNumber, - prState: prCache.state, - prUrl: prCache.prUrl, - ciStatus: prCache.ciStatus, - reviewStatus: prCache.reviewStatus, - reviewer: prCache.reviewer, - }) - .from(prCache) - .all(); - const prByBranch = new Map(prRows.map((row) => [row.branchName, row])); + const githubData = getGithubData(c, c.state.workspaceId); + const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []); + const prByBranch = new Map(prRows.map((row) => [row.headRefName, row])); const combinedRows = sortBranchesForOverview( branchRowsRaw.map((row) => ({ @@ -1258,12 +1139,12 @@ export const projectActions = { taskId: taskMeta?.taskId ?? null, taskTitle: taskMeta?.title ?? null, taskStatus: taskMeta?.status ?? null, - prNumber: pr?.prNumber ?? null, - prState: pr?.prState ?? null, - prUrl: pr?.prUrl ?? null, - ciStatus: pr?.ciStatus ?? null, - reviewStatus: pr?.reviewStatus ?? null, - reviewer: pr?.reviewer ?? null, + prNumber: pr?.number ?? null, + prState: pr?.state ?? null, + prUrl: pr?.url ?? null, + ciStatus: null, + reviewStatus: null, + reviewer: pr?.authorLogin ?? null, firstSeenAt: row.firstSeenAt ?? null, lastSeenAt: row.lastSeenAt ?? null, updatedAt: Math.max(row.updatedAt, taskMeta?.updatedAt ?? 0), @@ -1271,7 +1152,7 @@ export const projectActions = { }); const latestBranchSync = await c.db.select({ updatedAt: branches.updatedAt }).from(branches).orderBy(desc(branches.updatedAt)).limit(1).get(); - const latestPrSync = await c.db.select({ updatedAt: prCache.updatedAt }).from(prCache).orderBy(desc(prCache.updatedAt)).limit(1).get(); + const githubSummary = await githubData.getSummary().catch(() => null); return { workspaceId: c.state.workspaceId, @@ -1281,9 +1162,9 @@ export const projectActions = { stackAvailable, fetchedAt: now, branchSyncAt: latestBranchSync?.updatedAt ?? null, - prSyncAt: latestPrSync?.updatedAt ?? null, + prSyncAt: githubSummary?.lastSyncAt ?? null, branchSyncStatus: latestBranchSync ? "synced" : "pending", - prSyncStatus: latestPrSync ? "synced" : "pending", + prSyncStatus: githubSummary?.syncStatus ?? "pending", repoActionJobs: await listRepoActionJobRows(c), branches: branchRows, }; @@ -1294,24 +1175,11 @@ export const projectActions = { if (!branchName) { return null; } - - const pr = await c.db - .select({ - prNumber: prCache.prNumber, - prState: prCache.state, - }) - .from(prCache) - .where(eq(prCache.branchName, branchName)) - .get(); - - if (!pr?.prNumber) { - return null; - } - - return { - number: pr.prNumber, - status: pr.prState === "draft" ? "draft" : "ready", - }; + const githubData = getGithubData(c, c.state.workspaceId); + return await githubData.getPullRequestForBranch({ + repoId: c.state.repoId, + branchName, + }); }, async runRepoStackAction(c: any, cmd: RunRepoStackActionCommand): Promise { @@ -1353,14 +1221,6 @@ export const projectActions = { }; }, - async applyPrSyncResult(c: any, body: PrSyncResult): Promise { - const self = selfProject(c); - await self.send(projectWorkflowQueueName("project.command.applyPrSyncResult"), body, { - wait: true, - timeout: 5 * 60_000, - }); - }, - async applyBranchSyncResult(c: any, body: BranchSyncResult): Promise { const self = selfProject(c); await self.send(projectWorkflowQueueName("project.command.applyBranchSyncResult"), body, { diff --git a/foundry/packages/backend/src/actors/project/db/migrations.ts b/foundry/packages/backend/src/actors/project/db/migrations.ts index aa49fba..fd705b7 100644 --- a/foundry/packages/backend/src/actors/project/db/migrations.ts +++ b/foundry/packages/backend/src/actors/project/db/migrations.ts @@ -29,21 +29,6 @@ export default { \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE \`pr_cache\` ( - \`branch_name\` text PRIMARY KEY NOT NULL, - \`pr_number\` integer NOT NULL, - \`state\` text NOT NULL, - \`title\` text NOT NULL, - \`pr_url\` text, - \`pr_author\` text, - \`is_draft\` integer DEFAULT 0 NOT NULL, - \`ci_status\` text, - \`review_status\` text, - \`reviewer\` text, - \`fetched_at\` integer, - \`updated_at\` integer NOT NULL -); ---> statement-breakpoint CREATE TABLE \`repo_meta\` ( \`id\` integer PRIMARY KEY NOT NULL, \`remote_url\` text NOT NULL, diff --git a/foundry/packages/backend/src/actors/project/db/schema.ts b/foundry/packages/backend/src/actors/project/db/schema.ts index 1ef4cee..bb61d75 100644 --- a/foundry/packages/backend/src/actors/project/db/schema.ts +++ b/foundry/packages/backend/src/actors/project/db/schema.ts @@ -21,21 +21,6 @@ export const repoMeta = sqliteTable("repo_meta", { updatedAt: integer("updated_at").notNull(), }); -export const prCache = sqliteTable("pr_cache", { - branchName: text("branch_name").notNull().primaryKey(), - prNumber: integer("pr_number").notNull(), - state: text("state").notNull(), - title: text("title").notNull(), - prUrl: text("pr_url"), - prAuthor: text("pr_author"), - isDraft: integer("is_draft").notNull().default(0), - ciStatus: text("ci_status"), - reviewStatus: text("review_status"), - reviewer: text("reviewer"), - fetchedAt: integer("fetched_at"), - updatedAt: integer("updated_at").notNull(), -}); - export const taskIndex = sqliteTable("task_index", { taskId: text("task_id").notNull().primaryKey(), branchName: text("branch_name"), diff --git a/foundry/packages/backend/src/actors/task/index.ts b/foundry/packages/backend/src/actors/task/index.ts index 6ea2e07..cac007a 100644 --- a/foundry/packages/backend/src/actors/task/index.ts +++ b/foundry/packages/backend/src/actors/task/index.ts @@ -101,6 +101,10 @@ interface TaskWorkbenchSendMessageCommand { attachments: Array; } +interface TaskWorkbenchSendMessageActionInput extends TaskWorkbenchSendMessageInput { + waitForCompletion?: boolean; +} + interface TaskWorkbenchCreateSessionCommand { model?: string; } @@ -317,9 +321,9 @@ export const task = actor({ ); }, - async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageInput): Promise { + async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageActionInput): Promise { const self = selfTask(c); - await self.send( + const result = await self.send( taskWorkflowQueueName("task.command.workbench.send_message"), { sessionId: input.tabId, @@ -327,9 +331,13 @@ export const task = actor({ attachments: input.attachments, } satisfies TaskWorkbenchSendMessageCommand, { - wait: false, + wait: input.waitForCompletion === true, + ...(input.waitForCompletion === true ? { timeout: 10 * 60_000 } : {}), }, ); + if (input.waitForCompletion === true) { + expectQueueResponse(result); + } }, async stopWorkbenchSession(c, input: TaskTabCommand): Promise { diff --git a/foundry/packages/backend/src/actors/task/workbench.ts b/foundry/packages/backend/src/actors/task/workbench.ts index 12aed4e..1da7f2f 100644 --- a/foundry/packages/backend/src/actors/task/workbench.ts +++ b/foundry/packages/backend/src/actors/task/workbench.ts @@ -149,6 +149,23 @@ export function shouldMarkSessionUnreadForStatus(meta: { thinkingSinceMs?: numbe return Boolean(meta.thinkingSinceMs); } +export function shouldRecreateSessionForModelChange(meta: { + status: "pending_provision" | "pending_session_create" | "ready" | "error"; + sandboxSessionId?: string | null; + created?: boolean; + transcript?: Array; +}): boolean { + if (meta.status !== "ready" || !meta.sandboxSessionId) { + return false; + } + + if (meta.created) { + return false; + } + + return !Array.isArray(meta.transcript) || meta.transcript.length === 0; +} + async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean }): Promise> { await ensureWorkbenchSessionTable(c); const rows = await c.db.select().from(taskWorkbenchSessions).orderBy(asc(taskWorkbenchSessions.createdAt)).all(); @@ -290,6 +307,24 @@ async function requireReadySessionMeta(c: any, tabId: string): Promise { return meta; } +async function ensureReadySessionMeta(c: any, tabId: string): Promise { + const meta = await readSessionMeta(c, tabId); + if (!meta) { + throw new Error(`Unknown workbench tab: ${tabId}`); + } + + if (meta.status === "ready" && meta.sandboxSessionId) { + return meta; + } + + if (meta.status === "error") { + throw new Error(meta.errorMessage ?? "This workbench tab failed to prepare"); + } + + await ensureWorkbenchSession(c, tabId); + return await requireReadySessionMeta(c, tabId); +} + function shellFragment(parts: string[]): string { return parts.join(" && "); } @@ -662,6 +697,23 @@ async function enqueueWorkbenchRefresh( await self.send(command, body, { wait: false }); } +async function enqueueWorkbenchEnsureSession(c: any, tabId: string): Promise { + const self = selfTask(c); + await self.send( + "task.command.workbench.ensure_session", + { + tabId, + }, + { + wait: false, + }, + ); +} + +function pendingWorkbenchSessionStatus(record: any): "pending_provision" | "pending_session_create" { + return record.activeSandboxId ? "pending_session_create" : "pending_provision"; +} + async function maybeScheduleWorkbenchRefreshes(c: any, record: any, sessions: Array): Promise { const gitState = await readCachedGitState(c); if (record.activeSandboxId && !gitState.updatedAt) { @@ -721,7 +773,7 @@ export async function ensureWorkbenchSeeded(c: any): Promise { } function buildSessionSummary(record: any, meta: any): any { - const derivedSandboxSessionId = meta.sandboxSessionId ?? (meta.status === "pending_provision" && record.activeSessionId ? record.activeSessionId : null); + const derivedSandboxSessionId = meta.status === "ready" ? (meta.sandboxSessionId ?? null) : null; const sessionStatus = meta.status === "pending_provision" || meta.status === "pending_session_create" ? meta.status @@ -991,12 +1043,12 @@ export async function createWorkbenchSession(c: any, model?: string): Promise<{ await ensureSessionMeta(c, { tabId, model: model ?? defaultModelForAgent(record.agentType), - sandboxSessionId: tabId, - status: record.activeSandboxId ? "pending_session_create" : "pending_provision", + sandboxSessionId: null, + status: pendingWorkbenchSessionStatus(record), created: false, }); - await ensureWorkbenchSession(c, tabId, model); await broadcastTaskUpdate(c, { sessionId: tabId }); + await enqueueWorkbenchEnsureSession(c, tabId); return { tabId }; } @@ -1099,14 +1151,60 @@ export async function updateWorkbenchDraft(c: any, sessionId: string, text: stri } export async function changeWorkbenchModel(c: any, sessionId: string, model: string): Promise { - await updateSessionMeta(c, sessionId, { + const meta = await readSessionMeta(c, sessionId); + if (!meta || meta.closed) { + return; + } + + if (meta.model === model) { + return; + } + + const record = await ensureWorkbenchSeeded(c); + let nextMeta = await updateSessionMeta(c, sessionId, { model, }); + let shouldEnsure = nextMeta.status === "pending_provision" || nextMeta.status === "pending_session_create" || nextMeta.status === "error"; + + if (shouldRecreateSessionForModelChange(nextMeta)) { + const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + await sandbox.destroySession(nextMeta.sandboxSessionId); + nextMeta = await updateSessionMeta(c, sessionId, { + sandboxSessionId: null, + status: pendingWorkbenchSessionStatus(record), + errorMessage: null, + transcriptJson: "[]", + transcriptUpdatedAt: null, + thinkingSinceMs: null, + }); + shouldEnsure = true; + } else if (nextMeta.status === "ready" && nextMeta.sandboxSessionId) { + const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + if (typeof sandbox.rawSendSessionMethod === "function") { + try { + await sandbox.rawSendSessionMethod(nextMeta.sandboxSessionId, "session/set_config_option", { + configId: "model", + value: model, + }); + } catch { + // Some agents do not allow live model updates. Preserve the new preference in metadata. + } + } + } else if (nextMeta.status !== "ready") { + nextMeta = await updateSessionMeta(c, sessionId, { + status: pendingWorkbenchSessionStatus(record), + errorMessage: null, + }); + } + + if (shouldEnsure) { + await enqueueWorkbenchEnsureSession(c, sessionId); + } await broadcastTaskUpdate(c, { sessionId }); } export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array): Promise { - const meta = await requireReadySessionMeta(c, sessionId); + const meta = await ensureReadySessionMeta(c, sessionId); const record = await ensureWorkbenchSeeded(c); const runtime = await getTaskSandboxRuntime(c, record); await ensureSandboxRepo(c, runtime.sandbox, record); diff --git a/foundry/packages/backend/src/actors/task/workflow/index.ts b/foundry/packages/backend/src/actors/task/workflow/index.ts index 7461c24..f9049a7 100644 --- a/foundry/packages/backend/src/actors/task/workflow/index.ts +++ b/foundry/packages/backend/src/actors/task/workflow/index.ts @@ -186,12 +186,16 @@ const commandHandlers: Record = { }, "task.command.workbench.send_message": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-send-message", - timeout: 10 * 60_000, - run: async () => sendWorkbenchMessage(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments), - }); - await msg.complete({ ok: true }); + try { + await loopCtx.step({ + name: "workbench-send-message", + timeout: 10 * 60_000, + run: async () => sendWorkbenchMessage(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments), + }); + await msg.complete({ ok: true }); + } catch (error) { + await msg.complete({ error: resolveErrorMessage(error) }); + } }, "task.command.workbench.stop_session": async (loopCtx, msg) => { diff --git a/foundry/packages/backend/src/actors/workspace/actions.ts b/foundry/packages/backend/src/actors/workspace/actions.ts index 0dd6d6e..8782a77 100644 --- a/foundry/packages/backend/src/actors/workspace/actions.ts +++ b/foundry/packages/backend/src/actors/workspace/actions.ts @@ -28,6 +28,7 @@ import type { TaskWorkbenchSendMessageInput, TaskWorkbenchTabInput, TaskWorkbenchUpdateDraftInput, + WorkbenchOpenPrSummary, WorkbenchRepoSummary, WorkbenchSessionSummary, WorkbenchTaskSummary, @@ -36,12 +37,12 @@ import type { WorkspaceUseInput, } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js"; +import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js"; import { logActorWarning, resolveErrorMessage } from "../logging.js"; import { availableSandboxProviderIds, defaultSandboxProviderId } from "../../sandbox-config.js"; import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js"; import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; -import { taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js"; +import { organizationProfile, taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js"; import { agentTypeForModel } from "../task/workbench.js"; import { expectQueueResponse } from "../../services/queue.js"; import { workspaceAppActions } from "./app-shell.js"; @@ -85,6 +86,8 @@ export function workspaceWorkflowQueueName(name: WorkspaceQueueName): WorkspaceQ return name; } +const ORGANIZATION_PROFILE_ROW_ID = "profile"; + function assertWorkspace(c: { state: WorkspaceState }, workspaceId: string): void { if (workspaceId !== c.state.workspaceId) { throw new Error(`Workspace actor mismatch: actor=${c.state.workspaceId} command=${workspaceId}`); @@ -203,6 +206,14 @@ function taskSummaryFromRow(row: any): WorkbenchTaskSummary { }; } +async function listOpenPullRequestsSnapshot(c: any, taskRows: WorkbenchTaskSummary[]): Promise { + const githubData = getGithubData(c, c.state.workspaceId); + const openPullRequests = await githubData.listOpenPullRequests({}).catch(() => []); + const claimedBranches = new Set(taskRows.filter((task) => task.branch).map((task) => `${task.repoId}:${task.branch}`)); + + return openPullRequests.filter((pullRequest: WorkbenchOpenPrSummary) => !claimedBranches.has(`${pullRequest.repoId}:${pullRequest.headRefName}`)); +} + async function reconcileWorkbenchProjection(c: any): Promise { const repoRows = await c.db .select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt }) @@ -252,6 +263,7 @@ async function reconcileWorkbenchProjection(c: any): Promise buildRepoSummary(row, taskRows)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), taskSummaries: taskRows, + openPullRequests: await listOpenPullRequestsSnapshot(c, taskRows), }; } @@ -280,8 +292,8 @@ async function waitForWorkbenchTaskReady(task: any, timeoutMs = 5 * 60_000): Pro /** * Reads the workspace sidebar snapshot from the workspace actor's local SQLite - * only. Task actors push summary updates into `task_summaries`, so clients do - * not need this action to fan out to every child actor on the hot read path. + * plus the org-scoped GitHub actor for open PRs. Task actors still push + * summary updates into `task_summaries`, so the hot read path stays bounded. */ async function getWorkspaceSummarySnapshot(c: any): Promise { const repoRows = await c.db @@ -300,6 +312,7 @@ async function getWorkspaceSummarySnapshot(c: any): Promise buildRepoSummary(row, summaries)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), taskSummaries: summaries, + openPullRequests: await listOpenPullRequestsSnapshot(c, summaries), }; } @@ -463,58 +476,74 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { return Loop.continue(undefined); } - if (msg.name === "workspace.command.addRepo") { - const result = await loopCtx.step({ - name: "workspace-add-repo", - timeout: 60_000, - run: async () => addRepoMutation(loopCtx, msg.body as AddRepoInput), - }); - await msg.complete(result); - return Loop.continue(undefined); - } + try { + if (msg.name === "workspace.command.addRepo") { + const result = await loopCtx.step({ + name: "workspace-add-repo", + timeout: 60_000, + run: async () => addRepoMutation(loopCtx, msg.body as AddRepoInput), + }); + await msg.complete(result); + return Loop.continue(undefined); + } - if (msg.name === "workspace.command.createTask") { - const result = await loopCtx.step({ - name: "workspace-create-task", - timeout: 5 * 60_000, - run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput), - }); - await msg.complete(result); - return Loop.continue(undefined); - } + if (msg.name === "workspace.command.createTask") { + const result = await loopCtx.step({ + name: "workspace-create-task", + timeout: 5 * 60_000, + run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput), + }); + await msg.complete(result); + return Loop.continue(undefined); + } - if (msg.name === "workspace.command.refreshProviderProfiles") { - await loopCtx.step("workspace-refresh-provider-profiles", async () => - refreshProviderProfilesMutation(loopCtx, msg.body as RefreshProviderProfilesCommand), - ); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } + if (msg.name === "workspace.command.refreshProviderProfiles") { + await loopCtx.step("workspace-refresh-provider-profiles", async () => + refreshProviderProfilesMutation(loopCtx, msg.body as RefreshProviderProfilesCommand), + ); + await msg.complete({ ok: true }); + return Loop.continue(undefined); + } - if (msg.name === "workspace.command.syncGithubSession") { - await loopCtx.step({ - name: "workspace-sync-github-session", - timeout: 60_000, - run: async () => { - const { syncGithubOrganizations } = await import("./app-shell.js"); - await syncGithubOrganizations(loopCtx, msg.body as { sessionId: string; accessToken: string }); - }, - }); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } + if (msg.name === "workspace.command.syncGithubSession") { + await loopCtx.step({ + name: "workspace-sync-github-session", + timeout: 60_000, + run: async () => { + const { syncGithubOrganizations } = await import("./app-shell.js"); + await syncGithubOrganizations(loopCtx, msg.body as { sessionId: string; accessToken: string }); + }, + }); + await msg.complete({ ok: true }); + return Loop.continue(undefined); + } - if (msg.name === "workspace.command.syncGithubOrganizationRepos") { - await loopCtx.step({ - name: "workspace-sync-github-organization-repos", - timeout: 60_000, - run: async () => { - const { syncGithubOrganizationRepos } = await import("./app-shell.js"); - await syncGithubOrganizationRepos(loopCtx, msg.body as { sessionId: string; organizationId: string }); - }, + if (msg.name === "workspace.command.syncGithubOrganizationRepos") { + await loopCtx.step({ + name: "workspace-sync-github-organization-repos", + timeout: 60_000, + run: async () => { + const { syncGithubOrganizationRepos } = await import("./app-shell.js"); + await syncGithubOrganizationRepos(loopCtx, msg.body as { sessionId: string; organizationId: string }); + }, + }); + await msg.complete({ ok: true }); + return Loop.continue(undefined); + } + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("workspace", "workspace workflow command failed", { + workspaceId: loopCtx.state.workspaceId, + queueName: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch((completeError: unknown) => { + logActorWarning("workspace", "workspace workflow failed completing error response", { + workspaceId: loopCtx.state.workspaceId, + queueName: msg.name, + error: resolveErrorMessage(completeError), + }); }); - await msg.complete({ ok: true }); - return Loop.continue(undefined); } return Loop.continue(undefined); @@ -604,6 +633,175 @@ export const workspaceActions = { c.broadcast("workspaceUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies WorkspaceEvent); }, + async findTaskForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> { + const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.repoId)).all(); + const existing = summaries.find((summary) => summary.branch === input.branchName); + return { taskId: existing?.taskId ?? null }; + }, + + async refreshTaskSummaryForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise { + const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.repoId)).all(); + const matches = summaries.filter((summary) => summary.branch === input.branchName); + + for (const summary of matches) { + try { + const task = getTask(c, c.state.workspaceId, input.repoId, summary.taskId); + await workspaceActions.applyTaskSummaryUpdate(c, { + taskSummary: await task.getTaskSummary({}), + }); + } catch (error) { + logActorWarning("workspace", "failed refreshing task summary for GitHub branch", { + workspaceId: c.state.workspaceId, + repoId: input.repoId, + branchName: input.branchName, + taskId: summary.taskId, + error: resolveErrorMessage(error), + }); + } + } + }, + + async applyOpenPullRequestUpdate(c: any, input: { pullRequest: WorkbenchOpenPrSummary }): Promise { + const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.pullRequest.repoId)).all(); + if (summaries.some((summary) => summary.branch === input.pullRequest.headRefName)) { + return; + } + c.broadcast("workspaceUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies WorkspaceEvent); + }, + + async removeOpenPullRequest(c: any, input: { prId: string }): Promise { + c.broadcast("workspaceUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies WorkspaceEvent); + }, + + async applyGithubRepositoryProjection(c: any, input: { repoId: string; remoteUrl: string }): Promise { + const now = Date.now(); + const existing = await c.db.select({ repoId: repos.repoId }).from(repos).where(eq(repos.repoId, input.repoId)).get(); + await c.db + .insert(repos) + .values({ + repoId: input.repoId, + remoteUrl: input.remoteUrl, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: repos.repoId, + set: { + remoteUrl: input.remoteUrl, + updatedAt: now, + }, + }) + .run(); + await broadcastRepoSummary(c, existing ? "repoUpdated" : "repoAdded", { + repoId: input.repoId, + remoteUrl: input.remoteUrl, + updatedAt: now, + }); + }, + + async applyGithubDataProjection( + c: any, + input: { + connectedAccount: string; + installationStatus: string; + installationId: number | null; + syncStatus: string; + lastSyncLabel: string; + lastSyncAt: number | null; + repositories: Array<{ fullName: string; cloneUrl: string; private: boolean }>; + }, + ): Promise { + const existingRepos = await c.db.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt }).from(repos).all(); + const existingById = new Map(existingRepos.map((repo) => [repo.repoId, repo])); + const nextRepoIds = new Set(); + const now = Date.now(); + + for (const repository of input.repositories) { + const repoId = repoIdFromRemote(repository.cloneUrl); + nextRepoIds.add(repoId); + await c.db + .insert(repos) + .values({ + repoId, + remoteUrl: repository.cloneUrl, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: repos.repoId, + set: { + remoteUrl: repository.cloneUrl, + updatedAt: now, + }, + }) + .run(); + await broadcastRepoSummary(c, existingById.has(repoId) ? "repoUpdated" : "repoAdded", { + repoId, + remoteUrl: repository.cloneUrl, + updatedAt: now, + }); + } + + for (const repo of existingRepos) { + if (nextRepoIds.has(repo.repoId)) { + continue; + } + await c.db.delete(repos).where(eq(repos.repoId, repo.repoId)).run(); + c.broadcast("workspaceUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies WorkspaceEvent); + } + + const profile = await c.db + .select({ id: organizationProfile.id }) + .from(organizationProfile) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .get(); + if (profile) { + await c.db + .update(organizationProfile) + .set({ + githubConnectedAccount: input.connectedAccount, + githubInstallationStatus: input.installationStatus, + githubSyncStatus: input.syncStatus, + githubInstallationId: input.installationId, + githubLastSyncLabel: input.lastSyncLabel, + githubLastSyncAt: input.lastSyncAt, + updatedAt: now, + }) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .run(); + } + }, + + async recordGithubWebhookReceipt( + c: any, + input: { + workspaceId: string; + event: string; + action?: string | null; + receivedAt?: number; + }, + ): Promise { + assertWorkspace(c, input.workspaceId); + + const profile = await c.db + .select({ id: organizationProfile.id }) + .from(organizationProfile) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .get(); + if (!profile) { + return; + } + + await c.db + .update(organizationProfile) + .set({ + githubLastWebhookAt: input.receivedAt ?? Date.now(), + githubLastWebhookEvent: input.action ? `${input.event}.${input.action}` : input.event, + }) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .run(); + }, + async getWorkspaceSummary(c: any, input: WorkspaceUseInput): Promise { assertWorkspace(c, input.workspaceId); return await getWorkspaceSummarySnapshot(c); @@ -620,7 +818,7 @@ export const workspaceActions = { repoId: input.repoId, task: input.task, ...(input.title ? { explicitTitle: input.title } : {}), - ...(input.branch ? { explicitBranchName: input.branch } : {}), + ...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}), ...(input.model ? { agentType: agentTypeForModel(input.model) } : {}), }); const task = await requireWorkbenchTask(c, created.taskId); @@ -634,6 +832,10 @@ export const workspaceActions = { tabId: session.tabId, text: input.task, attachments: [], + waitForCompletion: true, + }); + await task.getSessionDetail({ + sessionId: session.tabId, }); return { taskId: created.taskId, @@ -706,6 +908,22 @@ export const workspaceActions = { await task.revertWorkbenchFile(input); }, + async reloadGithubOrganization(c: any): Promise { + await getOrCreateGithubData(c, c.state.workspaceId).reloadOrganization({}); + }, + + async reloadGithubPullRequests(c: any): Promise { + await getOrCreateGithubData(c, c.state.workspaceId).reloadAllPullRequests({}); + }, + + async reloadGithubRepository(c: any, input: { repoId: string }): Promise { + await getOrCreateGithubData(c, c.state.workspaceId).reloadRepository(input); + }, + + async reloadGithubPullRequest(c: any, input: { repoId: string; prNumber: number }): Promise { + await getOrCreateGithubData(c, c.state.workspaceId).reloadPullRequest(input); + }, + async listTasks(c: any, input: ListTasksInput): Promise { assertWorkspace(c, input.workspaceId); diff --git a/foundry/packages/backend/src/actors/workspace/app-shell.ts b/foundry/packages/backend/src/actors/workspace/app-shell.ts index 7f6e73f..d9a5dfa 100644 --- a/foundry/packages/backend/src/actors/workspace/app-shell.ts +++ b/foundry/packages/backend/src/actors/workspace/app-shell.ts @@ -10,7 +10,7 @@ import type { UpdateFoundryOrganizationProfileInput, } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateWorkspace, selfWorkspace } from "../handles.js"; +import { getOrCreateGithubData, getOrCreateWorkspace, selfWorkspace } from "../handles.js"; import { GitHubAppError } from "../../services/app-github.js"; import { getBetterAuthService } from "../../services/better-auth.js"; import { repoIdFromRemote, repoLabelFromRemote } from "../../services/repo.js"; @@ -601,40 +601,19 @@ export async function syncGithubOrganizationRepos(c: any, input: { sessionId: st const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const { appShell } = getActorRuntimeContext(); const workspace = await getOrCreateWorkspace(c, input.organizationId); const organization = await getOrganizationState(workspace); + const githubData = await getOrCreateGithubData(c, input.organizationId); try { - let repositories; - let installationStatus = organization.snapshot.github.installationStatus; - - if (organization.snapshot.kind === "personal") { - repositories = await appShell.github.listUserRepositories(session.githubAccessToken); - installationStatus = "connected"; - } else if (organization.githubInstallationId) { - try { - repositories = await appShell.github.listInstallationRepositories(organization.githubInstallationId); - } catch (error) { - if (!(error instanceof GitHubAppError) || (error.status !== 403 && error.status !== 404)) { - throw error; - } - repositories = (await appShell.github.listUserRepositories(session.githubAccessToken)).filter((repository) => - repository.fullName.startsWith(`${organization.githubLogin}/`), - ); - installationStatus = "reconnect_required"; - } - } else { - repositories = (await appShell.github.listUserRepositories(session.githubAccessToken)).filter((repository) => - repository.fullName.startsWith(`${organization.githubLogin}/`), - ); - installationStatus = "reconnect_required"; - } - - await workspace.applyOrganizationSyncCompleted({ - repositories, - installationStatus, - lastSyncLabel: repositories.length > 0 ? "Synced just now" : "No repositories available", + await githubData.fullSync({ + accessToken: session.githubAccessToken, + connectedAccount: organization.snapshot.github.connectedAccount, + installationId: organization.githubInstallationId, + installationStatus: organization.snapshot.github.installationStatus, + githubLogin: organization.githubLogin, + kind: organization.snapshot.kind, + label: "Importing repository catalog...", }); // Broadcast updated app snapshot so connected clients see the new repos @@ -759,6 +738,8 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number importedRepoCount: repoCatalog.length, lastSyncLabel: row.githubLastSyncLabel, lastSyncAt: row.githubLastSyncAt ?? null, + lastWebhookAt: row.githubLastWebhookAt ?? null, + lastWebhookEvent: row.githubLastWebhookEvent ?? "", }, billing: { planId: row.billingPlanId, @@ -1433,8 +1414,8 @@ export const workspaceAppActions = { const { appShell } = getActorRuntimeContext(); const { event, body } = appShell.github.verifyWebhookEvent(input.payload, input.signatureHeader, input.eventHeader); - const accountLogin = body.installation?.account?.login; - const accountType = body.installation?.account?.type; + const accountLogin = body.installation?.account?.login ?? body.repository?.owner?.login ?? body.organization?.login ?? null; + const accountType = body.installation?.account?.type ?? (body.organization?.login ? "Organization" : null); if (!accountLogin) { githubWebhookLogger.info( { @@ -1449,6 +1430,15 @@ export const workspaceAppActions = { const kind: FoundryOrganization["kind"] = accountType === "User" ? "personal" : "organization"; const organizationId = organizationWorkspaceId(kind, accountLogin); + const receivedAt = Date.now(); + const workspace = await getOrCreateWorkspace(c, organizationId); + await workspace.recordGithubWebhookReceipt({ + workspaceId: organizationId, + event, + action: body.action ?? null, + receivedAt, + }); + const githubData = await getOrCreateGithubData(c, organizationId); if (event === "installation" && (body.action === "created" || body.action === "deleted" || body.action === "suspend" || body.action === "unsuspend")) { githubWebhookLogger.info( @@ -1461,12 +1451,36 @@ export const workspaceAppActions = { "installation_event", ); if (body.action === "deleted") { - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.applyGithubInstallationRemoved({}); + await githubData.clearState({ + connectedAccount: accountLogin, + installationStatus: "install_required", + installationId: null, + label: "GitHub App installation removed", + }); } else if (body.action === "created") { - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.applyGithubInstallationCreated({ - installationId: body.installation?.id ?? 0, + await githubData.fullSync({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Syncing GitHub data from installation webhook...", + }); + } else if (body.action === "suspend") { + await githubData.clearState({ + connectedAccount: accountLogin, + installationStatus: "reconnect_required", + installationId: body.installation?.id ?? null, + label: "GitHub App installation suspended", + }); + } else if (body.action === "unsuspend") { + await githubData.fullSync({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Resyncing GitHub data after unsuspend...", }); } return { ok: true }; @@ -1484,13 +1498,13 @@ export const workspaceAppActions = { }, "repository_membership_changed", ); - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.applyGithubRepositoryChanges({ - added: (body.repositories_added ?? []).map((r) => ({ - fullName: r.full_name, - private: r.private, - })), - removed: (body.repositories_removed ?? []).map((r) => r.full_name), + await githubData.fullSync({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Resyncing GitHub data after repository access change...", }); return { ok: true }; } @@ -1518,7 +1532,30 @@ export const workspaceAppActions = { }, "repository_event", ); - // TODO: Dispatch to GitHubStateActor / downstream actors + if (event === "pull_request" && body.repository?.clone_url && body.pull_request) { + await githubData.handlePullRequestWebhook({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + repository: { + fullName: body.repository.full_name, + cloneUrl: body.repository.clone_url, + private: Boolean(body.repository.private), + }, + pullRequest: { + number: body.pull_request.number, + title: body.pull_request.title ?? "", + body: body.pull_request.body ?? null, + state: body.pull_request.state ?? "open", + url: body.pull_request.html_url ?? `https://github.com/${body.repository.full_name}/pull/${body.pull_request.number}`, + headRefName: body.pull_request.head?.ref ?? "", + baseRefName: body.pull_request.base?.ref ?? "", + authorLogin: body.pull_request.user?.login ?? null, + isDraft: Boolean(body.pull_request.draft), + merged: Boolean(body.pull_request.merged), + }, + }); + } } return { ok: true }; } diff --git a/foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql b/foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql index 508cc74..7410e3b 100644 --- a/foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql +++ b/foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql @@ -54,6 +54,8 @@ CREATE TABLE `organization_profile` ( `github_installation_id` integer, `github_last_sync_label` text NOT NULL, `github_last_sync_at` integer, + `github_last_webhook_at` integer, + `github_last_webhook_event` text, `stripe_customer_id` text, `stripe_subscription_id` text, `stripe_price_id` text, diff --git a/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json index 08a47e5..0ae9736 100644 --- a/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json @@ -359,6 +359,20 @@ "notNull": false, "autoincrement": false }, + "github_last_webhook_at": { + "name": "github_last_webhook_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_last_webhook_event": { + "name": "github_last_webhook_event", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, "stripe_customer_id": { "name": "stripe_customer_id", "type": "text", diff --git a/foundry/packages/backend/src/actors/workspace/db/migrations.ts b/foundry/packages/backend/src/actors/workspace/db/migrations.ts index 607eb19..a86578d 100644 --- a/foundry/packages/backend/src/actors/workspace/db/migrations.ts +++ b/foundry/packages/backend/src/actors/workspace/db/migrations.ts @@ -84,6 +84,8 @@ CREATE TABLE \`organization_profile\` ( \`github_installation_id\` integer, \`github_last_sync_label\` text NOT NULL, \`github_last_sync_at\` integer, + \`github_last_webhook_at\` integer, + \`github_last_webhook_event\` text, \`stripe_customer_id\` text, \`stripe_subscription_id\` text, \`stripe_price_id\` text, diff --git a/foundry/packages/backend/src/actors/workspace/db/schema.ts b/foundry/packages/backend/src/actors/workspace/db/schema.ts index 93082af..6571b62 100644 --- a/foundry/packages/backend/src/actors/workspace/db/schema.ts +++ b/foundry/packages/backend/src/actors/workspace/db/schema.ts @@ -55,6 +55,8 @@ export const organizationProfile = sqliteTable("organization_profile", { githubInstallationId: integer("github_installation_id"), githubLastSyncLabel: text("github_last_sync_label").notNull(), githubLastSyncAt: integer("github_last_sync_at"), + githubLastWebhookAt: integer("github_last_webhook_at"), + githubLastWebhookEvent: text("github_last_webhook_event"), stripeCustomerId: text("stripe_customer_id"), stripeSubscriptionId: text("stripe_subscription_id"), stripePriceId: text("stripe_price_id"), diff --git a/foundry/packages/backend/src/services/app-github.ts b/foundry/packages/backend/src/services/app-github.ts index 1f04fe3..065c382 100644 --- a/foundry/packages/backend/src/services/app-github.ts +++ b/foundry/packages/backend/src/services/app-github.ts @@ -40,6 +40,30 @@ export interface GitHubRepositoryRecord { private: boolean; } +export interface GitHubMemberRecord { + id: string; + login: string; + name: string; + email: string | null; + role: string | null; + state: string; +} + +export interface GitHubPullRequestRecord { + repoFullName: string; + cloneUrl: string; + number: number; + title: string; + body: string | null; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin: string | null; + isDraft: boolean; + merged: boolean; +} + interface GitHubTokenResponse { access_token?: string; scope?: string; @@ -58,11 +82,23 @@ const githubOAuthLogger = logger.child({ export interface GitHubWebhookEvent { action?: string; + organization?: { login?: string; id?: number }; installation?: { id: number; account?: { login?: string; type?: string; id?: number } | null }; repositories_added?: Array<{ id: number; full_name: string; private: boolean }>; repositories_removed?: Array<{ id: number; full_name: string }>; repository?: { id: number; full_name: string; clone_url?: string; private?: boolean; owner?: { login?: string } }; - pull_request?: { number: number; title?: string; state?: string; head?: { ref?: string }; base?: { ref?: string } }; + pull_request?: { + number: number; + title?: string; + body?: string | null; + state?: string; + html_url?: string; + draft?: boolean; + merged?: boolean; + user?: { login?: string } | null; + head?: { ref?: string }; + base?: { ref?: string }; + }; sender?: { login?: string; id?: number }; [key: string]: unknown; } @@ -329,6 +365,130 @@ export class GitHubAppClient { })); } + async getUserRepository(accessToken: string, fullName: string): Promise { + try { + const repository = await this.requestJson<{ + full_name: string; + clone_url: string; + private: boolean; + }>(`/repos/${fullName}`, accessToken); + return { + fullName: repository.full_name, + cloneUrl: repository.clone_url, + private: repository.private, + }; + } catch (error) { + if (error instanceof GitHubAppError && error.status === 404) { + return null; + } + throw error; + } + } + + async getInstallationRepository(installationId: number, fullName: string): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.getUserRepository(accessToken, fullName); + } + + async listOrganizationMembers(accessToken: string, organizationLogin: string): Promise { + const members = await this.paginate<{ + id: number; + login: string; + role?: string | null; + }>(`/orgs/${organizationLogin}/members?per_page=100&role=all`, accessToken); + + const detailedMembers = await Promise.all( + members.map(async (member) => { + try { + const detail = await this.requestJson<{ + id: number; + login: string; + name?: string | null; + email?: string | null; + }>(`/users/${member.login}`, accessToken); + return { + id: String(detail.id), + login: detail.login, + name: detail.name?.trim() || detail.login, + email: detail.email ?? null, + role: member.role ?? null, + state: "active", + }; + } catch { + return { + id: String(member.id), + login: member.login, + name: member.login, + email: null, + role: member.role ?? null, + state: "active", + }; + } + }), + ); + + return detailedMembers; + } + + async listInstallationMembers(installationId: number, organizationLogin: string): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.listOrganizationMembers(accessToken, organizationLogin); + } + + async listPullRequestsForUserRepositories(accessToken: string, repositories: GitHubRepositoryRecord[]): Promise { + return (await Promise.all(repositories.map((repository) => this.listRepositoryPullRequests(accessToken, repository.fullName, repository.cloneUrl)))).flat(); + } + + async listInstallationPullRequestsForRepositories(installationId: number, repositories: GitHubRepositoryRecord[]): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.listPullRequestsForUserRepositories(accessToken, repositories); + } + + async getUserPullRequest(accessToken: string, fullName: string, prNumber: number): Promise { + try { + const pullRequest = await this.requestJson<{ + number: number; + title: string; + body?: string | null; + state: string; + html_url: string; + draft?: boolean; + merged?: boolean; + user?: { login?: string } | null; + head?: { ref?: string } | null; + base?: { ref?: string } | null; + }>(`/repos/${fullName}/pulls/${prNumber}`, accessToken); + const repository = await this.getUserRepository(accessToken, fullName); + if (!repository) { + return null; + } + return { + repoFullName: fullName, + cloneUrl: repository.cloneUrl, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.html_url, + headRefName: pullRequest.head?.ref?.trim() ?? "", + baseRefName: pullRequest.base?.ref?.trim() ?? "", + authorLogin: pullRequest.user?.login?.trim() ?? null, + isDraft: Boolean(pullRequest.draft), + merged: Boolean(pullRequest.merged), + }; + } catch (error) { + if (error instanceof GitHubAppError && error.status === 404) { + return null; + } + throw error; + } + } + + async getInstallationPullRequest(installationId: number, fullName: string, prNumber: number): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.getUserPullRequest(accessToken, fullName, prNumber); + } + async buildInstallationUrl(organizationLogin: string, state: string): Promise { if (!this.isAppConfigured()) { throw new GitHubAppError("GitHub App is not configured", 500); @@ -437,6 +597,36 @@ export class GitHubAppClient { return payload as T; } + private async listRepositoryPullRequests(accessToken: string, fullName: string, cloneUrl: string): Promise { + const pullRequests = await this.paginate<{ + number: number; + title: string; + body?: string | null; + state: string; + html_url: string; + draft?: boolean; + merged?: boolean; + user?: { login?: string } | null; + head?: { ref?: string } | null; + base?: { ref?: string } | null; + }>(`/repos/${fullName}/pulls?state=open&per_page=100&sort=updated&direction=desc`, accessToken); + + return pullRequests.map((pullRequest) => ({ + repoFullName: fullName, + cloneUrl, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.html_url, + headRefName: pullRequest.head?.ref?.trim() ?? "", + baseRefName: pullRequest.base?.ref?.trim() ?? "", + authorLogin: pullRequest.user?.login?.trim() ?? null, + isDraft: Boolean(pullRequest.draft), + merged: Boolean(pullRequest.merged), + })); + } + private async paginate(path: string, accessToken: string): Promise { let nextUrl = `${this.apiBaseUrl}${path.startsWith("/") ? path : `/${path}`}`; const items: T[] = []; diff --git a/foundry/packages/backend/src/services/queue.ts b/foundry/packages/backend/src/services/queue.ts index b366375..34e697c 100644 --- a/foundry/packages/backend/src/services/queue.ts +++ b/foundry/packages/backend/src/services/queue.ts @@ -7,6 +7,14 @@ export function expectQueueResponse(result: QueueSendResult | void): T { if (!result || result.status === "timedOut") { throw new Error("Queue command timed out"); } + if ( + result.response && + typeof result.response === "object" && + "error" in result.response && + typeof (result.response as { error?: unknown }).error === "string" + ) { + throw new Error((result.response as { error: string }).error); + } return result.response as T; } diff --git a/foundry/packages/backend/test/keys.test.ts b/foundry/packages/backend/test/keys.test.ts index d0886d2..28bf1dc 100644 --- a/foundry/packages/backend/test/keys.test.ts +++ b/foundry/packages/backend/test/keys.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import { taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "../src/actors/keys.js"; +import { githubDataKey, historyKey, projectBranchSyncKey, projectKey, taskKey, taskSandboxKey, workspaceKey } from "../src/actors/keys.js"; describe("actor keys", () => { it("prefixes every key with workspace namespace", () => { @@ -9,7 +9,7 @@ describe("actor keys", () => { taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), historyKey("default", "repo"), - projectPrSyncKey("default", "repo"), + githubDataKey("default"), projectBranchSyncKey("default", "repo"), ]; diff --git a/foundry/packages/backend/test/workbench-unread.test.ts b/foundry/packages/backend/test/workbench-unread.test.ts index f7ed201..aafc178 100644 --- a/foundry/packages/backend/test/workbench-unread.test.ts +++ b/foundry/packages/backend/test/workbench-unread.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import { shouldMarkSessionUnreadForStatus } from "../src/actors/task/workbench.js"; +import { shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workbench.js"; describe("workbench unread status transitions", () => { it("marks unread when a running session first becomes idle", () => { @@ -14,3 +14,46 @@ describe("workbench unread status transitions", () => { expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: Date.now() - 1_000 }, "running")).toBe(false); }); }); + +describe("workbench model changes", () => { + it("recreates an unused ready session so the selected model takes effect", () => { + expect( + shouldRecreateSessionForModelChange({ + status: "ready", + sandboxSessionId: "session-1", + created: false, + transcript: [], + }), + ).toBe(true); + }); + + it("does not recreate a session once the conversation has started", () => { + expect( + shouldRecreateSessionForModelChange({ + status: "ready", + sandboxSessionId: "session-1", + created: true, + transcript: [], + }), + ).toBe(false); + }); + + it("does not recreate pending or anonymous sessions", () => { + expect( + shouldRecreateSessionForModelChange({ + status: "pending_session_create", + sandboxSessionId: "session-1", + created: false, + transcript: [], + }), + ).toBe(false); + expect( + shouldRecreateSessionForModelChange({ + status: "ready", + sandboxSessionId: null, + created: false, + transcript: [], + }), + ).toBe(false); + }); +}); diff --git a/foundry/packages/client/src/backend-client.ts b/foundry/packages/client/src/backend-client.ts index 2c34442..ccb0657 100644 --- a/foundry/packages/client/src/backend-client.ts +++ b/foundry/packages/client/src/backend-client.ts @@ -112,6 +112,10 @@ interface WorkspaceHandle { closeWorkbenchSession(input: TaskWorkbenchTabInput): Promise; publishWorkbenchPr(input: TaskWorkbenchSelectInput): Promise; revertWorkbenchFile(input: TaskWorkbenchDiffInput): Promise; + reloadGithubOrganization(): Promise; + reloadGithubPullRequests(): Promise; + reloadGithubRepository(input: { repoId: string }): Promise; + reloadGithubPullRequest(input: { repoId: string; prNumber: number }): Promise; } interface AppWorkspaceHandle { @@ -296,6 +300,10 @@ export interface BackendClient { closeWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise; publishWorkbenchPr(workspaceId: string, input: TaskWorkbenchSelectInput): Promise; revertWorkbenchFile(workspaceId: string, input: TaskWorkbenchDiffInput): Promise; + reloadGithubOrganization(workspaceId: string): Promise; + reloadGithubPullRequests(workspaceId: string): Promise; + reloadGithubRepository(workspaceId: string, repoId: string): Promise; + reloadGithubPullRequest(workspaceId: string, repoId: string, prNumber: number): Promise; health(): Promise<{ ok: true }>; useWorkspace(workspaceId: string): Promise<{ workspaceId: string }>; starSandboxAgentRepo(workspaceId: string): Promise; @@ -1182,6 +1190,22 @@ export function createBackendClient(options: BackendClientOptions): BackendClien await (await workspace(workspaceId)).revertWorkbenchFile(input); }, + async reloadGithubOrganization(workspaceId: string): Promise { + await (await workspace(workspaceId)).reloadGithubOrganization(); + }, + + async reloadGithubPullRequests(workspaceId: string): Promise { + await (await workspace(workspaceId)).reloadGithubPullRequests(); + }, + + async reloadGithubRepository(workspaceId: string, repoId: string): Promise { + await (await workspace(workspaceId)).reloadGithubRepository({ repoId }); + }, + + async reloadGithubPullRequest(workspaceId: string, repoId: string, prNumber: number): Promise { + await (await workspace(workspaceId)).reloadGithubPullRequest({ repoId, prNumber }); + }, + async health(): Promise<{ ok: true }> { const workspaceId = options.defaultWorkspaceId; if (!workspaceId) { diff --git a/foundry/packages/client/src/interest/topics.ts b/foundry/packages/client/src/interest/topics.ts index a111248..2e38bf0 100644 --- a/foundry/packages/client/src/interest/topics.ts +++ b/foundry/packages/client/src/interest/topics.ts @@ -53,6 +53,11 @@ function upsertById(items: T[], nextItem: T, sort: (le return [...filtered, nextItem].sort(sort); } +function upsertByPrId(items: T[], nextItem: T, sort: (left: T, right: T) => number): T[] { + const filtered = items.filter((item) => item.prId !== nextItem.prId); + return [...filtered, nextItem].sort(sort); +} + export const topicDefinitions = { app: { key: () => "app", @@ -90,6 +95,16 @@ export const topicDefinitions = { ...current, repos: current.repos.filter((repo) => repo.id !== event.repoId), }; + case "pullRequestUpdated": + return { + ...current, + openPullRequests: upsertByPrId(current.openPullRequests, event.pullRequest, (left, right) => right.updatedAtMs - left.updatedAtMs), + }; + case "pullRequestRemoved": + return { + ...current, + openPullRequests: current.openPullRequests.filter((pullRequest) => pullRequest.prId !== event.prId), + }; } }, } satisfies TopicDefinition, diff --git a/foundry/packages/client/src/mock-app.ts b/foundry/packages/client/src/mock-app.ts index 1f1ed39..1cec853 100644 --- a/foundry/packages/client/src/mock-app.ts +++ b/foundry/packages/client/src/mock-app.ts @@ -52,6 +52,8 @@ export interface MockFoundryGithubState { importedRepoCount: number; lastSyncLabel: string; lastSyncAt: number | null; + lastWebhookAt: number | null; + lastWebhookEvent: string; } export interface MockFoundryOrganizationSettings { @@ -188,6 +190,8 @@ function buildRivetOrganization(): MockFoundryOrganization { importedRepoCount: repos.length, lastSyncLabel: "Synced just now", lastSyncAt: Date.now() - 60_000, + lastWebhookAt: Date.now() - 30_000, + lastWebhookEvent: "push", }, billing: { planId: "team", @@ -267,6 +271,8 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { importedRepoCount: 1, lastSyncLabel: "Synced just now", lastSyncAt: Date.now() - 60_000, + lastWebhookAt: Date.now() - 120_000, + lastWebhookEvent: "pull_request.opened", }, billing: { planId: "free", @@ -301,6 +307,8 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { importedRepoCount: 3, lastSyncLabel: "Waiting for first import", lastSyncAt: null, + lastWebhookAt: null, + lastWebhookEvent: "", }, billing: { planId: "team", @@ -344,6 +352,8 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { importedRepoCount: 1, lastSyncLabel: "Synced yesterday", lastSyncAt: Date.now() - 24 * 60 * 60_000, + lastWebhookAt: Date.now() - 3_600_000, + lastWebhookEvent: "check_run.completed", }, billing: { planId: "free", @@ -397,6 +407,8 @@ function parseStoredSnapshot(): MockFoundryAppSnapshot | null { ...organization.github, syncStatus: syncStatusFromLegacy(organization.github?.syncStatus ?? organization.repoImportStatus), lastSyncAt: organization.github?.lastSyncAt ?? null, + lastWebhookAt: organization.github?.lastWebhookAt ?? null, + lastWebhookEvent: organization.github?.lastWebhookEvent ?? "", }, })), }; @@ -567,6 +579,8 @@ class MockFoundryAppStore implements MockFoundryAppClient { syncStatus: "synced", lastSyncLabel: "Synced just now", lastSyncAt: Date.now(), + lastWebhookAt: Date.now(), + lastWebhookEvent: "installation_repositories.added", }, })); this.importTimers.delete(organizationId); diff --git a/foundry/packages/client/src/mock/backend-client.ts b/foundry/packages/client/src/mock/backend-client.ts index 2048a60..b87c8c4 100644 --- a/foundry/packages/client/src/mock/backend-client.ts +++ b/foundry/packages/client/src/mock/backend-client.ts @@ -249,6 +249,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }; }), taskSummaries, + openPullRequests: [], }; }; @@ -763,6 +764,14 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend emitTaskUpdate(input.taskId); }, + async reloadGithubOrganization(): Promise {}, + + async reloadGithubPullRequests(): Promise {}, + + async reloadGithubRepository(): Promise {}, + + async reloadGithubPullRequest(): Promise {}, + async health(): Promise<{ ok: true }> { return { ok: true }; }, diff --git a/foundry/packages/client/src/remote/workbench-client.ts b/foundry/packages/client/src/remote/workbench-client.ts index 4b25193..480921c 100644 --- a/foundry/packages/client/src/remote/workbench-client.ts +++ b/foundry/packages/client/src/remote/workbench-client.ts @@ -100,7 +100,8 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient { async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { await this.backend.updateWorkbenchDraft(this.workspaceId, input); - await this.refresh(); + // Skip refresh — the server broadcast will trigger it, and the frontend + // holds local draft state to avoid the round-trip overwriting user input. } async sendMessage(input: TaskWorkbenchSendMessageInput): Promise { diff --git a/foundry/packages/client/test/interest-manager.test.ts b/foundry/packages/client/test/interest-manager.test.ts index db86b0d..eb39f6c 100644 --- a/foundry/packages/client/test/interest-manager.test.ts +++ b/foundry/packages/client/test/interest-manager.test.ts @@ -64,6 +64,7 @@ function workspaceSnapshot(): WorkspaceSummarySnapshot { sessionsSummary: [], }, ], + openPullRequests: [], }; } diff --git a/foundry/packages/frontend/src/components/dev-panel.tsx b/foundry/packages/frontend/src/components/dev-panel.tsx index 68d5cd9..061eff1 100644 --- a/foundry/packages/frontend/src/components/dev-panel.tsx +++ b/foundry/packages/frontend/src/components/dev-panel.tsx @@ -71,10 +71,10 @@ function timeAgo(ts: number | null): string { if (!ts) return "never"; const seconds = Math.floor((Date.now() - ts) / 1000); if (seconds < 5) return "now"; - if (seconds < 60) return `${seconds}s`; + if (seconds < 60) return `${seconds}s ago`; const minutes = Math.floor(seconds / 60); - if (minutes < 60) return `${minutes}m`; - return `${Math.floor(minutes / 60)}h`; + if (minutes < 60) return `${minutes}m ago`; + return `${Math.floor(minutes / 60)}h ago`; } function statusColor(status: string, t: ReturnType): string { @@ -157,8 +157,11 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza }, [now]); const repos = snapshot.repos ?? []; + const prCount = (snapshot.tasks ?? []).filter((task) => task.pullRequest != null).length; const focusedTaskStatus = focusedTask?.runtimeStatus ?? focusedTask?.status ?? null; const focusedTaskState = describeTaskState(focusedTaskStatus, focusedTask?.statusMessage ?? null); + const lastWebhookAt = organization?.github.lastWebhookAt ?? null; + const hasRecentWebhook = lastWebhookAt != null && now - lastWebhookAt < 5 * 60_000; const mono = css({ fontFamily: "ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace", @@ -436,8 +439,28 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza Sync {organization.github.syncStatus}
+
+ + Webhook + {lastWebhookAt != null ? ( + + {organization.github.lastWebhookEvent} · {timeAgo(lastWebhookAt)} + + ) : ( + never received + )} +
- + +
{organization.github.connectedAccount && (
@{organization.github.connectedAccount}
diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx index 6988f23..be995c0 100644 --- a/foundry/packages/frontend/src/components/mock-layout.tsx +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -3,14 +3,16 @@ import { useNavigate } from "@tanstack/react-router"; import { useStyletron } from "baseui"; import { createErrorContext, + type FoundryOrganization, type TaskWorkbenchSnapshot, + type WorkbenchOpenPrSummary, type WorkbenchSessionSummary, type WorkbenchTaskDetail, type WorkbenchTaskSummary, } from "@sandbox-agent/foundry-shared"; import { useInterest } from "@sandbox-agent/foundry-client"; -import { PanelLeft, PanelRight } from "lucide-react"; +import { CircleAlert, PanelLeft, PanelRight } from "lucide-react"; import { useFoundryTokens } from "../app/theme"; import { logger } from "../logging.js"; @@ -75,6 +77,59 @@ function sanitizeActiveTabId(task: Task, tabId: string | null | undefined, openD return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentTabId; } +function githubInstallationWarningTitle(organization: FoundryOrganization): string { + return organization.github.installationStatus === "install_required" ? "GitHub App not installed" : "GitHub App needs reconnection"; +} + +function githubInstallationWarningDetail(organization: FoundryOrganization): string { + const statusDetail = organization.github.lastSyncLabel.trim(); + const requirementDetail = + organization.github.installationStatus === "install_required" + ? "Webhooks are required for Foundry to function. Repo sync and PR updates will not work until the GitHub App is installed for this workspace." + : "Webhook delivery is unavailable. Repo sync and PR updates will not work until the GitHub App is reconnected."; + return statusDetail ? `${requirementDetail} ${statusDetail}.` : requirementDetail; +} + +function GithubInstallationWarning({ + organization, + css, + t, +}: { + organization: FoundryOrganization; + css: ReturnType[0]; + t: ReturnType; +}) { + if (organization.github.installationStatus === "connected") { + return null; + } + + return ( +
+ +
+
{githubInstallationWarningTitle(organization)}
+
{githubInstallationWarningDetail(organization)}
+
+
+ ); +} + function toLegacyTab( summary: WorkbenchSessionSummary, sessionDetail?: { draft: Task["tabs"][number]["draft"]; transcript: Task["tabs"][number]["transcript"] }, @@ -125,6 +180,40 @@ function toLegacyTask( }; } +const OPEN_PR_TASK_PREFIX = "pr:"; + +function openPrTaskId(prId: string): string { + return `${OPEN_PR_TASK_PREFIX}${prId}`; +} + +function isOpenPrTaskId(taskId: string): boolean { + return taskId.startsWith(OPEN_PR_TASK_PREFIX); +} + +function toLegacyOpenPrTask(pullRequest: WorkbenchOpenPrSummary): Task { + return { + id: openPrTaskId(pullRequest.prId), + repoId: pullRequest.repoId, + title: pullRequest.title, + status: "new", + runtimeStatus: undefined, + statusMessage: pullRequest.authorLogin ? `@${pullRequest.authorLogin}` : null, + repoName: pullRequest.repoFullName, + updatedAtMs: pullRequest.updatedAtMs, + branch: pullRequest.headRefName, + pullRequest: { + number: pullRequest.number, + status: pullRequest.isDraft ? "draft" : "ready", + }, + tabs: [], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 0, + activeSandboxId: null, + }; +} + function sessionStateMessage(tab: Task["tabs"][number] | null | undefined): string | null { if (!tab) { return null; @@ -153,7 +242,14 @@ function groupProjects(repos: Array<{ id: string; label: string }>, tasks: Task[ } interface WorkbenchActions { - createTask(input: { repoId: string; task: string; title?: string; branch?: string; model?: ModelId }): Promise<{ taskId: string; tabId?: string }>; + createTask(input: { + repoId: string; + task: string; + title?: string; + branch?: string; + onBranch?: string; + model?: ModelId; + }): Promise<{ taskId: string; tabId?: string }>; markTaskUnread(input: { taskId: string }): Promise; renameTask(input: { taskId: string; value: string }): Promise; renameBranch(input: { taskId: string; value: string }): Promise; @@ -168,6 +264,10 @@ interface WorkbenchActions { closeTab(input: { taskId: string; tabId: string }): Promise; addTab(input: { taskId: string; model?: string }): Promise<{ tabId: string }>; changeModel(input: { taskId: string; tabId: string; model: ModelId }): Promise; + reloadGithubOrganization(): Promise; + reloadGithubPullRequests(): Promise; + reloadGithubRepository(repoId: string): Promise; + reloadGithubPullRequest(repoId: string, prNumber: number): Promise; } const TranscriptPanel = memo(function TranscriptPanel({ @@ -187,6 +287,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ onSidebarPeekEnd, rightSidebarCollapsed, onToggleRightSidebar, + selectedSessionHydrating = false, onNavigateToUsage, }: { taskWorkbenchClient: WorkbenchActions; @@ -205,6 +306,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ onSidebarPeekEnd?: () => void; rightSidebarCollapsed?: boolean; onToggleRightSidebar?: () => void; + selectedSessionHydrating?: boolean; onNavigateToUsage?: () => void; }) { const t = useFoundryTokens(); @@ -216,6 +318,11 @@ const TranscriptPanel = memo(function TranscriptPanel({ const [pendingHistoryTarget, setPendingHistoryTarget] = useState<{ messageId: string; tabId: string } | null>(null); const [copiedMessageId, setCopiedMessageId] = useState(null); const [timerNowMs, setTimerNowMs] = useState(() => Date.now()); + const [localDraft, setLocalDraft] = useState(""); + const [localAttachments, setLocalAttachments] = useState([]); + const lastEditTimeRef = useRef(0); + const throttleTimerRef = useRef | null>(null); + const pendingDraftRef = useRef<{ text: string; attachments: LineAttachment[] } | null>(null); const scrollRef = useRef(null); const textareaRef = useRef(null); const messageRefs = useRef(new Map()); @@ -235,8 +342,27 @@ const TranscriptPanel = memo(function TranscriptPanel({ !!activeAgentTab && (activeAgentTab.status === "pending_provision" || activeAgentTab.status === "pending_session_create" || activeAgentTab.status === "error") && activeMessages.length === 0; - const draft = promptTab?.draft.text ?? ""; - const attachments = promptTab?.draft.attachments ?? []; + const serverDraft = promptTab?.draft.text ?? ""; + const serverAttachments = promptTab?.draft.attachments ?? []; + + // Sync server → local only when user hasn't typed recently (3s cooldown) + const DRAFT_SYNC_COOLDOWN_MS = 3_000; + useEffect(() => { + if (Date.now() - lastEditTimeRef.current > DRAFT_SYNC_COOLDOWN_MS) { + setLocalDraft(serverDraft); + setLocalAttachments(serverAttachments); + } + }, [serverDraft, serverAttachments]); + + // Reset local draft immediately on tab/task switch + useEffect(() => { + lastEditTimeRef.current = 0; + setLocalDraft(promptTab?.draft.text ?? ""); + setLocalAttachments(promptTab?.draft.attachments ?? []); + }, [promptTab?.id, task.id]); + + const draft = localDraft; + const attachments = localAttachments; useEffect(() => { if (scrollRef.current) { @@ -343,20 +469,53 @@ const TranscriptPanel = memo(function TranscriptPanel({ [editValue, task.id], ); + const DRAFT_THROTTLE_MS = 500; + + const flushDraft = useCallback( + (text: string, nextAttachments: LineAttachment[], tabId: string) => { + void taskWorkbenchClient.updateDraft({ + taskId: task.id, + tabId, + text, + attachments: nextAttachments, + }); + }, + [task.id], + ); + + // Clean up throttle timer on unmount + useEffect(() => { + return () => { + if (throttleTimerRef.current) { + clearTimeout(throttleTimerRef.current); + } + }; + }, []); + const updateDraft = useCallback( (nextText: string, nextAttachments: LineAttachment[]) => { if (!promptTab) { return; } - void taskWorkbenchClient.updateDraft({ - taskId: task.id, - tabId: promptTab.id, - text: nextText, - attachments: nextAttachments, - }); + // Update local state immediately for responsive typing + lastEditTimeRef.current = Date.now(); + setLocalDraft(nextText); + setLocalAttachments(nextAttachments); + + // Throttle the network call + pendingDraftRef.current = { text: nextText, attachments: nextAttachments }; + if (!throttleTimerRef.current) { + throttleTimerRef.current = setTimeout(() => { + throttleTimerRef.current = null; + if (pendingDraftRef.current) { + flushDraft(pendingDraftRef.current.text, pendingDraftRef.current.attachments, promptTab.id); + pendingDraftRef.current = null; + } + }, DRAFT_THROTTLE_MS); + } }, - [task.id, promptTab], + [promptTab, flushDraft], ); const sendMessage = useCallback(() => { @@ -687,6 +846,33 @@ const TranscriptPanel = memo(function TranscriptPanel({
+ ) : selectedSessionHydrating ? ( + +
+
+ +

Loading session

+

Fetching the latest transcript for this session.

+
+
+
) : showPendingSessionState ? (
backendClient.closeWorkbenchSession(workspaceId, input), addTab: (input) => backendClient.createWorkbenchSession(workspaceId, input), changeModel: (input) => backendClient.changeWorkbenchModel(workspaceId, input), + reloadGithubOrganization: () => backendClient.reloadGithubOrganization(workspaceId), + reloadGithubPullRequests: () => backendClient.reloadGithubPullRequests(workspaceId), + reloadGithubRepository: (repoId) => backendClient.reloadGithubRepository(workspaceId, repoId), + reloadGithubPullRequest: (repoId, prNumber) => backendClient.reloadGithubPullRequest(workspaceId, repoId, prNumber), }), [workspaceId], ); const workspaceState = useInterest(interestManager, "workspace", { workspaceId }); const workspaceRepos = workspaceState.data?.repos ?? []; const taskSummaries = workspaceState.data?.taskSummaries ?? []; + const openPullRequests = workspaceState.data?.openPullRequests ?? []; + const openPullRequestsByTaskId = useMemo( + () => new Map(openPullRequests.map((pullRequest) => [openPrTaskId(pullRequest.prId), pullRequest])), + [openPullRequests], + ); + const selectedOpenPullRequest = useMemo( + () => (selectedTaskId ? (openPullRequestsByTaskId.get(selectedTaskId) ?? null) : null), + [openPullRequestsByTaskId, selectedTaskId], + ); const selectedTaskSummary = useMemo( () => taskSummaries.find((task) => task.id === selectedTaskId) ?? taskSummaries[0] ?? null, [selectedTaskId, taskSummaries], @@ -1169,10 +1368,12 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M } } - return taskSummaries.map((summary) => + const legacyTasks = taskSummaries.map((summary) => summary.id === selectedTaskSummary?.id ? toLegacyTask(summary, taskState.data, sessionCache) : toLegacyTask(summary), ); - }, [selectedTaskSummary, selectedSessionId, sessionState.data, taskState.data, taskSummaries, workspaceId]); + const legacyOpenPrs = openPullRequests.map((pullRequest) => toLegacyOpenPrTask(pullRequest)); + return [...legacyTasks, ...legacyOpenPrs].sort((left, right) => right.updatedAtMs - left.updatedAtMs); + }, [openPullRequests, selectedTaskSummary, selectedSessionId, sessionState.data, taskState.data, taskSummaries, workspaceId]); const rawProjects = useMemo(() => groupProjects(workspaceRepos, tasks), [tasks, workspaceRepos]); const appSnapshot = useMockAppSnapshot(); const activeOrg = activeMockOrganization(appSnapshot); @@ -1200,9 +1401,11 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M const leftWidthRef = useRef(leftWidth); const rightWidthRef = useRef(rightWidth); const autoCreatingSessionForTaskRef = useRef>(new Set()); + const resolvingOpenPullRequestsRef = useRef>(new Set()); const [leftSidebarOpen, setLeftSidebarOpen] = useState(true); const [rightSidebarOpen, setRightSidebarOpen] = useState(true); const [leftSidebarPeeking, setLeftSidebarPeeking] = useState(false); + const [materializingOpenPrId, setMaterializingOpenPrId] = useState(null); const showDevPanel = useDevPanel(); const peekTimeoutRef = useRef | null>(null); @@ -1268,13 +1471,81 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M startRightRef.current = rightWidthRef.current; }, []); - const activeTask = useMemo(() => tasks.find((task) => task.id === selectedTaskId) ?? tasks[0] ?? null, [tasks, selectedTaskId]); + const activeTask = useMemo(() => { + const realTasks = tasks.filter((task) => !isOpenPrTaskId(task.id)); + if (selectedOpenPullRequest) { + return null; + } + if (selectedTaskId) { + return realTasks.find((task) => task.id === selectedTaskId) ?? realTasks[0] ?? null; + } + return realTasks[0] ?? null; + }, [selectedOpenPullRequest, selectedTaskId, tasks]); + + const materializeOpenPullRequest = useCallback( + async (pullRequest: WorkbenchOpenPrSummary) => { + if (resolvingOpenPullRequestsRef.current.has(pullRequest.prId)) { + return; + } + + resolvingOpenPullRequestsRef.current.add(pullRequest.prId); + setMaterializingOpenPrId(pullRequest.prId); + + try { + const { taskId, tabId } = await taskWorkbenchClient.createTask({ + repoId: pullRequest.repoId, + task: `Continue work on GitHub PR #${pullRequest.number}: ${pullRequest.title}`, + model: "gpt-5.3-codex", + title: pullRequest.title, + onBranch: pullRequest.headRefName, + }); + await navigate({ + to: "/workspaces/$workspaceId/tasks/$taskId", + params: { + workspaceId, + taskId, + }, + search: { sessionId: tabId ?? undefined }, + replace: true, + }); + } catch (error) { + setMaterializingOpenPrId((current) => (current === pullRequest.prId ? null : current)); + resolvingOpenPullRequestsRef.current.delete(pullRequest.prId); + logger.error( + { + prId: pullRequest.prId, + repoId: pullRequest.repoId, + branchName: pullRequest.headRefName, + ...createErrorContext(error), + }, + "failed_to_materialize_open_pull_request_task", + ); + } + }, + [navigate, taskWorkbenchClient, workspaceId], + ); + + useEffect(() => { + if (!selectedOpenPullRequest) { + if (materializingOpenPrId) { + resolvingOpenPullRequestsRef.current.delete(materializingOpenPrId); + } + setMaterializingOpenPrId(null); + return; + } + + void materializeOpenPullRequest(selectedOpenPullRequest); + }, [materializeOpenPullRequest, materializingOpenPrId, selectedOpenPullRequest]); useEffect(() => { if (activeTask) { return; } + if (selectedOpenPullRequest || materializingOpenPrId) { + return; + } + const fallbackTaskId = tasks[0]?.id; if (!fallbackTaskId) { return; @@ -1291,11 +1562,12 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M search: { sessionId: fallbackTask?.tabs[0]?.id ?? undefined }, replace: true, }); - }, [activeTask, tasks, navigate, workspaceId]); + }, [activeTask, materializingOpenPrId, navigate, selectedOpenPullRequest, tasks, workspaceId]); const openDiffs = activeTask ? sanitizeOpenDiffs(activeTask, openDiffsByTask[activeTask.id]) : []; const lastAgentTabId = activeTask ? sanitizeLastAgentTabId(activeTask, lastAgentTabIdByTask[activeTask.id]) : null; const activeTabId = activeTask ? sanitizeActiveTabId(activeTask, activeTabIdByTask[activeTask.id], openDiffs, lastAgentTabId) : null; + const selectedSessionHydrating = Boolean(selectedSessionId && activeTabId === selectedSessionId && sessionState.status === "loading" && !sessionState.data); const syncRouteSession = useCallback( (taskId: string, sessionId: string | null, replace = false) => { @@ -1395,7 +1667,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M }, [activeTask, selectedSessionId, syncRouteSession, taskWorkbenchClient]); const createTask = useCallback( - (overrideRepoId?: string) => { + (overrideRepoId?: string, options?: { title?: string; task?: string; branch?: string; onBranch?: string }) => { void (async () => { const repoId = overrideRepoId || selectedNewTaskRepoId; if (!repoId) { @@ -1404,9 +1676,11 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M const { taskId, tabId } = await taskWorkbenchClient.createTask({ repoId, - task: "New task", + task: options?.task ?? "New task", model: "gpt-5.3-codex", - title: "New task", + title: options?.title ?? "New task", + ...(options?.branch ? { branch: options.branch } : {}), + ...(options?.onBranch ? { onBranch: options.onBranch } : {}), }); await navigate({ to: "/workspaces/$workspaceId/tasks/$taskId", @@ -1418,7 +1692,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M }); })(); }, - [navigate, selectedNewTaskRepoId, workspaceId], + [navigate, selectedNewTaskRepoId, taskWorkbenchClient, workspaceId], ); const openDiffTab = useCallback( @@ -1447,6 +1721,14 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M const selectTask = useCallback( (id: string) => { + if (isOpenPrTaskId(id)) { + const pullRequest = openPullRequestsByTaskId.get(id); + if (!pullRequest) { + return; + } + void materializeOpenPullRequest(pullRequest); + return; + } const task = tasks.find((candidate) => candidate.id === id) ?? null; void navigate({ to: "/workspaces/$workspaceId/tasks/$taskId", @@ -1457,7 +1739,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M search: { sessionId: task?.tabs[0]?.id ?? undefined }, }); }, - [tasks, navigate, workspaceId], + [materializeOpenPullRequest, navigate, openPullRequestsByTaskId, tasks, workspaceId], ); const markTaskUnread = useCallback((id: string) => { @@ -1616,6 +1898,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M }; if (!activeTask) { + const isMaterializingSelectedOpenPr = Boolean(selectedOpenPullRequest) || materializingOpenPrId != null; return ( <> {dragRegion} @@ -1636,7 +1919,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M projects={projects} newTaskRepos={workspaceRepos} selectedNewTaskRepoId={selectedNewTaskRepoId} - activeId="" + activeId={selectedTaskId ?? ""} onSelect={selectTask} onCreate={createTask} onSelectNewTaskRepo={setSelectedNewTaskRepoId} @@ -1646,6 +1929,10 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M onReorderProjects={reorderProjects} taskOrderByProject={taskOrderByProject} onReorderTasks={reorderTasks} + onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} + onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} + onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} + onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} onToggleSidebar={() => setLeftSidebarOpen(false)} />
@@ -1712,6 +1999,14 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M {activeOrg.github.importedRepoCount > 0 && <> {activeOrg.github.importedRepoCount} repos imported so far.}

+ ) : isMaterializingSelectedOpenPr && selectedOpenPullRequest ? ( + <> + +

Creating task from pull request

+

+ Preparing a task for {selectedOpenPullRequest.title} on {selectedOpenPullRequest.headRefName}. +

+ ) : activeOrg?.github.syncStatus === "error" ? ( <>

GitHub sync failed

@@ -1766,40 +2061,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M - {activeOrg && (activeOrg.github.installationStatus === "install_required" || activeOrg.github.installationStatus === "reconnect_required") && ( -
- - - GitHub App {activeOrg.github.installationStatus === "install_required" ? "not installed" : "needs reconnection"} — repo sync is unavailable - -
- )} + {activeOrg && } {showDevPanel && ( void taskWorkbenchClient.reloadGithubOrganization()} + onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} + onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} + onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} onToggleSidebar={() => setLeftSidebarOpen(false)} /> @@ -1880,7 +2146,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M projects={projects} newTaskRepos={workspaceRepos} selectedNewTaskRepoId={selectedNewTaskRepoId} - activeId={activeTask.id} + activeId={selectedTaskId ?? activeTask.id} onSelect={(id) => { selectTask(id); setLeftSidebarPeeking(false); @@ -1893,6 +2159,10 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M onReorderProjects={reorderProjects} taskOrderByProject={taskOrderByProject} onReorderTasks={reorderTasks} + onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} + onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} + onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} + onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} onToggleSidebar={() => { setLeftSidebarPeeking(false); setLeftSidebarOpen(true); @@ -1930,6 +2200,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M onSidebarPeekEnd={endPeek} rightSidebarCollapsed={!rightSidebarOpen} onToggleRightSidebar={() => setRightSidebarOpen(true)} + selectedSessionHydrating={selectedSessionHydrating} onNavigateToUsage={navigateToUsage} /> @@ -1959,40 +2230,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M - {activeOrg && (activeOrg.github.installationStatus === "install_required" || activeOrg.github.installationStatus === "reconnect_required") && ( -
- - - GitHub App {activeOrg.github.installationStatus === "install_required" ? "not installed" : "needs reconnection"} — repo sync is unavailable - -
- )} + {activeOrg && } {showDevPanel && ( void; taskOrderByProject: Record; onReorderTasks: (projectId: string, fromIndex: number, toIndex: number) => void; + onReloadOrganization: () => void; + onReloadPullRequests: () => void; + onReloadRepository: (repoId: string) => void; + onReloadPullRequest: (repoId: string, prNumber: number) => void; onToggleSidebar?: () => void; }) { const [css] = useStyletron(); @@ -88,6 +101,8 @@ export const Sidebar = memo(function Sidebar({ const contextMenu = useContextMenu(); const [collapsedProjects, setCollapsedProjects] = useState>({}); const [hoveredProjectId, setHoveredProjectId] = useState(null); + const [headerMenuOpen, setHeaderMenuOpen] = useState(false); + const headerMenuRef = useRef(null); // Mouse-based drag and drop state type DragState = @@ -149,6 +164,20 @@ export const Sidebar = memo(function Sidebar({ }; }, [drag, onReorderProjects, onReorderTasks]); + useEffect(() => { + if (!headerMenuOpen) { + return; + } + const onMouseDown = (event: MouseEvent) => { + if (headerMenuRef.current?.contains(event.target as Node)) { + return; + } + setHeaderMenuOpen(false); + }; + document.addEventListener("mousedown", onMouseDown); + return () => document.removeEventListener("mousedown", onMouseDown); + }, [headerMenuOpen]); + const [createSelectOpen, setCreateSelectOpen] = useState(false); const selectOptions = useMemo(() => newTaskRepos.map((repo) => ({ id: repo.id, label: stripCommonOrgPrefix(repo.label, newTaskRepos) })), [newTaskRepos]); @@ -326,47 +355,111 @@ export const Sidebar = memo(function Sidebar({ /> ) : ( -
{ - if (newTaskRepos.length === 0) return; - if (newTaskRepos.length === 1) { - onSelectNewTaskRepo(newTaskRepos[0]!.id); - onCreate(newTaskRepos[0]!.id); - } else { - setCreateSelectOpen(true); - } - }} - onKeyDown={(event) => { - if (newTaskRepos.length === 0) return; - if (event.key === "Enter" || event.key === " ") { +
+ + {headerMenuOpen ? ( +
+ + +
+ ) : null} +
{ + if (newTaskRepos.length === 0) return; if (newTaskRepos.length === 1) { onSelectNewTaskRepo(newTaskRepos[0]!.id); onCreate(newTaskRepos[0]!.id); } else { setCreateSelectOpen(true); } - } - }} - className={css({ - width: "26px", - height: "26px", - borderRadius: "8px", - backgroundColor: newTaskRepos.length > 0 ? t.borderMedium : t.interactiveHover, - color: t.textPrimary, - cursor: newTaskRepos.length > 0 ? "pointer" : "not-allowed", - display: "flex", - alignItems: "center", - justifyContent: "center", - transition: "background 200ms ease", - flexShrink: 0, - opacity: newTaskRepos.length > 0 ? 1 : 0.6, - ":hover": newTaskRepos.length > 0 ? { backgroundColor: "rgba(255, 255, 255, 0.20)" } : undefined, - })} - > - + }} + onKeyDown={(event) => { + if (newTaskRepos.length === 0) return; + if (event.key === "Enter" || event.key === " ") { + if (newTaskRepos.length === 1) { + onSelectNewTaskRepo(newTaskRepos[0]!.id); + onCreate(newTaskRepos[0]!.id); + } else { + setCreateSelectOpen(true); + } + } + }} + className={css({ + width: "26px", + height: "26px", + borderRadius: "8px", + backgroundColor: newTaskRepos.length > 0 ? t.borderMedium : t.interactiveHover, + color: t.textPrimary, + cursor: newTaskRepos.length > 0 ? "pointer" : "not-allowed", + display: "flex", + alignItems: "center", + justifyContent: "center", + transition: "background 200ms ease", + flexShrink: 0, + opacity: newTaskRepos.length > 0 ? 1 : 0.6, + ":hover": newTaskRepos.length > 0 ? { backgroundColor: "rgba(255, 255, 255, 0.20)" } : undefined, + })} + > + +
)} @@ -431,6 +524,12 @@ export const Sidebar = memo(function Sidebar({ })); } }} + onContextMenu={(event) => + contextMenu.open(event, [ + { label: "Reload repository", onClick: () => onReloadRepository(project.id) }, + { label: "New task", onClick: () => onCreate(project.id) }, + ]) + } data-project-header className={css({ display: "flex", @@ -499,13 +598,13 @@ export const Sidebar = memo(function Sidebar({ height: "26px", borderRadius: "6px", border: "none", - background: "none", + backgroundColor: "transparent", padding: 0, margin: 0, cursor: "pointer", color: t.textTertiary, opacity: hoveredProjectId === project.id ? 1 : 0, - transition: "opacity 150ms ease, background 200ms ease, color 200ms ease", + transition: "opacity 150ms ease, background-color 200ms ease, color 200ms ease", pointerEvents: hoveredProjectId === project.id ? "auto" : "none", ":hover": { backgroundColor: t.interactiveHover, color: t.textSecondary }, })} @@ -519,12 +618,14 @@ export const Sidebar = memo(function Sidebar({ {!isCollapsed && orderedTasks.map((task, taskIndex) => { const isActive = task.id === activeId; + const isPullRequestItem = isPullRequestSidebarItem(task); const isDim = task.status === "archived"; const isRunning = task.tabs.some((tab) => tab.status === "running"); const isProvisioning = - String(task.status).startsWith("init_") || - task.status === "new" || - task.tabs.some((tab) => tab.status === "pending_provision" || tab.status === "pending_session_create"); + !isPullRequestItem && + (String(task.status).startsWith("init_") || + task.status === "new" || + task.tabs.some((tab) => tab.status === "pending_provision" || tab.status === "pending_session_create")); const hasUnread = task.tabs.some((tab) => tab.unread); const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); @@ -554,13 +655,20 @@ export const Sidebar = memo(function Sidebar({ onSelect(task.id); } }} - onContextMenu={(event) => + onContextMenu={(event) => { + if (isPullRequestItem && task.pullRequest) { + contextMenu.open(event, [ + { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, + { label: "Create task", onClick: () => onSelect(task.id) }, + ]); + return; + } contextMenu.open(event, [ { label: "Rename task", onClick: () => onRenameTask(task.id) }, { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, - ]) - } + ]); + }} className={css({ padding: "8px 12px", borderRadius: "8px", @@ -596,21 +704,32 @@ export const Sidebar = memo(function Sidebar({ flexShrink: 0, })} > - + {isPullRequestItem ? ( + + ) : ( + + )} +
+
+ + {task.title} + + {isPullRequestItem && task.statusMessage ? ( + + {task.statusMessage} + + ) : null}
- - {task.title} - {task.pullRequest != null ? ( diff --git a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx index bc10cad..ca9326a 100644 --- a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx @@ -543,7 +543,10 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl gap: "6px", minHeight: "39px", maxHeight: "39px", - padding: "0 14px", + paddingTop: "0", + paddingRight: "14px", + paddingBottom: "0", + paddingLeft: "14px", borderTop: `1px solid ${t.borderDefault}`, backgroundColor: t.surfacePrimary, flexShrink: 0, diff --git a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx index d5b4b2f..808c4a6 100644 --- a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx @@ -134,7 +134,6 @@ export const TranscriptHeader = memo(function TranscriptHeader({ className={css({ appearance: "none", WebkitAppearance: "none", - background: "none", margin: "0", outline: "none", padding: "2px 8px", diff --git a/foundry/packages/frontend/src/components/mock-layout/ui.tsx b/foundry/packages/frontend/src/components/mock-layout/ui.tsx index 8ede752..a036030 100644 --- a/foundry/packages/frontend/src/components/mock-layout/ui.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/ui.tsx @@ -299,7 +299,10 @@ export const PanelHeaderBar = styled("div", ({ $theme }) => { alignItems: "center", minHeight: HEADER_HEIGHT, maxHeight: HEADER_HEIGHT, - padding: "0 14px", + paddingTop: "0", + paddingRight: "14px", + paddingBottom: "0", + paddingLeft: "14px", borderBottom: `1px solid ${t.borderDefault}`, backgroundColor: t.surfaceTertiary, gap: "8px", diff --git a/foundry/packages/shared/src/app-shell.ts b/foundry/packages/shared/src/app-shell.ts index d21370f..31ee235 100644 --- a/foundry/packages/shared/src/app-shell.ts +++ b/foundry/packages/shared/src/app-shell.ts @@ -50,6 +50,8 @@ export interface FoundryGithubState { importedRepoCount: number; lastSyncLabel: string; lastSyncAt: number | null; + lastWebhookAt: number | null; + lastWebhookEvent: string; } export interface FoundryOrganizationSettings { diff --git a/foundry/packages/shared/src/realtime-events.ts b/foundry/packages/shared/src/realtime-events.ts index c539adc..739dd51 100644 --- a/foundry/packages/shared/src/realtime-events.ts +++ b/foundry/packages/shared/src/realtime-events.ts @@ -1,5 +1,5 @@ import type { FoundryAppSnapshot } from "./app-shell.js"; -import type { WorkbenchRepoSummary, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary } from "./workbench.js"; +import type { WorkbenchOpenPrSummary, WorkbenchRepoSummary, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary } from "./workbench.js"; export interface SandboxProcessSnapshot { id: string; @@ -21,7 +21,9 @@ export type WorkspaceEvent = | { type: "taskRemoved"; taskId: string } | { type: "repoAdded"; repo: WorkbenchRepoSummary } | { type: "repoUpdated"; repo: WorkbenchRepoSummary } - | { type: "repoRemoved"; repoId: string }; + | { type: "repoRemoved"; repoId: string } + | { type: "pullRequestUpdated"; pullRequest: WorkbenchOpenPrSummary } + | { type: "pullRequestRemoved"; prId: string }; /** Task-level events broadcast by the task actor. */ export type TaskEvent = { type: "taskDetailUpdated"; detail: WorkbenchTaskDetail }; diff --git a/foundry/packages/shared/src/workbench.ts b/foundry/packages/shared/src/workbench.ts index e549c6e..078ed31 100644 --- a/foundry/packages/shared/src/workbench.ts +++ b/foundry/packages/shared/src/workbench.ts @@ -105,6 +105,21 @@ export interface WorkbenchPullRequestSummary { status: "draft" | "ready"; } +export interface WorkbenchOpenPrSummary { + prId: string; + repoId: string; + repoFullName: string; + number: number; + title: string; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin: string | null; + isDraft: boolean; + updatedAtMs: number; +} + export interface WorkbenchSandboxSummary { providerId: ProviderId; sandboxId: string; @@ -161,6 +176,7 @@ export interface WorkspaceSummarySnapshot { workspaceId: string; repos: WorkbenchRepoSummary[]; taskSummaries: WorkbenchTaskSummary[]; + openPullRequests: WorkbenchOpenPrSummary[]; } /** @@ -229,6 +245,7 @@ export interface TaskWorkbenchCreateTaskInput { task: string; title?: string; branch?: string; + onBranch?: string; model?: WorkbenchModelId; } diff --git a/foundry/research/specs/github-data-actor.md b/foundry/research/specs/github-data-actor.md new file mode 100644 index 0000000..d3af6ab --- /dev/null +++ b/foundry/research/specs/github-data-actor.md @@ -0,0 +1,169 @@ +# Spec: GitHub Data Actor & Webhook-Driven State + +## Summary + +Replace the per-repo polling PR sync actor (`ProjectPrSyncActor`) and per-repo PR cache (`prCache` table) with a single organization-scoped `github-state` actor that owns all GitHub data (repos, PRs, members). All GitHub state updates flow exclusively through webhooks, with a one-shot full sync on initial connection. Manual reload actions are exposed per-entity (org, repo, PR) for recovery from missed webhooks. + +Open PRs are surfaced in the left sidebar alongside tasks via a unified workspace interest topic, with lazy task/sandbox creation when a user clicks on a PR. + +## Reference Implementation + +A prior implementation of the `github-state` actor exists in git checkpoint `0aca2c7` (from PR #247 "Refactor Foundry GitHub state and sandbox runtime"). This was never merged to a branch but contains working code for: + +- `foundry/packages/backend/src/actors/github-state/index.ts` — full actor with DB, sync workflow, webhook handler, PR CRUD +- `foundry/packages/backend/src/actors/github-state/db/schema.ts` — `github_meta`, `github_repositories`, `github_members`, `github_pull_requests` tables +- `foundry/packages/backend/src/actors/organization/app-shell.ts` lines 1056-1180 — webhook dispatch to `githubState.handlePullRequestWebhook()` and `githubState.fullSync()` + +Use `git show 0aca2c7:` to read the reference files. Adapt (don't copy blindly) — the current branch structure has diverged. + +## Constraints + +1. **No polling.** Delete `ProjectPrSyncActor` (`actors/project-pr-sync/`), all references to it in handles/keys/index, and the `prCache` table in `ProjectActor`'s DB schema. Remove `prSyncStatus`/`prSyncAt` from `getRepoOverview`. +2. **Keep `ProjectBranchSyncActor`.** This polls the local git clone (not GitHub API) and is the sandbox git status mechanism. It stays. +3. **Webhooks are the sole live update path.** The only GitHub API calls happen during: + - Initial full sync on org connection/installation + - Manual reload actions (per-entity) +4. **GitHub does not auto-retry failed webhook deliveries** ([docs](https://docs.github.com/en/webhooks/using-webhooks/handling-failed-webhook-deliveries)). Manual reload is the recovery mechanism. +5. **No `user-github-data` actor in this spec.** OAuth/auth is already handled correctly on the current branch. Only the org-scoped `github-state` actor is in scope. + +## Architecture + +### Actor: `github-state` (one per organization) + +**Key:** `["org", organizationId, "github"]` + +**DB tables:** +- `github_meta` — sync status, installation info, connected account +- `github_repositories` — repos accessible via the GitHub App installation +- `github_pull_requests` — all open PRs across all repos in the org +- `github_members` — org members (existing from checkpoint, keep for completeness) + +**Actions (from checkpoint, to adapt):** +- `fullSync(input)` — one-shot fetch of repos + PRs via installation token. Enqueues as a workflow step. Used on initial connection and `installation.created`/`unsuspend` webhooks. +- `handlePullRequestWebhook(input)` — upserts a single PR from webhook payload, notifies downstream. +- `getSummary()` — returns sync meta + row counts. +- `listRepositories()` — returns all known repos. +- `listPullRequestsForRepository({ repoId })` — returns PRs for a repo. +- `getPullRequestForBranch({ repoId, branchName })` — returns PR info for a branch. +- `createPullRequest({ repoId, repoPath, branchName, title, body })` — creates PR via GitHub API, stores locally. +- `clearState(input)` — wipes all data (on `installation.deleted`, `suspend`). + +**New actions (not in checkpoint):** +- `reloadOrganization()` — re-fetches repos + members from GitHub API (not PRs). Updates `github_repositories` and `github_members`. Notifies downstream. +- `reloadRepository({ repoId })` — re-fetches metadata for a single repo from GitHub API. Updates the `github_repositories` row. Does NOT re-fetch PRs. +- `reloadPullRequest({ repoId, prNumber })` — re-fetches a single PR from GitHub API by number. Updates the `github_pull_requests` row. Notifies downstream. + +### Webhook Dispatch (in app-shell) + +Replace the current TODO at `app-shell.ts:1521` with dispatch logic adapted from checkpoint `0aca2c7:foundry/packages/backend/src/actors/organization/app-shell.ts` lines 1056-1180: + +| Webhook event | Action | +|---|---| +| `installation.created` | `githubState.fullSync({ force: true })` | +| `installation.deleted` | `githubState.clearState(...)` | +| `installation.suspend` | `githubState.clearState(...)` | +| `installation.unsuspend` | `githubState.fullSync({ force: true })` | +| `installation_repositories` | `githubState.fullSync({ force: true })` | +| `pull_request` (any action) | `githubState.handlePullRequestWebhook(...)` | +| `push`, `create`, `delete`, `check_run`, `check_suite`, `status`, `pull_request_review`, `pull_request_review_comment` | Log for now, extend later | + +### Downstream Notifications + +When `github-state` receives a PR update (webhook or manual reload), it should: + +1. Update its own `github_pull_requests` table +2. Call `notifyOrganizationUpdated()` → which broadcasts `workspaceUpdated` to connected clients +3. If the PR branch matches an existing task's branch, update that task's `pullRequest` summary in the workspace actor + +### Workspace Summary Changes + +Extend `WorkspaceSummarySnapshot` to include open PRs: + +```typescript +export interface WorkspaceSummarySnapshot { + workspaceId: string; + repos: WorkbenchRepoSummary[]; + taskSummaries: WorkbenchTaskSummary[]; + openPullRequests: WorkbenchOpenPrSummary[]; // NEW +} + +export interface WorkbenchOpenPrSummary { + prId: string; // "repoId#number" + repoId: string; + repoFullName: string; + number: number; + title: string; + state: string; + url: string; + headRefName: string; + baseRefName: string; + authorLogin: string | null; + isDraft: boolean; + updatedAtMs: number; +} +``` + +The workspace actor fetches open PRs from the `github-state` actor when building the summary snapshot. PRs that already have an associated task (matched by branch name) should be excluded from `openPullRequests` (they already appear in `taskSummaries` with their `pullRequest` field populated). + +### Interest Manager + +The `workspace` interest topic already returns `WorkspaceSummarySnapshot`. Adding `openPullRequests` to that type means the sidebar automatically gets PR data without a new topic. + +`workspaceUpdated` events should include a new variant for PR changes: +```typescript +{ type: "pullRequestUpdated", pullRequest: WorkbenchOpenPrSummary } +{ type: "pullRequestRemoved", prId: string } +``` + +### Sidebar Changes + +The left sidebar currently renders `projects: ProjectSection[]` where each project has `tasks: Task[]`. Extend this to include open PRs as lightweight entries within each project section: + +- Open PRs appear in the same list as tasks, sorted by `updatedAtMs` +- PRs should be visually distinct: show PR icon instead of task indicator, display `#number` and author +- Clicking a PR creates a task lazily (creates the task + sandbox on demand), then navigates to it +- PRs that already have a task are filtered out (they show as the task instead) + +This is similar to what `buildPrTasks()` does in the mock data (`workbench-model.ts:1154-1182`), but driven by real data from the `github-state` actor. + +### Frontend: Manual Reload + +Add a "three dots" menu button in the top-right of the sidebar header. Dropdown options: + +- **Reload organization** — calls `githubState.reloadOrganization()` via backend API +- **Reload all PRs** — calls `githubState.fullSync({ force: true })` (convenience shortcut) + +For per-repo and per-PR reload, add context menu options: +- Right-click a project header → "Reload repository" +- Right-click a PR entry → "Reload pull request" + +These call the corresponding `reloadRepository`/`reloadPullRequest` actions on the `github-state` actor. + +## Deletions + +Files/code to remove: + +1. `foundry/packages/backend/src/actors/project-pr-sync/` — entire directory +2. `foundry/packages/backend/src/actors/project/db/schema.ts` — `prCache` table +3. `foundry/packages/backend/src/actors/project/actions.ts` — `applyPrSyncResultMutation`, `getPullRequestForBranch` (moves to github-state), `prSyncStatus`/`prSyncAt` from `getRepoOverview` +4. `foundry/packages/backend/src/actors/handles.ts` — `getOrCreateProjectPrSync`, `selfProjectPrSync` +5. `foundry/packages/backend/src/actors/keys.ts` — any PR sync key helper +6. `foundry/packages/backend/src/actors/index.ts` — `projectPrSync` import and registration +7. All call sites in `ProjectActor` that spawn or call the PR sync actor (`initProject`, `refreshProject`) + +## Migration Path + +The `prCache` table in `ProjectActor`'s DB can simply be dropped — no data migration needed since the `github-state` actor will re-fetch everything on its first `fullSync`. Existing task `pullRequest` fields are populated from the github-state actor going forward. + +## Implementation Order + +1. Create `github-state` actor (adapt from checkpoint `0aca2c7`) +2. Wire up actor in registry, handles, keys +3. Implement webhook dispatch in app-shell (replace TODO) +4. Delete `ProjectPrSyncActor` and `prCache` from project actor +5. Add manual reload actions to github-state +6. Extend `WorkspaceSummarySnapshot` with `openPullRequests` +7. Wire through interest manager + workspace events +8. Update sidebar to render open PRs +9. Add three-dots menu with reload options +10. Update task creation flow for lazy PR→task conversion From 400f9a214e0740bd8172ba12813d8159f49a9397 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sat, 14 Mar 2026 17:55:05 -0700 Subject: [PATCH 03/48] Add transcript virtualization to Foundry UI (#255) --- docs/react-components.mdx | 10 + .../packages/client/src/workbench-model.ts | 64 ++ foundry/packages/frontend/package.json | 3 +- .../frontend/src/components/mock-layout.tsx | 67 +- .../components/mock-layout/message-list.tsx | 84 ++- .../src/components/mock-layout/sidebar.tsx | 652 +++++++----------- frontend/packages/inspector/src/App.tsx | 8 +- .../src/components/chat/ChatPanel.tsx | 8 +- .../components/chat/InspectorConversation.tsx | 9 +- pnpm-lock.yaml | 58 +- sdks/react/package.json | 1 + sdks/react/src/AgentConversation.tsx | 24 +- sdks/react/src/AgentTranscript.tsx | 309 ++++++--- sdks/react/src/index.ts | 1 + sdks/react/src/useTranscriptVirtualizer.ts | 58 ++ 15 files changed, 780 insertions(+), 576 deletions(-) create mode 100644 sdks/react/src/useTranscriptVirtualizer.ts diff --git a/docs/react-components.mdx b/docs/react-components.mdx index 0fa41b0..93183b2 100644 --- a/docs/react-components.mdx +++ b/docs/react-components.mdx @@ -12,6 +12,7 @@ Current exports: - `ProcessTerminal` for attaching to a running tty process - `AgentTranscript` for rendering session/message timelines without bundling any styles - `ChatComposer` for a reusable prompt input/send surface +- `useTranscriptVirtualizer` for wiring large transcript lists to a scroll container ## Install @@ -184,11 +185,20 @@ Useful props: - `className`: root class hook - `classNames`: slot-level class hooks for styling from outside the package +- `scrollRef` + `virtualize`: opt into TanStack Virtual against an external scroll container - `renderMessageText`: custom text or markdown renderer - `renderToolItemIcon`, `renderToolGroupIcon`, `renderChevron`, `renderEventLinkContent`: presentation overrides - `renderInlinePendingIndicator`, `renderThinkingState`: loading/thinking UI overrides - `isDividerEntry`, `canOpenEvent`, `getToolGroupSummary`: behavior overrides for grouping and labels +## Transcript virtualization hook + +`useTranscriptVirtualizer` exposes the same TanStack Virtual behavior used by `AgentTranscript` when `virtualize` is enabled. + +- Pass the grouped transcript rows you want to virtualize +- Pass a `scrollRef` that points at the actual scrollable element +- Use it when you need transcript-aware virtualization outside the stock `AgentTranscript` renderer + ## Composer and conversation `ChatComposer` is the headless message input. `AgentConversation` composes `AgentTranscript` and `ChatComposer` so apps can reuse the transcript/composer pairing without pulling in Inspector session chrome. diff --git a/foundry/packages/client/src/workbench-model.ts b/foundry/packages/client/src/workbench-model.ts index 206d08a..2affb4d 100644 --- a/foundry/packages/client/src/workbench-model.ts +++ b/foundry/packages/client/src/workbench-model.ts @@ -235,6 +235,41 @@ function minutesAgo(minutes: number): number { return NOW_MS - minutes * 60_000; } +function buildTranscriptStressMessages(pairCount: number): LegacyMessage[] { + const startedAtMs = NOW_MS - pairCount * 8_000; + const messages: LegacyMessage[] = []; + + for (let index = 0; index < pairCount; index++) { + const sequence = index + 1; + const createdAtMs = startedAtMs + index * 8_000; + + messages.push({ + id: `stress-user-${sequence}`, + role: "user", + agent: null, + createdAtMs, + lines: [ + `Stress prompt ${sequence}: summarize the current state of the transcript virtualizer.`, + `Keep the answer focused on scroll position, render cost, and preserved expansion state.`, + ], + }); + + messages.push({ + id: `stress-agent-${sequence}`, + role: "agent", + agent: "codex", + createdAtMs: createdAtMs + 3_000, + lines: [ + `Stress reply ${sequence}: the list should only render visible rows plus overscan while preserving scroll anchoring near the bottom.`, + `Grouping, minimap navigation, and per-row UI should remain stable even as older rows unmount.`, + ], + durationMs: 2_500, + }); + } + + return messages; +} + export function parseDiffLines(diff: string): ParsedDiffLine[] { return diff.split("\n").map((text, index) => { if (text.startsWith("@@")) { @@ -1189,6 +1224,35 @@ export function buildInitialTasks(): Task[] { fileTree: [], minutesUsed: 0, }, + { + id: "stress-transcript", + repoId: "sandbox-agent", + title: "Transcript virtualization stress test", + status: "idle", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(40), + branch: "perf/transcript-virtualizer", + pullRequest: null, + tabs: [ + { + id: "stress-transcript-tab", + sessionId: "stress-transcript-session", + sessionName: "Virtualizer stress session", + agent: "Codex", + model: "gpt-5.3-codex", + status: "idle", + thinkingSinceMs: null, + unread: false, + created: true, + draft: { text: "", attachments: [], updatedAtMs: null }, + transcript: transcriptFromLegacyMessages("stress-transcript-tab", buildTranscriptStressMessages(1600)), + }, + ], + fileChanges: [], + diffs: {}, + fileTree: [], + minutesUsed: 18, + }, { id: "status-running", repoId: "sandbox-agent", diff --git a/foundry/packages/frontend/package.json b/foundry/packages/frontend/package.json index 6a2e3c4..793a12d 100644 --- a/foundry/packages/frontend/package.json +++ b/foundry/packages/frontend/package.json @@ -10,11 +10,12 @@ "test": "vitest run" }, "dependencies": { - "@sandbox-agent/react": "workspace:*", "@sandbox-agent/foundry-client": "workspace:*", "@sandbox-agent/foundry-shared": "workspace:*", + "@sandbox-agent/react": "workspace:*", "@tanstack/react-query": "^5.85.5", "@tanstack/react-router": "^1.132.23", + "@tanstack/react-virtual": "^3.13.22", "baseui": "^16.1.1", "lucide-react": "^0.542.0", "react": "^19.1.1", diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx index be995c0..e0f6803 100644 --- a/foundry/packages/frontend/src/components/mock-layout.tsx +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -391,20 +391,6 @@ const TranscriptPanel = memo(function TranscriptPanel({ textarea.style.overflowY = textarea.scrollHeight > PROMPT_TEXTAREA_MAX_HEIGHT ? "auto" : "hidden"; }, [draft, activeTabId, task.id]); - useEffect(() => { - if (!pendingHistoryTarget || activeTabId !== pendingHistoryTarget.tabId) { - return; - } - - const targetNode = messageRefs.current.get(pendingHistoryTarget.messageId); - if (!targetNode) { - return; - } - - targetNode.scrollIntoView({ behavior: "smooth", block: "center" }); - setPendingHistoryTarget(null); - }, [activeMessages.length, activeTabId, pendingHistoryTarget]); - useEffect(() => { if (!copiedMessageId) { return; @@ -694,13 +680,6 @@ const TranscriptPanel = memo(function TranscriptPanel({ if (activeTabId !== event.tabId) { switchTab(event.tabId); - return; - } - - const targetNode = messageRefs.current.get(event.messageId); - if (targetNode) { - targetNode.scrollIntoView({ behavior: "smooth", block: "center" }); - setPendingHistoryTarget(null); } }, [activeTabId, switchTab], @@ -932,6 +911,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ messageRefs={messageRefs} historyEvents={historyEvents} onSelectHistoryEvent={jumpToHistoryEvent} + targetMessageId={pendingHistoryTarget && activeTabId === pendingHistoryTarget.tabId ? pendingHistoryTarget.messageId : null} + onTargetMessageResolved={() => setPendingHistoryTarget(null)} copiedMessageId={copiedMessageId} onCopyMessage={(message) => { void copyMessage(message); @@ -1382,16 +1363,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M void navigate({ to: "/organizations/$organizationId/billing" as never, params: { organizationId: activeOrg.id } as never }); } }, [activeOrg, navigate]); - const [projectOrder, setProjectOrder] = useState(null); - const projects = useMemo(() => { - if (!projectOrder) return rawProjects; - const byId = new Map(rawProjects.map((p) => [p.id, p])); - const ordered = projectOrder.map((id) => byId.get(id)).filter(Boolean) as typeof rawProjects; - for (const p of rawProjects) { - if (!projectOrder.includes(p.id)) ordered.push(p); - } - return ordered; - }, [rawProjects, projectOrder]); + const projects = rawProjects; const [activeTabIdByTask, setActiveTabIdByTask] = useState>({}); const [lastAgentTabIdByTask, setLastAgentTabIdByTask] = useState>({}); const [openDiffsByTask, setOpenDiffsByTask] = useState>({}); @@ -1418,30 +1390,6 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M peekTimeoutRef.current = setTimeout(() => setLeftSidebarPeeking(false), 200); }, []); - const reorderProjects = useCallback( - (fromIndex: number, toIndex: number) => { - const ids = projects.map((p) => p.id); - const [moved] = ids.splice(fromIndex, 1); - ids.splice(toIndex, 0, moved!); - setProjectOrder(ids); - }, - [projects], - ); - - const [taskOrderByProject, setTaskOrderByProject] = useState>({}); - const reorderTasks = useCallback( - (projectId: string, fromIndex: number, toIndex: number) => { - const project = projects.find((p) => p.id === projectId); - if (!project) return; - const currentOrder = taskOrderByProject[projectId] ?? project.tasks.map((t) => t.id); - const ids = [...currentOrder]; - const [moved] = ids.splice(fromIndex, 1); - ids.splice(toIndex, 0, moved!); - setTaskOrderByProject((prev) => ({ ...prev, [projectId]: ids })); - }, - [projects, taskOrderByProject], - ); - useEffect(() => { leftWidthRef.current = leftWidth; window.localStorage.setItem(LEFT_WIDTH_STORAGE_KEY, String(leftWidth)); @@ -1926,9 +1874,6 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M onMarkUnread={markTaskUnread} onRenameTask={renameTask} onRenameBranch={renameBranch} - onReorderProjects={reorderProjects} - taskOrderByProject={taskOrderByProject} - onReorderTasks={reorderTasks} onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} @@ -2101,9 +2046,6 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M onMarkUnread={markTaskUnread} onRenameTask={renameTask} onRenameBranch={renameBranch} - onReorderProjects={reorderProjects} - taskOrderByProject={taskOrderByProject} - onReorderTasks={reorderTasks} onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} @@ -2156,9 +2098,6 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M onMarkUnread={markTaskUnread} onRenameTask={renameTask} onRenameBranch={renameBranch} - onReorderProjects={reorderProjects} - taskOrderByProject={taskOrderByProject} - onReorderTasks={reorderTasks} onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} diff --git a/foundry/packages/frontend/src/components/mock-layout/message-list.tsx b/foundry/packages/frontend/src/components/mock-layout/message-list.tsx index 7068268..5fcb4f9 100644 --- a/foundry/packages/frontend/src/components/mock-layout/message-list.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/message-list.tsx @@ -1,5 +1,5 @@ import { AgentTranscript, type AgentTranscriptClassNames, type TranscriptEntry } from "@sandbox-agent/react"; -import { memo, useMemo, type MutableRefObject, type Ref } from "react"; +import { memo, useEffect, useMemo, type MutableRefObject, type RefObject } from "react"; import { useStyletron } from "baseui"; import { LabelSmall, LabelXSmall } from "baseui/typography"; import { Copy } from "lucide-react"; @@ -14,11 +14,15 @@ const TranscriptMessageBody = memo(function TranscriptMessageBody({ messageRefs, copiedMessageId, onCopyMessage, + isTarget, + onTargetRendered, }: { message: Message; messageRefs: MutableRefObject>; copiedMessageId: string | null; onCopyMessage: (message: Message) => void; + isTarget?: boolean; + onTargetRendered?: () => void; }) { const [css] = useStyletron(); const t = useFoundryTokens(); @@ -27,6 +31,20 @@ const TranscriptMessageBody = memo(function TranscriptMessageBody({ const messageTimestamp = formatMessageTimestamp(message.createdAtMs); const displayFooter = isUser ? messageTimestamp : message.durationMs ? `${messageTimestamp} • Took ${formatMessageDuration(message.durationMs)}` : null; + useEffect(() => { + if (!isTarget) { + return; + } + + const targetNode = messageRefs.current.get(message.id); + if (!targetNode) { + return; + } + + targetNode.scrollIntoView({ behavior: "smooth", block: "center" }); + onTargetRendered?.(); + }, [isTarget, message.id, messageRefs, onTargetRendered]); + return (
{ @@ -127,15 +145,19 @@ export const MessageList = memo(function MessageList({ messageRefs, historyEvents, onSelectHistoryEvent, + targetMessageId, + onTargetMessageResolved, copiedMessageId, onCopyMessage, thinkingTimerLabel, }: { tab: AgentTab | null | undefined; - scrollRef: Ref; + scrollRef: RefObject; messageRefs: MutableRefObject>; historyEvents: HistoryEvent[]; onSelectHistoryEvent: (event: HistoryEvent) => void; + targetMessageId?: string | null; + onTargetMessageResolved?: () => void; copiedMessageId: string | null; onCopyMessage: (message: Message) => void; thinkingTimerLabel: string | null; @@ -144,6 +166,7 @@ export const MessageList = memo(function MessageList({ const t = useFoundryTokens(); const messages = useMemo(() => buildDisplayMessages(tab), [tab]); const messagesById = useMemo(() => new Map(messages.map((message) => [message.id, message])), [messages]); + const messageIndexById = useMemo(() => new Map(messages.map((message, index) => [message.id, index])), [messages]); const transcriptEntries = useMemo( () => messages.map((message) => ({ @@ -192,6 +215,37 @@ export const MessageList = memo(function MessageList({ letterSpacing: "0.01em", }), }; + const scrollContainerClass = css({ + padding: "16px 52px 16px 20px", + display: "flex", + flexDirection: "column", + flex: 1, + minHeight: 0, + overflowY: "auto", + }); + + useEffect(() => { + if (!targetMessageId) { + return; + } + + const targetNode = messageRefs.current.get(targetMessageId); + if (targetNode) { + targetNode.scrollIntoView({ behavior: "smooth", block: "center" }); + onTargetMessageResolved?.(); + return; + } + + const targetIndex = messageIndexById.get(targetMessageId); + if (targetIndex == null) { + return; + } + + scrollRef.current?.scrollTo({ + top: Math.max(0, targetIndex * 88), + behavior: "smooth", + }); + }, [messageIndexById, messageRefs, onTargetMessageResolved, scrollRef, targetMessageId]); return ( <> @@ -201,17 +255,7 @@ export const MessageList = memo(function MessageList({ } `} {historyEvents.length > 0 ? : null} -
+
{tab && transcriptEntries.length === 0 ? (
{ const message = messagesById.get(entry.id); if (!message) { return null; } - return ; + return ( + + ); }} isThinking={Boolean(tab && tab.status === "running" && transcriptEntries.length > 0)} renderThinkingState={() => ( diff --git a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx index 484619a..12f7ab9 100644 --- a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx @@ -1,6 +1,7 @@ import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState } from "react"; import { createPortal } from "react-dom"; import { useNavigate } from "@tanstack/react-router"; +import { useVirtualizer } from "@tanstack/react-virtual"; import { useStyletron } from "baseui"; import { LabelSmall, LabelXSmall } from "baseui/typography"; import { Select, type Value } from "baseui/select"; @@ -68,9 +69,6 @@ export const Sidebar = memo(function Sidebar({ onMarkUnread, onRenameTask, onRenameBranch, - onReorderProjects, - taskOrderByProject, - onReorderTasks, onReloadOrganization, onReloadPullRequests, onReloadRepository, @@ -87,9 +85,6 @@ export const Sidebar = memo(function Sidebar({ onMarkUnread: (id: string) => void; onRenameTask: (id: string) => void; onRenameBranch: (id: string) => void; - onReorderProjects: (fromIndex: number, toIndex: number) => void; - taskOrderByProject: Record; - onReorderTasks: (projectId: string, fromIndex: number, toIndex: number) => void; onReloadOrganization: () => void; onReloadPullRequests: () => void; onReloadRepository: (repoId: string) => void; @@ -103,66 +98,7 @@ export const Sidebar = memo(function Sidebar({ const [hoveredProjectId, setHoveredProjectId] = useState(null); const [headerMenuOpen, setHeaderMenuOpen] = useState(false); const headerMenuRef = useRef(null); - - // Mouse-based drag and drop state - type DragState = - | { type: "project"; fromIdx: number; overIdx: number | null } - | { type: "task"; projectId: string; fromIdx: number; overIdx: number | null } - | null; - const [drag, setDrag] = useState(null); - const dragRef = useRef(null); - const startYRef = useRef(0); - const didDragRef = useRef(false); - - // Attach global mousemove/mouseup when dragging - useEffect(() => { - if (!drag) return; - const onMove = (e: MouseEvent) => { - // Detect which element is under the cursor using data attributes - const el = document.elementFromPoint(e.clientX, e.clientY); - if (!el) return; - const projectEl = (el as HTMLElement).closest?.("[data-project-idx]") as HTMLElement | null; - const taskEl = (el as HTMLElement).closest?.("[data-task-idx]") as HTMLElement | null; - - if (drag.type === "project" && projectEl) { - const overIdx = Number(projectEl.dataset.projectIdx); - if (overIdx !== drag.overIdx) { - setDrag({ ...drag, overIdx }); - dragRef.current = { ...drag, overIdx }; - } - } else if (drag.type === "task" && taskEl) { - const overProjectId = taskEl.dataset.taskProjectId ?? ""; - const overIdx = Number(taskEl.dataset.taskIdx); - if (overProjectId === drag.projectId && overIdx !== drag.overIdx) { - setDrag({ ...drag, overIdx }); - dragRef.current = { ...drag, overIdx }; - } - } - // Mark that we actually moved (to distinguish from clicks) - if (Math.abs(e.clientY - startYRef.current) > 4) { - didDragRef.current = true; - } - }; - const onUp = () => { - const d = dragRef.current; - if (d && didDragRef.current && d.overIdx !== null && d.fromIdx !== d.overIdx) { - if (d.type === "project") { - onReorderProjects(d.fromIdx, d.overIdx); - } else { - onReorderTasks(d.projectId, d.fromIdx, d.overIdx); - } - } - dragRef.current = null; - didDragRef.current = false; - setDrag(null); - }; - document.addEventListener("mousemove", onMove); - document.addEventListener("mouseup", onUp); - return () => { - document.removeEventListener("mousemove", onMove); - document.removeEventListener("mouseup", onUp); - }; - }, [drag, onReorderProjects, onReorderTasks]); + const scrollRef = useRef(null); useEffect(() => { if (!headerMenuOpen) { @@ -180,6 +116,26 @@ export const Sidebar = memo(function Sidebar({ const [createSelectOpen, setCreateSelectOpen] = useState(false); const selectOptions = useMemo(() => newTaskRepos.map((repo) => ({ id: repo.id, label: stripCommonOrgPrefix(repo.label, newTaskRepos) })), [newTaskRepos]); + type FlatItem = { key: string; type: "project-header"; project: ProjectSection } | { key: string; type: "task"; project: ProjectSection; task: Task }; + const flatItems = useMemo( + () => + projects.flatMap((project) => { + const items: FlatItem[] = [{ key: `project:${project.id}`, type: "project-header", project }]; + if (!collapsedProjects[project.id]) { + items.push(...project.tasks.map((task) => ({ key: `task:${task.id}`, type: "task" as const, project, task }))); + } + return items; + }), + [collapsedProjects, projects], + ); + const virtualizer = useVirtualizer({ + count: flatItems.length, + getItemKey: (index) => flatItems[index]?.key ?? index, + getScrollElement: () => scrollRef.current, + estimateSize: () => 40, + overscan: 12, + measureElement: (element) => element.getBoundingClientRect().height, + }); return ( @@ -463,342 +419,270 @@ export const Sidebar = memo(function Sidebar({
)} - -
- {projects.map((project, projectIndex) => { - const isCollapsed = collapsedProjects[project.id] === true; - const isProjectDropTarget = drag?.type === "project" && drag.overIdx === projectIndex && drag.fromIdx !== projectIndex; - const isBeingDragged = drag?.type === "project" && drag.fromIdx === projectIndex && didDragRef.current; - const orderedTaskIds = taskOrderByProject[project.id]; - const orderedTasks = orderedTaskIds - ? (() => { - const byId = new Map(project.tasks.map((t) => [t.id, t])); - const sorted = orderedTaskIds.map((id) => byId.get(id)).filter(Boolean) as typeof project.tasks; - for (const t of project.tasks) { - if (!orderedTaskIds.includes(t.id)) sorted.push(t); - } - return sorted; - })() - : project.tasks; + +
+
+ {virtualizer.getVirtualItems().map((virtualItem) => { + const item = flatItems[virtualItem.index]; + if (!item) { + return null; + } - return ( -
-
setHoveredProjectId(project.id)} - onMouseLeave={() => setHoveredProjectId((cur) => (cur === project.id ? null : cur))} - onMouseDown={(event) => { - if (event.button !== 0) return; - startYRef.current = event.clientY; - didDragRef.current = false; - setHoveredProjectId(null); - const state: DragState = { type: "project", fromIdx: projectIndex, overIdx: null }; - dragRef.current = state; - setDrag(state); - }} - onClick={() => { - if (!didDragRef.current) { - setCollapsedProjects((current) => ({ - ...current, - [project.id]: !current[project.id], - })); - } - }} - onContextMenu={(event) => - contextMenu.open(event, [ - { label: "Reload repository", onClick: () => onReloadRepository(project.id) }, - { label: "New task", onClick: () => onCreate(project.id) }, - ]) - } - data-project-header - className={css({ - display: "flex", - alignItems: "center", - justifyContent: "space-between", - padding: "10px 8px 4px", - gap: "8px", - cursor: "grab", - userSelect: "none", - })} - > -
-
- - {projectInitial(project.label)} - - - {isCollapsed ? : } - -
- - {stripCommonOrgPrefix(project.label, projects)} - -
-
- {isCollapsed ? {formatRelativeAge(project.updatedAtMs)} : null} - -
-
+ if (item.type === "project-header") { + const project = item.project; + const isCollapsed = collapsedProjects[project.id] === true; - {!isCollapsed && - orderedTasks.map((task, taskIndex) => { - const isActive = task.id === activeId; - const isPullRequestItem = isPullRequestSidebarItem(task); - const isDim = task.status === "archived"; - const isRunning = task.tabs.some((tab) => tab.status === "running"); - const isProvisioning = - !isPullRequestItem && - (String(task.status).startsWith("init_") || - task.status === "new" || - task.tabs.some((tab) => tab.status === "pending_provision" || tab.status === "pending_session_create")); - const hasUnread = task.tabs.some((tab) => tab.unread); - const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; - const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); - const totalRemoved = task.fileChanges.reduce((sum, file) => sum + file.removed, 0); - const hasDiffs = totalAdded > 0 || totalRemoved > 0; - const isTaskDropTarget = drag?.type === "task" && drag.projectId === project.id && drag.overIdx === taskIndex && drag.fromIdx !== taskIndex; - const isTaskBeingDragged = drag?.type === "task" && drag.projectId === project.id && drag.fromIdx === taskIndex && didDragRef.current; - - return ( + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + }} + > +
{ - if (event.button !== 0) return; - // Only start task drag if not already in a project drag - if (dragRef.current) return; - event.stopPropagation(); - startYRef.current = event.clientY; - didDragRef.current = false; - const state: DragState = { type: "task", projectId: project.id, fromIdx: taskIndex, overIdx: null }; - dragRef.current = state; - setDrag(state); - }} + onMouseEnter={() => setHoveredProjectId(project.id)} + onMouseLeave={() => setHoveredProjectId((cur) => (cur === project.id ? null : cur))} onClick={() => { - if (!didDragRef.current) { - onSelect(task.id); - } + setCollapsedProjects((current) => ({ + ...current, + [project.id]: !current[project.id], + })); }} - onContextMenu={(event) => { - if (isPullRequestItem && task.pullRequest) { - contextMenu.open(event, [ - { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, - { label: "Create task", onClick: () => onSelect(task.id) }, - ]); - return; - } + onContextMenu={(event) => contextMenu.open(event, [ - { label: "Rename task", onClick: () => onRenameTask(task.id) }, - { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, - { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, - ]); - }} + { label: "Reload repository", onClick: () => onReloadRepository(project.id) }, + { label: "New task", onClick: () => onCreate(project.id) }, + ]) + } + data-project-header className={css({ - padding: "8px 12px", - borderRadius: "8px", - position: "relative", - backgroundColor: isActive ? t.interactiveHover : "transparent", - opacity: isTaskBeingDragged ? 0.4 : 1, + display: "flex", + alignItems: "center", + justifyContent: "space-between", + padding: "10px 8px 4px", + gap: "8px", cursor: "pointer", - transition: "all 150ms ease", - "::before": { - content: '""', - position: "absolute", - top: "-2px", - left: 0, - right: 0, - height: "2px", - backgroundColor: isTaskDropTarget ? t.textPrimary : "transparent", - transition: "background-color 100ms ease", - }, - ":hover": { - backgroundColor: t.interactiveHover, - }, + userSelect: "none", })} > -
-
+
+ + {projectInitial(project.label)} + + + {isCollapsed ? : } + +
+ + {stripCommonOrgPrefix(project.label, projects)} + +
+
+ {isCollapsed ? {formatRelativeAge(project.updatedAtMs)} : null} +
-
- - {task.title} - - {isPullRequestItem && task.statusMessage ? ( - - {task.statusMessage} - - ) : null} -
- {task.pullRequest != null ? ( - - - #{task.pullRequest.number} - - {task.pullRequest.status === "draft" ? : null} - - ) : ( - - )} - {hasDiffs ? ( -
- +{totalAdded} - -{totalRemoved} -
- ) : null} - - {formatRelativeAge(task.updatedAtMs)} - + +
- ); - })} - {/* Bottom drop zone for dragging to end of task list */} - {!isCollapsed && ( -
- )} -
- ); - })} - {/* Bottom drop zone for dragging project to end of list */} -
+
+ ); + } + + const { project, task } = item; + const isActive = task.id === activeId; + const isPullRequestItem = isPullRequestSidebarItem(task); + const isRunning = task.tabs.some((tab) => tab.status === "running"); + const isProvisioning = + !isPullRequestItem && + (String(task.status).startsWith("init_") || + task.status === "new" || + task.tabs.some((tab) => tab.status === "pending_provision" || tab.status === "pending_session_create")); + const hasUnread = task.tabs.some((tab) => tab.unread); + const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; + const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); + const totalRemoved = task.fileChanges.reduce((sum, file) => sum + file.removed, 0); + const hasDiffs = totalAdded > 0 || totalRemoved > 0; + + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + }} + > +
+
onSelect(task.id)} + onContextMenu={(event) => { + if (isPullRequestItem && task.pullRequest) { + contextMenu.open(event, [ + { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, + { label: "Create task", onClick: () => onSelect(task.id) }, + ]); + return; + } + contextMenu.open(event, [ + { label: "Rename task", onClick: () => onRenameTask(task.id) }, + { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, + { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, + ]); + }} + className={css({ + padding: "8px 12px", + borderRadius: "8px", + backgroundColor: isActive ? t.interactiveHover : "transparent", + cursor: "pointer", + transition: "all 150ms ease", + ":hover": { + backgroundColor: t.interactiveHover, + }, + })} + > +
+
+ {isPullRequestItem ? ( + + ) : ( + + )} +
+
+ + {task.title} + + {isPullRequestItem && task.statusMessage ? ( + + {task.statusMessage} + + ) : null} +
+ {task.pullRequest != null ? ( + + + #{task.pullRequest.number} + + {task.pullRequest.status === "draft" ? : null} + + ) : ( + + )} + {hasDiffs ? ( +
+ +{totalAdded} + -{totalRemoved} +
+ ) : null} + + {formatRelativeAge(task.updatedAtMs)} + +
+
+
+
+ ); })} - /> +
diff --git a/frontend/packages/inspector/src/App.tsx b/frontend/packages/inspector/src/App.tsx index a829ae6..ac06904 100644 --- a/frontend/packages/inspector/src/App.tsx +++ b/frontend/packages/inspector/src/App.tsx @@ -286,7 +286,7 @@ export default function App() { const [highlightedEventId, setHighlightedEventId] = useState(null); const [debugPanelCollapsed, setDebugPanelCollapsed] = useState(false); - const messagesEndRef = useRef(null); + const transcriptScrollRef = useRef(null); const clientRef = useRef(null); const activeSessionRef = useRef(null); @@ -1434,10 +1434,6 @@ export default function App() { }); }, [connected, sessionId, sessions, getClient, subscribeToSession]); - useEffect(() => { - messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); - }, [transcriptEntries]); - const currentAgent = agents.find((agent) => agent.id === agentId); const agentLabel = agentDisplayNames[agentId] ?? agentId; const selectedSession = sessions.find((s) => s.sessionId === sessionId); @@ -1743,7 +1739,7 @@ export default function App() { } agentsLoading={agentsLoading} agentsError={agentsError} - messagesEndRef={messagesEndRef} + scrollRef={transcriptScrollRef} agentLabel={agentLabel} modelLabel={modelPillLabel} currentAgentVersion={currentAgent?.version ?? null} diff --git a/frontend/packages/inspector/src/components/chat/ChatPanel.tsx b/frontend/packages/inspector/src/components/chat/ChatPanel.tsx index 5d77a93..5afc259 100644 --- a/frontend/packages/inspector/src/components/chat/ChatPanel.tsx +++ b/frontend/packages/inspector/src/components/chat/ChatPanel.tsx @@ -1,6 +1,6 @@ import type { TranscriptEntry } from "@sandbox-agent/react"; import { AlertTriangle, Archive, CheckSquare, MessageSquare, Plus, Square, Terminal } from "lucide-react"; -import { useEffect, useRef, useState } from "react"; +import { useEffect, useRef, useState, type RefObject } from "react"; import type { AgentInfo } from "sandbox-agent"; import { formatShortId } from "../../utils/format"; @@ -40,7 +40,7 @@ const ChatPanel = ({ agents, agentsLoading, agentsError, - messagesEndRef, + scrollRef, agentLabel, modelLabel, currentAgentVersion, @@ -71,7 +71,7 @@ const ChatPanel = ({ agents: AgentInfo[]; agentsLoading: boolean; agentsError: string | null; - messagesEndRef: React.RefObject; + scrollRef: RefObject; agentLabel: string; modelLabel?: string | null; currentAgentVersion?: string | null; @@ -233,7 +233,7 @@ const ChatPanel = ({ entries={transcriptEntries} sessionError={sessionError} eventError={null} - messagesEndRef={messagesEndRef} + scrollRef={scrollRef} onEventClick={onEventClick} isThinking={isThinking} agentId={agentId} diff --git a/frontend/packages/inspector/src/components/chat/InspectorConversation.tsx b/frontend/packages/inspector/src/components/chat/InspectorConversation.tsx index cb3c1af..5d3c007 100644 --- a/frontend/packages/inspector/src/components/chat/InspectorConversation.tsx +++ b/frontend/packages/inspector/src/components/chat/InspectorConversation.tsx @@ -7,7 +7,7 @@ import { type TranscriptEntry, } from "@sandbox-agent/react"; import { AlertTriangle, Brain, Check, ChevronDown, ChevronRight, ExternalLink, Info, PlayCircle, Send, Shield, Wrench, X } from "lucide-react"; -import type { ReactNode } from "react"; +import type { ReactNode, RefObject } from "react"; import MarkdownText from "./MarkdownText"; const agentLogos: Record = { @@ -84,7 +84,7 @@ export interface InspectorConversationProps { entries: TranscriptEntry[]; sessionError: string | null; eventError?: string | null; - messagesEndRef: React.RefObject; + scrollRef: RefObject; onEventClick?: (eventId: string) => void; isThinking?: boolean; agentId?: string; @@ -102,7 +102,7 @@ const InspectorConversation = ({ entries, sessionError, eventError, - messagesEndRef, + scrollRef, onEventClick, isThinking, agentId, @@ -119,12 +119,13 @@ const InspectorConversation = ({ =18.0.0'} cpu: [arm64] os: [darwin] - '@boxlite-ai/boxlite-linux-x64-gnu@0.4.2': - resolution: {integrity: sha512-UIRiTKl1L0cx2igDiikEiBfpNbTZ0W3lft5ow7I2mkDnjtBVIQYSm+PmVXBupTYivAuPh38g9WhqJH44C1RJdQ==} + '@boxlite-ai/boxlite-linux-x64-gnu@0.4.3': + resolution: {integrity: sha512-e5Ukl2pyqFe046cA+VcDUL9iso1OseHS13BEDnr/ADKsG+P//bYZHnE0JZPJL1ai4+fHg6d6BOe113rOxba1eQ==} engines: {node: '>=18.0.0'} cpu: [x64] os: [linux] - '@boxlite-ai/boxlite@0.4.2': - resolution: {integrity: sha512-LVxG0feP1sBGbYz/VOm11VsU8PyUv7rvXOQJqKrfBgI9oRVyqycpY39PCJ1oC+FFho7w7d61q8VCVDlDdj8i6Q==} + '@boxlite-ai/boxlite@0.4.3': + resolution: {integrity: sha512-bCYSrJH8mAlz+JoyVkCUSfYuCp2IwqaLrvOu4m1vstq6LNwkLcpmJzs9gLXrHnYb+YitYko3pQiK8uTieG4BJw==} engines: {node: '>=18.0.0'} peerDependencies: playwright-core: '>=1.58.0' @@ -3642,6 +3648,12 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + '@tanstack/react-virtual@3.13.22': + resolution: {integrity: sha512-EaOrBBJLi3M0bTMQRjGkxLXRw7Gizwntoy5E2Q2UnSbML7Mo2a1P/Hfkw5tw9FLzK62bj34Jl6VNbQfRV6eJcA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + '@tanstack/router-core@1.166.4': resolution: {integrity: sha512-T/RrsAvznqNJqfT7nrj3S+/RiQmW4U/i4Vii8KdOQdhahPzAQnmRzZB+SUwR4quqRYql5o2zmCA6Brg1961hHg==} engines: {node: '>=20.19'} @@ -3649,6 +3661,9 @@ packages: '@tanstack/store@0.9.2': resolution: {integrity: sha512-K013lUJEFJK2ofFQ/hZKJUmCnpcV00ebLyOyFOWQvyQHUOZp/iYO84BM6aOGiV81JzwbX0APTVmW8YI7yiG5oA==} + '@tanstack/virtual-core@3.13.22': + resolution: {integrity: sha512-isuUGKsc5TAPDoHSbWTbl1SCil54zOS2MiWz/9GCWHPUQOvNTQx8qJEWC7UWR0lShhbK0Lmkcf0SZYxvch7G3g==} + '@types/babel__core@7.20.5': resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -8419,16 +8434,16 @@ snapshots: '@biomejs/cli-win32-x64@2.4.6': optional: true - '@boxlite-ai/boxlite-darwin-arm64@0.4.2': + '@boxlite-ai/boxlite-darwin-arm64@0.4.3': optional: true - '@boxlite-ai/boxlite-linux-x64-gnu@0.4.2': + '@boxlite-ai/boxlite-linux-x64-gnu@0.4.3': optional: true - '@boxlite-ai/boxlite@0.4.2': + '@boxlite-ai/boxlite@0.4.3': optionalDependencies: - '@boxlite-ai/boxlite-darwin-arm64': 0.4.2 - '@boxlite-ai/boxlite-linux-x64-gnu': 0.4.2 + '@boxlite-ai/boxlite-darwin-arm64': 0.4.3 + '@boxlite-ai/boxlite-linux-x64-gnu': 0.4.3 '@bufbuild/protobuf@2.11.0': {} @@ -10246,6 +10261,18 @@ snapshots: react-dom: 19.2.4(react@19.2.4) use-sync-external-store: 1.6.0(react@19.2.4) + '@tanstack/react-virtual@3.13.22(react-dom@19.2.4(react@18.3.1))(react@18.3.1)': + dependencies: + '@tanstack/virtual-core': 3.13.22 + react: 18.3.1 + react-dom: 19.2.4(react@18.3.1) + + '@tanstack/react-virtual@3.13.22(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@tanstack/virtual-core': 3.13.22 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + '@tanstack/router-core@1.166.4': dependencies: '@tanstack/history': 1.161.4 @@ -10258,6 +10285,8 @@ snapshots: '@tanstack/store@0.9.2': {} + '@tanstack/virtual-core@3.13.22': {} + '@types/babel__core@7.20.5': dependencies: '@babel/parser': 7.28.6 @@ -13238,6 +13267,11 @@ snapshots: react: 18.3.1 scheduler: 0.23.2 + react-dom@19.2.4(react@18.3.1): + dependencies: + react: 18.3.1 + scheduler: 0.27.0 + react-dom@19.2.4(react@19.2.4): dependencies: react: 19.2.4 diff --git a/sdks/react/package.json b/sdks/react/package.json index 18bede0..8b2e1d4 100644 --- a/sdks/react/package.json +++ b/sdks/react/package.json @@ -28,6 +28,7 @@ "sandbox-agent": "^0.2.2" }, "dependencies": { + "@tanstack/react-virtual": "^3.13.22", "ghostty-web": "^0.4.0" }, "devDependencies": { diff --git a/sdks/react/src/AgentConversation.tsx b/sdks/react/src/AgentConversation.tsx index 632fac4..acc9466 100644 --- a/sdks/react/src/AgentConversation.tsx +++ b/sdks/react/src/AgentConversation.tsx @@ -1,6 +1,6 @@ "use client"; -import type { ReactNode } from "react"; +import type { ReactNode, RefObject } from "react"; import { AgentTranscript, type AgentTranscriptClassNames, type AgentTranscriptProps, type TranscriptEntry } from "./AgentTranscript.tsx"; import { ChatComposer, type ChatComposerClassNames, type ChatComposerProps } from "./ChatComposer.tsx"; @@ -18,9 +18,10 @@ export interface AgentConversationProps { emptyState?: ReactNode; transcriptClassName?: string; transcriptClassNames?: Partial; + scrollRef?: RefObject; composerClassName?: string; composerClassNames?: Partial; - transcriptProps?: Omit; + transcriptProps?: Omit; composerProps?: Omit; } @@ -47,6 +48,7 @@ export const AgentConversation = ({ emptyState, transcriptClassName, transcriptClassNames, + scrollRef, composerClassName, composerClassNames, transcriptProps, @@ -58,12 +60,18 @@ export const AgentConversation = ({ return (
{hasTranscriptContent ? ( - + scrollRef ? ( +
+ +
+ ) : ( + + ) ) : emptyState ? (
{emptyState} diff --git a/sdks/react/src/AgentTranscript.tsx b/sdks/react/src/AgentTranscript.tsx index f884dd6..b565081 100644 --- a/sdks/react/src/AgentTranscript.tsx +++ b/sdks/react/src/AgentTranscript.tsx @@ -1,7 +1,8 @@ "use client"; import type { ReactNode, RefObject } from "react"; -import { useMemo, useState } from "react"; +import { useEffect, useMemo, useRef, useState } from "react"; +import { useTranscriptVirtualizer } from "./useTranscriptVirtualizer.ts"; export type PermissionReply = "once" | "always" | "reject"; @@ -98,10 +99,14 @@ export interface AgentTranscriptProps { className?: string; classNames?: Partial; endRef?: RefObject; + scrollRef?: RefObject; + scrollToEntryId?: string | null; sessionError?: string | null; eventError?: string | null; isThinking?: boolean; agentId?: string; + virtualize?: boolean; + onAtBottomChange?: (atBottom: boolean) => void; onEventClick?: (eventId: string) => void; onPermissionReply?: (permissionId: string, reply: PermissionReply) => void; isDividerEntry?: (entry: TranscriptEntry) => boolean; @@ -124,6 +129,8 @@ type GroupedEntries = | { type: "divider"; entries: TranscriptEntry[] } | { type: "permission"; entries: TranscriptEntry[] }; +const VIRTUAL_GROUP_GAP_PX = 12; + const DEFAULT_CLASS_NAMES: AgentTranscriptClassNames = { root: "sa-agent-transcript", divider: "sa-agent-transcript-divider", @@ -324,9 +331,21 @@ const buildGroupedEntries = (entries: TranscriptEntry[], isDividerEntry: (entry: return groupedEntries; }; +const getGroupedEntryKey = (group: GroupedEntries, index: number): string => { + const firstEntry = group.entries[0]; + + if (group.type === "tool-group") { + return `tool-group:${firstEntry?.id ?? index}`; + } + + return firstEntry?.id ?? `${group.type}:${index}`; +}; + const ToolItem = ({ entry, isLast, + expanded, + onExpandedChange, classNames, onEventClick, canOpenEvent, @@ -337,6 +356,8 @@ const ToolItem = ({ }: { entry: TranscriptEntry; isLast: boolean; + expanded: boolean; + onExpandedChange: (expanded: boolean) => void; classNames: AgentTranscriptClassNames; onEventClick?: (eventId: string) => void; canOpenEvent: (entry: TranscriptEntry) => boolean; @@ -345,7 +366,6 @@ const ToolItem = ({ renderChevron: (expanded: boolean) => ReactNode; renderEventLinkContent: (entry: TranscriptEntry) => ReactNode; }) => { - const [expanded, setExpanded] = useState(false); const isTool = entry.kind === "tool"; const isReasoning = entry.kind === "reasoning"; const isMeta = entry.kind === "meta"; @@ -382,7 +402,7 @@ const ToolItem = ({ disabled={!hasContent} onClick={() => { if (hasContent) { - setExpanded((value) => !value); + onExpandedChange(!expanded); } }} > @@ -469,6 +489,10 @@ const ToolItem = ({ const ToolGroup = ({ entries, + expanded, + onExpandedChange, + expandedItemIds, + onToolItemExpandedChange, classNames, onEventClick, canOpenEvent, @@ -480,6 +504,10 @@ const ToolGroup = ({ renderEventLinkContent, }: { entries: TranscriptEntry[]; + expanded: boolean; + onExpandedChange: (expanded: boolean) => void; + expandedItemIds: Record; + onToolItemExpandedChange: (entryId: string, expanded: boolean) => void; classNames: AgentTranscriptClassNames; onEventClick?: (eventId: string) => void; canOpenEvent: (entry: TranscriptEntry) => boolean; @@ -490,7 +518,6 @@ const ToolGroup = ({ renderChevron: (expanded: boolean) => ReactNode; renderEventLinkContent: (entry: TranscriptEntry) => ReactNode; }) => { - const [expanded, setExpanded] = useState(false); const hasFailed = entries.some((entry) => entry.kind === "tool" && entry.toolStatus === "failed"); if (entries.length === 1) { @@ -499,6 +526,8 @@ const ToolGroup = ({ onToolItemExpandedChange(entries[0]!.id, nextExpanded)} classNames={classNames} onEventClick={onEventClick} canOpenEvent={canOpenEvent} @@ -518,7 +547,7 @@ const ToolGroup = ({ className={cx(classNames.toolGroupHeader, expanded && "expanded")} data-slot="tool-group-header" data-expanded={expanded ? "true" : undefined} - onClick={() => setExpanded((value) => !value)} + onClick={() => onExpandedChange(!expanded)} > {renderToolGroupIcon(entries, expanded)} @@ -537,6 +566,8 @@ const ToolGroup = ({ key={entry.id} entry={entry} isLast={index === entries.length - 1} + expanded={Boolean(expandedItemIds[entry.id])} + onExpandedChange={(nextExpanded) => onToolItemExpandedChange(entry.id, nextExpanded)} classNames={classNames} onEventClick={onEventClick} canOpenEvent={canOpenEvent} @@ -636,10 +667,14 @@ export const AgentTranscript = ({ className, classNames: classNameOverrides, endRef, + scrollRef, + scrollToEntryId, sessionError, eventError, isThinking, agentId, + virtualize = false, + onAtBottomChange, onEventClick, onPermissionReply, isDividerEntry = defaultIsDividerEntry, @@ -657,83 +692,199 @@ export const AgentTranscript = ({ }: AgentTranscriptProps) => { const resolvedClassNames = useMemo(() => mergeClassNames(DEFAULT_CLASS_NAMES, classNameOverrides), [classNameOverrides]); const groupedEntries = useMemo(() => buildGroupedEntries(entries, isDividerEntry), [entries, isDividerEntry]); + const [expandedToolGroups, setExpandedToolGroups] = useState>({}); + const [expandedToolItems, setExpandedToolItems] = useState>({}); + const lastScrollTargetRef = useRef(null); + const isVirtualized = virtualize && Boolean(scrollRef); + const { virtualizer, isFollowingRef } = useTranscriptVirtualizer(groupedEntries, isVirtualized ? scrollRef : undefined, onAtBottomChange); + + useEffect(() => { + if (!scrollToEntryId) { + lastScrollTargetRef.current = null; + return; + } + + if (!isVirtualized || scrollToEntryId === lastScrollTargetRef.current) { + return; + } + + const targetIndex = groupedEntries.findIndex((group) => group.entries.some((entry) => entry.id === scrollToEntryId)); + if (targetIndex < 0) { + return; + } + + lastScrollTargetRef.current = scrollToEntryId; + + const frameId = requestAnimationFrame(() => { + virtualizer.scrollToIndex(targetIndex, { + align: "center", + behavior: "smooth", + }); + }); + + return () => { + cancelAnimationFrame(frameId); + }; + }, [groupedEntries, isVirtualized, scrollToEntryId, virtualizer]); + + useEffect(() => { + if (!isVirtualized || !scrollRef?.current || !isFollowingRef.current) { + return; + } + + const scrollElement = scrollRef.current; + const frameId = requestAnimationFrame(() => { + scrollElement.scrollTo({ top: scrollElement.scrollHeight }); + }); + + return () => { + cancelAnimationFrame(frameId); + }; + }, [eventError, isFollowingRef, isThinking, isVirtualized, scrollRef, sessionError]); + + const setToolGroupExpanded = (groupKey: string, expanded: boolean) => { + setExpandedToolGroups((current) => { + if (current[groupKey] === expanded) { + return current; + } + return { ...current, [groupKey]: expanded }; + }); + }; + + const setToolItemExpanded = (entryId: string, expanded: boolean) => { + setExpandedToolItems((current) => { + if (current[entryId] === expanded) { + return current; + } + return { ...current, [entryId]: expanded }; + }); + }; + + const renderGroup = (group: GroupedEntries, index: number) => { + if (group.type === "divider") { + const entry = group.entries[0]; + const title = entry.meta?.title ?? "Status"; + return ( +
+
+ + {title} + +
+
+ ); + } + + if (group.type === "tool-group") { + const groupKey = getGroupedEntryKey(group, index); + + return ( + setToolGroupExpanded(groupKey, expanded)} + expandedItemIds={expandedToolItems} + onToolItemExpandedChange={setToolItemExpanded} + classNames={resolvedClassNames} + onEventClick={onEventClick} + canOpenEvent={canOpenEvent} + getToolGroupSummary={getToolGroupSummary} + renderInlinePendingIndicator={renderInlinePendingIndicator} + renderToolItemIcon={renderToolItemIcon} + renderToolGroupIcon={renderToolGroupIcon} + renderChevron={renderChevron} + renderEventLinkContent={renderEventLinkContent} + /> + ); + } + + if (group.type === "permission") { + const entry = group.entries[0]; + return ( + + ); + } + + const entry = group.entries[0]; + const messageVariant = getMessageVariant(entry); + + return ( +
+
+ {entry.text ? ( +
+ {renderMessageText(entry)} +
+ ) : ( + + {renderInlinePendingIndicator()} + + )} +
+
+ ); + }; return ( -
- {groupedEntries.map((group, index) => { - if (group.type === "divider") { - const entry = group.entries[0]; - const title = entry.meta?.title ?? "Status"; - return ( -
-
- - {title} - -
-
- ); - } +
+ {isVirtualized ? ( +
+ {virtualizer.getVirtualItems().map((virtualItem) => { + const group = groupedEntries[virtualItem.index]; + if (!group) { + return null; + } - if (group.type === "tool-group") { - return ( - - ); - } - - if (group.type === "permission") { - const entry = group.entries[0]; - return ( - - ); - } - - const entry = group.entries[0]; - const messageVariant = getMessageVariant(entry); - - return ( -
-
- {entry.text ? ( -
- {renderMessageText(entry)} + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + }} + > +
+ {renderGroup(group, virtualItem.index)}
- ) : ( - - {renderInlinePendingIndicator()} - - )} -
-
- ); - })} +
+ ); + })} +
+ ) : ( + groupedEntries.map((group, index) => renderGroup(group, index)) + )} {sessionError ? (
{sessionError} @@ -753,7 +904,7 @@ export const AgentTranscript = ({
)) : null} -
+ {!isVirtualized ?
: null}
); }; diff --git a/sdks/react/src/index.ts b/sdks/react/src/index.ts index f76f2a3..55d4a91 100644 --- a/sdks/react/src/index.ts +++ b/sdks/react/src/index.ts @@ -2,6 +2,7 @@ export { AgentConversation } from "./AgentConversation.tsx"; export { AgentTranscript } from "./AgentTranscript.tsx"; export { ChatComposer } from "./ChatComposer.tsx"; export { ProcessTerminal } from "./ProcessTerminal.tsx"; +export { useTranscriptVirtualizer } from "./useTranscriptVirtualizer.ts"; export type { AgentConversationClassNames, diff --git a/sdks/react/src/useTranscriptVirtualizer.ts b/sdks/react/src/useTranscriptVirtualizer.ts new file mode 100644 index 0000000..dc52717 --- /dev/null +++ b/sdks/react/src/useTranscriptVirtualizer.ts @@ -0,0 +1,58 @@ +"use client"; + +import type { RefObject } from "react"; +import { useEffect, useRef } from "react"; +import { useVirtualizer } from "@tanstack/react-virtual"; + +export function useTranscriptVirtualizer(items: T[], scrollElementRef?: RefObject, onAtBottomChange?: (atBottom: boolean) => void) { + const isFollowingRef = useRef(true); + + const virtualizer = useVirtualizer({ + count: items.length, + getScrollElement: () => scrollElementRef?.current ?? null, + estimateSize: () => 80, + measureElement: (element) => element.getBoundingClientRect().height, + overscan: 10, + }); + + virtualizer.shouldAdjustScrollPositionOnItemSizeChange = () => isFollowingRef.current; + + useEffect(() => { + const scrollElement = scrollElementRef?.current; + if (!scrollElement) { + return; + } + + const updateFollowState = () => { + const atBottom = scrollElement.scrollHeight - scrollElement.scrollTop - scrollElement.clientHeight < 50; + isFollowingRef.current = atBottom; + onAtBottomChange?.(atBottom); + }; + + updateFollowState(); + scrollElement.addEventListener("scroll", updateFollowState, { passive: true }); + + return () => { + scrollElement.removeEventListener("scroll", updateFollowState); + }; + }, [onAtBottomChange, scrollElementRef]); + + useEffect(() => { + if (!isFollowingRef.current || items.length === 0) { + return; + } + + const frameId = requestAnimationFrame(() => { + virtualizer.scrollToIndex(items.length - 1, { + align: "end", + behavior: "smooth", + }); + }); + + return () => { + cancelAnimationFrame(frameId); + }; + }, [items.length, virtualizer]); + + return { virtualizer, isFollowingRef }; +} From 99abb9d42ec666b8ac04035440d28fcc7b13ede6 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sat, 14 Mar 2026 20:42:18 -0700 Subject: [PATCH 04/48] chore(foundry): workbench action responsiveness (#254) * wip * wip --- CLAUDE.md | 98 +- README.md | 2 +- foundry/CLAUDE.md | 93 +- foundry/README.md | 6 +- foundry/compose.dev.yaml | 9 +- foundry/compose.preview.yaml | 2 - foundry/docker/backend.dev.Dockerfile | 15 - foundry/docker/backend.preview.Dockerfile | 15 - foundry/packages/backend/CLAUDE.md | 19 +- foundry/packages/backend/src/actors/events.ts | 40 +- .../src/actors/github-data/db/migrations.ts | 22 + .../src/actors/github-data/db/schema.ts | 9 + .../backend/src/actors/github-data/index.ts | 198 +- .../packages/backend/src/actors/handles.ts | 73 +- .../backend/src/actors/history/index.ts | 6 +- foundry/packages/backend/src/actors/index.ts | 15 +- foundry/packages/backend/src/actors/keys.ts | 30 +- .../packages/backend/src/actors/logging.ts | 6 +- .../{workspace => organization}/actions.ts | 471 ++-- .../{workspace => organization}/app-shell.ts | 401 ++-- .../actors/{project => organization}/db/db.ts | 2 +- .../actors/organization/db/drizzle.config.ts | 6 + .../db/drizzle/0000_melted_viper.sql | 6 - .../db/drizzle/meta/0000_snapshot.json | 31 - .../db/drizzle/meta/_journal.json | 0 .../db/migrations.ts | 14 +- .../{workspace => organization}/db/schema.ts | 10 +- .../backend/src/actors/organization/index.ts | 19 + .../src/actors/project-branch-sync/index.ts | 178 -- .../backend/src/actors/project/actions.ts | 1231 ----------- .../src/actors/project/db/drizzle.config.ts | 6 - .../db/drizzle/0000_useful_la_nuit.sql | 40 - .../db/drizzle/meta/0000_snapshot.json | 265 --- .../src/actors/project/db/migrations.ts | 46 - .../backend/src/actors/project/db/schema.ts | 41 - .../backend/src/actors/project/index.ts | 30 - .../backend/src/actors/project/stack-model.ts | 69 - .../backend/src/actors/repository/actions.ts | 557 +++++ .../actors/{workspace => repository}/db/db.ts | 2 +- .../actors/repository/db/drizzle.config.ts | 6 + .../db/drizzle/0000_useful_la_nuit.sql | 12 + .../db/drizzle/meta/0000_snapshot.json | 87 + .../db/drizzle/meta/_journal.json | 0 .../src/actors/repository/db/migrations.ts | 43 + .../src/actors/repository/db/schema.ts | 16 + .../backend/src/actors/repository/index.ts | 27 + .../backend/src/actors/sandbox/index.ts | 54 +- .../backend/src/actors/task/db/migrations.ts | 12 +- .../backend/src/actors/task/db/schema.ts | 4 +- .../packages/backend/src/actors/task/index.ts | 91 +- .../backend/src/actors/task/workbench.ts | 214 +- .../src/actors/task/workflow/commands.ts | 8 +- .../src/actors/task/workflow/common.ts | 14 +- .../backend/src/actors/task/workflow/index.ts | 50 +- .../backend/src/actors/task/workflow/init.ts | 127 +- .../backend/src/actors/task/workflow/push.ts | 6 +- .../backend/src/actors/task/workflow/queue.ts | 1 + .../src/actors/workspace/db/drizzle.config.ts | 6 - .../backend/src/actors/workspace/index.ts | 19 - .../backend/src/config/organization.ts | 13 + .../packages/backend/src/config/workspace.ts | 13 - foundry/packages/backend/src/driver.ts | 82 +- foundry/packages/backend/src/index.ts | 40 +- .../src/integrations/git-spice/index.ts | 223 -- .../backend/src/integrations/git/index.ts | 313 --- .../backend/src/integrations/github/index.ts | 292 +-- .../src/integrations/graphite/index.ts | 140 -- .../packages/backend/src/sandbox-config.ts | 12 +- .../backend/src/services/app-github.ts | 35 + .../backend/src/services/better-auth.ts | 96 +- .../backend/src/services/foundry-paths.ts | 20 - .../backend/src/services/github-auth.ts | 14 +- .../backend/src/services/repo-git-lock.ts | 45 - foundry/packages/backend/src/services/repo.ts | 27 + .../packages/backend/test/git-spice.test.ts | 129 -- .../backend/test/git-validate-remote.test.ts | 40 - .../backend/test/helpers/test-context.ts | 4 +- .../backend/test/helpers/test-driver.ts | 36 +- foundry/packages/backend/test/keys.test.ts | 11 +- ...test.ts => organization-isolation.test.ts} | 42 +- ...anization-star-sandbox-agent-repo.test.ts} | 8 +- .../backend/test/sandbox-config.test.ts | 6 +- .../packages/backend/test/stack-model.test.ts | 34 - .../backend/test/workbench-unread.test.ts | 29 +- foundry/packages/cli/src/index.ts | 257 ++- .../src/{workspace => organization}/config.ts | 6 +- foundry/packages/cli/src/theme.ts | 8 +- foundry/packages/cli/src/tui.ts | 27 +- .../packages/cli/test/backend-manager.test.ts | 6 +- ...ig.test.ts => organization-config.test.ts} | 14 +- foundry/packages/cli/test/theme.test.ts | 4 +- foundry/packages/cli/test/tui-format.test.ts | 6 +- foundry/packages/client/src/app-client.ts | 2 +- foundry/packages/client/src/backend-client.ts | 624 +++--- foundry/packages/client/src/index.ts | 10 +- .../client/src/interest/mock-manager.ts | 12 - foundry/packages/client/src/keys.ts | 28 +- foundry/packages/client/src/mock-app.ts | 16 +- .../client/src/mock/backend-client.ts | 237 +- .../client/src/mock/workbench-client.ts | 126 +- .../packages/client/src/remote/app-client.ts | 4 +- .../client/src/remote/workbench-client.ts | 58 +- .../src/{interest => subscription}/manager.ts | 8 +- .../client/src/subscription/mock-manager.ts | 12 + .../remote-manager.ts | 10 +- .../src/{interest => subscription}/topics.ts | 55 +- .../use-subscription.ts} | 8 +- foundry/packages/client/src/view-model.ts | 2 +- .../packages/client/src/workbench-client.ts | 18 +- .../packages/client/src/workbench-model.ts | 77 +- .../test/e2e/full-integration-e2e.test.ts | 44 +- .../client/test/e2e/github-pr-e2e.test.ts | 53 +- foundry/packages/client/test/e2e/helpers.ts | 84 + .../client/test/e2e/workbench-e2e.test.ts | 313 +-- .../test/e2e/workbench-load-e2e.test.ts | 53 +- foundry/packages/client/test/keys.test.ts | 12 +- ...r.test.ts => subscription-manager.test.ts} | 70 +- .../packages/client/test/view-model.test.ts | 12 +- .../src-tauri/gen/schemas/acl-manifests.json | 1923 ++++++++++++++++- .../src-tauri/gen/schemas/desktop-schema.json | 90 +- .../src-tauri/gen/schemas/macOS-schema.json | 90 +- foundry/packages/frontend/src/app/router.tsx | 84 +- .../frontend/src/components/dev-panel.tsx | 90 +- .../frontend/src/components/mock-layout.tsx | 653 +++--- .../components/mock-layout/message-list.tsx | 70 +- .../components/mock-layout/right-sidebar.tsx | 6 +- .../{tab-strip.tsx => session-strip.tsx} | 80 +- .../src/components/mock-layout/sidebar.tsx | 567 +++-- .../components/mock-layout/terminal-pane.tsx | 54 +- .../mock-layout/transcript-header.tsx | 14 +- .../src/components/mock-layout/ui.tsx | 10 +- .../components/mock-layout/view-model.test.ts | 10 +- .../src/components/mock-layout/view-model.ts | 28 +- .../src/components/mock-onboarding.tsx | 22 +- ...shboard.tsx => organization-dashboard.tsx} | 445 +--- .../frontend/src/features/tasks/model.test.ts | 6 +- foundry/packages/frontend/src/lib/backend.ts | 4 +- foundry/packages/frontend/src/lib/env.ts | 4 +- foundry/packages/frontend/src/lib/interest.ts | 5 - foundry/packages/frontend/src/lib/mock-app.ts | 14 +- .../packages/frontend/src/lib/subscription.ts | 5 + foundry/packages/shared/src/app-shell.ts | 2 +- foundry/packages/shared/src/config.ts | 4 +- foundry/packages/shared/src/contracts.ts | 105 +- foundry/packages/shared/src/index.ts | 2 +- foundry/packages/shared/src/organization.ts | 13 + .../packages/shared/src/realtime-events.ts | 12 +- foundry/packages/shared/src/workbench.ts | 64 +- foundry/packages/shared/src/workspace.ts | 13 - ...workspace.test.ts => organization.test.ts} | 16 +- foundry/research/friction/general.mdx | 40 +- foundry/research/friction/rivet.mdx | 46 +- foundry/research/friction/sandboxes.mdx | 6 +- .../realtime-interest-manager-spec.md | 248 +-- .../00-end-to-end-async-realtime-plan.md | 34 +- .../01-task-creation-bootstrap-only.md | 14 +- ...02-repo-overview-from-cached-projection.md | 30 +- ...03-repo-actions-via-background-workflow.md | 24 +- ...on-creation-without-inline-provisioning.md | 4 +- ...5-workbench-snapshot-from-derived-state.md | 4 +- .../07-auth-identity-simplification.md | 48 +- .../specs/async-action-fixes/README.md | 6 +- foundry/research/specs/frontend.md | 4 +- foundry/research/specs/github-data-actor.md | 44 +- .../research/specs/remove-local-git-clone.md | 381 ++++ .../specs/rivetkit-opentui-migration-plan.md | 174 +- foundry/scripts/build-test-image.sh | 2 +- foundry/scripts/data/rivet-dev.json | 12 +- foundry/scripts/pull-org-data.ts | 8 +- sdks/CLAUDE.md | 37 + server/CLAUDE.md | 71 +- 171 files changed, 7260 insertions(+), 7342 deletions(-) rename foundry/packages/backend/src/actors/{workspace => organization}/actions.ts (62%) rename foundry/packages/backend/src/actors/{workspace => organization}/app-shell.ts (84%) rename foundry/packages/backend/src/actors/{project => organization}/db/db.ts (68%) create mode 100644 foundry/packages/backend/src/actors/organization/db/drizzle.config.ts rename foundry/packages/backend/src/actors/{workspace => organization}/db/drizzle/0000_melted_viper.sql (94%) rename foundry/packages/backend/src/actors/{workspace => organization}/db/drizzle/meta/0000_snapshot.json (95%) rename foundry/packages/backend/src/actors/{workspace => organization}/db/drizzle/meta/_journal.json (100%) rename foundry/packages/backend/src/actors/{workspace => organization}/db/migrations.ts (96%) rename foundry/packages/backend/src/actors/{workspace => organization}/db/schema.ts (91%) create mode 100644 foundry/packages/backend/src/actors/organization/index.ts delete mode 100644 foundry/packages/backend/src/actors/project-branch-sync/index.ts delete mode 100644 foundry/packages/backend/src/actors/project/actions.ts delete mode 100644 foundry/packages/backend/src/actors/project/db/drizzle.config.ts delete mode 100644 foundry/packages/backend/src/actors/project/db/drizzle/0000_useful_la_nuit.sql delete mode 100644 foundry/packages/backend/src/actors/project/db/drizzle/meta/0000_snapshot.json delete mode 100644 foundry/packages/backend/src/actors/project/db/migrations.ts delete mode 100644 foundry/packages/backend/src/actors/project/db/schema.ts delete mode 100644 foundry/packages/backend/src/actors/project/index.ts delete mode 100644 foundry/packages/backend/src/actors/project/stack-model.ts create mode 100644 foundry/packages/backend/src/actors/repository/actions.ts rename foundry/packages/backend/src/actors/{workspace => repository}/db/db.ts (68%) create mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle.config.ts create mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql create mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json rename foundry/packages/backend/src/actors/{project => repository}/db/drizzle/meta/_journal.json (100%) create mode 100644 foundry/packages/backend/src/actors/repository/db/migrations.ts create mode 100644 foundry/packages/backend/src/actors/repository/db/schema.ts create mode 100644 foundry/packages/backend/src/actors/repository/index.ts delete mode 100644 foundry/packages/backend/src/actors/workspace/db/drizzle.config.ts delete mode 100644 foundry/packages/backend/src/actors/workspace/index.ts create mode 100644 foundry/packages/backend/src/config/organization.ts delete mode 100644 foundry/packages/backend/src/config/workspace.ts delete mode 100644 foundry/packages/backend/src/integrations/git-spice/index.ts delete mode 100644 foundry/packages/backend/src/integrations/git/index.ts delete mode 100644 foundry/packages/backend/src/integrations/graphite/index.ts delete mode 100644 foundry/packages/backend/src/services/foundry-paths.ts delete mode 100644 foundry/packages/backend/src/services/repo-git-lock.ts delete mode 100644 foundry/packages/backend/test/git-spice.test.ts delete mode 100644 foundry/packages/backend/test/git-validate-remote.test.ts rename foundry/packages/backend/test/{workspace-isolation.test.ts => organization-isolation.test.ts} (63%) rename foundry/packages/backend/test/{workspace-star-sandbox-agent-repo.test.ts => organization-star-sandbox-agent-repo.test.ts} (80%) delete mode 100644 foundry/packages/backend/test/stack-model.test.ts rename foundry/packages/cli/src/{workspace => organization}/config.ts (71%) rename foundry/packages/cli/test/{workspace-config.test.ts => organization-config.test.ts} (59%) delete mode 100644 foundry/packages/client/src/interest/mock-manager.ts rename foundry/packages/client/src/{interest => subscription}/manager.ts (82%) create mode 100644 foundry/packages/client/src/subscription/mock-manager.ts rename foundry/packages/client/src/{interest => subscription}/remote-manager.ts (94%) rename foundry/packages/client/src/{interest => subscription}/topics.ts (73%) rename foundry/packages/client/src/{interest/use-interest.ts => subscription/use-subscription.ts} (85%) create mode 100644 foundry/packages/client/test/e2e/helpers.ts rename foundry/packages/client/test/{interest-manager.test.ts => subscription-manager.test.ts} (60%) rename foundry/packages/frontend/src/components/mock-layout/{tab-strip.tsx => session-strip.tsx} (78%) rename foundry/packages/frontend/src/components/{workspace-dashboard.tsx => organization-dashboard.tsx} (79%) delete mode 100644 foundry/packages/frontend/src/lib/interest.ts create mode 100644 foundry/packages/frontend/src/lib/subscription.ts create mode 100644 foundry/packages/shared/src/organization.ts delete mode 100644 foundry/packages/shared/src/workspace.ts rename foundry/packages/shared/test/{workspace.test.ts => organization.test.ts} (59%) create mode 100644 foundry/research/specs/remove-local-git-clone.md create mode 100644 sdks/CLAUDE.md diff --git a/CLAUDE.md b/CLAUDE.md index 26dfa28..f8771fb 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,40 +1,5 @@ # Instructions -## ACP v1 Baseline - -- v1 is ACP-native. -- `/v1/*` is removed and returns `410 Gone` (`application/problem+json`). -- `/opencode/*` is disabled during ACP core phases and returns `503`. -- Prompt/session traffic is ACP JSON-RPC over streamable HTTP on `/v1/rpc`: - - `POST /v1/rpc` - - `GET /v1/rpc` (SSE) - - `DELETE /v1/rpc` -- Control-plane endpoints: - - `GET /v1/health` - - `GET /v1/agents` - - `POST /v1/agents/{agent}/install` -- Binary filesystem transfer endpoints (intentionally HTTP, not ACP extension methods): - - `GET /v1/fs/file` - - `PUT /v1/fs/file` - - `POST /v1/fs/upload-batch` -- Sandbox Agent ACP extension method naming: - - Custom ACP methods use `_sandboxagent/...` (not `_sandboxagent/v1/...`). - - Session detach method is `_sandboxagent/session/detach`. - -## API Scope - -- ACP is the primary protocol for agent/session behavior and all functionality that talks directly to the agent. -- ACP extensions may be used for gaps (for example `skills`, `models`, and related metadata), but the default is that agent-facing behavior is implemented by the agent through ACP. -- Custom HTTP APIs are for non-agent/session platform services (for example filesystem, terminals, and other host/runtime capabilities). -- Filesystem and terminal APIs remain Sandbox Agent-specific HTTP contracts and are not ACP. - - Do not make Sandbox Agent core flows depend on ACP client implementations of `fs/*` or `terminal/*`; in practice those client-side capabilities are often incomplete or inconsistent. - - ACP-native filesystem and terminal methods are also too limited for Sandbox Agent host/runtime needs, so prefer the native HTTP APIs for richer behavior. -- Keep `GET /v1/fs/file`, `PUT /v1/fs/file`, and `POST /v1/fs/upload-batch` on HTTP: - - These are Sandbox Agent host/runtime operations with cross-agent-consistent behavior. - - They may involve very large binary transfers that ACP JSON-RPC envelopes are not suited to stream. - - This is intentionally separate from ACP native `fs/read_text_file` and `fs/write_text_file`. - - ACP extension variants may exist in parallel, but SDK defaults should prefer HTTP for these binary transfer operations. - ## Naming and Ownership - This repository/product is **Sandbox Agent**. @@ -49,66 +14,13 @@ - Never expose underlying protocol method names (e.g. `session/request_permission`, `session/create`, `_sandboxagent/session/detach`) in non-ACP docs. Describe the behavior in user-facing terms instead. - Do not describe the underlying protocol implementation in docs. Only document the SDK surface (methods, types, options). ACP protocol details belong exclusively in ACP-specific pages. -## Architecture (Brief) +### Docs Source Of Truth (HTTP/CLI) -- HTTP contract and problem/error mapping: `server/packages/sandbox-agent/src/router.rs` -- ACP client runtime and agent process bridge: `server/packages/sandbox-agent/src/acp_runtime/mod.rs` -- Agent/native + ACP agent process install and lazy install: `server/packages/agent-management/` -- Inspector UI served at `/ui/` and bound to ACP over HTTP from `frontend/packages/inspector/` - -## TypeScript SDK Architecture - -- TypeScript clients are split into: - - `acp-http-client`: protocol-pure ACP-over-HTTP (`/v1/acp`) with no Sandbox-specific HTTP helpers. - - `sandbox-agent`: `SandboxAgent` SDK wrapper that combines ACP session operations with Sandbox control-plane and filesystem helpers. -- `SandboxAgent` entry points are `SandboxAgent.connect(...)` and `SandboxAgent.start(...)`. -- Stable Sandbox session methods are `createSession`, `resumeSession`, `resumeOrCreateSession`, `destroySession`, `rawSendSessionMethod`, `onSessionEvent`, `setSessionMode`, `setSessionModel`, `setSessionThoughtLevel`, `setSessionConfigOption`, `getSessionConfigOptions`, `getSessionModes`, `respondPermission`, `rawRespondPermission`, and `onPermissionRequest`. -- `Session` helpers are `prompt(...)`, `rawSend(...)`, `onEvent(...)`, `setMode(...)`, `setModel(...)`, `setThoughtLevel(...)`, `setConfigOption(...)`, `getConfigOptions()`, `getModes()`, `respondPermission(...)`, `rawRespondPermission(...)`, and `onPermissionRequest(...)`. -- Cleanup is `sdk.dispose()`. - -### React Component Methodology - -- Shared React UI belongs in `sdks/react` only when it is reusable outside the Inspector. -- If the same UI pattern is shared between the Sandbox Agent Inspector and Foundry, prefer extracting it into `sdks/react` instead of maintaining parallel implementations. -- Keep shared components unstyled by default: behavior in the package, styling in the consumer via `className`, slot-level `classNames`, render overrides, and `data-*` hooks. -- Prefer extracting reusable pieces such as transcript, composer, and conversation surfaces. Keep Inspector-specific shells such as session selection, session headers, and control-plane actions in `frontend/packages/inspector/`. -- Document all shared React components in `docs/react-components.mdx`, and keep that page aligned with the exported surface in `sdks/react/src/index.ts`. - -### TypeScript SDK Naming Conventions - -- Use `respond(id, reply)` for SDK methods that reply to an agent-initiated request (e.g. `respondPermission`). This is the standard pattern for answering any inbound JSON-RPC request from the agent. -- Prefix raw/low-level escape hatches with `raw` (e.g. `rawRespondPermission`, `rawSend`). These accept protocol-level types directly and bypass SDK abstractions. - -### Docs Source Of Truth - -- For TypeScript docs/examples, source of truth is implementation in: - - `sdks/typescript/src/client.ts` - - `sdks/typescript/src/index.ts` - - `sdks/acp-http-client/src/index.ts` -- Do not document TypeScript APIs unless they are exported and implemented in those files. - For HTTP/CLI docs/examples, source of truth is: - `server/packages/sandbox-agent/src/router.rs` - `server/packages/sandbox-agent/src/cli.rs` - Keep docs aligned to implemented endpoints/commands only (for example ACP under `/v1/acp`, not legacy `/v1/sessions` APIs). -## ACP Protocol Compliance - -- Before adding any new ACP method, property, or config option category to the SDK, verify it exists in the ACP spec at `https://agentclientprotocol.com/llms-full.txt`. -- Valid `SessionConfigOptionCategory` values are: `mode`, `model`, `thought_level`, `other`, or custom categories prefixed with `_` (e.g. `_permission_mode`). -- Do not invent ACP properties or categories (e.g. `permission_mode` is not a valid ACP category — use `_permission_mode` if it's a custom extension, or use existing ACP mechanisms like `session/set_mode`). -- `NewSessionRequest` only has `_meta`, `cwd`, and `mcpServers`. Do not add non-ACP fields to it. -- Sandbox Agent SDK abstractions (like `SessionCreateRequest`) may add convenience properties, but must clearly map to real ACP methods internally and not send fabricated fields over the wire. - -## Source Documents - -- ACP protocol specification (full LLM-readable reference): `https://agentclientprotocol.com/llms-full.txt` -- `~/misc/acp-docs/schema/schema.json` -- `~/misc/acp-docs/schema/meta.json` -- `research/acp/spec.md` -- `research/acp/v1-schema-to-acp-mapping.md` -- `research/acp/friction.md` -- `research/acp/todo.md` - ## Change Tracking - If the user asks to "push" changes, treat that as permission to commit and push all current workspace changes, not a hand-picked subset, unless the user explicitly scopes the push. @@ -119,14 +31,6 @@ - Append blockers/decisions to `research/acp/friction.md` during ACP work. - `docs/agent-capabilities.mdx` lists models/modes/thought levels per agent. Update it when adding a new agent or changing `fallback_config_options`. If its "Last updated" date is >2 weeks old, re-run `cd scripts/agent-configs && npx tsx dump.ts` and update the doc to match. Source data: `scripts/agent-configs/resources/*.json` and hardcoded entries in `server/packages/sandbox-agent/src/router/support.rs` (`fallback_config_options`). - Some agent models are gated by subscription (e.g. Claude `opus`). The live report only shows models available to the current credentials. The static doc and JSON resource files should list all known models regardless of subscription tier. -- TypeScript SDK tests should run against a real running server/runtime over real `/v1` HTTP APIs, typically using the real `mock` agent for deterministic behavior. -- Do not use Vitest fetch/transport mocks to simulate server functionality in TypeScript SDK tests. - -## Docker Examples (Dev Testing) - -- When manually testing bleeding-edge (unreleased) versions of sandbox-agent in `examples/`, use `SANDBOX_AGENT_DEV=1` with the Docker-based examples. -- This triggers a local build of `docker/runtime/Dockerfile.full` which builds the server binary from local source and packages it into the Docker image. -- Example: `SANDBOX_AGENT_DEV=1 pnpm --filter @sandbox-agent/example-mcp start` ## Install Version References diff --git a/README.md b/README.md index d4bfc61..eb427d7 100644 --- a/README.md +++ b/README.md @@ -277,7 +277,7 @@ Coding agents expect interactive terminals with proper TTY handling. SSH with pi - **Storage of sessions on disk**: Sessions are already stored by the respective coding agents on disk. It's assumed that the consumer is streaming data from this machine to an external storage, such as Postgres, ClickHouse, or Rivet. - **Direct LLM wrappers**: Use the [Vercel AI SDK](https://ai-sdk.dev/docs/introduction) if you want to implement your own agent from scratch. - **Git Repo Management**: Just use git commands or the features provided by your sandbox provider of choice. -- **Sandbox Provider API**: Sandbox providers have many nuanced differences in their API, it does not make sense for us to try to provide a custom layer. Instead, we opt to provide guides that let you integrate this project with sandbox providers. +- **Sandbox Provider API**: Sandbox providers have many nuanced differences in their API, it does not make sense for us to try to provide a custom layer. Instead, we opt to provide guides that let you integrate this repository with sandbox providers. ## Roadmap diff --git a/foundry/CLAUDE.md b/foundry/CLAUDE.md index aae89c3..e347a60 100644 --- a/foundry/CLAUDE.md +++ b/foundry/CLAUDE.md @@ -12,10 +12,10 @@ Use TypeScript for all source code. Use `pnpm` workspaces and Turborepo. -- Workspace root uses `pnpm-workspace.yaml` and `turbo.json`. +- Repository root uses `pnpm-workspace.yaml` and `turbo.json`. - Packages live in `packages/*`. - `core` is renamed to `shared`. -- `packages/cli` is disabled and excluded from active workspace validation. +- `packages/cli` is disabled and excluded from active monorepo validation. - Integrations and providers live under `packages/backend/src/{integrations,providers}`. ## CLI Status @@ -23,14 +23,14 @@ Use `pnpm` workspaces and Turborepo. - `packages/cli` is fully disabled for active development. - Do not implement new behavior in `packages/cli` unless explicitly requested. - Frontend is the primary product surface; prioritize `packages/frontend` + supporting `packages/client`/`packages/backend`. -- Workspace `build`, `typecheck`, and `test` intentionally exclude `@sandbox-agent/foundry-cli`. -- `pnpm-workspace.yaml` excludes `packages/cli` from workspace package resolution. +- Monorepo `build`, `typecheck`, and `test` intentionally exclude `@sandbox-agent/foundry-cli`. +- `pnpm-workspace.yaml` excludes `packages/cli` from monorepo package resolution. ## Common Commands - Foundry is the canonical name for this product tree. Do not introduce or preserve legacy pre-Foundry naming in code, docs, commands, or runtime paths. - Install deps: `pnpm install` -- Full active-workspace validation: `pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test` +- Full active-monorepo validation: `pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test` - Start the full dev stack (real backend + frontend): `just foundry-dev` — frontend on **port 4173**, backend on **port 7741** (Docker via `compose.dev.yaml`) - Start the mock frontend stack (no backend): `just foundry-mock` — mock frontend on **port 4174** (Docker via `compose.mock.yaml`) - Start the local production-build preview stack: `just foundry-preview` @@ -59,9 +59,9 @@ Use `pnpm` workspaces and Turborepo. ## Railway Logs -- Production Foundry Railway logs can be read from a linked workspace with `railway logs --deployment --lines 200` or `railway logs --deployment --lines 200`. +- Production Foundry Railway logs can be read from a linked checkout with `railway logs --deployment --lines 200` or `railway logs --deployment --lines 200`. - Production deploys should go through `git push` to the deployment branch/workflow. Do not use `railway up` for Foundry deploys. -- If Railway logs fail because the workspace is not linked to the correct project/service/environment, run: +- If Railway logs fail because the checkout is not linked to the correct Railway project/service/environment, run: `railway link --project 33e3e2df-32c5-41c5-a4af-dca8654acb1d --environment cf387142-61fd-4668-8cf7-b3559e0983cb --service 91c7e450-d6d2-481a-b2a4-0a916f4160fc` - That links this directory to the `sandbox-agent` project, `production` environment, and `foundry-api` service. - Production proxy chain: `api.sandboxagent.dev` routes through Cloudflare → Fastly/Varnish → Railway. When debugging request duplication, timeouts, or retry behavior, check headers like `cf-ray`, `x-varnish`, `x-railway-edge`, and `cdn-loop` to identify which layer is involved. @@ -96,19 +96,19 @@ Do not use polling (`refetchInterval`), empty "go re-fetch" broadcast events, or ### Materialized state in coordinator actors -- **Workspace actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the workspace actor when they mutate. The workspace actor broadcasts the updated entity to connected clients. `getWorkspaceSummary` reads from local tables only — no fan-out to child actors. +- **Organization actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the organization actor when they mutate. The organization actor broadcasts the updated entity to connected clients. `getOrganizationSummary` reads from local tables only — no fan-out to child actors. - **Task actor** materializes its own detail state (session summaries, sandbox info, diffs, file tree). `getTaskDetail` reads from the task actor's own SQLite. The task actor broadcasts updates directly to clients connected to it. -- **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session tab is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor). -- The expensive fan-out (querying every project/task actor) only exists as a background reconciliation/rebuild path, never on the hot read path. +- **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor). +- The expensive fan-out (querying every repository/task actor) only exists as a background reconciliation/rebuild path, never on the hot read path. -### Interest manager +### Subscription manager -The interest manager (`packages/client`) is a global singleton that manages WebSocket connections, cached state, and subscriptions for all topics. It: +The subscription manager (`packages/client`) is a global singleton that manages WebSocket connections, cached state, and subscriptions for all topics. It: - **Deduplicates** — multiple subscribers to the same topic share one connection and one cached state. - **Grace period (30s)** — when the last subscriber leaves, the connection and state stay alive for 30 seconds before teardown. This keeps data warm for back-navigation and prevents thrashing. -- **Exposes a single hook** — `useInterest(topicKey, params)` returns `{ data, status, error }`. Null params = no subscription (conditional interest). -- **Shared harness, separate implementations** — the `InterestManager` interface is shared between mock and remote implementations. The mock implementation uses in-memory state. The remote implementation uses WebSocket connections. The API/client exposure is identical for both. +- **Exposes a single hook** — `useSubscription(topicKey, params)` returns `{ data, status, error }`. Null params = no subscription (conditional subscription). +- **Shared harness, separate implementations** — the `SubscriptionManager` interface is shared between mock and remote implementations. The mock implementation uses in-memory state. The remote implementation uses WebSocket connections. The API/client exposure is identical for both. ### Topics @@ -116,23 +116,31 @@ Each topic maps to one actor connection and one event stream: | Topic | Actor | Event | Data | |---|---|---|---| -| `app` | Workspace `"app"` | `appUpdated` | Auth, orgs, onboarding | -| `workspace` | Workspace `{workspaceId}` | `workspaceUpdated` | Repo catalog, task summaries, repo summaries | -| `task` | Task `{workspaceId, repoId, taskId}` | `taskUpdated` | Session summaries, sandbox info, diffs, file tree | -| `session` | Task `{workspaceId, repoId, taskId}` (filtered by sessionId) | `sessionUpdated` | Transcript, draft state | +| `app` | Organization `"app"` | `appUpdated` | Auth, orgs, onboarding | +| `organization` | Organization `{organizationId}` | `organizationUpdated` | Repo catalog, task summaries, repo summaries | +| `task` | Task `{organizationId, repoId, taskId}` | `taskUpdated` | Session summaries, sandbox info, diffs, file tree | +| `session` | Task `{organizationId, repoId, taskId}` (filtered by sessionId) | `sessionUpdated` | Transcript, draft state | | `sandboxProcesses` | SandboxInstance | `processesUpdated` | Process list | -The client subscribes to `app` always, `workspace` when entering a workspace, `task` when viewing a task, and `session` when viewing a specific session tab. At most 4 actor connections at a time (app + workspace + task + sandbox if terminal is open). The `session` topic reuses the task actor connection and filters by session ID. +The client subscribes to `app` always, `organization` when entering an organization, `task` when viewing a task, and `session` when viewing a specific session. At most 4 actor connections at a time (app + organization + task + sandbox if terminal is open). The `session` topic reuses the task actor connection and filters by session ID. ### Rules - Do not add `useQuery` with `refetchInterval` for data that should be push-based. - Do not broadcast empty notification events. Events must carry the full new state of the changed entity. - Do not re-fetch full snapshots after mutations. The mutation triggers a server-side broadcast with the new entity state; the client replaces it in local state. -- All event subscriptions go through the interest manager. Do not create ad-hoc `handle.connect()` + `conn.on()` patterns. -- Backend mutations that affect sidebar data (task title, status, branch, PR state) must push the updated summary to the parent workspace actor, which broadcasts to workspace subscribers. +- All event subscriptions go through the subscription manager. Do not create ad-hoc `handle.connect()` + `conn.on()` patterns. +- Backend mutations that affect sidebar data (task title, status, branch, PR state) must push the updated summary to the parent organization actor, which broadcasts to organization subscribers. - Comment architecture-related code: add doc comments explaining the materialized state pattern, why deltas flow the way they do, and the relationship between parent/child actor broadcasts. New contributors should understand the data flow from comments alone. +## Git State Policy + +- The backend stores zero git state. No local clones, no refs, no working trees, and no git-spice. +- Repository metadata (branches, default branch, pull requests) comes from GitHub API data and webhook events already flowing into the system. +- All git operations that require a working tree run inside the task's sandbox via `executeInSandbox()`. +- Do not add backend git clone paths, `git fetch`, `git for-each-ref`, or direct backend git CLI calls. If you need git data, either read stored GitHub metadata or run the command inside a sandbox. +- The `BackendDriver` has no `GitDriver` or `StackDriver`. Only `GithubDriver` and `TmuxDriver` remain. + ## UI System - Foundry's base UI system is `BaseUI` with `Styletron`, plus Foundry-specific theme/tokens on top. Treat that as the default UI foundation. @@ -166,14 +174,14 @@ For all Rivet/RivetKit implementation: 2. SQLite is **per actor instance** (per actor key), not a shared backend-global database: - Each actor instance gets its own SQLite DB. - Schema design should assume a single actor instance owns the entire DB. - - Do not add `workspaceId`/`repoId`/`taskId` columns just to "namespace" rows for a given actor instance; use actor state and/or the actor key instead. - - Example: the `task` actor instance already represents `(workspaceId, repoId, taskId)`, so its SQLite tables should not need those columns for primary keys. + - Do not add `organizationId`/`repoId`/`taskId` columns just to "namespace" rows for a given actor instance; use actor state and/or the actor key instead. + - Example: the `task` actor instance already represents `(organizationId, repoId, taskId)`, so its SQLite tables should not need those columns for primary keys. 3. Do not use backend-global SQLite singletons; database access must go through actor `db` providers (`c.db`). -4. The default dependency source for RivetKit is the published `rivetkit` package so workspace installs and CI remain self-contained. +4. The default dependency source for RivetKit is the published `rivetkit` package so monorepo installs and CI remain self-contained. 5. When working on coordinated RivetKit changes, you may temporarily relink to a local checkout instead of the published package. - - Dedicated local checkout for this workspace: `/Users/nathan/conductor/workspaces/task/rivet-checkout` + - Dedicated local checkout for this repo: `/Users/nathan/conductor/workspaces/task/rivet-checkout` - Preferred local link target: `../rivet-checkout/rivetkit-typescript/packages/rivetkit` - - Sub-packages (`@rivetkit/sqlite-vfs`, etc.) resolve transitively from the RivetKit workspace when using the local checkout. + - Sub-packages (`@rivetkit/sqlite-vfs`, etc.) resolve transitively from the RivetKit monorepo when using the local checkout. 6. Before using a local checkout, build RivetKit in the rivet repo: ```bash cd ../rivet-checkout/rivetkit-typescript @@ -187,17 +195,17 @@ For all Rivet/RivetKit implementation: - Do not add an extra proxy or manager-specific route layer in the backend. - Let RivetKit own metadata/public endpoint behavior for `/v1/rivet`. -## Workspace + Actor Rules +## Organization + Actor Rules -- Everything is scoped to a workspace. -- Workspace resolution order: `--workspace` flag -> config default -> `"default"`. -- `ControlPlaneActor` is replaced by `WorkspaceActor` (workspace coordinator). -- Every actor key must be prefixed with workspace namespace (`["ws", workspaceId, ...]`). +- Everything is scoped to an organization. +- Organization resolution order: `--organization` flag -> config default -> `"default"`. +- `ControlPlaneActor` is replaced by `OrganizationActor` (organization coordinator). +- Every actor key must be prefixed with organization namespace (`["org", organizationId, ...]`). - CLI/TUI/GUI must use `@sandbox-agent/foundry-client` (`packages/client`) for backend access; `rivetkit/client` imports are only allowed inside `packages/client`. - Do not add custom backend REST endpoints (no `/v1/*` shim layer). - We own the sandbox-agent project; treat sandbox-agent defects as first-party bugs and fix them instead of working around them. - Keep strict single-writer ownership: each table/row has exactly one actor writer. -- Parent actors (`workspace`, `project`, `task`, `history`, `sandbox-instance`) use command-only loops with no timeout. +- Parent actors (`organization`, `repository`, `task`, `history`, `sandbox-instance`) use command-only loops with no timeout. - Periodic syncing lives in dedicated child actors with one timeout cadence each. - Do not build blocking flows that wait on external systems to become ready or complete. Prefer push-based progression driven by actor messages, events, webhooks, or queue/workflow state changes. - Use workflows/background commands for any repo sync, sandbox provisioning, agent install, branch restack/rebase, or other multi-step external work. Do not keep user-facing actions/requests open while that work runs. @@ -218,19 +226,25 @@ Action handlers must return fast. The pattern: 3. **Validating preconditions** — check state synchronously in the action handler *before* enqueuing. If a precondition isn't met (e.g. session not ready, task not initialized), throw an error immediately. Do not implicitly provision missing dependencies or poll for readiness inside the action handler. It is the client's responsibility to ensure preconditions are met before calling the action. Examples: -- `createTask` → `wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via workspace events. +- `createTask` → `wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via organization events. - `sendWorkbenchMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running` → `idle` via session events. - `createWorkbenchSession` → `wait: true` (returns `{ tabId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision` → `ready` via task events. Never use `wait: true` for operations that depend on external readiness, sandbox I/O, agent responses, git network operations, polling loops, or long-running queue drains. Never hold an action open while waiting for an external system to become ready — that is a polling/retry loop in disguise. +### Timeout policy + +All `wait: true` sends must have an explicit `timeout`. Maximum timeout for any `wait: true` send is **10 seconds** (`10_000`). If an operation cannot reliably complete within 10 seconds, it must be restructured: write the initial record to the DB, return it to the caller, and continue the work asynchronously with `wait: false`. The client observes completion via push events. + +`wait: false` sends do not need a timeout (the enqueue is instant; the work runs in the workflow loop with its own step-level timeouts). + ### Task creation: resolve metadata before creating the actor -When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the parent actor (project) *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. +When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the parent actor (repository) *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. - Title is derived from the task description via `deriveFallbackTitle()` — pure string manipulation, no external I/O. -- Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against remote branches and the project's task index. -- The project actor already has the repo clone and task index. Do the git fetch + name resolution there. +- Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against the repository's task index. +- The repository actor already has the task index and GitHub-backed default branch metadata. Resolve the branch name there without local git fetches. - Do not defer naming to a background provision workflow. Do not poll for names to become available. - The `onBranch` path (attaching to an existing branch) and the new-task path should both produce a fully-named task record on return. - Actor handle policy: @@ -239,8 +253,7 @@ When creating a task, all deterministic metadata (title, branch name) must be re - Use create semantics only on explicit provisioning/create paths where creating a new actor instance is intended. - `getOrCreate` is a last resort for create paths when an explicit create API is unavailable; never use it in read/command paths. - For long-lived cross-actor links (for example sandbox/session runtime access), persist actor identity (`actorId`) and keep a fallback lookup path by actor id. -- Docker dev: `compose.dev.yaml` mounts a named volume at `/root/.local/share/foundry/repos` to persist backend-managed git clones across restarts. Code must still work if this volume is not present (create directories as needed). -- RivetKit actor `c.state` is durable, but in Docker it is stored under `/root/.local/share/rivetkit`. If that path is not persisted, actor state-derived indexes (for example, in `project` actor state) can be lost after container recreation even when other data still exists. +- RivetKit actor `c.state` is durable, but in Docker it is stored under `/root/.local/share/rivetkit`. If that path is not persisted, actor state-derived indexes can be lost after container recreation even when other data still exists. - Workflow history divergence policy: - Production: never auto-delete actor state to resolve `HistoryDivergedError`; ship explicit workflow migrations (`ctx.removed(...)`, step compatibility). - Development: manual local state reset is allowed as an operator recovery path when migrations are not yet available. @@ -259,9 +272,9 @@ When creating a task, all deterministic metadata (title, branch name) must be re - Secrets (e.g. `OPENAI_API_KEY`, `GITHUB_TOKEN`/`GH_TOKEN`) must be provided via environment variables, never hardcoded in the repo. - `~/misc/env.txt` and `~/misc/the-foundry.env` contain the expected local OpenAI + GitHub OAuth/App config for dev. - For local GitHub webhook development, use the configured Smee proxy (`SMEE_URL`) to forward deliveries into `POST /v1/webhooks/github`. Check `.env` / `foundry/.env` if you need the current channel URL. - - If GitHub repos, PRs, or install state are not showing up, verify that the GitHub App is installed for the workspace and that webhook delivery is enabled and healthy. Foundry depends on webhook events for GitHub-backed state; missing webhooks means the product will appear broken. + - If GitHub repos, PRs, or install state are not showing up, verify that the GitHub App is installed for the organization and that webhook delivery is enabled and healthy. Foundry depends on webhook events for GitHub-backed state; missing webhooks means the product will appear broken. - Do not assume `gh auth token` is sufficient for Foundry task provisioning against private repos. Sandbox/bootstrap git clone, push, and PR flows require a repo-capable `GITHUB_TOKEN`/`GH_TOKEN` in the backend container. - - Preferred product behavior for org workspaces is to mint a GitHub App installation token from the workspace installation and inject it into backend/sandbox git operations. Do not rely on an operator's ambient CLI auth as the long-term solution. + - Preferred product behavior for organizations is to mint a GitHub App installation token from the organization installation and inject it into backend/sandbox git operations. Do not rely on an operator's ambient CLI auth as the long-term solution. - Treat client E2E tests in `packages/client/test` as the primary end-to-end source of truth for product behavior. - Keep backend tests small and targeted. Only retain backend-only tests for invariants or persistence rules that are not well-covered through client E2E. - Do not keep large browser E2E suites around in a broken state. If a frontend browser E2E is not maintained and producing signal, remove it until it can be replaced with a reliable test. diff --git a/foundry/README.md b/foundry/README.md index f65d93e..47501ef 100644 --- a/foundry/README.md +++ b/foundry/README.md @@ -1,6 +1,6 @@ # Foundry -TypeScript workspace task system powered by RivetKit actors, SQLite/Drizzle state, and OpenTUI. +TypeScript organization task system powered by RivetKit actors, SQLite/Drizzle state, and OpenTUI. **Documentation**: see `../docs/` in the repository root @@ -12,12 +12,12 @@ pnpm install pnpm -w build ``` -## Project Goals +## Repository Goals - **Simple**: There's one screen. It has everything you need. You can use it blindfolded. - **Fast**: No waiting around. - **Collaborative**: Built for fast moving teams that need code reviewed & shipped fast. -- **Pluggable**: Works for small side projects to enterprise teams. +- **Pluggable**: Works for small side repositories to enterprise teams. ## License diff --git a/foundry/compose.dev.yaml b/foundry/compose.dev.yaml index 8dd9f97..b96805e 100644 --- a/foundry/compose.dev.yaml +++ b/foundry/compose.dev.yaml @@ -14,6 +14,10 @@ services: HF_BACKEND_HOST: "0.0.0.0" HF_BACKEND_PORT: "7741" RIVETKIT_STORAGE_PATH: "/root/.local/share/foundry/rivetkit" + RIVET_LOG_ERROR_STACK: "${RIVET_LOG_ERROR_STACK:-1}" + RIVET_LOG_LEVEL: "${RIVET_LOG_LEVEL:-debug}" + RIVET_LOG_TIMESTAMP: "${RIVET_LOG_TIMESTAMP:-1}" + FOUNDRY_LOG_LEVEL: "${FOUNDRY_LOG_LEVEL:-debug}" # Pass through credentials needed for agent execution + PR creation in dev/e2e. # Do not hardcode secrets; set these in your environment when starting compose. ANTHROPIC_API_KEY: "${ANTHROPIC_API_KEY:-}" @@ -64,8 +68,6 @@ services: - "foundry_backend_persist_rivet_node_modules:/app/sdks/persist-rivet/node_modules" - "foundry_backend_typescript_node_modules:/app/sdks/typescript/node_modules" - "foundry_backend_pnpm_store:/root/.local/share/pnpm/store" - # Persist backend-managed local git clones across container restarts. - - "foundry_git_repos:/root/.local/share/foundry/repos" # Persist RivetKit local storage across container restarts. - "foundry_rivetkit_storage:/root/.local/share/foundry/rivetkit" @@ -86,7 +88,7 @@ services: # Ensure logs in .foundry/ persist on the host even if we change source mounts later. - "./.foundry:/app/foundry/.foundry" - "../../../task/rivet-checkout:/task/rivet-checkout:ro" - # Use Linux-native workspace dependencies inside the container instead of host node_modules. + # Use Linux-native repo dependencies inside the container instead of host node_modules. - "foundry_node_modules:/app/node_modules" - "foundry_client_node_modules:/app/foundry/packages/client/node_modules" - "foundry_frontend_node_modules:/app/foundry/packages/frontend/node_modules" @@ -121,7 +123,6 @@ volumes: foundry_backend_persist_rivet_node_modules: {} foundry_backend_typescript_node_modules: {} foundry_backend_pnpm_store: {} - foundry_git_repos: {} foundry_rivetkit_storage: {} foundry_node_modules: {} foundry_client_node_modules: {} diff --git a/foundry/compose.preview.yaml b/foundry/compose.preview.yaml index 6213885..aa43b52 100644 --- a/foundry/compose.preview.yaml +++ b/foundry/compose.preview.yaml @@ -24,7 +24,6 @@ services: - "7841:7841" volumes: - "${HOME}/.codex:/root/.codex" - - "foundry_preview_git_repos:/root/.local/share/foundry/repos" - "foundry_preview_rivetkit_storage:/root/.local/share/foundry/rivetkit" frontend: @@ -38,5 +37,4 @@ services: - "4273:4273" volumes: - foundry_preview_git_repos: {} foundry_preview_rivetkit_storage: {} diff --git a/foundry/docker/backend.dev.Dockerfile b/foundry/docker/backend.dev.Dockerfile index 0182aa5..46177c3 100644 --- a/foundry/docker/backend.dev.Dockerfile +++ b/foundry/docker/backend.dev.Dockerfile @@ -2,7 +2,6 @@ FROM oven/bun:1.3 -ARG GIT_SPICE_VERSION=v0.23.0 ARG SANDBOX_AGENT_VERSION=0.3.0 RUN apt-get update \ @@ -18,20 +17,6 @@ RUN apt-get update \ RUN npm install -g pnpm@10.28.2 -RUN set -eux; \ - arch="$(dpkg --print-architecture)"; \ - case "$arch" in \ - amd64) spice_arch="x86_64" ;; \ - arm64) spice_arch="aarch64" ;; \ - *) echo "Unsupported architecture for git-spice: $arch" >&2; exit 1 ;; \ - esac; \ - tmpdir="$(mktemp -d)"; \ - curl -fsSL "https://github.com/abhinav/git-spice/releases/download/${GIT_SPICE_VERSION}/git-spice.Linux-${spice_arch}.tar.gz" -o "${tmpdir}/git-spice.tgz"; \ - tar -xzf "${tmpdir}/git-spice.tgz" -C "${tmpdir}"; \ - install -m 0755 "${tmpdir}/gs" /usr/local/bin/gs; \ - ln -sf /usr/local/bin/gs /usr/local/bin/git-spice; \ - rm -rf "${tmpdir}" - RUN curl -fsSL "https://releases.rivet.dev/sandbox-agent/${SANDBOX_AGENT_VERSION}/install.sh" | sh ENV PATH="/root/.local/bin:${PATH}" diff --git a/foundry/docker/backend.preview.Dockerfile b/foundry/docker/backend.preview.Dockerfile index 8c30ae0..00774f2 100644 --- a/foundry/docker/backend.preview.Dockerfile +++ b/foundry/docker/backend.preview.Dockerfile @@ -2,7 +2,6 @@ FROM oven/bun:1.3 -ARG GIT_SPICE_VERSION=v0.23.0 ARG SANDBOX_AGENT_VERSION=0.3.0 RUN apt-get update \ @@ -17,20 +16,6 @@ RUN apt-get update \ && npm install -g pnpm@10.28.2 \ && rm -rf /var/lib/apt/lists/* -RUN set -eux; \ - arch="$(dpkg --print-architecture)"; \ - case "$arch" in \ - amd64) spice_arch="x86_64" ;; \ - arm64) spice_arch="aarch64" ;; \ - *) echo "Unsupported architecture for git-spice: $arch" >&2; exit 1 ;; \ - esac; \ - tmpdir="$(mktemp -d)"; \ - curl -fsSL "https://github.com/abhinav/git-spice/releases/download/${GIT_SPICE_VERSION}/git-spice.Linux-${spice_arch}.tar.gz" -o "${tmpdir}/git-spice.tgz"; \ - tar -xzf "${tmpdir}/git-spice.tgz" -C "${tmpdir}"; \ - install -m 0755 "${tmpdir}/gs" /usr/local/bin/gs; \ - ln -sf /usr/local/bin/gs /usr/local/bin/git-spice; \ - rm -rf "${tmpdir}" - RUN curl -fsSL "https://releases.rivet.dev/sandbox-agent/${SANDBOX_AGENT_VERSION}/install.sh" | sh ENV PATH="/root/.local/bin:${PATH}" diff --git a/foundry/packages/backend/CLAUDE.md b/foundry/packages/backend/CLAUDE.md index 949db90..aef6cfd 100644 --- a/foundry/packages/backend/CLAUDE.md +++ b/foundry/packages/backend/CLAUDE.md @@ -5,30 +5,29 @@ Keep the backend actor tree aligned with this shape unless we explicitly decide to change it: ```text -WorkspaceActor -├─ HistoryActor(workspace-scoped global feed) -├─ ProjectActor(repo) -│ ├─ ProjectBranchSyncActor -│ ├─ ProjectPrSyncActor +OrganizationActor +├─ HistoryActor(organization-scoped global feed) +├─ GithubDataActor +├─ RepositoryActor(repo) │ └─ TaskActor(task) │ ├─ TaskSessionActor(session) × N │ │ └─ SessionStatusSyncActor(session) × 0..1 │ └─ Task-local workbench state -└─ SandboxInstanceActor(providerId, sandboxId) × N +└─ SandboxInstanceActor(sandboxProviderId, sandboxId) × N ``` ## Ownership Rules -- `WorkspaceActor` is the workspace coordinator and lookup/index owner. -- `HistoryActor` is workspace-scoped. There is one workspace-level history feed. -- `ProjectActor` is the repo coordinator and owns repo-local caches/indexes. +- `OrganizationActor` is the organization coordinator and lookup/index owner. +- `HistoryActor` is organization-scoped. There is one organization-level history feed. +- `RepositoryActor` is the repo coordinator and owns repo-local caches/indexes. - `TaskActor` is one branch. Treat `1 task = 1 branch` once branch assignment is finalized. - `TaskActor` can have many sessions. - `TaskActor` can reference many sandbox instances historically, but should have only one active sandbox/session at a time. - Session unread state and draft prompts are backend-owned workbench state, not frontend-local state. - Branch rename is a real git operation, not just metadata. - `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity. -- Sync actors are polling workers only. They feed parent actors and should not become the source of truth. +- The backend stores no local git state. No clones, no refs, no working trees, and no git-spice. Repository metadata comes from GitHub API data and webhook events. Any working-tree git operation runs inside a sandbox via `executeInSandbox()`. - When a backend request path must aggregate multiple independent actor calls or reads, prefer bounded parallelism over sequential fan-out when correctness permits. Do not serialize independent work by default. ## Maintenance diff --git a/foundry/packages/backend/src/actors/events.ts b/foundry/packages/backend/src/actors/events.ts index 8872dfa..4a514ad 100644 --- a/foundry/packages/backend/src/actors/events.ts +++ b/foundry/packages/backend/src/actors/events.ts @@ -1,51 +1,51 @@ -import type { TaskStatus, ProviderId } from "@sandbox-agent/foundry-shared"; +import type { TaskStatus, SandboxProviderId } from "@sandbox-agent/foundry-shared"; export interface TaskCreatedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; - providerId: ProviderId; + sandboxProviderId: SandboxProviderId; branchName: string; title: string; } export interface TaskStatusEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; status: TaskStatus; message: string; } -export interface ProjectSnapshotEvent { - workspaceId: string; +export interface RepositorySnapshotEvent { + organizationId: string; repoId: string; updatedAt: number; } export interface AgentStartedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; sessionId: string; } export interface AgentIdleEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; sessionId: string; } export interface AgentErrorEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; message: string; } export interface PrCreatedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; prNumber: number; @@ -53,7 +53,7 @@ export interface PrCreatedEvent { } export interface PrClosedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; prNumber: number; @@ -61,7 +61,7 @@ export interface PrClosedEvent { } export interface PrReviewEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; prNumber: number; @@ -70,7 +70,7 @@ export interface PrReviewEvent { } export interface CiStatusChangedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; prNumber: number; @@ -81,7 +81,7 @@ export type TaskStepName = "auto_commit" | "push" | "pr_submit"; export type TaskStepStatus = "started" | "completed" | "skipped" | "failed"; export interface TaskStepEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; step: TaskStepName; @@ -90,23 +90,15 @@ export interface TaskStepEvent { } export interface BranchSwitchedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; branchName: string; } export interface SessionAttachedEvent { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; sessionId: string; } - -export interface BranchSyncedEvent { - workspaceId: string; - repoId: string; - taskId: string; - branchName: string; - strategy: string; -} diff --git a/foundry/packages/backend/src/actors/github-data/db/migrations.ts b/foundry/packages/backend/src/actors/github-data/db/migrations.ts index 528e8fa..87cc76f 100644 --- a/foundry/packages/backend/src/actors/github-data/db/migrations.ts +++ b/foundry/packages/backend/src/actors/github-data/db/migrations.ts @@ -6,6 +6,18 @@ const journal = { tag: "0000_github_data", breakpoints: true, }, + { + idx: 1, + when: 1773810002000, + tag: "0001_default_branch", + breakpoints: true, + }, + { + idx: 2, + when: 1773810300000, + tag: "0002_github_branches", + breakpoints: true, + }, ], } as const; @@ -56,6 +68,16 @@ CREATE TABLE \`github_pull_requests\` ( \`is_draft\` integer NOT NULL, \`updated_at\` integer NOT NULL ); +`, + m0001: `ALTER TABLE \`github_repositories\` ADD \`default_branch\` text NOT NULL DEFAULT 'main'; +`, + m0002: `CREATE TABLE \`github_branches\` ( + \`branch_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`branch_name\` text NOT NULL, + \`commit_sha\` text NOT NULL, + \`updated_at\` integer NOT NULL +); `, } as const, }; diff --git a/foundry/packages/backend/src/actors/github-data/db/schema.ts b/foundry/packages/backend/src/actors/github-data/db/schema.ts index 9527fc6..fe37863 100644 --- a/foundry/packages/backend/src/actors/github-data/db/schema.ts +++ b/foundry/packages/backend/src/actors/github-data/db/schema.ts @@ -16,6 +16,15 @@ export const githubRepositories = sqliteTable("github_repositories", { fullName: text("full_name").notNull(), cloneUrl: text("clone_url").notNull(), private: integer("private").notNull(), + defaultBranch: text("default_branch").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const githubBranches = sqliteTable("github_branches", { + branchId: text("branch_id").notNull().primaryKey(), + repoId: text("repo_id").notNull(), + branchName: text("branch_name").notNull(), + commitSha: text("commit_sha").notNull(), updatedAt: integer("updated_at").notNull(), }); diff --git a/foundry/packages/backend/src/actors/github-data/index.ts b/foundry/packages/backend/src/actors/github-data/index.ts index 6b7d1f8..accfb5d 100644 --- a/foundry/packages/backend/src/actors/github-data/index.ts +++ b/foundry/packages/backend/src/actors/github-data/index.ts @@ -3,16 +3,16 @@ import { eq } from "drizzle-orm"; import { actor } from "rivetkit"; import type { FoundryOrganization } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateWorkspace, getTask } from "../handles.js"; +import { getOrCreateOrganization, getTask } from "../handles.js"; import { repoIdFromRemote } from "../../services/repo.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; +import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; import { githubDataDb } from "./db/db.js"; -import { githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js"; +import { githubBranches, githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js"; const META_ROW_ID = 1; interface GithubDataInput { - workspaceId: string; + organizationId: string; } interface GithubMemberRecord { @@ -28,6 +28,13 @@ interface GithubRepositoryRecord { fullName: string; cloneUrl: string; private: boolean; + defaultBranch: string; +} + +interface GithubBranchRecord { + repoId: string; + branchName: string; + commitSha: string; } interface GithubPullRequestRecord { @@ -156,21 +163,21 @@ async function writeMeta(c: any, patch: Partial [row.prId, row])); const afterById = new Map(afterRows.map((row) => [row.prId, row])); @@ -258,7 +282,7 @@ async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: if (!changed) { continue; } - await workspace.applyOpenPullRequestUpdate({ + await organization.applyOpenPullRequestUpdate({ pullRequest: pullRequestSummaryFromRow(row), }); await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); @@ -268,14 +292,14 @@ async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: if (afterById.has(prId)) { continue; } - await workspace.removeOpenPullRequest({ prId }); + await organization.removeOpenPullRequest({ prId }); await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); } } async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - const match = await workspace.findTaskForGithubBranch({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + const match = await organization.findTaskForGithubBranch({ repoId: row.repoId, branchName: row.headRefName, }); @@ -283,7 +307,7 @@ async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { return; } try { - const task = getTask(c, c.state.workspaceId, row.repoId, match.taskId); + const task = getTask(c, c.state.organizationId, row.repoId, match.taskId); await task.archive({ reason: `PR ${String(row.state).toLowerCase()}` }); } catch { // Best-effort only. Task summary refresh will still clear the PR state. @@ -391,6 +415,69 @@ async function resolvePullRequests( })); } +async function listRepositoryBranchesForContext( + context: Awaited>, + repository: GithubRepositoryRecord, +): Promise { + const { appShell } = getActorRuntimeContext(); + let branches: Array<{ name: string; commitSha: string }> = []; + + if (context.installationId != null) { + try { + branches = await appShell.github.listInstallationRepositoryBranches(context.installationId, repository.fullName); + } catch (error) { + if (!context.accessToken) { + throw error; + } + } + } + + if (branches.length === 0 && context.accessToken) { + branches = await appShell.github.listUserRepositoryBranches(context.accessToken, repository.fullName); + } + + const repoId = repoIdFromRemote(repository.cloneUrl); + return branches.map((branch) => ({ + repoId, + branchName: branch.name, + commitSha: branch.commitSha, + })); +} + +async function resolveBranches( + _c: any, + context: Awaited>, + repositories: GithubRepositoryRecord[], +): Promise { + return (await Promise.all(repositories.map((repository) => listRepositoryBranchesForContext(context, repository)))).flat(); +} + +async function refreshRepositoryBranches( + c: any, + context: Awaited>, + repository: GithubRepositoryRecord, + updatedAt: number, +): Promise { + const nextBranches = await listRepositoryBranchesForContext(context, repository); + await c.db + .delete(githubBranches) + .where(eq(githubBranches.repoId, repoIdFromRemote(repository.cloneUrl))) + .run(); + + for (const branch of nextBranches) { + await c.db + .insert(githubBranches) + .values({ + branchId: `${branch.repoId}:${branch.branchName}`, + repoId: branch.repoId, + branchName: branch.branchName, + commitSha: branch.commitSha, + updatedAt, + }) + .run(); + } +} + async function readAllPullRequestRows(c: any) { return await c.db.select().from(githubPullRequests).all(); } @@ -409,15 +496,17 @@ async function runFullSync(c: any, input: FullSyncInput = {}) { }); const repositories = await resolveRepositories(c, context); + const branches = await resolveBranches(c, context, repositories); const members = await resolveMembers(c, context); const pullRequests = await resolvePullRequests(c, context, repositories); await replaceRepositories(c, repositories, startedAt); + await replaceBranches(c, branches, startedAt); await replaceMembers(c, members, startedAt); await replacePullRequests(c, pullRequests); - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyGithubDataProjection({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyGithubDataProjection({ connectedAccount: context.connectedAccount, installationStatus: context.installationStatus, installationId: context.installationId, @@ -455,16 +544,18 @@ export const githubData = actor({ actionTimeout: 5 * 60_000, }, createState: (_c, input: GithubDataInput) => ({ - workspaceId: input.workspaceId, + organizationId: input.organizationId, }), actions: { async getSummary(c) { const repositories = await c.db.select().from(githubRepositories).all(); + const branches = await c.db.select().from(githubBranches).all(); const members = await c.db.select().from(githubMembers).all(); const pullRequests = await c.db.select().from(githubPullRequests).all(); return { ...(await readMeta(c)), repositoryCount: repositories.length, + branchCount: branches.length, memberCount: members.length, pullRequestCount: pullRequests.length, }; @@ -477,14 +568,39 @@ export const githubData = actor({ fullName: row.fullName, cloneUrl: row.cloneUrl, private: Boolean(row.private), + defaultBranch: row.defaultBranch, })); }, + async getRepository(c, input: { repoId: string }) { + const row = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); + if (!row) { + return null; + } + return { + repoId: row.repoId, + fullName: row.fullName, + cloneUrl: row.cloneUrl, + private: Boolean(row.private), + defaultBranch: row.defaultBranch, + }; + }, + async listPullRequestsForRepository(c, input: { repoId: string }) { const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all(); return rows.map(pullRequestSummaryFromRow); }, + async listBranchesForRepository(c, input: { repoId: string }) { + const rows = await c.db.select().from(githubBranches).where(eq(githubBranches.repoId, input.repoId)).all(); + return rows + .map((row) => ({ + branchName: row.branchName, + commitSha: row.commitSha, + })) + .sort((left, right) => left.branchName.localeCompare(right.branchName)); + }, + async listOpenPullRequests(c) { const rows = await c.db.select().from(githubPullRequests).all(); return rows.map(pullRequestSummaryFromRow).sort((left, right) => right.updatedAtMs - left.updatedAtMs); @@ -539,6 +655,7 @@ export const githubData = actor({ fullName: repository.fullName, cloneUrl: repository.cloneUrl, private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, updatedAt, }) .onConflictDoUpdate({ @@ -547,13 +664,25 @@ export const githubData = actor({ fullName: repository.fullName, cloneUrl: repository.cloneUrl, private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, updatedAt, }, }) .run(); + await refreshRepositoryBranches( + c, + context, + { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private, + defaultBranch: repository.defaultBranch, + }, + updatedAt, + ); - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyGithubRepositoryProjection({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyGithubRepositoryProjection({ repoId: input.repoId, remoteUrl: repository.cloneUrl, }); @@ -562,6 +691,7 @@ export const githubData = actor({ fullName: repository.fullName, cloneUrl: repository.cloneUrl, private: repository.private, + defaultBranch: repository.defaultBranch, }; }, @@ -656,6 +786,7 @@ export const githubData = actor({ async clearState(c, input: ClearStateInput) { const beforeRows = await readAllPullRequestRows(c); await c.db.delete(githubPullRequests).run(); + await c.db.delete(githubBranches).run(); await c.db.delete(githubRepositories).run(); await c.db.delete(githubMembers).run(); await writeMeta(c, { @@ -667,8 +798,8 @@ export const githubData = actor({ lastSyncAt: null, }); - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyGithubDataProjection({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyGithubDataProjection({ connectedAccount: input.connectedAccount, installationStatus: input.installationStatus, installationId: input.installationId, @@ -683,6 +814,7 @@ export const githubData = actor({ async handlePullRequestWebhook(c, input: PullRequestWebhookInput) { const beforeRows = await readAllPullRequestRows(c); const repoId = repoIdFromRemote(input.repository.cloneUrl); + const currentRepository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); const updatedAt = Date.now(); const state = normalizePrStatus(input.pullRequest); const prId = `${repoId}#${input.pullRequest.number}`; @@ -694,6 +826,7 @@ export const githubData = actor({ fullName: input.repository.fullName, cloneUrl: input.repository.cloneUrl, private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", updatedAt, }) .onConflictDoUpdate({ @@ -702,6 +835,7 @@ export const githubData = actor({ fullName: input.repository.fullName, cloneUrl: input.repository.cloneUrl, private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", updatedAt, }, }) @@ -753,8 +887,8 @@ export const githubData = actor({ lastSyncAt: updatedAt, }); - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyGithubRepositoryProjection({ + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyGithubRepositoryProjection({ repoId, remoteUrl: input.repository.cloneUrl, }); diff --git a/foundry/packages/backend/src/actors/handles.ts b/foundry/packages/backend/src/actors/handles.ts index b488bf7..bd17fb0 100644 --- a/foundry/packages/backend/src/actors/handles.ts +++ b/foundry/packages/backend/src/actors/handles.ts @@ -1,12 +1,12 @@ -import { authUserKey, githubDataKey, taskKey, historyKey, projectBranchSyncKey, projectKey, taskSandboxKey, workspaceKey } from "./keys.js"; +import { authUserKey, githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "./keys.js"; export function actorClient(c: any) { return c.client(); } -export async function getOrCreateWorkspace(c: any, workspaceId: string) { - return await actorClient(c).workspace.getOrCreate(workspaceKey(workspaceId), { - createWithInput: workspaceId, +export async function getOrCreateOrganization(c: any, organizationId: string) { + return await actorClient(c).organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, }); } @@ -20,76 +20,61 @@ export function getAuthUser(c: any, userId: string) { return actorClient(c).authUser.get(authUserKey(userId)); } -export async function getOrCreateProject(c: any, workspaceId: string, repoId: string, remoteUrl: string) { - return await actorClient(c).project.getOrCreate(projectKey(workspaceId, repoId), { +export async function getOrCreateRepository(c: any, organizationId: string, repoId: string, remoteUrl: string) { + return await actorClient(c).repository.getOrCreate(repositoryKey(organizationId, repoId), { createWithInput: { - workspaceId, + organizationId, repoId, remoteUrl, }, }); } -export function getProject(c: any, workspaceId: string, repoId: string) { - return actorClient(c).project.get(projectKey(workspaceId, repoId)); +export function getRepository(c: any, organizationId: string, repoId: string) { + return actorClient(c).repository.get(repositoryKey(organizationId, repoId)); } -export function getTask(c: any, workspaceId: string, repoId: string, taskId: string) { - return actorClient(c).task.get(taskKey(workspaceId, repoId, taskId)); +export function getTask(c: any, organizationId: string, repoId: string, taskId: string) { + return actorClient(c).task.get(taskKey(organizationId, repoId, taskId)); } -export async function getOrCreateTask(c: any, workspaceId: string, repoId: string, taskId: string, createWithInput: Record) { - return await actorClient(c).task.getOrCreate(taskKey(workspaceId, repoId, taskId), { +export async function getOrCreateTask(c: any, organizationId: string, repoId: string, taskId: string, createWithInput: Record) { + return await actorClient(c).task.getOrCreate(taskKey(organizationId, repoId, taskId), { createWithInput, }); } -export async function getOrCreateHistory(c: any, workspaceId: string, repoId: string) { - return await actorClient(c).history.getOrCreate(historyKey(workspaceId, repoId), { +export async function getOrCreateHistory(c: any, organizationId: string, repoId: string) { + return await actorClient(c).history.getOrCreate(historyKey(organizationId, repoId), { createWithInput: { - workspaceId, + organizationId, repoId, }, }); } -export async function getOrCreateGithubData(c: any, workspaceId: string) { - return await actorClient(c).githubData.getOrCreate(githubDataKey(workspaceId), { +export async function getOrCreateGithubData(c: any, organizationId: string) { + return await actorClient(c).githubData.getOrCreate(githubDataKey(organizationId), { createWithInput: { - workspaceId, + organizationId, }, }); } -export function getGithubData(c: any, workspaceId: string) { - return actorClient(c).githubData.get(githubDataKey(workspaceId)); +export function getGithubData(c: any, organizationId: string) { + return actorClient(c).githubData.get(githubDataKey(organizationId)); } -export async function getOrCreateProjectBranchSync(c: any, workspaceId: string, repoId: string, repoPath: string, intervalMs: number) { - return await actorClient(c).projectBranchSync.getOrCreate(projectBranchSyncKey(workspaceId, repoId), { - createWithInput: { - workspaceId, - repoId, - repoPath, - intervalMs, - }, - }); +export function getTaskSandbox(c: any, organizationId: string, sandboxId: string) { + return actorClient(c).taskSandbox.get(taskSandboxKey(organizationId, sandboxId)); } -export function getTaskSandbox(c: any, workspaceId: string, sandboxId: string) { - return actorClient(c).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId)); -} - -export async function getOrCreateTaskSandbox(c: any, workspaceId: string, sandboxId: string, createWithInput?: Record) { - return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(workspaceId, sandboxId), { +export async function getOrCreateTaskSandbox(c: any, organizationId: string, sandboxId: string, createWithInput?: Record) { + return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(organizationId, sandboxId), { createWithInput, }); } -export function selfProjectBranchSync(c: any) { - return actorClient(c).projectBranchSync.getForId(c.actorId); -} - export function selfHistory(c: any) { return actorClient(c).history.getForId(c.actorId); } @@ -98,12 +83,12 @@ export function selfTask(c: any) { return actorClient(c).task.getForId(c.actorId); } -export function selfWorkspace(c: any) { - return actorClient(c).workspace.getForId(c.actorId); +export function selfOrganization(c: any) { + return actorClient(c).organization.getForId(c.actorId); } -export function selfProject(c: any) { - return actorClient(c).project.getForId(c.actorId); +export function selfRepository(c: any) { + return actorClient(c).repository.getForId(c.actorId); } export function selfAuthUser(c: any) { diff --git a/foundry/packages/backend/src/actors/history/index.ts b/foundry/packages/backend/src/actors/history/index.ts index d2caa12..fa1373b 100644 --- a/foundry/packages/backend/src/actors/history/index.ts +++ b/foundry/packages/backend/src/actors/history/index.ts @@ -8,7 +8,7 @@ import { historyDb } from "./db/db.js"; import { events } from "./db/schema.js"; export interface HistoryInput { - workspaceId: string; + organizationId: string; repoId: string; } @@ -70,7 +70,7 @@ export const history = actor({ icon: "database", }, createState: (_c, input: HistoryInput) => ({ - workspaceId: input.workspaceId, + organizationId: input.organizationId, repoId: input.repoId, }), actions: { @@ -106,7 +106,7 @@ export const history = actor({ return rows.map((row) => ({ ...row, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, repoId: c.state.repoId, })); }, diff --git a/foundry/packages/backend/src/actors/index.ts b/foundry/packages/backend/src/actors/index.ts index 4f67459..2f9e566 100644 --- a/foundry/packages/backend/src/actors/index.ts +++ b/foundry/packages/backend/src/actors/index.ts @@ -3,10 +3,9 @@ import { setup } from "rivetkit"; import { githubData } from "./github-data/index.js"; import { task } from "./task/index.js"; import { history } from "./history/index.js"; -import { projectBranchSync } from "./project-branch-sync/index.js"; -import { project } from "./project/index.js"; +import { repository } from "./repository/index.js"; import { taskSandbox } from "./sandbox/index.js"; -import { workspace } from "./workspace/index.js"; +import { organization } from "./organization/index.js"; import { logger } from "../logging.js"; const RUNNER_VERSION = Math.floor(Date.now() / 1000); @@ -23,13 +22,12 @@ export const registry = setup({ }, use: { authUser, - workspace, - project, + organization, + repository, task, taskSandbox, history, githubData, - projectBranchSync, }, }); @@ -40,7 +38,6 @@ export * from "./github-data/index.js"; export * from "./task/index.js"; export * from "./history/index.js"; export * from "./keys.js"; -export * from "./project-branch-sync/index.js"; -export * from "./project/index.js"; +export * from "./repository/index.js"; export * from "./sandbox/index.js"; -export * from "./workspace/index.js"; +export * from "./organization/index.js"; diff --git a/foundry/packages/backend/src/actors/keys.ts b/foundry/packages/backend/src/actors/keys.ts index 1dfaa48..59e669e 100644 --- a/foundry/packages/backend/src/actors/keys.ts +++ b/foundry/packages/backend/src/actors/keys.ts @@ -1,33 +1,29 @@ export type ActorKey = string[]; -export function workspaceKey(workspaceId: string): ActorKey { - return ["ws", workspaceId]; +export function organizationKey(organizationId: string): ActorKey { + return ["org", organizationId]; } export function authUserKey(userId: string): ActorKey { - return ["ws", "app", "user", userId]; + return ["org", "app", "user", userId]; } -export function projectKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId]; +export function repositoryKey(organizationId: string, repoId: string): ActorKey { + return ["org", organizationId, "repository", repoId]; } -export function taskKey(workspaceId: string, repoId: string, taskId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "task", taskId]; +export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { + return ["org", organizationId, "repository", repoId, "task", taskId]; } -export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey { - return ["ws", workspaceId, "sandbox", sandboxId]; +export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { + return ["org", organizationId, "sandbox", sandboxId]; } -export function historyKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "history"]; +export function historyKey(organizationId: string, repoId: string): ActorKey { + return ["org", organizationId, "repository", repoId, "history"]; } -export function githubDataKey(workspaceId: string): ActorKey { - return ["ws", workspaceId, "github-data"]; -} - -export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "branch-sync"]; +export function githubDataKey(organizationId: string): ActorKey { + return ["org", organizationId, "github-data"]; } diff --git a/foundry/packages/backend/src/actors/logging.ts b/foundry/packages/backend/src/actors/logging.ts index 6a4616a..afc7d37 100644 --- a/foundry/packages/backend/src/actors/logging.ts +++ b/foundry/packages/backend/src/actors/logging.ts @@ -2,7 +2,11 @@ import { logger } from "../logging.js"; export function resolveErrorMessage(error: unknown): string { if (error instanceof Error) { - return error.message; + let msg = error.message; + if (error.cause) { + msg += ` [cause: ${resolveErrorMessage(error.cause)}]`; + } + return msg; } return String(error); } diff --git a/foundry/packages/backend/src/actors/workspace/actions.ts b/foundry/packages/backend/src/actors/organization/actions.ts similarity index 62% rename from foundry/packages/backend/src/actors/workspace/actions.ts rename to foundry/packages/backend/src/actors/organization/actions.ts index 8782a77..d83e776 100644 --- a/foundry/packages/backend/src/actors/workspace/actions.ts +++ b/foundry/packages/backend/src/actors/organization/actions.ts @@ -1,18 +1,14 @@ // @ts-nocheck -import { setTimeout as delay } from "node:timers/promises"; import { desc, eq } from "drizzle-orm"; import { Loop } from "rivetkit/workflow"; import type { - AddRepoInput, CreateTaskInput, HistoryEvent, HistoryQueryInput, ListTasksInput, - ProviderId, + SandboxProviderId, RepoOverview, RepoRecord, - RepoStackActionInput, - RepoStackActionResult, StarSandboxAgentRepoInput, StarSandboxAgentRepoResult, SwitchResult, @@ -26,37 +22,33 @@ import type { TaskWorkbenchSelectInput, TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, WorkbenchOpenPrSummary, - WorkbenchRepoSummary, + WorkbenchRepositorySummary, WorkbenchSessionSummary, WorkbenchTaskSummary, - WorkspaceEvent, - WorkspaceSummarySnapshot, - WorkspaceUseInput, + OrganizationEvent, + OrganizationSummarySnapshot, + OrganizationUseInput, } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js"; +import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateRepository, selfOrganization } from "../handles.js"; import { logActorWarning, resolveErrorMessage } from "../logging.js"; -import { availableSandboxProviderIds, defaultSandboxProviderId } from "../../sandbox-config.js"; -import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; -import { organizationProfile, taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js"; +import { defaultSandboxProviderId } from "../../sandbox-config.js"; +import { repoIdFromRemote } from "../../services/repo.js"; +import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; +import { organizationProfile, taskLookup, repos, taskSummaries } from "./db/schema.js"; import { agentTypeForModel } from "../task/workbench.js"; import { expectQueueResponse } from "../../services/queue.js"; -import { workspaceAppActions } from "./app-shell.js"; +import { organizationAppActions } from "./app-shell.js"; -interface WorkspaceState { - workspaceId: string; -} - -interface RefreshProviderProfilesCommand { - providerId?: ProviderId; +interface OrganizationState { + organizationId: string; } interface GetTaskInput { - workspaceId: string; + organizationId: string; taskId: string; } @@ -65,32 +57,30 @@ interface TaskProxyActionInput extends GetTaskInput { } interface RepoOverviewInput { - workspaceId: string; + organizationId: string; repoId: string; } -const WORKSPACE_QUEUE_NAMES = [ - "workspace.command.addRepo", - "workspace.command.createTask", - "workspace.command.refreshProviderProfiles", - "workspace.command.syncGithubOrganizationRepos", - "workspace.command.syncGithubSession", +const ORGANIZATION_QUEUE_NAMES = [ + "organization.command.createTask", + "organization.command.syncGithubOrganizationRepos", + "organization.command.syncGithubSession", ] as const; const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent"; -type WorkspaceQueueName = (typeof WORKSPACE_QUEUE_NAMES)[number]; +type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number]; -export { WORKSPACE_QUEUE_NAMES }; +export { ORGANIZATION_QUEUE_NAMES }; -export function workspaceWorkflowQueueName(name: WorkspaceQueueName): WorkspaceQueueName { +export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName { return name; } const ORGANIZATION_PROFILE_ROW_ID = "profile"; -function assertWorkspace(c: { state: WorkspaceState }, workspaceId: string): void { - if (workspaceId !== c.state.workspaceId) { - throw new Error(`Workspace actor mismatch: actor=${c.state.workspaceId} command=${workspaceId}`); +function assertOrganization(c: { state: OrganizationState }, organizationId: string): void { + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} command=${organizationId}`); } } @@ -136,12 +126,12 @@ async function collectAllTaskSummaries(c: any): Promise { const all: TaskSummary[] = []; for (const row of repoRows) { try { - const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl); - const snapshot = await project.listTaskSummaries({ includeArchived: true }); + const repository = await getOrCreateRepository(c, c.state.organizationId, row.repoId, row.remoteUrl); + const snapshot = await repository.listTaskSummaries({ includeArchived: true }); all.push(...snapshot); } catch (error) { - logActorWarning("workspace", "failed collecting tasks for repo", { - workspaceId: c.state.workspaceId, + logActorWarning("organization", "failed collecting tasks for repo", { + organizationId: c.state.organizationId, repoId: row.repoId, error: resolveErrorMessage(error), }); @@ -166,7 +156,7 @@ function repoLabelFromRemote(remoteUrl: string): string { return remoteUrl; } -function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepoSummary { +function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepositorySummary { const repoTasks = taskRows.filter((task) => task.repoId === repoRow.repoId); const latestActivityMs = repoTasks.reduce((latest, task) => Math.max(latest, task.updatedAtMs), repoRow.updatedAt); @@ -207,14 +197,14 @@ function taskSummaryFromRow(row: any): WorkbenchTaskSummary { } async function listOpenPullRequestsSnapshot(c: any, taskRows: WorkbenchTaskSummary[]): Promise { - const githubData = getGithubData(c, c.state.workspaceId); + const githubData = getGithubData(c, c.state.organizationId); const openPullRequests = await githubData.listOpenPullRequests({}).catch(() => []); const claimedBranches = new Set(taskRows.filter((task) => task.branch).map((task) => `${task.repoId}:${task.branch}`)); return openPullRequests.filter((pullRequest: WorkbenchOpenPrSummary) => !claimedBranches.has(`${pullRequest.repoId}:${pullRequest.headRefName}`)); } -async function reconcileWorkbenchProjection(c: any): Promise { +async function reconcileWorkbenchProjection(c: any): Promise { const repoRows = await c.db .select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt }) .from(repos) @@ -224,12 +214,12 @@ async function reconcileWorkbenchProjection(c: any): Promise right.updatedAtMs - left.updatedAtMs); return { - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, repos: repoRows.map((row) => buildRepoSummary(row, taskRows)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), taskSummaries: taskRows, openPullRequests: await listOpenPullRequestsSnapshot(c, taskRows), @@ -269,33 +259,15 @@ async function reconcileWorkbenchProjection(c: any): Promise { - const startedAt = Date.now(); - - for (;;) { - const record = await task.get(); - if (record?.branchName && record?.title) { - return record; - } - if (record?.status === "error") { - throw new Error("task initialization failed before the workbench session was ready"); - } - if (Date.now() - startedAt > timeoutMs) { - throw new Error("timed out waiting for task initialization"); - } - await delay(1_000); - } + return getTask(c, c.state.organizationId, repoId, taskId); } /** - * Reads the workspace sidebar snapshot from the workspace actor's local SQLite + * Reads the organization sidebar snapshot from the organization actor's local SQLite * plus the org-scoped GitHub actor for open PRs. Task actors still push * summary updates into `task_summaries`, so the hot read path stays bounded. */ -async function getWorkspaceSummarySnapshot(c: any): Promise { +async function getOrganizationSummarySnapshot(c: any): Promise { const repoRows = await c.db .select({ repoId: repos.repoId, @@ -309,7 +281,7 @@ async function getWorkspaceSummarySnapshot(c: any): Promise buildRepoSummary(row, summaries)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), taskSummaries: summaries, openPullRequests: await listOpenPullRequestsSnapshot(c, summaries), @@ -323,61 +295,14 @@ async function broadcastRepoSummary( ): Promise { const matchingTaskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoRow.repoId)).all(); const repo = buildRepoSummary(repoRow, matchingTaskRows.map(taskSummaryFromRow)); - c.broadcast("workspaceUpdated", { type, repo } satisfies WorkspaceEvent); -} - -async function addRepoMutation(c: any, input: AddRepoInput): Promise { - assertWorkspace(c, input.workspaceId); - - const remoteUrl = normalizeRemoteUrl(input.remoteUrl); - if (!remoteUrl) { - throw new Error("remoteUrl is required"); - } - - const { driver } = getActorRuntimeContext(); - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - await driver.git.validateRemote(remoteUrl, { githubToken: auth?.githubToken ?? null }); - - const repoId = repoIdFromRemote(remoteUrl); - const now = Date.now(); - const existing = await c.db.select({ repoId: repos.repoId }).from(repos).where(eq(repos.repoId, repoId)).get(); - - await c.db - .insert(repos) - .values({ - repoId, - remoteUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl, - updatedAt: now, - }, - }) - .run(); - - await broadcastRepoSummary(c, existing ? "repoUpdated" : "repoAdded", { - repoId, - remoteUrl, - updatedAt: now, - }); - return { - workspaceId: c.state.workspaceId, - repoId, - remoteUrl, - createdAt: now, - updatedAt: now, - }; + c.broadcast("organizationUpdated", { type, repo } satisfies OrganizationEvent); } async function createTaskMutation(c: any, input: CreateTaskInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const { config } = getActorRuntimeContext(); - const providerId = input.providerId ?? defaultSandboxProviderId(config); + const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config); const repoId = input.repoId; const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get(); @@ -386,27 +311,11 @@ async function createTaskMutation(c: any, input: CreateTaskInput): Promise { - const body = command ?? {}; - const { config } = getActorRuntimeContext(); - const providerIds: ProviderId[] = body.providerId ? [body.providerId] : availableSandboxProviderIds(config); - - for (const providerId of providerIds) { - await c.db - .insert(providerProfiles) - .values({ - providerId, - profileJson: JSON.stringify({ providerId }), - updatedAt: Date.now(), - }) - .onConflictDoUpdate({ - target: providerProfiles.providerId, - set: { - profileJson: JSON.stringify({ providerId }), - updatedAt: Date.now(), - }, - }) - .run(); - } -} - -export async function runWorkspaceWorkflow(ctx: any): Promise { - await ctx.loop("workspace-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-workspace-command", { - names: [...WORKSPACE_QUEUE_NAMES], +export async function runOrganizationWorkflow(ctx: any): Promise { + await ctx.loop("organization-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-organization-command", { + names: [...ORGANIZATION_QUEUE_NAMES], completable: true, }); if (!msg) { @@ -477,19 +362,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { } try { - if (msg.name === "workspace.command.addRepo") { + if (msg.name === "organization.command.createTask") { const result = await loopCtx.step({ - name: "workspace-add-repo", - timeout: 60_000, - run: async () => addRepoMutation(loopCtx, msg.body as AddRepoInput), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "workspace.command.createTask") { - const result = await loopCtx.step({ - name: "workspace-create-task", + name: "organization-create-task", timeout: 5 * 60_000, run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput), }); @@ -497,17 +372,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { return Loop.continue(undefined); } - if (msg.name === "workspace.command.refreshProviderProfiles") { - await loopCtx.step("workspace-refresh-provider-profiles", async () => - refreshProviderProfilesMutation(loopCtx, msg.body as RefreshProviderProfilesCommand), - ); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "workspace.command.syncGithubSession") { + if (msg.name === "organization.command.syncGithubSession") { await loopCtx.step({ - name: "workspace-sync-github-session", + name: "organization-sync-github-session", timeout: 60_000, run: async () => { const { syncGithubOrganizations } = await import("./app-shell.js"); @@ -518,9 +385,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { return Loop.continue(undefined); } - if (msg.name === "workspace.command.syncGithubOrganizationRepos") { + if (msg.name === "organization.command.syncGithubOrganizationRepos") { await loopCtx.step({ - name: "workspace-sync-github-organization-repos", + name: "organization-sync-github-organization-repos", timeout: 60_000, run: async () => { const { syncGithubOrganizationRepos } = await import("./app-shell.js"); @@ -532,14 +399,12 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { } } catch (error) { const message = resolveErrorMessage(error); - logActorWarning("workspace", "workspace workflow command failed", { - workspaceId: loopCtx.state.workspaceId, + logActorWarning("organization", "organization workflow command failed", { queueName: msg.name, error: message, }); await msg.complete({ error: message }).catch((completeError: unknown) => { - logActorWarning("workspace", "workspace workflow failed completing error response", { - workspaceId: loopCtx.state.workspaceId, + logActorWarning("organization", "organization workflow failed completing error response", { queueName: msg.name, error: resolveErrorMessage(completeError), }); @@ -550,25 +415,15 @@ export async function runWorkspaceWorkflow(ctx: any): Promise { }); } -export const workspaceActions = { - ...workspaceAppActions, - async useWorkspace(c: any, input: WorkspaceUseInput): Promise<{ workspaceId: string }> { - assertWorkspace(c, input.workspaceId); - return { workspaceId: c.state.workspaceId }; +export const organizationActions = { + ...organizationAppActions, + async useOrganization(c: any, input: OrganizationUseInput): Promise<{ organizationId: string }> { + assertOrganization(c, input.organizationId); + return { organizationId: c.state.organizationId }; }, - async addRepo(c: any, input: AddRepoInput): Promise { - const self = selfWorkspace(c); - return expectQueueResponse( - await self.send(workspaceWorkflowQueueName("workspace.command.addRepo"), input, { - wait: true, - timeout: 60_000, - }), - ); - }, - - async listRepos(c: any, input: WorkspaceUseInput): Promise { - assertWorkspace(c, input.workspaceId); + async listRepos(c: any, input: OrganizationUseInput): Promise { + assertOrganization(c, input.organizationId); const rows = await c.db .select({ @@ -582,7 +437,7 @@ export const workspaceActions = { .all(); return rows.map((row) => ({ - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, repoId: row.repoId, remoteUrl: row.remoteUrl, createdAt: row.createdAt, @@ -591,19 +446,22 @@ export const workspaceActions = { }, async createTask(c: any, input: CreateTaskInput): Promise { - const self = selfWorkspace(c); + const self = selfOrganization(c); return expectQueueResponse( - await self.send(workspaceWorkflowQueueName("workspace.command.createTask"), input, { + await self.send(organizationWorkflowQueueName("organization.command.createTask"), input, { wait: true, - timeout: 5 * 60_000, + timeout: 10_000, }), ); }, async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const { driver } = getActorRuntimeContext(); - await driver.github.starRepository(SANDBOX_AGENT_REPO); + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + await driver.github.starRepository(SANDBOX_AGENT_REPO, { + githubToken: auth?.githubToken ?? null, + }); return { repo: SANDBOX_AGENT_REPO, starredAt: Date.now(), @@ -613,7 +471,7 @@ export const workspaceActions = { /** * Called by task actors when their summary-level state changes. * This is the write path for the local materialized projection; clients read - * the projection via `getWorkspaceSummary`, but only task actors should push + * the projection via `getOrganizationSummary`, but only task actors should push * rows into it. */ async applyTaskSummaryUpdate(c: any, input: { taskSummary: WorkbenchTaskSummary }): Promise { @@ -625,12 +483,12 @@ export const workspaceActions = { set: taskSummaryRowFromSummary(input.taskSummary), }) .run(); - c.broadcast("workspaceUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies OrganizationEvent); }, async removeTaskSummary(c: any, input: { taskId: string }): Promise { await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); - c.broadcast("workspaceUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies OrganizationEvent); }, async findTaskForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> { @@ -645,13 +503,13 @@ export const workspaceActions = { for (const summary of matches) { try { - const task = getTask(c, c.state.workspaceId, input.repoId, summary.taskId); - await workspaceActions.applyTaskSummaryUpdate(c, { + const task = getTask(c, c.state.organizationId, input.repoId, summary.taskId); + await organizationActions.applyTaskSummaryUpdate(c, { taskSummary: await task.getTaskSummary({}), }); } catch (error) { - logActorWarning("workspace", "failed refreshing task summary for GitHub branch", { - workspaceId: c.state.workspaceId, + logActorWarning("organization", "failed refreshing task summary for GitHub branch", { + organizationId: c.state.organizationId, repoId: input.repoId, branchName: input.branchName, taskId: summary.taskId, @@ -666,11 +524,11 @@ export const workspaceActions = { if (summaries.some((summary) => summary.branch === input.pullRequest.headRefName)) { return; } - c.broadcast("workspaceUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies OrganizationEvent); }, async removeOpenPullRequest(c: any, input: { prId: string }): Promise { - c.broadcast("workspaceUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies OrganizationEvent); }, async applyGithubRepositoryProjection(c: any, input: { repoId: string; remoteUrl: string }): Promise { @@ -747,7 +605,7 @@ export const workspaceActions = { continue; } await c.db.delete(repos).where(eq(repos.repoId, repo.repoId)).run(); - c.broadcast("workspaceUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies WorkspaceEvent); + c.broadcast("organizationUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies OrganizationEvent); } const profile = await c.db @@ -775,13 +633,13 @@ export const workspaceActions = { async recordGithubWebhookReceipt( c: any, input: { - workspaceId: string; + organizationId: string; event: string; action?: string | null; receivedAt?: number; }, ): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const profile = await c.db .select({ id: organizationProfile.id }) @@ -802,45 +660,38 @@ export const workspaceActions = { .run(); }, - async getWorkspaceSummary(c: any, input: WorkspaceUseInput): Promise { - assertWorkspace(c, input.workspaceId); - return await getWorkspaceSummarySnapshot(c); + async getOrganizationSummary(c: any, input: OrganizationUseInput): Promise { + assertOrganization(c, input.organizationId); + return await getOrganizationSummarySnapshot(c); }, - async reconcileWorkbenchState(c: any, input: WorkspaceUseInput): Promise { - assertWorkspace(c, input.workspaceId); + async reconcileWorkbenchState(c: any, input: OrganizationUseInput): Promise { + assertOrganization(c, input.organizationId); return await reconcileWorkbenchProjection(c); }, - async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; tabId?: string }> { - const created = await workspaceActions.createTask(c, { - workspaceId: c.state.workspaceId, + async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> { + // Step 1: Create the task record (wait: true — local state mutations only). + const created = await organizationActions.createTask(c, { + organizationId: c.state.organizationId, repoId: input.repoId, task: input.task, ...(input.title ? { explicitTitle: input.title } : {}), ...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}), ...(input.model ? { agentType: agentTypeForModel(input.model) } : {}), }); + + // Step 2: Enqueue session creation + initial message (wait: false). + // The task workflow creates the session record and sends the message in + // the background. The client observes progress via push events on the + // task subscription topic. const task = await requireWorkbenchTask(c, created.taskId); - await waitForWorkbenchTaskReady(task); - const session = await task.createWorkbenchSession({ - taskId: created.taskId, - ...(input.model ? { model: input.model } : {}), - }); - await task.sendWorkbenchMessage({ - taskId: created.taskId, - tabId: session.tabId, + await task.createWorkbenchSessionAndSend({ + model: input.model, text: input.task, - attachments: [], - waitForCompletion: true, }); - await task.getSessionDetail({ - sessionId: session.tabId, - }); - return { - taskId: created.taskId, - tabId: session.tabId, - }; + + return { taskId: created.taskId }; }, async markWorkbenchUnread(c: any, input: TaskWorkbenchSelectInput): Promise { @@ -858,7 +709,7 @@ export const workspaceActions = { await task.renameWorkbenchBranch(input); }, - async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> { + async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { const task = await requireWorkbenchTask(c, input.taskId); return await task.createWorkbenchSession({ ...(input.model ? { model: input.model } : {}) }); }, @@ -888,12 +739,12 @@ export const workspaceActions = { await task.sendWorkbenchMessage(input); }, - async stopWorkbenchSession(c: any, input: TaskWorkbenchTabInput): Promise { + async stopWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise { const task = await requireWorkbenchTask(c, input.taskId); await task.stopWorkbenchSession(input); }, - async closeWorkbenchSession(c: any, input: TaskWorkbenchTabInput): Promise { + async closeWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise { const task = await requireWorkbenchTask(c, input.taskId); await task.closeWorkbenchSession(input); }, @@ -909,23 +760,23 @@ export const workspaceActions = { }, async reloadGithubOrganization(c: any): Promise { - await getOrCreateGithubData(c, c.state.workspaceId).reloadOrganization({}); + await getOrCreateGithubData(c, c.state.organizationId).reloadOrganization({}); }, async reloadGithubPullRequests(c: any): Promise { - await getOrCreateGithubData(c, c.state.workspaceId).reloadAllPullRequests({}); + await getOrCreateGithubData(c, c.state.organizationId).reloadAllPullRequests({}); }, async reloadGithubRepository(c: any, input: { repoId: string }): Promise { - await getOrCreateGithubData(c, c.state.workspaceId).reloadRepository(input); + await getOrCreateGithubData(c, c.state.organizationId).reloadRepository(input); }, async reloadGithubPullRequest(c: any, input: { repoId: string; prNumber: number }): Promise { - await getOrCreateGithubData(c, c.state.workspaceId).reloadPullRequest(input); + await getOrCreateGithubData(c, c.state.organizationId).reloadPullRequest(input); }, async listTasks(c: any, input: ListTasksInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); if (input.repoId) { const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); @@ -933,67 +784,41 @@ export const workspaceActions = { throw new Error(`Unknown repo: ${input.repoId}`); } - const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl); - return await project.listTaskSummaries({ includeArchived: true }); + const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl); + return await repository.listTaskSummaries({ includeArchived: true }); } return await collectAllTaskSummaries(c); }, async getRepoOverview(c: any, input: RepoOverviewInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); if (!repoRow) { throw new Error(`Unknown repo: ${input.repoId}`); } - const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl); - await project.ensure({ remoteUrl: repoRow.remoteUrl }); - return await project.getRepoOverview({}); - }, - - async runRepoStackAction(c: any, input: RepoStackActionInput): Promise { - assertWorkspace(c, input.workspaceId); - - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${input.repoId}`); - } - - const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl); - await project.ensure({ remoteUrl: repoRow.remoteUrl }); - return await project.runRepoStackAction({ - action: input.action, - branchName: input.branchName, - parentBranch: input.parentBranch, - }); + const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl); + return await repository.getRepoOverview({}); }, async switchTask(c: any, taskId: string): Promise { const repoId = await resolveRepoId(c, taskId); - const h = getTask(c, c.state.workspaceId, repoId, taskId); + const h = getTask(c, c.state.organizationId, repoId, taskId); const record = await h.get(); const switched = await h.switch(); return { - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, taskId, - providerId: record.providerId, + sandboxProviderId: record.sandboxProviderId, switchTarget: switched.switchTarget, }; }, - async refreshProviderProfiles(c: any, command?: RefreshProviderProfilesCommand): Promise { - const self = selfWorkspace(c); - await self.send(workspaceWorkflowQueueName("workspace.command.refreshProviderProfiles"), command ?? {}, { - wait: true, - timeout: 60_000, - }); - }, - async history(c: any, input: HistoryQueryInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const limit = input.limit ?? 20; const repoRows = await c.db.select({ repoId: repos.repoId }).from(repos).all(); @@ -1002,7 +827,7 @@ export const workspaceActions = { for (const row of repoRows) { try { - const hist = await getOrCreateHistory(c, c.state.workspaceId, row.repoId); + const hist = await getOrCreateHistory(c, c.state.organizationId, row.repoId); const items = await hist.list({ branch: input.branch, taskId: input.taskId, @@ -1010,8 +835,8 @@ export const workspaceActions = { }); allEvents.push(...items); } catch (error) { - logActorWarning("workspace", "history lookup failed for repo", { - workspaceId: c.state.workspaceId, + logActorWarning("organization", "history lookup failed for repo", { + organizationId: c.state.organizationId, repoId: row.repoId, error: resolveErrorMessage(error), }); @@ -1023,7 +848,7 @@ export const workspaceActions = { }, async getTask(c: any, input: GetTaskInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); @@ -1032,49 +857,49 @@ export const workspaceActions = { throw new Error(`Unknown repo: ${repoId}`); } - const project = await getOrCreateProject(c, c.state.workspaceId, repoId, repoRow.remoteUrl); - return await project.getTaskEnriched({ taskId: input.taskId }); + const repository = await getOrCreateRepository(c, c.state.organizationId, repoId, repoRow.remoteUrl); + return await repository.getTaskEnriched({ taskId: input.taskId }); }, async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); return await h.attach({ reason: input.reason }); }, async pushTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.push({ reason: input.reason }); }, async syncTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.sync({ reason: input.reason }); }, async mergeTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.merge({ reason: input.reason }); }, async archiveTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.archive({ reason: input.reason }); }, async killTask(c: any, input: TaskProxyActionInput): Promise { - assertWorkspace(c, input.workspaceId); + assertOrganization(c, input.organizationId); const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.workspaceId, repoId, input.taskId); + const h = getTask(c, c.state.organizationId, repoId, input.taskId); await h.kill({ reason: input.reason }); }, }; diff --git a/foundry/packages/backend/src/actors/workspace/app-shell.ts b/foundry/packages/backend/src/actors/organization/app-shell.ts similarity index 84% rename from foundry/packages/backend/src/actors/workspace/app-shell.ts rename to foundry/packages/backend/src/actors/organization/app-shell.ts index d9a5dfa..20febfd 100644 --- a/foundry/packages/backend/src/actors/workspace/app-shell.ts +++ b/foundry/packages/backend/src/actors/organization/app-shell.ts @@ -10,7 +10,7 @@ import type { UpdateFoundryOrganizationProfileInput, } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateGithubData, getOrCreateWorkspace, selfWorkspace } from "../handles.js"; +import { getOrCreateGithubData, getOrCreateOrganization, selfOrganization } from "../handles.js"; import { GitHubAppError } from "../../services/app-github.js"; import { getBetterAuthService } from "../../services/better-auth.js"; import { repoIdFromRemote, repoLabelFromRemote } from "../../services/repo.js"; @@ -28,13 +28,13 @@ import { stripeLookup, } from "./db/schema.js"; -export const APP_SHELL_WORKSPACE_ID = "app"; +export const APP_SHELL_ORGANIZATION_ID = "app"; // ── Better Auth adapter where-clause helpers ── // These convert the adapter's `{ field, value, operator }` clause arrays into -// Drizzle predicates for workspace-level auth index / verification tables. +// Drizzle predicates for organization-level auth index / verification tables. -function workspaceAuthColumn(table: any, field: string): any { +function organizationAuthColumn(table: any, field: string): any { const column = table[field]; if (!column) { throw new Error(`Unknown auth table field: ${field}`); @@ -52,8 +52,8 @@ function normalizeAuthValue(value: unknown): unknown { return value; } -function workspaceAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { - const column = workspaceAuthColumn(table, clause.field); +function organizationAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { + const column = organizationAuthColumn(table, clause.field); const value = normalizeAuthValue(clause.value); switch (clause.operator) { case "ne": @@ -82,13 +82,13 @@ function workspaceAuthClause(table: any, clause: { field: string; value: unknown } } -function workspaceAuthWhere(table: any, clauses: any[] | undefined): any { +function organizationAuthWhere(table: any, clauses: any[] | undefined): any { if (!clauses || clauses.length === 0) { return undefined; } - let expr = workspaceAuthClause(table, clauses[0]); + let expr = organizationAuthClause(table, clauses[0]); for (const clause of clauses.slice(1)) { - const next = workspaceAuthClause(table, clause); + const next = organizationAuthClause(table, clause); expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); } return expr; @@ -104,15 +104,15 @@ function roundDurationMs(start: number): number { return Math.round((performance.now() - start) * 100) / 100; } -function assertAppWorkspace(c: any): void { - if (c.state.workspaceId !== APP_SHELL_WORKSPACE_ID) { - throw new Error(`App shell action requires workspace ${APP_SHELL_WORKSPACE_ID}, got ${c.state.workspaceId}`); +function assertAppOrganization(c: any): void { + if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) { + throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`); } } -function assertOrganizationWorkspace(c: any): void { - if (c.state.workspaceId === APP_SHELL_WORKSPACE_ID) { - throw new Error("Organization action cannot run on the reserved app workspace"); +function assertOrganizationShell(c: any): void { + if (c.state.organizationId === APP_SHELL_ORGANIZATION_ID) { + throw new Error("Organization action cannot run on the reserved app organization"); } } @@ -124,12 +124,12 @@ function slugify(value: string): string { .replace(/^-+|-+$/g, ""); } -function personalWorkspaceId(login: string): string { +function personalOrganizationId(login: string): string { return `personal-${slugify(login)}`; } -function organizationWorkspaceId(kind: FoundryOrganization["kind"], login: string): string { - return kind === "personal" ? personalWorkspaceId(login) : slugify(login); +function organizationOrganizationId(kind: FoundryOrganization["kind"], login: string): string { + return kind === "personal" ? personalOrganizationId(login) : slugify(login); } function hasRepoScope(scopes: string[]): boolean { @@ -217,12 +217,12 @@ function stripeWebhookSubscription(event: any) { }; } -async function getOrganizationState(workspace: any) { - return await workspace.getOrganizationShellState({}); +async function getOrganizationState(organization: any) { + return await organization.getOrganizationShellState({}); } -async function getOrganizationStateIfInitialized(workspace: any) { - return await workspace.getOrganizationShellStateIfInitialized({}); +async function getOrganizationStateIfInitialized(organization: any) { + return await organization.getOrganizationShellStateIfInitialized({}); } async function listSnapshotOrganizations(c: any, sessionId: string, organizationIds: string[]) { @@ -230,13 +230,13 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization organizationIds.map(async (organizationId) => { const organizationStartedAt = performance.now(); try { - const workspace = await getOrCreateWorkspace(c, organizationId); - const organizationState = await getOrganizationStateIfInitialized(workspace); + const organization = await getOrCreateOrganization(c, organizationId); + const organizationState = await getOrganizationStateIfInitialized(organization); if (!organizationState) { logger.warn( { sessionId, - workspaceId: c.state.workspaceId, + actorOrganizationId: c.state.organizationId, organizationId, durationMs: roundDurationMs(organizationStartedAt), }, @@ -247,7 +247,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization logger.info( { sessionId, - workspaceId: c.state.workspaceId, + actorOrganizationId: c.state.organizationId, organizationId, durationMs: roundDurationMs(organizationStartedAt), }, @@ -260,7 +260,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization logger.error( { sessionId, - workspaceId: c.state.workspaceId, + actorOrganizationId: c.state.organizationId, organizationId, durationMs: roundDurationMs(organizationStartedAt), errorMessage: message, @@ -273,7 +273,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization logger.info( { sessionId, - workspaceId: c.state.workspaceId, + actorOrganizationId: c.state.organizationId, organizationId, durationMs: roundDurationMs(organizationStartedAt), }, @@ -291,7 +291,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization } async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepair = true): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const startedAt = performance.now(); const auth = getBetterAuthService(); let authState = await auth.getAuthState(sessionId); @@ -318,7 +318,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa logger.info( { sessionId, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, eligibleOrganizationCount: eligibleOrganizationIds.length, eligibleOrganizationIds, }, @@ -333,7 +333,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa logger.info( { sessionId, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, organizationIds: uninitializedOrganizationIds, }, "build_app_snapshot_repairing_organizations", @@ -344,7 +344,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa logger.warn( { sessionId, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, organizationIds: uninitializedOrganizationIds, }, "build_app_snapshot_repair_skipped_no_access_token", @@ -393,7 +393,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa logger.info( { sessionId, - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, eligibleOrganizationCount: eligibleOrganizationIds.length, organizationCount: organizations.length, durationMs: roundDurationMs(startedAt), @@ -439,7 +439,7 @@ function requireEligibleOrganization(session: any, organizationId: string): void } async function upsertStripeLookupEntries(c: any, organizationId: string, customerId: string | null, subscriptionId: string | null): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const now = Date.now(); for (const lookupKey of [customerId ? `customer:${customerId}` : null, subscriptionId ? `subscription:${subscriptionId}` : null]) { if (!lookupKey) { @@ -464,7 +464,7 @@ async function upsertStripeLookupEntries(c: any, organizationId: string, custome } async function findOrganizationIdForStripeEvent(c: any, customerId: string | null, subscriptionId: string | null): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const customerLookup = customerId ? await c.db .select({ organizationId: stripeLookup.organizationId }) @@ -511,7 +511,7 @@ async function safeListInstallations(accessToken: string): Promise { } /** - * Slow path: list GitHub orgs + installations, sync each org workspace, + * Slow path: list GitHub orgs + installations, sync each org organization, * and update the session's eligible organization list. Called from the * workflow queue so it runs in the background after the callback has * already returned a redirect to the browser. @@ -521,7 +521,7 @@ export async function syncGithubOrganizations(c: any, input: { sessionId: string } async function syncGithubOrganizationsInternal(c: any, input: { sessionId: string; accessToken: string }, options: { broadcast: boolean }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const auth = getBetterAuthService(); const { appShell } = getActorRuntimeContext(); const { sessionId, accessToken } = input; @@ -554,10 +554,10 @@ async function syncGithubOrganizationsInternal(c: any, input: { sessionId: strin ]; for (const account of accounts) { - const organizationId = organizationWorkspaceId(account.kind, account.githubLogin); + const organizationId = organizationOrganizationId(account.kind, account.githubLogin); const installation = installations.find((candidate) => candidate.accountLogin === account.githubLogin) ?? null; - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.syncOrganizationShellFromGithub({ + const organization = await getOrCreateOrganization(c, organizationId); + await organization.syncOrganizationShellFromGithub({ userId: githubUserId, userName: viewer.name || viewer.login, userEmail: viewer.email ?? `${viewer.login}@users.noreply.github.com`, @@ -597,22 +597,22 @@ async function syncGithubOrganizationsInternal(c: any, input: { sessionId: strin } export async function syncGithubOrganizationRepos(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); const githubData = await getOrCreateGithubData(c, input.organizationId); try { await githubData.fullSync({ accessToken: session.githubAccessToken, - connectedAccount: organization.snapshot.github.connectedAccount, - installationId: organization.githubInstallationId, - installationStatus: organization.snapshot.github.installationStatus, - githubLogin: organization.githubLogin, - kind: organization.snapshot.kind, + connectedAccount: organizationState.snapshot.github.connectedAccount, + installationId: organizationState.githubInstallationId, + installationStatus: organizationState.snapshot.github.installationStatus, + githubLogin: organizationState.githubLogin, + kind: organizationState.snapshot.kind, label: "Importing repository catalog...", }); @@ -625,8 +625,8 @@ export async function syncGithubOrganizationRepos(c: any, input: { sessionId: st const installationStatus = error instanceof GitHubAppError && (error.status === 403 || error.status === 404) ? "reconnect_required" - : organization.snapshot.github.installationStatus; - await workspace.markOrganizationSyncFailed({ + : organizationState.snapshot.github.installationStatus; + await organizationHandle.markOrganizationSyncFailed({ message: error instanceof Error ? error.message : "GitHub import failed", installationStatus, }); @@ -640,20 +640,20 @@ export async function syncGithubOrganizationRepos(c: any, input: { sessionId: st } async function readOrganizationProfileRow(c: any) { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); return await c.db.select().from(organizationProfile).where(eq(organizationProfile.id, PROFILE_ROW_ID)).get(); } async function requireOrganizationProfileRow(c: any) { const row = await readOrganizationProfileRow(c); if (!row) { - throw new Error(`Organization profile is not initialized for workspace ${c.state.workspaceId}`); + throw new Error(`Organization profile is not initialized for organization ${c.state.organizationId}`); } return row; } async function listOrganizationMembers(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const rows = await c.db.select().from(organizationMembers).orderBy(organizationMembers.role, organizationMembers.name).all(); return rows.map((row) => ({ id: row.id, @@ -665,13 +665,13 @@ async function listOrganizationMembers(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const rows = await c.db.select({ email: seatAssignments.email }).from(seatAssignments).orderBy(seatAssignments.email).all(); return rows.map((row) => row.email); } async function listOrganizationInvoices(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const rows = await c.db.select().from(invoices).orderBy(desc(invoices.issuedAt), desc(invoices.createdAt)).all(); return rows.map((row) => ({ id: row.id, @@ -683,7 +683,7 @@ async function listOrganizationInvoices(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const rows = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).orderBy(desc(repos.updatedAt)).all(); return rows.map((row) => repoLabelFromRemote(row.remoteUrl)).sort((left, right) => left.localeCompare(right)); } @@ -710,8 +710,8 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number const invoiceRows = await listOrganizationInvoices(c); const state = { - id: c.state.workspaceId, - workspaceId: c.state.workspaceId, + id: c.state.organizationId, + organizationId: c.state.organizationId, kind: row.kind, githubLogin: row.githubLogin, githubInstallationId: row.githubInstallationId ?? null, @@ -720,8 +720,8 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number stripePriceId: row.stripePriceId ?? null, billingPlanId: row.billingPlanId, snapshot: { - id: c.state.workspaceId, - workspaceId: c.state.workspaceId, + id: c.state.organizationId, + organizationId: c.state.organizationId, kind: row.kind, settings: { displayName: row.displayName, @@ -759,7 +759,7 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number logger.info( { - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, githubLogin: row.githubLogin, repoCount: repoCatalog.length, memberCount: members.length, @@ -774,7 +774,7 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number } async function applySubscriptionState( - workspace: any, + organization: any, subscription: { id: string; customerId: string; @@ -787,15 +787,15 @@ async function applySubscriptionState( }, fallbackPlanId: FoundryBillingPlanId, ): Promise { - await workspace.applyOrganizationStripeSubscription({ + await organization.applyOrganizationStripeSubscription({ subscription, fallbackPlanId, }); } -export const workspaceAppActions = { +export const organizationAppActions = { async authFindSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const clauses = [ ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), @@ -804,12 +804,12 @@ export const workspaceAppActions = { if (clauses.length === 0) { return null; } - const predicate = workspaceAuthWhere(authSessionIndex, clauses); + const predicate = organizationAuthWhere(authSessionIndex, clauses); return await c.db.select().from(authSessionIndex).where(predicate!).get(); }, async authUpsertSessionIndex(c: any, input: { sessionId: string; sessionToken: string; userId: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const now = Date.now(); await c.db @@ -834,7 +834,7 @@ export const workspaceAppActions = { }, async authDeleteSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const clauses = [ ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), @@ -843,18 +843,18 @@ export const workspaceAppActions = { if (clauses.length === 0) { return; } - const predicate = workspaceAuthWhere(authSessionIndex, clauses); + const predicate = organizationAuthWhere(authSessionIndex, clauses); await c.db.delete(authSessionIndex).where(predicate!).run(); }, async authFindEmailIndex(c: any, input: { email: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); }, async authUpsertEmailIndex(c: any, input: { email: string; userId: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const now = Date.now(); await c.db @@ -876,13 +876,13 @@ export const workspaceAppActions = { }, async authDeleteEmailIndex(c: any, input: { email: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); await c.db.delete(authEmailIndex).where(eq(authEmailIndex.email, input.email)).run(); }, async authFindAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); if (input.id) { return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); @@ -898,7 +898,7 @@ export const workspaceAppActions = { }, async authUpsertAccountIndex(c: any, input: { id: string; providerId: string; accountId: string; userId: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); const now = Date.now(); await c.db @@ -924,7 +924,7 @@ export const workspaceAppActions = { }, async authDeleteAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { - assertAppWorkspace(c); + assertAppOrganization(c); if (input.id) { await c.db.delete(authAccountIndex).where(eq(authAccountIndex.id, input.id)).run(); @@ -939,7 +939,7 @@ export const workspaceAppActions = { }, async authCreateVerification(c: any, input: { data: Record }) { - assertAppWorkspace(c); + assertAppOrganization(c); await c.db .insert(authVerification) @@ -953,22 +953,22 @@ export const workspaceAppActions = { }, async authFindOneVerification(c: any, input: { where: any[] }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); return predicate ? await c.db.select().from(authVerification).where(predicate).get() : null; }, async authFindManyVerification(c: any, input: { where?: any[]; limit?: number; sortBy?: any; offset?: number }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); let query = c.db.select().from(authVerification); if (predicate) { query = query.where(predicate); } if (input.sortBy?.field) { - const column = workspaceAuthColumn(authVerification, input.sortBy.field); + const column = organizationAuthColumn(authVerification, input.sortBy.field); query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); } if (typeof input.limit === "number") { @@ -981,9 +981,9 @@ export const workspaceAppActions = { }, async authUpdateVerification(c: any, input: { where: any[]; update: Record }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); if (!predicate) { return null; } @@ -996,9 +996,9 @@ export const workspaceAppActions = { }, async authUpdateManyVerification(c: any, input: { where: any[]; update: Record }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); if (!predicate) { return 0; } @@ -1012,9 +1012,9 @@ export const workspaceAppActions = { }, async authDeleteVerification(c: any, input: { where: any[] }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); if (!predicate) { return; } @@ -1022,9 +1022,9 @@ export const workspaceAppActions = { }, async authDeleteManyVerification(c: any, input: { where: any[] }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); if (!predicate) { return 0; } @@ -1034,9 +1034,9 @@ export const workspaceAppActions = { }, async authCountVerification(c: any, input: { where?: any[] }) { - assertAppWorkspace(c); + assertAppOrganization(c); - const predicate = workspaceAuthWhere(authVerification, input.where); + const predicate = organizationAuthWhere(authVerification, input.where); const row = predicate ? await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get() : await c.db.select({ value: sqlCount() }).from(authVerification).get(); @@ -1051,7 +1051,7 @@ export const workspaceAppActions = { c: any, input: { organizationId: string; requireRepoScope?: boolean }, ): Promise<{ accessToken: string; scopes: string[] } | null> { - assertAppWorkspace(c); + assertAppOrganization(c); const auth = getBetterAuthService(); const rows = await c.db.select().from(authSessionIndex).orderBy(desc(authSessionIndex.updatedAt)).all(); @@ -1081,7 +1081,7 @@ export const workspaceAppActions = { }, async skipAppStarterRepo(c: any, input: { sessionId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); await getBetterAuthService().upsertUserProfile(session.authUserId, { starterRepoStatus: "skipped", @@ -1092,12 +1092,12 @@ export const workspaceAppActions = { }, async starAppStarterRepo(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - await workspace.starSandboxAgentRepo({ - workspaceId: input.organizationId, + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.starSandboxAgentRepo({ + organizationId: input.organizationId, }); await getBetterAuthService().upsertUserProfile(session.authUserId, { starterRepoStatus: "starred", @@ -1108,22 +1108,22 @@ export const workspaceAppActions = { }, async selectAppOrganization(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); - if (organization.snapshot.github.syncStatus !== "synced") { - if (organization.snapshot.github.syncStatus !== "syncing") { - await workspace.markOrganizationSyncStarted({ + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.github.syncStatus !== "synced") { + if (organizationState.snapshot.github.syncStatus !== "syncing") { + await organizationHandle.markOrganizationSyncStarted({ label: "Importing repository catalog...", }); - const self = selfWorkspace(c); + const self = selfOrganization(c); await self.send( - "workspace.command.syncGithubOrganizationRepos", + "organization.command.syncGithubOrganizationRepos", { sessionId: input.sessionId, organizationId: input.organizationId }, { wait: false, @@ -1140,11 +1140,11 @@ export const workspaceAppActions = { c: any, input: { sessionId: string; organizationId: string } & UpdateFoundryOrganizationProfileInput, ): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - await workspace.updateOrganizationShellProfile({ + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.updateOrganizationShellProfile({ displayName: input.displayName, slug: input.slug, primaryDomain: input.primaryDomain, @@ -1153,23 +1153,23 @@ export const workspaceAppActions = { }, async triggerAppRepoImport(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); - if (organization.snapshot.github.syncStatus === "syncing") { + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.github.syncStatus === "syncing") { return await buildAppSnapshot(c, input.sessionId); } - await workspace.markOrganizationSyncStarted({ + await organizationHandle.markOrganizationSyncStarted({ label: "Importing repository catalog...", }); - const self = selfWorkspace(c); + const self = selfOrganization(c); await self.send( - "workspace.command.syncGithubOrganizationRepos", + "organization.command.syncGithubOrganizationRepos", { sessionId: input.sessionId, organizationId: input.organizationId }, { wait: false, @@ -1180,32 +1180,32 @@ export const workspaceAppActions = { }, async beginAppGithubInstall(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); - if (organization.snapshot.kind !== "organization") { + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.kind !== "organization") { return { - url: `${appShell.appUrl}/workspaces/${input.organizationId}`, + url: `${appShell.appUrl}/organizations/${input.organizationId}`, }; } return { - url: await appShell.github.buildInstallationUrl(organization.githubLogin, randomUUID()), + url: await appShell.github.buildInstallationUrl(organizationState.githubLogin, randomUUID()), }; }, async createAppCheckoutSession(c: any, input: { sessionId: string; organizationId: string; planId: FoundryBillingPlanId }): Promise<{ url: string }> { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); if (input.planId === "free") { - await workspace.applyOrganizationFreePlan({ clearSubscription: false }); + await organizationHandle.applyOrganizationFreePlan({ clearSubscription: false }); return { url: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, }; @@ -1215,16 +1215,16 @@ export const workspaceAppActions = { throw new Error("Stripe is not configured"); } - let customerId = organization.stripeCustomerId; + let customerId = organizationState.stripeCustomerId; if (!customerId) { customerId = ( await appShell.stripe.createCustomer({ organizationId: input.organizationId, - displayName: organization.snapshot.settings.displayName, + displayName: organizationState.snapshot.settings.displayName, email: session.currentUserEmail, }) ).id; - await workspace.applyOrganizationStripeCustomer({ customerId }); + await organizationHandle.applyOrganizationStripeCustomer({ customerId }); await upsertStripeLookupEntries(c, input.organizationId, customerId, null); } @@ -1245,24 +1245,24 @@ export const workspaceAppActions = { }, async finalizeAppCheckoutSession(c: any, input: { sessionId: string; organizationId: string; checkoutSessionId: string }): Promise<{ redirectTo: string }> { - assertAppWorkspace(c); + assertAppOrganization(c); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); const completion = await appShell.stripe.retrieveCheckoutCompletion(input.checkoutSessionId); if (completion.customerId) { - await workspace.applyOrganizationStripeCustomer({ customerId: completion.customerId }); + await organizationHandle.applyOrganizationStripeCustomer({ customerId: completion.customerId }); } await upsertStripeLookupEntries(c, input.organizationId, completion.customerId, completion.subscriptionId); if (completion.subscriptionId) { const subscription = await appShell.stripe.retrieveSubscription(completion.subscriptionId); - await applySubscriptionState(workspace, subscription, completion.planId ?? organization.billingPlanId); + await applySubscriptionState(organizationHandle, subscription, completion.planId ?? organizationState.billingPlanId); } if (completion.paymentMethodLabel) { - await workspace.setOrganizationBillingPaymentMethod({ + await organizationHandle.setOrganizationBillingPaymentMethod({ label: completion.paymentMethodLabel, }); } @@ -1273,73 +1273,73 @@ export const workspaceAppActions = { }, async createAppBillingPortalSession(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); - if (!organization.stripeCustomerId) { + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (!organizationState.stripeCustomerId) { throw new Error("Stripe customer is not available for this organization"); } const portal = await appShell.stripe.createPortalSession({ - customerId: organization.stripeCustomerId, + customerId: organizationState.stripeCustomerId, returnUrl: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, }); return { url: portal.url }; }, async cancelAppScheduledRenewal(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); - if (organization.stripeSubscriptionId && appShell.stripe.isConfigured()) { - const subscription = await appShell.stripe.updateSubscriptionCancellation(organization.stripeSubscriptionId, true); - await applySubscriptionState(workspace, subscription, organization.billingPlanId); - await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organization.stripeCustomerId, subscription.id); + if (organizationState.stripeSubscriptionId && appShell.stripe.isConfigured()) { + const subscription = await appShell.stripe.updateSubscriptionCancellation(organizationState.stripeSubscriptionId, true); + await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); + await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); } else { - await workspace.setOrganizationBillingStatus({ status: "scheduled_cancel" }); + await organizationHandle.setOrganizationBillingStatus({ status: "scheduled_cancel" }); } return await buildAppSnapshot(c, input.sessionId); }, async resumeAppSubscription(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppWorkspace(c); + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const { appShell } = getActorRuntimeContext(); - const workspace = await getOrCreateWorkspace(c, input.organizationId); - const organization = await getOrganizationState(workspace); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); - if (organization.stripeSubscriptionId && appShell.stripe.isConfigured()) { - const subscription = await appShell.stripe.updateSubscriptionCancellation(organization.stripeSubscriptionId, false); - await applySubscriptionState(workspace, subscription, organization.billingPlanId); - await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organization.stripeCustomerId, subscription.id); + if (organizationState.stripeSubscriptionId && appShell.stripe.isConfigured()) { + const subscription = await appShell.stripe.updateSubscriptionCancellation(organizationState.stripeSubscriptionId, false); + await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); + await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); } else { - await workspace.setOrganizationBillingStatus({ status: "active" }); + await organizationHandle.setOrganizationBillingStatus({ status: "active" }); } return await buildAppSnapshot(c, input.sessionId); }, - async recordAppSeatUsage(c: any, input: { sessionId: string; workspaceId: string }): Promise { - assertAppWorkspace(c); + async recordAppSeatUsage(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.workspaceId); - const workspace = await getOrCreateWorkspace(c, input.workspaceId); - await workspace.recordOrganizationSeatUsage({ + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.recordOrganizationSeatUsage({ email: session.currentUserEmail, }); return await buildAppSnapshot(c, input.sessionId); }, async handleAppStripeWebhook(c: any, input: { payload: string; signatureHeader: string | null }): Promise<{ ok: true }> { - assertAppWorkspace(c); + assertAppOrganization(c); const { appShell } = getActorRuntimeContext(); const event = appShell.stripe.verifyWebhookEvent(input.payload, input.signatureHeader); @@ -1353,9 +1353,9 @@ export const workspaceAppActions = { typeof object.subscription === "string" ? object.subscription : null, )); if (organizationId) { - const workspace = await getOrCreateWorkspace(c, organizationId); + const organization = await getOrCreateOrganization(c, organizationId); if (typeof object.customer === "string") { - await workspace.applyOrganizationStripeCustomer({ customerId: object.customer }); + await organization.applyOrganizationStripeCustomer({ customerId: object.customer }); } await upsertStripeLookupEntries( c, @@ -1371,9 +1371,13 @@ export const workspaceAppActions = { const subscription = stripeWebhookSubscription(event); const organizationId = await findOrganizationIdForStripeEvent(c, subscription.customerId, subscription.id); if (organizationId) { - const workspace = await getOrCreateWorkspace(c, organizationId); - const organization = await getOrganizationState(workspace); - await applySubscriptionState(workspace, subscription, appShell.stripe.planIdForPriceId(subscription.priceId ?? "") ?? organization.billingPlanId); + const organizationHandle = await getOrCreateOrganization(c, organizationId); + const organizationState = await getOrganizationState(organizationHandle); + await applySubscriptionState( + organizationHandle, + subscription, + appShell.stripe.planIdForPriceId(subscription.priceId ?? "") ?? organizationState.billingPlanId, + ); await upsertStripeLookupEntries(c, organizationId, subscription.customerId, subscription.id); } return { ok: true }; @@ -1383,8 +1387,8 @@ export const workspaceAppActions = { const subscription = stripeWebhookSubscription(event); const organizationId = await findOrganizationIdForStripeEvent(c, subscription.customerId, subscription.id); if (organizationId) { - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.applyOrganizationFreePlan({ clearSubscription: true }); + const organization = await getOrCreateOrganization(c, organizationId); + await organization.applyOrganizationFreePlan({ clearSubscription: true }); } return { ok: true }; } @@ -1393,10 +1397,10 @@ export const workspaceAppActions = { const invoice = event.data.object as Record; const organizationId = await findOrganizationIdForStripeEvent(c, typeof invoice.customer === "string" ? invoice.customer : null, null); if (organizationId) { - const workspace = await getOrCreateWorkspace(c, organizationId); + const organization = await getOrCreateOrganization(c, organizationId); const rawAmount = typeof invoice.amount_paid === "number" ? invoice.amount_paid : invoice.amount_due; const amountUsd = Math.round((typeof rawAmount === "number" ? rawAmount : 0) / 100); - await workspace.upsertOrganizationInvoice({ + await organization.upsertOrganizationInvoice({ id: String(invoice.id), label: typeof invoice.number === "string" ? `Invoice ${invoice.number}` : "Stripe invoice", issuedAt: formatUnixDate(typeof invoice.created === "number" ? invoice.created : Math.floor(Date.now() / 1000)), @@ -1410,7 +1414,7 @@ export const workspaceAppActions = { }, async handleAppGithubWebhook(c: any, input: { payload: string; signatureHeader: string | null; eventHeader: string | null }): Promise<{ ok: true }> { - assertAppWorkspace(c); + assertAppOrganization(c); const { appShell } = getActorRuntimeContext(); const { event, body } = appShell.github.verifyWebhookEvent(input.payload, input.signatureHeader, input.eventHeader); @@ -1429,11 +1433,11 @@ export const workspaceAppActions = { } const kind: FoundryOrganization["kind"] = accountType === "User" ? "personal" : "organization"; - const organizationId = organizationWorkspaceId(kind, accountLogin); + const organizationId = organizationOrganizationId(kind, accountLogin); const receivedAt = Date.now(); - const workspace = await getOrCreateWorkspace(c, organizationId); - await workspace.recordGithubWebhookReceipt({ - workspaceId: organizationId, + const organization = await getOrCreateOrganization(c, organizationId); + await organization.recordGithubWebhookReceipt({ + organizationId: organizationId, event, action: body.action ?? null, receivedAt, @@ -1556,6 +1560,13 @@ export const workspaceAppActions = { }, }); } + if ((event === "push" || event === "create" || event === "delete") && body.repository?.clone_url) { + const repoId = repoIdFromRemote(body.repository.clone_url); + const knownRepository = await githubData.getRepository({ repoId }); + if (knownRepository) { + await githubData.reloadRepository({ repoId }); + } + } } return { ok: true }; } @@ -1588,13 +1599,13 @@ export const workspaceAppActions = { appConfigured: boolean; }, ): Promise<{ organizationId: string }> { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const now = Date.now(); const existing = await readOrganizationProfileRow(c); const slug = existing?.slug ?? slugify(input.githubLogin); - const organizationId = organizationWorkspaceId(input.kind, input.githubLogin); - if (organizationId !== c.state.workspaceId) { - throw new Error(`Workspace actor mismatch: actor=${c.state.workspaceId} github=${organizationId}`); + const organizationId = organizationOrganizationId(input.kind, input.githubLogin); + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} github=${organizationId}`); } const installationStatus = @@ -1698,17 +1709,17 @@ export const workspaceAppActions = { }, async getOrganizationShellState(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); return await buildOrganizationState(c); }, async getOrganizationShellStateIfInitialized(c: any): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); return await buildOrganizationStateIfInitialized(c); }, async updateOrganizationShellProfile(c: any, input: Pick): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const existing = await requireOrganizationProfileRow(c); await c.db .update(organizationProfile) @@ -1723,7 +1734,7 @@ export const workspaceAppActions = { }, async markOrganizationSyncStarted(c: any, input: { label: string }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1743,7 +1754,7 @@ export const workspaceAppActions = { lastSyncLabel: string; }, ): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const now = Date.now(); for (const repository of input.repositories) { const remoteUrl = repository.cloneUrl; @@ -1778,7 +1789,7 @@ export const workspaceAppActions = { }, async markOrganizationSyncFailed(c: any, input: { message: string; installationStatus: FoundryOrganization["github"]["installationStatus"] }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1792,7 +1803,7 @@ export const workspaceAppActions = { }, async applyOrganizationStripeCustomer(c: any, input: { customerId: string }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1819,7 +1830,7 @@ export const workspaceAppActions = { fallbackPlanId: FoundryBillingPlanId; }, ): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const { appShell } = getActorRuntimeContext(); const planId = appShell.stripe.planIdForPriceId(input.subscription.priceId ?? "") ?? input.fallbackPlanId; await c.db @@ -1841,7 +1852,7 @@ export const workspaceAppActions = { }, async applyOrganizationFreePlan(c: any, input: { clearSubscription: boolean }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const patch: Record = { billingPlanId: "free", billingStatus: "active", @@ -1859,7 +1870,7 @@ export const workspaceAppActions = { }, async setOrganizationBillingPaymentMethod(c: any, input: { label: string }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1871,7 +1882,7 @@ export const workspaceAppActions = { }, async setOrganizationBillingStatus(c: any, input: { status: FoundryBillingState["status"] }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1883,7 +1894,7 @@ export const workspaceAppActions = { }, async upsertOrganizationInvoice(c: any, input: { id: string; label: string; issuedAt: string; amountUsd: number; status: "paid" | "open" }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .insert(invoices) .values({ @@ -1907,7 +1918,7 @@ export const workspaceAppActions = { }, async recordOrganizationSeatUsage(c: any, input: { email: string }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .insert(seatAssignments) .values({ @@ -1919,7 +1930,7 @@ export const workspaceAppActions = { }, async applyGithubInstallationCreated(c: any, input: { installationId: number }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1932,7 +1943,7 @@ export const workspaceAppActions = { }, async applyGithubInstallationRemoved(c: any, _input: {}): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); await c.db .update(organizationProfile) .set({ @@ -1947,7 +1958,7 @@ export const workspaceAppActions = { }, async applyGithubRepositoryChanges(c: any, input: { added: Array<{ fullName: string; private: boolean }>; removed: string[] }): Promise { - assertOrganizationWorkspace(c); + assertOrganizationShell(c); const now = Date.now(); for (const repo of input.added) { diff --git a/foundry/packages/backend/src/actors/project/db/db.ts b/foundry/packages/backend/src/actors/organization/db/db.ts similarity index 68% rename from foundry/packages/backend/src/actors/project/db/db.ts rename to foundry/packages/backend/src/actors/organization/db/db.ts index 49b5b72..f7eb392 100644 --- a/foundry/packages/backend/src/actors/project/db/db.ts +++ b/foundry/packages/backend/src/actors/organization/db/db.ts @@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle"; import * as schema from "./schema.js"; import migrations from "./migrations.js"; -export const projectDb = db({ schema, migrations }); +export const organizationDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts b/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts new file mode 100644 index 0000000..eb43667 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/organization/db/drizzle", + schema: "./src/actors/organization/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql similarity index 94% rename from foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql rename to foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql index 7410e3b..09b77f9 100644 --- a/foundry/packages/backend/src/actors/workspace/db/drizzle/0000_melted_viper.sql +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql @@ -69,12 +69,6 @@ CREATE TABLE `organization_profile` ( `updated_at` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE `provider_profiles` ( - `provider_id` text PRIMARY KEY NOT NULL, - `profile_json` text NOT NULL, - `updated_at` integer NOT NULL -); ---> statement-breakpoint CREATE TABLE `repos` ( `repo_id` text PRIMARY KEY NOT NULL, `remote_url` text NOT NULL, diff --git a/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json similarity index 95% rename from foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json rename to foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json index 0ae9736..cdcc44c 100644 --- a/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json @@ -457,37 +457,6 @@ "uniqueConstraints": {}, "checkConstraints": {} }, - "provider_profiles": { - "name": "provider_profiles", - "columns": { - "provider_id": { - "name": "provider_id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "profile_json": { - "name": "profile_json", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, "repos": { "name": "repos", "columns": { diff --git a/foundry/packages/backend/src/actors/workspace/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json similarity index 100% rename from foundry/packages/backend/src/actors/workspace/db/drizzle/meta/_journal.json rename to foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json diff --git a/foundry/packages/backend/src/actors/workspace/db/migrations.ts b/foundry/packages/backend/src/actors/organization/db/migrations.ts similarity index 96% rename from foundry/packages/backend/src/actors/workspace/db/migrations.ts rename to foundry/packages/backend/src/actors/organization/db/migrations.ts index a86578d..b3e09f1 100644 --- a/foundry/packages/backend/src/actors/workspace/db/migrations.ts +++ b/foundry/packages/backend/src/actors/organization/db/migrations.ts @@ -22,6 +22,12 @@ const journal = { tag: "0002_task_summaries", breakpoints: true, }, + { + idx: 3, + when: 1773810001000, + tag: "0003_drop_provider_profiles", + breakpoints: true, + }, ], } as const; @@ -99,12 +105,6 @@ CREATE TABLE \`organization_profile\` ( \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE \`provider_profiles\` ( - \`provider_id\` text PRIMARY KEY NOT NULL, - \`profile_json\` text NOT NULL, - \`updated_at\` integer NOT NULL -); ---> statement-breakpoint CREATE TABLE \`repos\` ( \`repo_id\` text PRIMARY KEY NOT NULL, \`remote_url\` text NOT NULL, @@ -170,6 +170,8 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` ( \`pull_request_json\` text, \`sessions_summary_json\` text DEFAULT '[]' NOT NULL ); +`, + m0003: `DROP TABLE IF EXISTS \`provider_profiles\`; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/workspace/db/schema.ts b/foundry/packages/backend/src/actors/organization/db/schema.ts similarity index 91% rename from foundry/packages/backend/src/actors/workspace/db/schema.ts rename to foundry/packages/backend/src/actors/organization/db/schema.ts index 6571b62..f1e069e 100644 --- a/foundry/packages/backend/src/actors/workspace/db/schema.ts +++ b/foundry/packages/backend/src/actors/organization/db/schema.ts @@ -1,12 +1,6 @@ import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; -// SQLite is per workspace actor instance, so no workspaceId column needed. -export const providerProfiles = sqliteTable("provider_profiles", { - providerId: text("provider_id").notNull().primaryKey(), - // Structured by the provider profile snapshot returned by provider integrations. - profileJson: text("profile_json").notNull(), - updatedAt: integer("updated_at").notNull(), -}); +// SQLite is per organization actor instance, so no organizationId column needed. export const repos = sqliteTable("repos", { repoId: text("repo_id").notNull().primaryKey(), @@ -23,7 +17,7 @@ export const taskLookup = sqliteTable("task_lookup", { /** * Materialized sidebar projection maintained by task actors. * The source of truth still lives on each task actor; this table exists so - * workspace reads can stay local and avoid fan-out across child actors. + * organization reads can stay local and avoid fan-out across child actors. */ export const taskSummaries = sqliteTable("task_summaries", { taskId: text("task_id").notNull().primaryKey(), diff --git a/foundry/packages/backend/src/actors/organization/index.ts b/foundry/packages/backend/src/actors/organization/index.ts new file mode 100644 index 0000000..1ea0196 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/index.ts @@ -0,0 +1,19 @@ +import { actor, queue } from "rivetkit"; +import { workflow } from "rivetkit/workflow"; +import { organizationDb } from "./db/db.js"; +import { runOrganizationWorkflow, ORGANIZATION_QUEUE_NAMES, organizationActions } from "./actions.js"; + +export const organization = actor({ + db: organizationDb, + queues: Object.fromEntries(ORGANIZATION_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + name: "Organization", + icon: "compass", + actionTimeout: 5 * 60_000, + }, + createState: (_c, organizationId: string) => ({ + organizationId, + }), + actions: organizationActions, + run: workflow(runOrganizationWorkflow), +}); diff --git a/foundry/packages/backend/src/actors/project-branch-sync/index.ts b/foundry/packages/backend/src/actors/project-branch-sync/index.ts deleted file mode 100644 index 3b20941..0000000 --- a/foundry/packages/backend/src/actors/project-branch-sync/index.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import type { GitDriver } from "../../driver.js"; -import { getActorRuntimeContext } from "../context.js"; -import { getProject, selfProjectBranchSync } from "../handles.js"; -import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js"; -import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js"; -import { parentLookupFromStack } from "../project/stack-model.js"; -import { withRepoGitLock } from "../../services/repo-git-lock.js"; - -export interface ProjectBranchSyncInput { - workspaceId: string; - repoId: string; - repoPath: string; - intervalMs: number; -} - -interface SetIntervalCommand { - intervalMs: number; -} - -interface EnrichedBranchSnapshot { - branchName: string; - commitSha: string; - parentBranch: string | null; - trackedInStack: boolean; - diffStat: string | null; - hasUnpushed: boolean; - conflictsWithMain: boolean; -} - -interface ProjectBranchSyncState extends PollingControlState { - workspaceId: string; - repoId: string; - repoPath: string; -} - -const CONTROL = { - start: "project.branch_sync.control.start", - stop: "project.branch_sync.control.stop", - setInterval: "project.branch_sync.control.set_interval", - force: "project.branch_sync.control.force", -} as const; - -async function enrichBranches(workspaceId: string, repoId: string, repoPath: string, git: GitDriver): Promise { - return await withRepoGitLock(repoPath, async () => { - await git.fetch(repoPath); - const branches = await git.listRemoteBranches(repoPath); - const { driver } = getActorRuntimeContext(); - const stackEntries = await driver.stack.listStack(repoPath).catch(() => []); - const parentByBranch = parentLookupFromStack(stackEntries); - const enriched: EnrichedBranchSnapshot[] = []; - - const baseRef = await git.remoteDefaultBaseRef(repoPath); - const baseSha = await git.revParse(repoPath, baseRef).catch(() => ""); - - for (const branch of branches) { - let branchDiffStat: string | null = null; - let branchHasUnpushed = false; - let branchConflicts = false; - - try { - branchDiffStat = await git.diffStatForBranch(repoPath, branch.branchName); - } catch (error) { - logActorWarning("project-branch-sync", "diffStatForBranch failed", { - workspaceId, - repoId, - branchName: branch.branchName, - error: resolveErrorMessage(error), - }); - branchDiffStat = null; - } - - try { - const headSha = await git.revParse(repoPath, `origin/${branch.branchName}`); - branchHasUnpushed = Boolean(baseSha && headSha && headSha !== baseSha); - } catch (error) { - logActorWarning("project-branch-sync", "revParse failed", { - workspaceId, - repoId, - branchName: branch.branchName, - error: resolveErrorMessage(error), - }); - branchHasUnpushed = false; - } - - try { - branchConflicts = await git.conflictsWithMain(repoPath, branch.branchName); - } catch (error) { - logActorWarning("project-branch-sync", "conflictsWithMain failed", { - workspaceId, - repoId, - branchName: branch.branchName, - error: resolveErrorMessage(error), - }); - branchConflicts = false; - } - - enriched.push({ - branchName: branch.branchName, - commitSha: branch.commitSha, - parentBranch: parentByBranch.get(branch.branchName) ?? null, - trackedInStack: parentByBranch.has(branch.branchName), - diffStat: branchDiffStat, - hasUnpushed: branchHasUnpushed, - conflictsWithMain: branchConflicts, - }); - } - - return enriched; - }); -} - -async function pollBranches(c: { state: ProjectBranchSyncState }): Promise { - const { driver } = getActorRuntimeContext(); - const enrichedItems = await enrichBranches(c.state.workspaceId, c.state.repoId, c.state.repoPath, driver.git); - const parent = getProject(c, c.state.workspaceId, c.state.repoId); - await parent.applyBranchSyncResult({ items: enrichedItems, at: Date.now() }); -} - -export const projectBranchSync = actor({ - queues: { - [CONTROL.start]: queue(), - [CONTROL.stop]: queue(), - [CONTROL.setInterval]: queue(), - [CONTROL.force]: queue(), - }, - options: { - name: "Project Branch Sync", - icon: "code-branch", - // Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling. - noSleep: true, - }, - createState: (_c, input: ProjectBranchSyncInput): ProjectBranchSyncState => ({ - workspaceId: input.workspaceId, - repoId: input.repoId, - repoPath: input.repoPath, - intervalMs: input.intervalMs, - running: true, - }), - actions: { - async start(c): Promise { - const self = selfProjectBranchSync(c); - await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 }); - }, - - async stop(c): Promise { - const self = selfProjectBranchSync(c); - await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 }); - }, - - async setIntervalMs(c, payload: SetIntervalCommand): Promise { - const self = selfProjectBranchSync(c); - await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 }); - }, - - async force(c): Promise { - const self = selfProjectBranchSync(c); - await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 }); - }, - }, - run: workflow(async (ctx) => { - await runWorkflowPollingLoop(ctx, { - loopName: "project-branch-sync-loop", - control: CONTROL, - onPoll: async (loopCtx) => { - try { - await pollBranches(loopCtx); - } catch (error) { - logActorWarning("project-branch-sync", "poll failed", { - error: resolveErrorMessage(error), - stack: resolveErrorStack(error), - }); - } - }, - }); - }), -}); diff --git a/foundry/packages/backend/src/actors/project/actions.ts b/foundry/packages/backend/src/actors/project/actions.ts deleted file mode 100644 index 8f9090d..0000000 --- a/foundry/packages/backend/src/actors/project/actions.ts +++ /dev/null @@ -1,1231 +0,0 @@ -// @ts-nocheck -import { randomUUID } from "node:crypto"; -import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; -import { Loop } from "rivetkit/workflow"; -import type { AgentType, TaskRecord, TaskSummary, ProviderId, RepoOverview, RepoStackAction, RepoStackActionResult } from "@sandbox-agent/foundry-shared"; -import { getActorRuntimeContext } from "../context.js"; -import { getGithubData, getTask, getOrCreateTask, getOrCreateHistory, getOrCreateProjectBranchSync, selfProject } from "../handles.js"; -import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js"; -import { foundryRepoClonePath } from "../../services/foundry-paths.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; -import { expectQueueResponse } from "../../services/queue.js"; -import { withRepoGitLock } from "../../services/repo-git-lock.js"; -import { branches, taskIndex, repoActionJobs, repoMeta } from "./db/schema.js"; -import { deriveFallbackTitle } from "../../services/create-flow.js"; -import { normalizeBaseBranchName } from "../../integrations/git-spice/index.js"; -import { sortBranchesForOverview } from "./stack-model.js"; - -interface EnsureProjectCommand { - remoteUrl: string; -} - -interface EnsureProjectResult { - localPath: string; -} - -interface CreateTaskCommand { - task: string; - providerId: ProviderId; - agentType: AgentType | null; - explicitTitle: string | null; - explicitBranchName: string | null; - initialPrompt: string | null; - onBranch: string | null; -} - -interface HydrateTaskIndexCommand {} - -interface ListReservedBranchesCommand {} - -interface RegisterTaskBranchCommand { - taskId: string; - branchName: string; - requireExistingRemote?: boolean; -} - -interface ListTaskSummariesCommand { - includeArchived?: boolean; -} - -interface GetTaskEnrichedCommand { - taskId: string; -} - -interface GetPullRequestForBranchCommand { - branchName: string; -} - -interface BranchSyncResult { - items: Array<{ - branchName: string; - commitSha: string; - parentBranch?: string | null; - trackedInStack?: boolean; - diffStat?: string | null; - hasUnpushed?: boolean; - conflictsWithMain?: boolean; - }>; - at: number; -} - -interface RepoOverviewCommand {} - -interface RunRepoStackActionCommand { - jobId?: string; - action: RepoStackAction; - branchName?: string; - parentBranch?: string; -} - -const PROJECT_QUEUE_NAMES = [ - "project.command.ensure", - "project.command.hydrateTaskIndex", - "project.command.createTask", - "project.command.registerTaskBranch", - "project.command.runRepoStackAction", - "project.command.applyBranchSyncResult", -] as const; - -type ProjectQueueName = (typeof PROJECT_QUEUE_NAMES)[number]; - -export { PROJECT_QUEUE_NAMES }; - -export function projectWorkflowQueueName(name: ProjectQueueName): ProjectQueueName { - return name; -} - -async function ensureLocalClone(c: any, remoteUrl: string): Promise { - const { config, driver } = getActorRuntimeContext(); - const localPath = foundryRepoClonePath(config, c.state.workspaceId, c.state.repoId); - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - await driver.git.ensureCloned(remoteUrl, localPath, { githubToken: auth?.githubToken ?? null }); - c.state.localPath = localPath; - return localPath; -} - -async function ensureProjectSyncActors(c: any, localPath: string): Promise { - if (c.state.syncActorsStarted) { - return; - } - - const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000); - c.state.syncActorsStarted = true; - - void branchSync.start().catch((error: unknown) => { - logActorWarning("project.sync", "starting branch sync actor failed", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - error: resolveErrorMessage(error), - }); - }); -} - -async function ensureRepoActionJobsTable(c: any): Promise { - await c.db.execute(` - CREATE TABLE IF NOT EXISTS repo_action_jobs ( - job_id text PRIMARY KEY NOT NULL, - action text NOT NULL, - branch_name text, - parent_branch text, - status text NOT NULL, - message text NOT NULL, - created_at integer NOT NULL, - updated_at integer NOT NULL, - completed_at integer - ) - `); -} - -async function writeRepoActionJob( - c: any, - input: { - jobId: string; - action: RepoStackAction; - branchName: string | null; - parentBranch: string | null; - status: "queued" | "running" | "completed" | "error"; - message: string; - createdAt?: number; - completedAt?: number | null; - }, -): Promise { - await ensureRepoActionJobsTable(c); - const now = Date.now(); - await c.db - .insert(repoActionJobs) - .values({ - jobId: input.jobId, - action: input.action, - branchName: input.branchName, - parentBranch: input.parentBranch, - status: input.status, - message: input.message, - createdAt: input.createdAt ?? now, - updatedAt: now, - completedAt: input.completedAt ?? null, - }) - .onConflictDoUpdate({ - target: repoActionJobs.jobId, - set: { - status: input.status, - message: input.message, - updatedAt: now, - completedAt: input.completedAt ?? null, - }, - }) - .run(); -} - -async function listRepoActionJobRows(c: any): Promise< - Array<{ - jobId: string; - action: RepoStackAction; - branchName: string | null; - parentBranch: string | null; - status: "queued" | "running" | "completed" | "error"; - message: string; - createdAt: number; - updatedAt: number; - completedAt: number | null; - }> -> { - await ensureRepoActionJobsTable(c); - const rows = await c.db.select().from(repoActionJobs).orderBy(desc(repoActionJobs.updatedAt)).limit(20).all(); - return rows.map((row: any) => ({ - jobId: row.jobId, - action: row.action, - branchName: row.branchName ?? null, - parentBranch: row.parentBranch ?? null, - status: row.status, - message: row.message, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - completedAt: row.completedAt ?? null, - })); -} - -async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { - try { - await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); - } catch { - // Best-effort cleanup only; preserve the original caller flow. - } -} - -function isStaleTaskReferenceError(error: unknown): boolean { - const message = resolveErrorMessage(error); - return isActorNotFoundError(error) || message.startsWith("Task not found:"); -} - -async function ensureTaskIndexHydrated(c: any): Promise { - if (c.state.taskIndexHydrated) { - return; - } - - const existing = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).limit(1).get(); - - if (existing) { - c.state.taskIndexHydrated = true; - return; - } - - // Migration path for old project actors that only tracked tasks in history. - try { - const history = await getOrCreateHistory(c, c.state.workspaceId, c.state.repoId); - const rows = await history.list({ limit: 5_000 }); - const seen = new Set(); - let skippedMissingTaskActors = 0; - - for (const row of rows) { - if (!row.taskId || seen.has(row.taskId)) { - continue; - } - seen.add(row.taskId); - - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, row.taskId); - await h.get(); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - skippedMissingTaskActors += 1; - continue; - } - throw error; - } - - await c.db - .insert(taskIndex) - .values({ - taskId: row.taskId, - branchName: row.branchName, - createdAt: row.createdAt, - updatedAt: row.createdAt, - }) - .onConflictDoNothing() - .run(); - } - - if (skippedMissingTaskActors > 0) { - logActorWarning("project", "skipped missing tasks while hydrating index", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - skippedMissingTaskActors, - }); - } - } catch (error) { - logActorWarning("project", "task index hydration from history failed", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - error: resolveErrorMessage(error), - }); - } - - c.state.taskIndexHydrated = true; -} - -async function ensureProjectReady(c: any): Promise { - if (!c.state.remoteUrl) { - throw new Error("project remoteUrl is not initialized"); - } - if (!c.state.localPath) { - await ensureLocalClone(c, c.state.remoteUrl); - } - if (!c.state.localPath) { - throw new Error("project local repo is not initialized"); - } - await ensureProjectSyncActors(c, c.state.localPath); - return c.state.localPath; -} - -async function ensureProjectReadyForRead(c: any): Promise { - if (!c.state.remoteUrl) { - throw new Error("project remoteUrl is not initialized"); - } - - if (!c.state.localPath) { - const result = await projectActions.ensure(c, { remoteUrl: c.state.remoteUrl }); - c.state.localPath = result?.localPath ?? c.state.localPath; - } - - if (!c.state.localPath) { - throw new Error("project local repo is not initialized"); - } - - if (!c.state.syncActorsStarted) { - await ensureProjectSyncActors(c, c.state.localPath); - } - - return c.state.localPath; -} - -async function ensureTaskIndexHydratedForRead(c: any): Promise { - if (c.state.taskIndexHydrated) { - return; - } - await projectActions.hydrateTaskIndex(c, {}); -} - -async function forceProjectSync(c: any, localPath: string): Promise { - const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000); - await branchSync.force(); -} - -async function enrichTaskRecord(c: any, record: TaskRecord): Promise { - const branchName = record.branchName; - const br = - branchName != null - ? await c.db - .select({ - diffStat: branches.diffStat, - hasUnpushed: branches.hasUnpushed, - conflictsWithMain: branches.conflictsWithMain, - parentBranch: branches.parentBranch, - }) - .from(branches) - .where(eq(branches.branchName, branchName)) - .get() - : null; - - const pr = - branchName != null - ? await getGithubData(c, c.state.workspaceId) - .listPullRequestsForRepository({ repoId: c.state.repoId }) - .then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null) - .catch(() => null) - : null; - - return { - ...record, - diffStat: br?.diffStat ?? null, - hasUnpushed: br?.hasUnpushed != null ? String(br.hasUnpushed) : null, - conflictsWithMain: br?.conflictsWithMain != null ? String(br.conflictsWithMain) : null, - parentBranch: br?.parentBranch ?? null, - prUrl: pr?.url ?? null, - prAuthor: pr?.authorLogin ?? null, - ciStatus: null, - reviewStatus: null, - reviewer: pr?.authorLogin ?? null, - }; -} - -async function reinsertTaskIndexRow(c: any, taskId: string, branchName: string | null, updatedAt: number): Promise { - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId, - branchName, - createdAt: updatedAt || now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskIndex.taskId, - set: { - branchName, - updatedAt: now, - }, - }) - .run(); -} - -async function ensureProjectMutation(c: any, cmd: EnsureProjectCommand): Promise { - c.state.remoteUrl = cmd.remoteUrl; - const localPath = await ensureLocalClone(c, cmd.remoteUrl); - - await c.db - .insert(repoMeta) - .values({ - id: 1, - remoteUrl: cmd.remoteUrl, - updatedAt: Date.now(), - }) - .onConflictDoUpdate({ - target: repoMeta.id, - set: { - remoteUrl: cmd.remoteUrl, - updatedAt: Date.now(), - }, - }) - .run(); - - return { localPath }; -} - -async function hydrateTaskIndexMutation(c: any, _cmd?: HydrateTaskIndexCommand): Promise { - await ensureTaskIndexHydrated(c); -} - -async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { - const onBranch = cmd.onBranch?.trim() || null; - const initialBranchName = onBranch; - const initialTitle = onBranch ? deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined) : null; - const taskId = randomUUID(); - - if (onBranch) { - await registerTaskBranchMutation(c, { - taskId, - branchName: onBranch, - requireExistingRemote: true, - }); - } - - let task: Awaited>; - try { - task = await getOrCreateTask(c, c.state.workspaceId, c.state.repoId, taskId, { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId, - repoRemote: c.state.remoteUrl, - branchName: initialBranchName, - title: initialTitle, - task: cmd.task, - providerId: cmd.providerId, - agentType: cmd.agentType, - explicitTitle: onBranch ? null : cmd.explicitTitle, - explicitBranchName: onBranch ? null : cmd.explicitBranchName, - initialPrompt: cmd.initialPrompt, - }); - } catch (error) { - if (onBranch) { - await c.db - .delete(taskIndex) - .where(eq(taskIndex.taskId, taskId)) - .run() - .catch(() => {}); - } - throw error; - } - - if (!onBranch) { - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId, - branchName: initialBranchName, - createdAt: now, - updatedAt: now, - }) - .onConflictDoNothing() - .run(); - } - - const created = await task.initialize({ providerId: cmd.providerId }); - - const history = await getOrCreateHistory(c, c.state.workspaceId, c.state.repoId); - await history.append({ - kind: "task.created", - taskId, - payload: { - repoId: c.state.repoId, - providerId: cmd.providerId, - }, - }); - - return created; -} - -async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { - const localPath = await ensureProjectReady(c); - - const branchName = cmd.branchName.trim(); - const requireExistingRemote = cmd.requireExistingRemote === true; - if (!branchName) { - throw new Error("branchName is required"); - } - - await ensureTaskIndexHydrated(c); - - const existingOwner = await c.db - .select({ taskId: taskIndex.taskId }) - .from(taskIndex) - .where(and(eq(taskIndex.branchName, branchName), ne(taskIndex.taskId, cmd.taskId))) - .get(); - - if (existingOwner) { - let ownerMissing = false; - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, existingOwner.taskId); - await h.get(); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - ownerMissing = true; - await deleteStaleTaskIndexRow(c, existingOwner.taskId); - logActorWarning("project", "pruned stale task index row during branch registration", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: existingOwner.taskId, - branchName, - }); - } else { - throw error; - } - } - if (!ownerMissing) { - throw new Error(`branch is already assigned to a different task: ${branchName}`); - } - } - - const { driver } = getActorRuntimeContext(); - - let headSha = ""; - let trackedInStack = false; - let parentBranch: string | null = null; - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - - await withRepoGitLock(localPath, async () => { - await driver.git.fetch(localPath, { githubToken: auth?.githubToken ?? null }); - const baseRef = await driver.git.remoteDefaultBaseRef(localPath); - const normalizedBase = normalizeBaseBranchName(baseRef); - let branchAvailableInRepo = false; - - if (requireExistingRemote) { - try { - headSha = await driver.git.revParse(localPath, `origin/${branchName}`); - branchAvailableInRepo = true; - } catch { - throw new Error(`Remote branch not found: ${branchName}`); - } - } else { - try { - headSha = await driver.git.revParse(localPath, `origin/${branchName}`); - branchAvailableInRepo = true; - } catch { - headSha = await driver.git.revParse(localPath, baseRef); - } - } - - if (branchAvailableInRepo && (await driver.stack.available(localPath).catch(() => false))) { - let stackRows = await driver.stack.listStack(localPath).catch(() => []); - let stackRow = stackRows.find((entry) => entry.branchName === branchName); - - if (!stackRow) { - try { - await driver.stack.trackBranch(localPath, branchName, normalizedBase); - } catch (error) { - logActorWarning("project", "stack track failed while registering branch", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - branchName, - error: resolveErrorMessage(error), - }); - } - stackRows = await driver.stack.listStack(localPath).catch(() => []); - stackRow = stackRows.find((entry) => entry.branchName === branchName); - } - - trackedInStack = Boolean(stackRow); - parentBranch = stackRow?.parentBranch ?? null; - } - }); - - const now = Date.now(); - await c.db - .insert(branches) - .values({ - branchName, - commitSha: headSha, - parentBranch, - trackedInStack: trackedInStack ? 1 : 0, - firstSeenAt: now, - lastSeenAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: branches.branchName, - set: { - commitSha: headSha, - parentBranch, - trackedInStack: trackedInStack ? 1 : 0, - lastSeenAt: now, - updatedAt: now, - }, - }) - .run(); - - await c.db - .insert(taskIndex) - .values({ - taskId: cmd.taskId, - branchName, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskIndex.taskId, - set: { - branchName, - updatedAt: now, - }, - }) - .run(); - - return { branchName, headSha }; -} - -async function runRepoStackActionMutation(c: any, cmd: RunRepoStackActionCommand): Promise { - const localPath = await ensureProjectReady(c); - await ensureTaskIndexHydrated(c); - - const { driver } = getActorRuntimeContext(); - const at = Date.now(); - const jobId = cmd.jobId ?? randomUUID(); - const action = cmd.action; - const branchName = cmd.branchName?.trim() || null; - const parentBranch = cmd.parentBranch?.trim() || null; - - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "running", - message: `Running ${action}`, - createdAt: at, - }); - - if (!(await driver.stack.available(localPath).catch(() => false))) { - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "error", - message: "git-spice is not available for this repo", - createdAt: at, - completedAt: Date.now(), - }); - return { - jobId, - action, - executed: false, - status: "error", - message: "git-spice is not available for this repo", - at, - }; - } - - if ((action === "restack_subtree" || action === "rebase_branch" || action === "reparent_branch") && !branchName) { - throw new Error(`branchName is required for action: ${action}`); - } - if (action === "reparent_branch" && !parentBranch) { - throw new Error("parentBranch is required for action: reparent_branch"); - } - - await forceProjectSync(c, localPath); - - if (branchName) { - const row = await c.db.select({ branchName: branches.branchName }).from(branches).where(eq(branches.branchName, branchName)).get(); - if (!row) { - throw new Error(`Branch not found in repo snapshot: ${branchName}`); - } - } - - if (action === "reparent_branch") { - if (!parentBranch) { - throw new Error("parentBranch is required for action: reparent_branch"); - } - if (parentBranch === branchName) { - throw new Error("parentBranch must be different from branchName"); - } - const parentRow = await c.db.select({ branchName: branches.branchName }).from(branches).where(eq(branches.branchName, parentBranch)).get(); - if (!parentRow) { - throw new Error(`Parent branch not found in repo snapshot: ${parentBranch}`); - } - } - - try { - await withRepoGitLock(localPath, async () => { - if (action === "sync_repo") { - await driver.stack.syncRepo(localPath); - } else if (action === "restack_repo") { - await driver.stack.restackRepo(localPath); - } else if (action === "restack_subtree") { - await driver.stack.restackSubtree(localPath, branchName!); - } else if (action === "rebase_branch") { - await driver.stack.rebaseBranch(localPath, branchName!); - } else if (action === "reparent_branch") { - await driver.stack.reparentBranch(localPath, branchName!, parentBranch!); - } else { - throw new Error(`Unsupported repo stack action: ${action}`); - } - }); - - try { - const history = await getOrCreateHistory(c, c.state.workspaceId, c.state.repoId); - await history.append({ - kind: "repo.stack_action", - branchName: branchName ?? null, - payload: { - action, - branchName: branchName ?? null, - parentBranch: parentBranch ?? null, - jobId, - }, - }); - } catch (error) { - logActorWarning("project", "failed appending repo stack history event", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - action, - error: resolveErrorMessage(error), - }); - } - - await forceProjectSync(c, localPath); - - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "completed", - message: `Completed ${action}`, - createdAt: at, - completedAt: Date.now(), - }); - } catch (error) { - const message = resolveErrorMessage(error); - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "error", - message, - createdAt: at, - completedAt: Date.now(), - }); - throw error; - } - - return { - jobId, - action, - executed: true, - status: "completed", - message: `Completed ${action}`, - at, - }; -} - -async function applyBranchSyncResultMutation(c: any, body: BranchSyncResult): Promise { - const incoming = new Set(body.items.map((item) => item.branchName)); - const reservedRows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); - const reservedBranches = new Set( - reservedRows.map((row) => row.branchName).filter((branchName): branchName is string => typeof branchName === "string" && branchName.length > 0), - ); - - for (const item of body.items) { - const existing = await c.db - .select({ - firstSeenAt: branches.firstSeenAt, - }) - .from(branches) - .where(eq(branches.branchName, item.branchName)) - .get(); - - await c.db - .insert(branches) - .values({ - branchName: item.branchName, - commitSha: item.commitSha, - parentBranch: item.parentBranch ?? null, - trackedInStack: item.trackedInStack ? 1 : 0, - diffStat: item.diffStat ?? null, - hasUnpushed: item.hasUnpushed ? 1 : 0, - conflictsWithMain: item.conflictsWithMain ? 1 : 0, - firstSeenAt: existing?.firstSeenAt ?? body.at, - lastSeenAt: body.at, - updatedAt: body.at, - }) - .onConflictDoUpdate({ - target: branches.branchName, - set: { - commitSha: item.commitSha, - parentBranch: item.parentBranch ?? null, - trackedInStack: item.trackedInStack ? 1 : 0, - diffStat: item.diffStat ?? null, - hasUnpushed: item.hasUnpushed ? 1 : 0, - conflictsWithMain: item.conflictsWithMain ? 1 : 0, - firstSeenAt: existing?.firstSeenAt ?? body.at, - lastSeenAt: body.at, - updatedAt: body.at, - }, - }) - .run(); - } - - const existingRows = await c.db.select({ branchName: branches.branchName }).from(branches).all(); - - for (const row of existingRows) { - if (incoming.has(row.branchName) || reservedBranches.has(row.branchName)) { - continue; - } - await c.db.delete(branches).where(eq(branches.branchName, row.branchName)).run(); - } -} - -export async function runProjectWorkflow(ctx: any): Promise { - await ctx.loop("project-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-project-command", { - names: [...PROJECT_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - try { - if (msg.name === "project.command.ensure") { - const result = await loopCtx.step({ - name: "project-ensure", - timeout: 5 * 60_000, - run: async () => ensureProjectMutation(loopCtx, msg.body as EnsureProjectCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.hydrateTaskIndex") { - await loopCtx.step("project-hydrate-task-index", async () => hydrateTaskIndexMutation(loopCtx, msg.body as HydrateTaskIndexCommand)); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.createTask") { - const result = await loopCtx.step({ - name: "project-create-task", - timeout: 5 * 60_000, - run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.registerTaskBranch") { - const result = await loopCtx.step({ - name: "project-register-task-branch", - timeout: 5 * 60_000, - run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.runRepoStackAction") { - const result = await loopCtx.step({ - name: "project-run-repo-stack-action", - timeout: 12 * 60_000, - run: async () => runRepoStackActionMutation(loopCtx, msg.body as RunRepoStackActionCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "project.command.applyBranchSyncResult") { - await loopCtx.step({ - name: "project-apply-branch-sync-result", - timeout: 60_000, - run: async () => applyBranchSyncResultMutation(loopCtx, msg.body as BranchSyncResult), - }); - await msg.complete({ ok: true }); - } - } catch (error) { - const message = resolveErrorMessage(error); - logActorWarning("project", "project workflow command failed", { - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - queueName: msg.name, - error: message, - }); - await msg.complete({ error: message }).catch((completeError: unknown) => { - logActorWarning("project", "project workflow failed completing error response", { - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - queueName: msg.name, - error: resolveErrorMessage(completeError), - }); - }); - } - - return Loop.continue(undefined); - }); -} - -export const projectActions = { - async ensure(c: any, cmd: EnsureProjectCommand): Promise { - const self = selfProject(c); - return expectQueueResponse( - await self.send(projectWorkflowQueueName("project.command.ensure"), cmd, { - wait: true, - timeout: 5 * 60_000, - }), - ); - }, - - async createTask(c: any, cmd: CreateTaskCommand): Promise { - const self = selfProject(c); - return expectQueueResponse( - await self.send(projectWorkflowQueueName("project.command.createTask"), cmd, { - wait: true, - timeout: 5 * 60_000, - }), - ); - }, - - async listReservedBranches(c: any, _cmd?: ListReservedBranchesCommand): Promise { - await ensureTaskIndexHydratedForRead(c); - - const rows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); - - return rows.map((row) => row.branchName).filter((name): name is string => typeof name === "string" && name.trim().length > 0); - }, - - async registerTaskBranch(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { - const self = selfProject(c); - return expectQueueResponse<{ branchName: string; headSha: string }>( - await self.send(projectWorkflowQueueName("project.command.registerTaskBranch"), cmd, { - wait: true, - timeout: 5 * 60_000, - }), - ); - }, - - async hydrateTaskIndex(c: any, cmd?: HydrateTaskIndexCommand): Promise { - const self = selfProject(c); - await self.send(projectWorkflowQueueName("project.command.hydrateTaskIndex"), cmd ?? {}, { - wait: true, - timeout: 60_000, - }); - }, - - async listTaskSummaries(c: any, cmd?: ListTaskSummariesCommand): Promise { - const body = cmd ?? {}; - const records: TaskSummary[] = []; - - await ensureTaskIndexHydratedForRead(c); - - const taskRows = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).orderBy(desc(taskIndex.updatedAt)).all(); - - for (const row of taskRows) { - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, row.taskId); - const record = await h.get(); - - if (!body.includeArchived && record.status === "archived") { - continue; - } - - records.push({ - workspaceId: record.workspaceId, - repoId: record.repoId, - taskId: record.taskId, - branchName: record.branchName, - title: record.title, - status: record.status, - updatedAt: record.updatedAt, - }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - logActorWarning("project", "pruned stale task index row during summary listing", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - }); - continue; - } - logActorWarning("project", "failed loading task summary row", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - error: resolveErrorMessage(error), - }); - } - } - - records.sort((a, b) => b.updatedAt - a.updatedAt); - return records; - }, - - async getTaskEnriched(c: any, cmd: GetTaskEnrichedCommand): Promise { - await ensureTaskIndexHydratedForRead(c); - - const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.taskId, cmd.taskId)).get(); - if (!row) { - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, cmd.taskId); - const record = await h.get(); - await reinsertTaskIndexRow(c, cmd.taskId, record.branchName ?? null, record.updatedAt ?? Date.now()); - return await enrichTaskRecord(c, record); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`); - } - throw error; - } - } - - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, cmd.taskId); - const record = await h.get(); - return await enrichTaskRecord(c, record); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, cmd.taskId); - throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`); - } - throw error; - } - }, - - async getRepoOverview(c: any, _cmd?: RepoOverviewCommand): Promise { - const localPath = await ensureProjectReadyForRead(c); - await ensureTaskIndexHydratedForRead(c); - - const { driver } = getActorRuntimeContext(); - const now = Date.now(); - const baseRef = await driver.git.remoteDefaultBaseRef(localPath).catch(() => null); - const stackAvailable = await driver.stack.available(localPath).catch(() => false); - - const branchRowsRaw = await c.db - .select({ - branchName: branches.branchName, - commitSha: branches.commitSha, - parentBranch: branches.parentBranch, - trackedInStack: branches.trackedInStack, - diffStat: branches.diffStat, - hasUnpushed: branches.hasUnpushed, - conflictsWithMain: branches.conflictsWithMain, - firstSeenAt: branches.firstSeenAt, - lastSeenAt: branches.lastSeenAt, - updatedAt: branches.updatedAt, - }) - .from(branches) - .all(); - - const taskRows = await c.db - .select({ - taskId: taskIndex.taskId, - branchName: taskIndex.branchName, - updatedAt: taskIndex.updatedAt, - }) - .from(taskIndex) - .all(); - - const taskMetaByBranch = new Map(); - - for (const row of taskRows) { - if (!row.branchName) { - continue; - } - try { - const h = getTask(c, c.state.workspaceId, c.state.repoId, row.taskId); - const record = await h.get(); - taskMetaByBranch.set(row.branchName, { - taskId: row.taskId, - title: record.title ?? null, - status: record.status, - updatedAt: record.updatedAt, - }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - logActorWarning("project", "pruned stale task index row during repo overview", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - branchName: row.branchName, - }); - continue; - } - logActorWarning("project", "failed loading task while building repo overview", { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - taskId: row.taskId, - branchName: row.branchName, - error: resolveErrorMessage(error), - }); - } - } - - const githubData = getGithubData(c, c.state.workspaceId); - const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []); - const prByBranch = new Map(prRows.map((row) => [row.headRefName, row])); - - const combinedRows = sortBranchesForOverview( - branchRowsRaw.map((row) => ({ - branchName: row.branchName, - parentBranch: row.parentBranch ?? null, - updatedAt: row.updatedAt, - })), - ); - - const detailByBranch = new Map(branchRowsRaw.map((row) => [row.branchName, row])); - - const branchRows = combinedRows.map((ordering) => { - const row = detailByBranch.get(ordering.branchName)!; - const taskMeta = taskMetaByBranch.get(row.branchName); - const pr = prByBranch.get(row.branchName); - return { - branchName: row.branchName, - commitSha: row.commitSha, - parentBranch: row.parentBranch ?? null, - trackedInStack: Boolean(row.trackedInStack), - diffStat: row.diffStat ?? null, - hasUnpushed: Boolean(row.hasUnpushed), - conflictsWithMain: Boolean(row.conflictsWithMain), - taskId: taskMeta?.taskId ?? null, - taskTitle: taskMeta?.title ?? null, - taskStatus: taskMeta?.status ?? null, - prNumber: pr?.number ?? null, - prState: pr?.state ?? null, - prUrl: pr?.url ?? null, - ciStatus: null, - reviewStatus: null, - reviewer: pr?.authorLogin ?? null, - firstSeenAt: row.firstSeenAt ?? null, - lastSeenAt: row.lastSeenAt ?? null, - updatedAt: Math.max(row.updatedAt, taskMeta?.updatedAt ?? 0), - }; - }); - - const latestBranchSync = await c.db.select({ updatedAt: branches.updatedAt }).from(branches).orderBy(desc(branches.updatedAt)).limit(1).get(); - const githubSummary = await githubData.getSummary().catch(() => null); - - return { - workspaceId: c.state.workspaceId, - repoId: c.state.repoId, - remoteUrl: c.state.remoteUrl, - baseRef, - stackAvailable, - fetchedAt: now, - branchSyncAt: latestBranchSync?.updatedAt ?? null, - prSyncAt: githubSummary?.lastSyncAt ?? null, - branchSyncStatus: latestBranchSync ? "synced" : "pending", - prSyncStatus: githubSummary?.syncStatus ?? "pending", - repoActionJobs: await listRepoActionJobRows(c), - branches: branchRows, - }; - }, - - async getPullRequestForBranch(c: any, cmd: GetPullRequestForBranchCommand): Promise<{ number: number; status: "draft" | "ready" } | null> { - const branchName = cmd.branchName?.trim(); - if (!branchName) { - return null; - } - const githubData = getGithubData(c, c.state.workspaceId); - return await githubData.getPullRequestForBranch({ - repoId: c.state.repoId, - branchName, - }); - }, - - async runRepoStackAction(c: any, cmd: RunRepoStackActionCommand): Promise { - const self = selfProject(c); - const jobId = randomUUID(); - const at = Date.now(); - const action = cmd.action; - const branchName = cmd.branchName?.trim() || null; - const parentBranch = cmd.parentBranch?.trim() || null; - - await writeRepoActionJob(c, { - jobId, - action, - branchName, - parentBranch, - status: "queued", - message: `Queued ${action}`, - createdAt: at, - }); - - await self.send( - projectWorkflowQueueName("project.command.runRepoStackAction"), - { - ...cmd, - jobId, - }, - { - wait: false, - }, - ); - - return { - jobId, - action, - executed: true, - status: "queued", - message: `Queued ${action}`, - at, - }; - }, - - async applyBranchSyncResult(c: any, body: BranchSyncResult): Promise { - const self = selfProject(c); - await self.send(projectWorkflowQueueName("project.command.applyBranchSyncResult"), body, { - wait: true, - timeout: 5 * 60_000, - }); - }, -}; diff --git a/foundry/packages/backend/src/actors/project/db/drizzle.config.ts b/foundry/packages/backend/src/actors/project/db/drizzle.config.ts deleted file mode 100644 index 5f53fc9..0000000 --- a/foundry/packages/backend/src/actors/project/db/drizzle.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { defineConfig } from "rivetkit/db/drizzle"; - -export default defineConfig({ - out: "./src/actors/project/db/drizzle", - schema: "./src/actors/project/db/schema.ts", -}); diff --git a/foundry/packages/backend/src/actors/project/db/drizzle/0000_useful_la_nuit.sql b/foundry/packages/backend/src/actors/project/db/drizzle/0000_useful_la_nuit.sql deleted file mode 100644 index f4f23ff..0000000 --- a/foundry/packages/backend/src/actors/project/db/drizzle/0000_useful_la_nuit.sql +++ /dev/null @@ -1,40 +0,0 @@ -CREATE TABLE `branches` ( - `branch_name` text PRIMARY KEY NOT NULL, - `commit_sha` text NOT NULL, - `parent_branch` text, - `tracked_in_stack` integer DEFAULT 0 NOT NULL, - `diff_stat` text, - `has_unpushed` integer DEFAULT 0 NOT NULL, - `conflicts_with_main` integer DEFAULT 0 NOT NULL, - `first_seen_at` integer, - `last_seen_at` integer, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `pr_cache` ( - `branch_name` text PRIMARY KEY NOT NULL, - `pr_number` integer NOT NULL, - `state` text NOT NULL, - `title` text NOT NULL, - `pr_url` text, - `pr_author` text, - `is_draft` integer DEFAULT 0 NOT NULL, - `ci_status` text, - `review_status` text, - `reviewer` text, - `fetched_at` integer, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `repo_meta` ( - `id` integer PRIMARY KEY NOT NULL, - `remote_url` text NOT NULL, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `task_index` ( - `task_id` text PRIMARY KEY NOT NULL, - `branch_name` text, - `created_at` integer NOT NULL, - `updated_at` integer NOT NULL -); diff --git a/foundry/packages/backend/src/actors/project/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/project/db/drizzle/meta/0000_snapshot.json deleted file mode 100644 index baf5913..0000000 --- a/foundry/packages/backend/src/actors/project/db/drizzle/meta/0000_snapshot.json +++ /dev/null @@ -1,265 +0,0 @@ -{ - "version": "6", - "dialect": "sqlite", - "id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9", - "prevId": "00000000-0000-0000-0000-000000000000", - "tables": { - "branches": { - "name": "branches", - "columns": { - "branch_name": { - "name": "branch_name", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "commit_sha": { - "name": "commit_sha", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "parent_branch": { - "name": "parent_branch", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "tracked_in_stack": { - "name": "tracked_in_stack", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "diff_stat": { - "name": "diff_stat", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "has_unpushed": { - "name": "has_unpushed", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "conflicts_with_main": { - "name": "conflicts_with_main", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "first_seen_at": { - "name": "first_seen_at", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "last_seen_at": { - "name": "last_seen_at", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "pr_cache": { - "name": "pr_cache", - "columns": { - "branch_name": { - "name": "branch_name", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "pr_number": { - "name": "pr_number", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "state": { - "name": "state", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "title": { - "name": "title", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "pr_url": { - "name": "pr_url", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "pr_author": { - "name": "pr_author", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "is_draft": { - "name": "is_draft", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "ci_status": { - "name": "ci_status", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "review_status": { - "name": "review_status", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "reviewer": { - "name": "reviewer", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "fetched_at": { - "name": "fetched_at", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "repo_meta": { - "name": "repo_meta", - "columns": { - "id": { - "name": "id", - "type": "integer", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "remote_url": { - "name": "remote_url", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "task_index": { - "name": "task_index", - "columns": { - "task_id": { - "name": "task_id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "branch_name": { - "name": "branch_name", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "created_at": { - "name": "created_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - } - }, - "views": {}, - "enums": {}, - "_meta": { - "schemas": {}, - "tables": {}, - "columns": {} - }, - "internal": { - "indexes": {} - } -} diff --git a/foundry/packages/backend/src/actors/project/db/migrations.ts b/foundry/packages/backend/src/actors/project/db/migrations.ts deleted file mode 100644 index fd705b7..0000000 --- a/foundry/packages/backend/src/actors/project/db/migrations.ts +++ /dev/null @@ -1,46 +0,0 @@ -// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. -// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). -// Do not hand-edit this file. - -const journal = { - entries: [ - { - idx: 0, - when: 1773376221848, - tag: "0000_useful_la_nuit", - breakpoints: true, - }, - ], -} as const; - -export default { - journal, - migrations: { - m0000: `CREATE TABLE \`branches\` ( - \`branch_name\` text PRIMARY KEY NOT NULL, - \`commit_sha\` text NOT NULL, - \`parent_branch\` text, - \`tracked_in_stack\` integer DEFAULT 0 NOT NULL, - \`diff_stat\` text, - \`has_unpushed\` integer DEFAULT 0 NOT NULL, - \`conflicts_with_main\` integer DEFAULT 0 NOT NULL, - \`first_seen_at\` integer, - \`last_seen_at\` integer, - \`updated_at\` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE \`repo_meta\` ( - \`id\` integer PRIMARY KEY NOT NULL, - \`remote_url\` text NOT NULL, - \`updated_at\` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE \`task_index\` ( - \`task_id\` text PRIMARY KEY NOT NULL, - \`branch_name\` text, - \`created_at\` integer NOT NULL, - \`updated_at\` integer NOT NULL -); -`, - } as const, -}; diff --git a/foundry/packages/backend/src/actors/project/db/schema.ts b/foundry/packages/backend/src/actors/project/db/schema.ts deleted file mode 100644 index bb61d75..0000000 --- a/foundry/packages/backend/src/actors/project/db/schema.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; - -// SQLite is per project actor instance (workspaceId+repoId), so no workspaceId/repoId columns needed. - -export const branches = sqliteTable("branches", { - branchName: text("branch_name").notNull().primaryKey(), - commitSha: text("commit_sha").notNull(), - parentBranch: text("parent_branch"), - trackedInStack: integer("tracked_in_stack").notNull().default(0), - diffStat: text("diff_stat"), - hasUnpushed: integer("has_unpushed").notNull().default(0), - conflictsWithMain: integer("conflicts_with_main").notNull().default(0), - firstSeenAt: integer("first_seen_at"), - lastSeenAt: integer("last_seen_at"), - updatedAt: integer("updated_at").notNull(), -}); - -export const repoMeta = sqliteTable("repo_meta", { - id: integer("id").primaryKey(), - remoteUrl: text("remote_url").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -export const taskIndex = sqliteTable("task_index", { - taskId: text("task_id").notNull().primaryKey(), - branchName: text("branch_name"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -export const repoActionJobs = sqliteTable("repo_action_jobs", { - jobId: text("job_id").notNull().primaryKey(), - action: text("action").notNull(), - branchName: text("branch_name"), - parentBranch: text("parent_branch"), - status: text("status").notNull(), - message: text("message").notNull(), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), - completedAt: integer("completed_at"), -}); diff --git a/foundry/packages/backend/src/actors/project/index.ts b/foundry/packages/backend/src/actors/project/index.ts deleted file mode 100644 index c5ba8a7..0000000 --- a/foundry/packages/backend/src/actors/project/index.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import { projectDb } from "./db/db.js"; -import { PROJECT_QUEUE_NAMES, projectActions, runProjectWorkflow } from "./actions.js"; - -export interface ProjectInput { - workspaceId: string; - repoId: string; - remoteUrl: string; -} - -export const project = actor({ - db: projectDb, - queues: Object.fromEntries(PROJECT_QUEUE_NAMES.map((name) => [name, queue()])), - options: { - name: "Project", - icon: "folder", - actionTimeout: 5 * 60_000, - }, - createState: (_c, input: ProjectInput) => ({ - workspaceId: input.workspaceId, - repoId: input.repoId, - remoteUrl: input.remoteUrl, - localPath: null as string | null, - syncActorsStarted: false, - taskIndexHydrated: false, - }), - actions: projectActions, - run: workflow(runProjectWorkflow), -}); diff --git a/foundry/packages/backend/src/actors/project/stack-model.ts b/foundry/packages/backend/src/actors/project/stack-model.ts deleted file mode 100644 index 78c9888..0000000 --- a/foundry/packages/backend/src/actors/project/stack-model.ts +++ /dev/null @@ -1,69 +0,0 @@ -export interface StackEntry { - branchName: string; - parentBranch: string | null; -} - -export interface OrderedBranchRow { - branchName: string; - parentBranch: string | null; - updatedAt: number; -} - -export function normalizeParentBranch(branchName: string, parentBranch: string | null | undefined): string | null { - const parent = parentBranch?.trim() || null; - if (!parent || parent === branchName) { - return null; - } - return parent; -} - -export function parentLookupFromStack(entries: StackEntry[]): Map { - const lookup = new Map(); - for (const entry of entries) { - const branchName = entry.branchName.trim(); - if (!branchName) { - continue; - } - lookup.set(branchName, normalizeParentBranch(branchName, entry.parentBranch)); - } - return lookup; -} - -export function sortBranchesForOverview(rows: OrderedBranchRow[]): OrderedBranchRow[] { - const byName = new Map(rows.map((row) => [row.branchName, row])); - const depthMemo = new Map(); - const computing = new Set(); - - const depthFor = (branchName: string): number => { - const cached = depthMemo.get(branchName); - if (cached != null) { - return cached; - } - if (computing.has(branchName)) { - return 999; - } - - computing.add(branchName); - const row = byName.get(branchName); - const parent = row?.parentBranch; - let depth = 0; - if (parent && parent !== branchName && byName.has(parent)) { - depth = Math.min(998, depthFor(parent) + 1); - } - computing.delete(branchName); - depthMemo.set(branchName, depth); - return depth; - }; - - return [...rows].sort((a, b) => { - const da = depthFor(a.branchName); - const db = depthFor(b.branchName); - if (da !== db) { - return da - db; - } - if (a.updatedAt !== b.updatedAt) { - return b.updatedAt - a.updatedAt; - } - return a.branchName.localeCompare(b.branchName); - }); -} diff --git a/foundry/packages/backend/src/actors/repository/actions.ts b/foundry/packages/backend/src/actors/repository/actions.ts new file mode 100644 index 0000000..9ef8e75 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/actions.ts @@ -0,0 +1,557 @@ +// @ts-nocheck +import { randomUUID } from "node:crypto"; +import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; +import { Loop } from "rivetkit/workflow"; +import type { AgentType, RepoOverview, SandboxProviderId, TaskRecord, TaskSummary } from "@sandbox-agent/foundry-shared"; +import { getGithubData, getOrCreateHistory, getOrCreateTask, getTask, selfRepository } from "../handles.js"; +import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../services/create-flow.js"; +import { expectQueueResponse } from "../../services/queue.js"; +import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js"; +import { repoMeta, taskIndex } from "./db/schema.js"; + +interface CreateTaskCommand { + task: string; + sandboxProviderId: SandboxProviderId; + agentType: AgentType | null; + explicitTitle: string | null; + explicitBranchName: string | null; + initialPrompt: string | null; + onBranch: string | null; +} + +interface RegisterTaskBranchCommand { + taskId: string; + branchName: string; + requireExistingRemote?: boolean; +} + +interface ListTaskSummariesCommand { + includeArchived?: boolean; +} + +interface GetTaskEnrichedCommand { + taskId: string; +} + +interface GetPullRequestForBranchCommand { + branchName: string; +} + +const REPOSITORY_QUEUE_NAMES = ["repository.command.createTask", "repository.command.registerTaskBranch"] as const; + +type RepositoryQueueName = (typeof REPOSITORY_QUEUE_NAMES)[number]; + +export { REPOSITORY_QUEUE_NAMES }; + +export function repositoryWorkflowQueueName(name: RepositoryQueueName): RepositoryQueueName { + return name; +} + +function isStaleTaskReferenceError(error: unknown): boolean { + const message = resolveErrorMessage(error); + return isActorNotFoundError(error) || message.startsWith("Task not found:"); +} + +async function persistRemoteUrl(c: any, remoteUrl: string): Promise { + c.state.remoteUrl = remoteUrl; + await c.db + .insert(repoMeta) + .values({ + id: 1, + remoteUrl, + updatedAt: Date.now(), + }) + .onConflictDoUpdate({ + target: repoMeta.id, + set: { + remoteUrl, + updatedAt: Date.now(), + }, + }) + .run(); +} + +async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { + try { + await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); + } catch { + // Best effort cleanup only. + } +} + +async function reinsertTaskIndexRow(c: any, taskId: string, branchName: string | null, updatedAt: number): Promise { + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId, + branchName, + createdAt: updatedAt || now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskIndex.taskId, + set: { + branchName, + updatedAt: now, + }, + }) + .run(); +} + +async function listKnownTaskBranches(c: any): Promise { + const rows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); + return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0); +} + +async function resolveGitHubRepository(c: any) { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.getRepository({ repoId: c.state.repoId }).catch(() => null); +} + +async function listGitHubBranches(c: any): Promise> { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.listBranchesForRepository({ repoId: c.state.repoId }).catch(() => []); +} + +async function enrichTaskRecord(c: any, record: TaskRecord): Promise { + const branchName = record.branchName?.trim() || null; + if (!branchName) { + return record; + } + + const pr = + branchName != null + ? await getGithubData(c, c.state.organizationId) + .listPullRequestsForRepository({ repoId: c.state.repoId }) + .then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null) + .catch(() => null) + : null; + + return { + ...record, + prUrl: pr?.url ?? null, + prAuthor: pr?.authorLogin ?? null, + ciStatus: null, + reviewStatus: null, + reviewer: pr?.authorLogin ?? null, + diffStat: record.diffStat ?? null, + hasUnpushed: record.hasUnpushed ?? null, + conflictsWithMain: record.conflictsWithMain ?? null, + parentBranch: record.parentBranch ?? null, + }; +} + +async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { + const organizationId = c.state.organizationId; + const repoId = c.state.repoId; + const repoRemote = c.state.remoteUrl; + const onBranch = cmd.onBranch?.trim() || null; + const taskId = randomUUID(); + let initialBranchName: string | null = null; + let initialTitle: string | null = null; + + await persistRemoteUrl(c, repoRemote); + + if (onBranch) { + initialBranchName = onBranch; + initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined); + + await registerTaskBranchMutation(c, { + taskId, + branchName: onBranch, + requireExistingRemote: true, + }); + } else { + const reservedBranches = await listKnownTaskBranches(c); + const resolved = resolveCreateFlowDecision({ + task: cmd.task, + explicitTitle: cmd.explicitTitle ?? undefined, + explicitBranchName: cmd.explicitBranchName ?? undefined, + localBranches: [], + taskBranches: reservedBranches, + }); + + initialBranchName = resolved.branchName; + initialTitle = resolved.title; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId, + branchName: resolved.branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoNothing() + .run(); + } + + let taskHandle: Awaited>; + try { + taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, { + organizationId, + repoId, + taskId, + repoRemote, + branchName: initialBranchName, + title: initialTitle, + task: cmd.task, + sandboxProviderId: cmd.sandboxProviderId, + agentType: cmd.agentType, + explicitTitle: null, + explicitBranchName: null, + initialPrompt: cmd.initialPrompt, + }); + } catch (error) { + if (initialBranchName) { + await deleteStaleTaskIndexRow(c, taskId); + } + throw error; + } + + const created = await taskHandle.initialize({ sandboxProviderId: cmd.sandboxProviderId }); + + const history = await getOrCreateHistory(c, organizationId, repoId); + await history.append({ + kind: "task.created", + taskId, + payload: { + repoId, + sandboxProviderId: cmd.sandboxProviderId, + }, + }); + + return created; +} + +async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { + const branchName = cmd.branchName.trim(); + if (!branchName) { + throw new Error("branchName is required"); + } + + await persistRemoteUrl(c, c.state.remoteUrl); + + const existingOwner = await c.db + .select({ taskId: taskIndex.taskId }) + .from(taskIndex) + .where(and(eq(taskIndex.branchName, branchName), ne(taskIndex.taskId, cmd.taskId))) + .get(); + + if (existingOwner) { + let ownerMissing = false; + try { + await getTask(c, c.state.organizationId, c.state.repoId, existingOwner.taskId).get(); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + ownerMissing = true; + await deleteStaleTaskIndexRow(c, existingOwner.taskId); + } else { + throw error; + } + } + if (!ownerMissing) { + throw new Error(`branch is already assigned to a different task: ${branchName}`); + } + } + + const branches = await listGitHubBranches(c); + const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null; + if (cmd.requireExistingRemote && !branchMatch) { + throw new Error(`Remote branch not found: ${branchName}`); + } + + const repository = await resolveGitHubRepository(c); + const defaultBranch = repository?.defaultBranch ?? "main"; + const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? ""; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId: cmd.taskId, + branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskIndex.taskId, + set: { + branchName, + updatedAt: now, + }, + }) + .run(); + + return { branchName, headSha }; +} + +async function listTaskSummaries(c: any, includeArchived = false): Promise { + const taskRows = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).orderBy(desc(taskIndex.updatedAt)).all(); + const records: TaskSummary[] = []; + + for (const row of taskRows) { + try { + const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get(); + if (!includeArchived && record.status === "archived") { + continue; + } + records.push({ + organizationId: record.organizationId, + repoId: record.repoId, + taskId: record.taskId, + branchName: record.branchName, + title: record.title, + status: record.status, + updatedAt: record.updatedAt, + }); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + await deleteStaleTaskIndexRow(c, row.taskId); + continue; + } + logActorWarning("repository", "failed loading task summary row", { + organizationId: c.state.organizationId, + repoId: c.state.repoId, + taskId: row.taskId, + error: resolveErrorMessage(error), + }); + } + } + + records.sort((a, b) => b.updatedAt - a.updatedAt); + return records; +} + +function sortOverviewBranches( + branches: Array<{ + branchName: string; + commitSha: string; + taskId: string | null; + taskTitle: string | null; + taskStatus: TaskRecord["status"] | null; + prNumber: number | null; + prState: string | null; + prUrl: string | null; + ciStatus: string | null; + reviewStatus: string | null; + reviewer: string | null; + updatedAt: number; + }>, + defaultBranch: string | null, +) { + return [...branches].sort((left, right) => { + if (defaultBranch) { + if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1; + if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1; + } + if (Boolean(left.taskId) !== Boolean(right.taskId)) { + return left.taskId ? -1 : 1; + } + if (left.updatedAt !== right.updatedAt) { + return right.updatedAt - left.updatedAt; + } + return left.branchName.localeCompare(right.branchName); + }); +} + +export async function runRepositoryWorkflow(ctx: any): Promise { + await ctx.loop("repository-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-repository-command", { + names: [...REPOSITORY_QUEUE_NAMES], + completable: true, + }); + if (!msg) { + return Loop.continue(undefined); + } + + try { + if (msg.name === "repository.command.createTask") { + const result = await loopCtx.step({ + name: "repository-create-task", + timeout: 5 * 60_000, + run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), + }); + await msg.complete(result); + return Loop.continue(undefined); + } + + if (msg.name === "repository.command.registerTaskBranch") { + const result = await loopCtx.step({ + name: "repository-register-task-branch", + timeout: 60_000, + run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand), + }); + await msg.complete(result); + return Loop.continue(undefined); + } + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("repository", "repository workflow command failed", { + queueName: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + + return Loop.continue(undefined); + }); +} + +export const repositoryActions = { + async createTask(c: any, cmd: CreateTaskCommand): Promise { + const self = selfRepository(c); + return expectQueueResponse( + await self.send(repositoryWorkflowQueueName("repository.command.createTask"), cmd, { + wait: true, + timeout: 10_000, + }), + ); + }, + + async listReservedBranches(c: any): Promise { + return await listKnownTaskBranches(c); + }, + + async registerTaskBranch(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { + const self = selfRepository(c); + return expectQueueResponse<{ branchName: string; headSha: string }>( + await self.send(repositoryWorkflowQueueName("repository.command.registerTaskBranch"), cmd, { + wait: true, + timeout: 10_000, + }), + ); + }, + + async listTaskSummaries(c: any, cmd?: ListTaskSummariesCommand): Promise { + return await listTaskSummaries(c, cmd?.includeArchived === true); + }, + + async getTaskEnriched(c: any, cmd: GetTaskEnrichedCommand): Promise { + const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.taskId, cmd.taskId)).get(); + if (!row) { + const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get(); + await reinsertTaskIndexRow(c, cmd.taskId, record.branchName ?? null, record.updatedAt ?? Date.now()); + return await enrichTaskRecord(c, record); + } + + try { + const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get(); + return await enrichTaskRecord(c, record); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + await deleteStaleTaskIndexRow(c, cmd.taskId); + throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`); + } + throw error; + } + }, + + async getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + const repository = await resolveGitHubRepository(c); + return { + defaultBranch: repository?.defaultBranch ?? null, + fullName: repository?.fullName ?? null, + remoteUrl: c.state.remoteUrl, + }; + }, + + async getRepoOverview(c: any): Promise { + await persistRemoteUrl(c, c.state.remoteUrl); + + const now = Date.now(); + const repository = await resolveGitHubRepository(c); + const githubBranches = await listGitHubBranches(c).catch(() => []); + const githubData = getGithubData(c, c.state.organizationId); + const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []); + const prByBranch = new Map(prRows.map((row) => [row.headRefName, row])); + + const taskRows = await c.db + .select({ + taskId: taskIndex.taskId, + branchName: taskIndex.branchName, + updatedAt: taskIndex.updatedAt, + }) + .from(taskIndex) + .all(); + + const taskMetaByBranch = new Map(); + for (const row of taskRows) { + if (!row.branchName) { + continue; + } + try { + const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get(); + taskMetaByBranch.set(row.branchName, { + taskId: row.taskId, + title: record.title ?? null, + status: record.status, + updatedAt: record.updatedAt, + }); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + await deleteStaleTaskIndexRow(c, row.taskId); + continue; + } + } + } + + const branchMap = new Map(); + for (const branch of githubBranches) { + branchMap.set(branch.branchName, branch); + } + for (const branchName of taskMetaByBranch.keys()) { + if (!branchMap.has(branchName)) { + branchMap.set(branchName, { branchName, commitSha: "" }); + } + } + if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) { + branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" }); + } + + const branches = sortOverviewBranches( + [...branchMap.values()].map((branch) => { + const taskMeta = taskMetaByBranch.get(branch.branchName); + const pr = prByBranch.get(branch.branchName); + return { + branchName: branch.branchName, + commitSha: branch.commitSha, + taskId: taskMeta?.taskId ?? null, + taskTitle: taskMeta?.title ?? null, + taskStatus: taskMeta?.status ?? null, + prNumber: pr?.number ?? null, + prState: pr?.state ?? null, + prUrl: pr?.url ?? null, + ciStatus: null, + reviewStatus: null, + reviewer: pr?.authorLogin ?? null, + updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now), + }; + }), + repository?.defaultBranch ?? null, + ); + + return { + organizationId: c.state.organizationId, + repoId: c.state.repoId, + remoteUrl: c.state.remoteUrl, + baseRef: repository?.defaultBranch ?? null, + fetchedAt: now, + branches, + }; + }, + + async getPullRequestForBranch(c: any, cmd: GetPullRequestForBranchCommand): Promise<{ number: number; status: "draft" | "ready" } | null> { + const branchName = cmd.branchName?.trim(); + if (!branchName) { + return null; + } + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.getPullRequestForBranch({ + repoId: c.state.repoId, + branchName, + }); + }, +}; diff --git a/foundry/packages/backend/src/actors/workspace/db/db.ts b/foundry/packages/backend/src/actors/repository/db/db.ts similarity index 68% rename from foundry/packages/backend/src/actors/workspace/db/db.ts rename to foundry/packages/backend/src/actors/repository/db/db.ts index 1b7c080..79bed8e 100644 --- a/foundry/packages/backend/src/actors/workspace/db/db.ts +++ b/foundry/packages/backend/src/actors/repository/db/db.ts @@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle"; import * as schema from "./schema.js"; import migrations from "./migrations.js"; -export const workspaceDb = db({ schema, migrations }); +export const repositoryDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts b/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts new file mode 100644 index 0000000..8b9a1b9 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/repository/db/drizzle", + schema: "./src/actors/repository/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql b/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql new file mode 100644 index 0000000..14bc071 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql @@ -0,0 +1,12 @@ +CREATE TABLE `repo_meta` ( + `id` integer PRIMARY KEY NOT NULL, + `remote_url` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `task_index` ( + `task_id` text PRIMARY KEY NOT NULL, + `branch_name` text, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json new file mode 100644 index 0000000..940b4e6 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json @@ -0,0 +1,87 @@ +{ + "version": "6", + "dialect": "sqlite", + "id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "repo_meta": { + "name": "repo_meta", + "columns": { + "id": { + "name": "id", + "type": "integer", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "remote_url": { + "name": "remote_url", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + }, + "task_index": { + "name": "task_index", + "columns": { + "task_id": { + "name": "task_id", + "type": "text", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "branch_name": { + "name": "branch_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "created_at": { + "name": "created_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "updated_at": { + "name": "updated_at", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "checkConstraints": {} + } + }, + "views": {}, + "enums": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + }, + "internal": { + "indexes": {} + } +} diff --git a/foundry/packages/backend/src/actors/project/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json similarity index 100% rename from foundry/packages/backend/src/actors/project/db/drizzle/meta/_journal.json rename to foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json diff --git a/foundry/packages/backend/src/actors/repository/db/migrations.ts b/foundry/packages/backend/src/actors/repository/db/migrations.ts new file mode 100644 index 0000000..ebdb167 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/migrations.ts @@ -0,0 +1,43 @@ +// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. +// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). +// Do not hand-edit this file. + +const journal = { + entries: [ + { + idx: 0, + when: 1773376221848, + tag: "0000_useful_la_nuit", + breakpoints: true, + }, + { + idx: 1, + when: 1778900000000, + tag: "0001_remove_local_git_state", + breakpoints: true, + }, + ], +} as const; + +export default { + journal, + migrations: { + m0000: `CREATE TABLE \`repo_meta\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`remote_url\` text NOT NULL, +\t\`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`task_index\` ( +\t\`task_id\` text PRIMARY KEY NOT NULL, +\t\`branch_name\` text, +\t\`created_at\` integer NOT NULL, +\t\`updated_at\` integer NOT NULL +); +`, + m0001: `DROP TABLE IF EXISTS \`branches\`; +--> statement-breakpoint +DROP TABLE IF EXISTS \`repo_action_jobs\`; +`, + } as const, +}; diff --git a/foundry/packages/backend/src/actors/repository/db/schema.ts b/foundry/packages/backend/src/actors/repository/db/schema.ts new file mode 100644 index 0000000..ddb2f19 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/db/schema.ts @@ -0,0 +1,16 @@ +import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; + +// SQLite is per repository actor instance (organizationId+repoId). + +export const repoMeta = sqliteTable("repo_meta", { + id: integer("id").primaryKey(), + remoteUrl: text("remote_url").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +export const taskIndex = sqliteTable("task_index", { + taskId: text("task_id").notNull().primaryKey(), + branchName: text("branch_name"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); diff --git a/foundry/packages/backend/src/actors/repository/index.ts b/foundry/packages/backend/src/actors/repository/index.ts new file mode 100644 index 0000000..4253a90 --- /dev/null +++ b/foundry/packages/backend/src/actors/repository/index.ts @@ -0,0 +1,27 @@ +import { actor, queue } from "rivetkit"; +import { workflow } from "rivetkit/workflow"; +import { repositoryDb } from "./db/db.js"; +import { REPOSITORY_QUEUE_NAMES, repositoryActions, runRepositoryWorkflow } from "./actions.js"; + +export interface RepositoryInput { + organizationId: string; + repoId: string; + remoteUrl: string; +} + +export const repository = actor({ + db: repositoryDb, + queues: Object.fromEntries(REPOSITORY_QUEUE_NAMES.map((name) => [name, queue()])), + options: { + name: "Repository", + icon: "folder", + actionTimeout: 5 * 60_000, + }, + createState: (_c, input: RepositoryInput) => ({ + organizationId: input.organizationId, + repoId: input.repoId, + remoteUrl: input.remoteUrl, + }), + actions: repositoryActions, + run: workflow(runRepositoryWorkflow), +}); diff --git a/foundry/packages/backend/src/actors/sandbox/index.ts b/foundry/packages/backend/src/actors/sandbox/index.ts index e65c151..2e2087b 100644 --- a/foundry/packages/backend/src/actors/sandbox/index.ts +++ b/foundry/packages/backend/src/actors/sandbox/index.ts @@ -4,21 +4,21 @@ import { existsSync } from "node:fs"; import Dockerode from "dockerode"; import { SandboxAgent } from "sandbox-agent"; import { getActorRuntimeContext } from "../context.js"; -import { workspaceKey } from "../keys.js"; +import { organizationKey } from "../keys.js"; import { resolveSandboxProviderId } from "../../sandbox-config.js"; -const SANDBOX_REPO_CWD = "/home/sandbox/workspace/repo"; +const SANDBOX_REPO_CWD = "/home/sandbox/organization/repo"; const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:full"; const DEFAULT_LOCAL_SANDBOX_PORT = 2468; const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" }); -function parseTaskSandboxKey(key: readonly string[]): { workspaceId: string; taskId: string } { - if (key.length !== 4 || key[0] !== "ws" || key[2] !== "sandbox") { +function parseTaskSandboxKey(key: readonly string[]): { organizationId: string; taskId: string } { + if (key.length !== 4 || key[0] !== "org" || key[2] !== "sandbox") { throw new Error(`Invalid task sandbox key: ${JSON.stringify(key)}`); } return { - workspaceId: key[1]!, + organizationId: key[1]!, taskId: key[3]!, }; } @@ -191,24 +191,24 @@ function sanitizeActorResult(value: unknown, seen = new WeakSet()): unkn const baseTaskSandbox = sandboxActor({ createProvider: async (c) => { const { config } = getActorRuntimeContext(); - const { workspaceId, taskId } = parseTaskSandboxKey(c.key); - const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), { - createWithInput: workspaceId, + const { organizationId, taskId } = parseTaskSandboxKey(c.key); + const organization = await c.client().organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, }); - const task = await workspace.getTask({ workspaceId, taskId }); - const providerId = resolveSandboxProviderId(config, task.providerId); + const task = await organization.getTask({ organizationId, taskId }); + const sandboxProviderId = resolveSandboxProviderId(config, task.sandboxProviderId); - if (providerId === "e2b") { + if (sandboxProviderId === "e2b") { return e2b({ create: () => ({ - template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x", + template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x", envs: sandboxEnvObject(), }), installAgents: ["claude", "codex"], }); } - return createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); + return createLocalSandboxProvider(config.sandboxProviders.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); }, }); @@ -236,23 +236,23 @@ async function providerForConnection(c: any): Promise { const providerFactory = baseTaskSandbox.config.actions as Record; void providerFactory; const { config } = getActorRuntimeContext(); - const { workspaceId, taskId } = parseTaskSandboxKey(c.key); - const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), { - createWithInput: workspaceId, + const { organizationId, taskId } = parseTaskSandboxKey(c.key); + const organization = await c.client().organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, }); - const task = await workspace.getTask({ workspaceId, taskId }); - const providerId = resolveSandboxProviderId(config, task.providerId); + const task = await organization.getTask({ organizationId, taskId }); + const sandboxProviderId = resolveSandboxProviderId(config, task.sandboxProviderId); const provider = - providerId === "e2b" + sandboxProviderId === "e2b" ? e2b({ create: () => ({ - template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x", + template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x", envs: sandboxEnvObject(), }), installAgents: ["claude", "codex"], }) - : createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); + : createLocalSandboxProvider(config.sandboxProviders.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE); c.vars.provider = provider; return provider; @@ -360,31 +360,31 @@ export const taskSandbox = actor({ } }, - async providerState(c: any): Promise<{ providerId: "e2b" | "local"; sandboxId: string; state: string; at: number }> { + async providerState(c: any): Promise<{ sandboxProviderId: "e2b" | "local"; sandboxId: string; state: string; at: number }> { const { config } = getActorRuntimeContext(); const { taskId } = parseTaskSandboxKey(c.key); const at = Date.now(); - const providerId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null); + const sandboxProviderId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null); if (c.state.sandboxDestroyed) { - return { providerId, sandboxId: taskId, state: "destroyed", at }; + return { sandboxProviderId, sandboxId: taskId, state: "destroyed", at }; } if (!c.state.sandboxId) { - return { providerId, sandboxId: taskId, state: "pending", at }; + return { sandboxProviderId, sandboxId: taskId, state: "pending", at }; } try { const health = await baseActions.getHealth(c); return { - providerId, + sandboxProviderId, sandboxId: taskId, state: health.status === "ok" ? "running" : "degraded", at, }; } catch { return { - providerId, + sandboxProviderId, sandboxId: taskId, state: "error", at, diff --git a/foundry/packages/backend/src/actors/task/db/migrations.ts b/foundry/packages/backend/src/actors/task/db/migrations.ts index 4d4630b..dc3193e 100644 --- a/foundry/packages/backend/src/actors/task/db/migrations.ts +++ b/foundry/packages/backend/src/actors/task/db/migrations.ts @@ -10,6 +10,12 @@ const journal = { tag: "0000_charming_maestro", breakpoints: true, }, + { + idx: 1, + when: 1773810000000, + tag: "0001_sandbox_provider_columns", + breakpoints: true, + }, ], } as const; @@ -63,9 +69,13 @@ CREATE TABLE \`task_workbench_sessions\` ( \`created\` integer DEFAULT 1 NOT NULL, \`closed\` integer DEFAULT 0 NOT NULL, \`thinking_since_ms\` integer, - \`created_at\` integer NOT NULL, +\`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL ); +`, + m0001: `ALTER TABLE \`task\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`; +--> statement-breakpoint +ALTER TABLE \`task_sandboxes\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/task/db/schema.ts b/foundry/packages/backend/src/actors/task/db/schema.ts index 2b59f4b..0c1f6cd 100644 --- a/foundry/packages/backend/src/actors/task/db/schema.ts +++ b/foundry/packages/backend/src/actors/task/db/schema.ts @@ -9,7 +9,7 @@ export const task = sqliteTable( branchName: text("branch_name"), title: text("title"), task: text("task").notNull(), - providerId: text("provider_id").notNull(), + sandboxProviderId: text("sandbox_provider_id").notNull(), status: text("status").notNull(), agentType: text("agent_type").default("claude"), prSubmitted: integer("pr_submitted").default(0), @@ -39,7 +39,7 @@ export const taskRuntime = sqliteTable( export const taskSandboxes = sqliteTable("task_sandboxes", { sandboxId: text("sandbox_id").notNull().primaryKey(), - providerId: text("provider_id").notNull(), + sandboxProviderId: text("sandbox_provider_id").notNull(), sandboxActorId: text("sandbox_actor_id"), switchTarget: text("switch_target").notNull(), cwd: text("cwd"), diff --git a/foundry/packages/backend/src/actors/task/index.ts b/foundry/packages/backend/src/actors/task/index.ts index cac007a..f2b9e51 100644 --- a/foundry/packages/backend/src/actors/task/index.ts +++ b/foundry/packages/backend/src/actors/task/index.ts @@ -9,7 +9,7 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchUpdateDraftInput, - ProviderId, + SandboxProviderId, } from "@sandbox-agent/foundry-shared"; import { expectQueueResponse } from "../../services/queue.js"; import { selfTask } from "../handles.js"; @@ -37,15 +37,14 @@ import { import { TASK_QUEUE_NAMES, taskWorkflowQueueName, runTaskWorkflow } from "./workflow/index.js"; export interface TaskInput { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; repoRemote: string; - repoLocalPath?: string; branchName: string | null; title: string | null; task: string; - providerId: ProviderId; + sandboxProviderId: SandboxProviderId; agentType: AgentType | null; explicitTitle: string | null; explicitBranchName: string | null; @@ -53,15 +52,15 @@ export interface TaskInput { } interface InitializeCommand { - providerId?: ProviderId; + sandboxProviderId?: SandboxProviderId; } interface TaskActionCommand { reason?: string; } -interface TaskTabCommand { - tabId: string; +interface TaskSessionCommand { + sessionId: string; } interface TaskStatusSyncCommand { @@ -101,14 +100,15 @@ interface TaskWorkbenchSendMessageCommand { attachments: Array; } -interface TaskWorkbenchSendMessageActionInput extends TaskWorkbenchSendMessageInput { - waitForCompletion?: boolean; -} - interface TaskWorkbenchCreateSessionCommand { model?: string; } +interface TaskWorkbenchCreateSessionAndSendCommand { + model?: string; + text: string; +} + interface TaskWorkbenchSessionCommand { sessionId: string; } @@ -122,15 +122,14 @@ export const task = actor({ actionTimeout: 5 * 60_000, }, createState: (_c, input: TaskInput) => ({ - workspaceId: input.workspaceId, + organizationId: input.organizationId, repoId: input.repoId, taskId: input.taskId, repoRemote: input.repoRemote, - repoLocalPath: input.repoLocalPath, branchName: input.branchName, title: input.title, task: input.task, - providerId: input.providerId, + sandboxProviderId: input.sandboxProviderId, agentType: input.agentType, explicitTitle: input.explicitTitle, explicitBranchName: input.explicitBranchName, @@ -143,7 +142,7 @@ export const task = actor({ const self = selfTask(c); const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, { wait: true, - timeout: 5 * 60_000, + timeout: 10_000, }); return expectQueueResponse(result); }, @@ -160,7 +159,7 @@ export const task = actor({ const self = selfTask(c); const result = await self.send(taskWorkflowQueueName("task.command.attach"), cmd ?? {}, { wait: true, - timeout: 20_000, + timeout: 10_000, }); return expectQueueResponse<{ target: string; sessionId: string | null }>(result); }, @@ -172,7 +171,7 @@ export const task = actor({ {}, { wait: true, - timeout: 20_000, + timeout: 10_000, }, ); return expectQueueResponse<{ switchTarget: string }>(result); @@ -236,7 +235,7 @@ export const task = actor({ {}, { wait: true, - timeout: 20_000, + timeout: 10_000, }, ); }, @@ -256,27 +255,40 @@ export const task = actor({ }); }, - async createWorkbenchSession(c, input?: { model?: string }): Promise<{ tabId: string }> { + async createWorkbenchSession(c, input?: { model?: string }): Promise<{ sessionId: string }> { const self = selfTask(c); const result = await self.send( taskWorkflowQueueName("task.command.workbench.create_session"), { ...(input?.model ? { model: input.model } : {}) } satisfies TaskWorkbenchCreateSessionCommand, { wait: true, - timeout: 5 * 60_000, + timeout: 10_000, }, ); - return expectQueueResponse<{ tabId: string }>(result); + return expectQueueResponse<{ sessionId: string }>(result); + }, + + /** + * Fire-and-forget: creates a workbench session and sends the initial message. + * Used by createWorkbenchTask so the caller doesn't block on session creation. + */ + async createWorkbenchSessionAndSend(c, input: { model?: string; text: string }): Promise { + const self = selfTask(c); + await self.send( + taskWorkflowQueueName("task.command.workbench.create_session_and_send"), + { model: input.model, text: input.text } satisfies TaskWorkbenchCreateSessionAndSendCommand, + { wait: false }, + ); }, async renameWorkbenchSession(c, input: TaskWorkbenchRenameSessionInput): Promise { const self = selfTask(c); await self.send( taskWorkflowQueueName("task.command.workbench.rename_session"), - { sessionId: input.tabId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand, + { sessionId: input.sessionId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand, { wait: true, - timeout: 20_000, + timeout: 10_000, }, ); }, @@ -285,10 +297,10 @@ export const task = actor({ const self = selfTask(c); await self.send( taskWorkflowQueueName("task.command.workbench.set_session_unread"), - { sessionId: input.tabId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand, + { sessionId: input.sessionId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand, { wait: true, - timeout: 20_000, + timeout: 10_000, }, ); }, @@ -298,13 +310,12 @@ export const task = actor({ await self.send( taskWorkflowQueueName("task.command.workbench.update_draft"), { - sessionId: input.tabId, + sessionId: input.sessionId, text: input.text, attachments: input.attachments, } satisfies TaskWorkbenchUpdateDraftCommand, { - wait: true, - timeout: 20_000, + wait: false, }, ); }, @@ -313,36 +324,32 @@ export const task = actor({ const self = selfTask(c); await self.send( taskWorkflowQueueName("task.command.workbench.change_model"), - { sessionId: input.tabId, model: input.model } satisfies TaskWorkbenchChangeModelCommand, + { sessionId: input.sessionId, model: input.model } satisfies TaskWorkbenchChangeModelCommand, { wait: true, - timeout: 20_000, + timeout: 10_000, }, ); }, - async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageActionInput): Promise { + async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageInput): Promise { const self = selfTask(c); - const result = await self.send( + await self.send( taskWorkflowQueueName("task.command.workbench.send_message"), { - sessionId: input.tabId, + sessionId: input.sessionId, text: input.text, attachments: input.attachments, } satisfies TaskWorkbenchSendMessageCommand, { - wait: input.waitForCompletion === true, - ...(input.waitForCompletion === true ? { timeout: 10 * 60_000 } : {}), + wait: false, }, ); - if (input.waitForCompletion === true) { - expectQueueResponse(result); - } }, - async stopWorkbenchSession(c, input: TaskTabCommand): Promise { + async stopWorkbenchSession(c, input: TaskSessionCommand): Promise { const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, { + await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, { wait: false, }); }, @@ -355,9 +362,9 @@ export const task = actor({ }); }, - async closeWorkbenchSession(c, input: TaskTabCommand): Promise { + async closeWorkbenchSession(c, input: TaskSessionCommand): Promise { const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, { + await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, { wait: false, }); }, diff --git a/foundry/packages/backend/src/actors/task/workbench.ts b/foundry/packages/backend/src/actors/task/workbench.ts index 1da7f2f..d689b3a 100644 --- a/foundry/packages/backend/src/actors/task/workbench.ts +++ b/foundry/packages/backend/src/actors/task/workbench.ts @@ -3,10 +3,11 @@ import { randomUUID } from "node:crypto"; import { basename, dirname } from "node:path"; import { asc, eq } from "drizzle-orm"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateProject, getOrCreateTaskSandbox, getOrCreateWorkspace, getTaskSandbox, selfTask } from "../handles.js"; +import { getOrCreateRepository, getOrCreateTaskSandbox, getOrCreateOrganization, getTaskSandbox, selfTask } from "../handles.js"; import { SANDBOX_REPO_CWD } from "../sandbox/index.js"; import { resolveSandboxProviderId } from "../../sandbox-config.js"; -import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js"; +import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; +import { githubRepoFullNameFromRemote } from "../../services/repo.js"; import { task as taskTable, taskRuntime, taskSandboxes, taskWorkbenchSessions } from "./db/schema.js"; import { getCurrentRecord } from "./workflow/common.js"; @@ -172,8 +173,7 @@ async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean } const mapped = rows.map((row: any) => ({ ...row, id: row.sessionId, - sessionId: row.sandboxSessionId ?? null, - tabId: row.sessionId, + sessionId: row.sessionId, sandboxSessionId: row.sandboxSessionId ?? null, status: row.status ?? "ready", errorMessage: row.errorMessage ?? null, @@ -209,8 +209,7 @@ async function readSessionMeta(c: any, sessionId: string): Promise { return { ...row, id: row.sessionId, - sessionId: row.sandboxSessionId ?? null, - tabId: row.sessionId, + sessionId: row.sessionId, sandboxSessionId: row.sandboxSessionId ?? null, status: row.status ?? "ready", errorMessage: row.errorMessage ?? null, @@ -227,7 +226,7 @@ async function readSessionMeta(c: any, sessionId: string): Promise { async function ensureSessionMeta( c: any, params: { - tabId: string; + sessionId: string; sandboxSessionId?: string | null; model?: string; sessionName?: string; @@ -238,7 +237,7 @@ async function ensureSessionMeta( }, ): Promise { await ensureWorkbenchSessionTable(c); - const existing = await readSessionMeta(c, params.tabId); + const existing = await readSessionMeta(c, params.sessionId); if (existing) { return existing; } @@ -251,7 +250,7 @@ async function ensureSessionMeta( await c.db .insert(taskWorkbenchSessions) .values({ - sessionId: params.tabId, + sessionId: params.sessionId, sandboxSessionId: params.sandboxSessionId ?? null, sessionName, model, @@ -271,20 +270,20 @@ async function ensureSessionMeta( }) .run(); - return await readSessionMeta(c, params.tabId); + return await readSessionMeta(c, params.sessionId); } -async function updateSessionMeta(c: any, tabId: string, values: Record): Promise { - await ensureSessionMeta(c, { tabId }); +async function updateSessionMeta(c: any, sessionId: string, values: Record): Promise { + await ensureSessionMeta(c, { sessionId }); await c.db .update(taskWorkbenchSessions) .set({ ...values, updatedAt: Date.now(), }) - .where(eq(taskWorkbenchSessions.sessionId, tabId)) + .where(eq(taskWorkbenchSessions.sessionId, sessionId)) .run(); - return await readSessionMeta(c, tabId); + return await readSessionMeta(c, sessionId); } async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: string): Promise { @@ -296,33 +295,25 @@ async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: strin return await readSessionMeta(c, row.sessionId); } -async function requireReadySessionMeta(c: any, tabId: string): Promise { - const meta = await readSessionMeta(c, tabId); +async function requireReadySessionMeta(c: any, sessionId: string): Promise { + const meta = await readSessionMeta(c, sessionId); if (!meta) { - throw new Error(`Unknown workbench tab: ${tabId}`); + throw new Error(`Unknown workbench session: ${sessionId}`); } if (meta.status !== "ready" || !meta.sandboxSessionId) { - throw new Error(meta.errorMessage ?? "This workbench tab is still preparing"); + throw new Error(meta.errorMessage ?? "This workbench session is still preparing"); } return meta; } -async function ensureReadySessionMeta(c: any, tabId: string): Promise { - const meta = await readSessionMeta(c, tabId); +export function requireSendableSessionMeta(meta: any, sessionId: string): any { if (!meta) { - throw new Error(`Unknown workbench tab: ${tabId}`); + throw new Error(`Unknown workbench session: ${sessionId}`); } - - if (meta.status === "ready" && meta.sandboxSessionId) { - return meta; + if (meta.status !== "ready" || !meta.sandboxSessionId) { + throw new Error(`Session is not ready (status: ${meta.status}). Wait for session provisioning to complete.`); } - - if (meta.status === "error") { - throw new Error(meta.errorMessage ?? "This workbench tab failed to prepare"); - } - - await ensureWorkbenchSession(c, tabId); - return await requireReadySessionMeta(c, tabId); + return meta; } function shellFragment(parts: string[]): string { @@ -339,23 +330,23 @@ async function getTaskSandboxRuntime( ): Promise<{ sandbox: any; sandboxId: string; - providerId: string; + sandboxProviderId: string; switchTarget: string; cwd: string; }> { const { config } = getActorRuntimeContext(); const sandboxId = stableSandboxId(c); - const providerId = resolveSandboxProviderId(config, record.providerId ?? c.state.providerId ?? null); - const sandbox = await getOrCreateTaskSandbox(c, c.state.workspaceId, sandboxId, {}); + const sandboxProviderId = resolveSandboxProviderId(config, record.sandboxProviderId ?? c.state.sandboxProviderId ?? null); + const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, sandboxId, {}); const actorId = typeof sandbox.resolve === "function" ? await sandbox.resolve().catch(() => null) : null; - const switchTarget = providerId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`; + const switchTarget = sandboxProviderId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`; const now = Date.now(); await c.db .insert(taskSandboxes) .values({ sandboxId, - providerId, + sandboxProviderId, sandboxActorId: typeof actorId === "string" ? actorId : null, switchTarget, cwd: SANDBOX_REPO_CWD, @@ -366,7 +357,7 @@ async function getTaskSandboxRuntime( .onConflictDoUpdate({ target: taskSandboxes.sandboxId, set: { - providerId, + sandboxProviderId, sandboxActorId: typeof actorId === "string" ? actorId : null, switchTarget, cwd: SANDBOX_REPO_CWD, @@ -389,7 +380,7 @@ async function getTaskSandboxRuntime( return { sandbox, sandboxId, - providerId, + sandboxProviderId, switchTarget, cwd: SANDBOX_REPO_CWD, }; @@ -400,17 +391,10 @@ async function ensureSandboxRepo(c: any, sandbox: any, record: any): Promise): Promise { - await updateSessionMeta(c, tabId, { +async function writeSessionTranscript(c: any, sessionId: string, transcript: Array): Promise { + await updateSessionMeta(c, sessionId, { transcriptJson: JSON.stringify(transcript), transcriptUpdatedAt: Date.now(), }); @@ -697,12 +681,12 @@ async function enqueueWorkbenchRefresh( await self.send(command, body, { wait: false }); } -async function enqueueWorkbenchEnsureSession(c: any, tabId: string): Promise { +async function enqueueWorkbenchEnsureSession(c: any, sessionId: string): Promise { const self = selfTask(c); await self.send( "task.command.workbench.ensure_session", { - tabId, + sessionId, }, { wait: false, @@ -750,8 +734,8 @@ async function readPullRequestSummary(c: any, branchName: string | null) { } try { - const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote); - return await project.getPullRequestForBranch({ branchName }); + const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); + return await repository.getPullRequestForBranch({ branchName }); } catch { return null; } @@ -762,7 +746,7 @@ export async function ensureWorkbenchSeeded(c: any): Promise { const record = await getCurrentRecord({ db: c.db, state: c.state }); if (record.activeSessionId) { await ensureSessionMeta(c, { - tabId: record.activeSessionId, + sessionId: record.activeSessionId, sandboxSessionId: record.activeSessionId, model: defaultModelForAgent(record.agentType), sessionName: "Session 1", @@ -791,7 +775,8 @@ function buildSessionSummary(record: any, meta: any): any { return { id: meta.id, - sessionId: derivedSandboxSessionId, + sessionId: meta.sessionId, + sandboxSessionId: derivedSandboxSessionId, sessionName: meta.sessionName, agent: agentKindForModel(meta.model), model: meta.model, @@ -806,9 +791,8 @@ function buildSessionSummary(record: any, meta: any): any { function buildSessionDetailFromMeta(record: any, meta: any): any { const summary = buildSessionSummary(record, meta); return { - sessionId: meta.tabId, - tabId: meta.tabId, - sandboxSessionId: summary.sessionId, + sessionId: meta.sessionId, + sandboxSessionId: summary.sandboxSessionId ?? null, sessionName: summary.sessionName, agent: summary.agent, model: summary.model, @@ -828,7 +812,7 @@ function buildSessionDetailFromMeta(record: any, meta: any): any { /** * Builds a WorkbenchTaskSummary from local task actor state. Task actors push - * this to the parent workspace actor so workspace sidebar reads stay local. + * this to the parent organization actor so organization sidebar reads stay local. */ export async function buildTaskSummary(c: any): Promise { const record = await ensureWorkbenchSeeded(c); @@ -874,7 +858,7 @@ export async function buildTaskDetail(c: any): Promise { fileTree: gitState.fileTree, minutesUsed: 0, sandboxes: (record.sandboxes ?? []).map((sandbox: any) => ({ - providerId: sandbox.providerId, + sandboxProviderId: sandbox.sandboxProviderId, sandboxId: sandbox.sandboxId, cwd: sandbox.cwd ?? null, })), @@ -883,13 +867,13 @@ export async function buildTaskDetail(c: any): Promise { } /** - * Builds a WorkbenchSessionDetail for a specific session tab. + * Builds a WorkbenchSessionDetail for a specific session. */ -export async function buildSessionDetail(c: any, tabId: string): Promise { +export async function buildSessionDetail(c: any, sessionId: string): Promise { const record = await ensureWorkbenchSeeded(c); - const meta = await readSessionMeta(c, tabId); + const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { - throw new Error(`Unknown workbench session tab: ${tabId}`); + throw new Error(`Unknown workbench session: ${sessionId}`); } if (!meta.sandboxSessionId) { @@ -899,7 +883,7 @@ export async function buildSessionDetail(c: any, tabId: string): Promise { try { const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); if (JSON.stringify(meta.transcript ?? []) !== JSON.stringify(transcript)) { - await writeSessionTranscript(c, meta.tabId, transcript); + await writeSessionTranscript(c, meta.sessionId, transcript); return buildSessionDetailFromMeta(record, { ...meta, transcript, @@ -921,21 +905,21 @@ export async function getTaskDetail(c: any): Promise { return await buildTaskDetail(c); } -export async function getSessionDetail(c: any, tabId: string): Promise { - return await buildSessionDetail(c, tabId); +export async function getSessionDetail(c: any, sessionId: string): Promise { + return await buildSessionDetail(c, sessionId); } /** * Replaces the old notifyWorkbenchUpdated pattern. * * The task actor emits two kinds of updates: - * - Push summary state up to the parent workspace actor so the sidebar + * - Push summary state up to the parent organization actor so the sidebar * materialized projection stays current. * - Broadcast full detail/session payloads down to direct task subscribers. */ export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string }): Promise { - const workspace = await getOrCreateWorkspace(c, c.state.workspaceId); - await workspace.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) }); + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) }); c.broadcast("taskUpdated", { type: "taskDetailUpdated", detail: await buildTaskDetail(c), @@ -964,8 +948,8 @@ export async function refreshWorkbenchSessionTranscript(c: any, sessionId: strin } const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); - await writeSessionTranscript(c, meta.tabId, transcript); - await broadcastTaskUpdate(c, { sessionId: meta.tabId }); + await writeSessionTranscript(c, meta.sessionId, transcript); + await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); } export async function renameWorkbenchTask(c: any, value: string): Promise { @@ -1029,31 +1013,31 @@ export async function renameWorkbenchBranch(c: any, value: string): Promise { - const tabId = `tab-${randomUUID()}`; +export async function createWorkbenchSession(c: any, model?: string): Promise<{ sessionId: string }> { + const sessionId = `session-${randomUUID()}`; const record = await ensureWorkbenchSeeded(c); await ensureSessionMeta(c, { - tabId, + sessionId, model: model ?? defaultModelForAgent(record.agentType), sandboxSessionId: null, status: pendingWorkbenchSessionStatus(record), created: false, }); - await broadcastTaskUpdate(c, { sessionId: tabId }); - await enqueueWorkbenchEnsureSession(c, tabId); - return { tabId }; + await broadcastTaskUpdate(c, { sessionId: sessionId }); + await enqueueWorkbenchEnsureSession(c, sessionId); + return { sessionId }; } -export async function ensureWorkbenchSession(c: any, tabId: string, model?: string): Promise { - const meta = await readSessionMeta(c, tabId); +export async function ensureWorkbenchSession(c: any, sessionId: string, model?: string): Promise { + const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { return; } @@ -1063,12 +1047,12 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { sessionId: meta.sandboxSessionId, }); - await broadcastTaskUpdate(c, { sessionId: tabId }); + await broadcastTaskUpdate(c, { sessionId: sessionId }); return; } - await updateSessionMeta(c, tabId, { - sandboxSessionId: meta.sandboxSessionId ?? tabId, + await updateSessionMeta(c, sessionId, { + sandboxSessionId: meta.sandboxSessionId ?? sessionId, status: "pending_session_create", errorMessage: null, }); @@ -1077,7 +1061,7 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri const runtime = await getTaskSandboxRuntime(c, record); await ensureSandboxRepo(c, runtime.sandbox, record); await runtime.sandbox.createSession({ - id: meta.sandboxSessionId ?? tabId, + id: meta.sandboxSessionId ?? sessionId, agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)), model: model ?? meta.model ?? defaultModelForAgent(record.agentType), sessionInit: { @@ -1085,22 +1069,22 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri }, }); - await updateSessionMeta(c, tabId, { - sandboxSessionId: meta.sandboxSessionId ?? tabId, + await updateSessionMeta(c, sessionId, { + sandboxSessionId: meta.sandboxSessionId ?? sessionId, status: "ready", errorMessage: null, }); await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { - sessionId: meta.sandboxSessionId ?? tabId, + sessionId: meta.sandboxSessionId ?? sessionId, }); } catch (error) { - await updateSessionMeta(c, tabId, { + await updateSessionMeta(c, sessionId, { status: "error", errorMessage: error instanceof Error ? error.message : String(error), }); } - await broadcastTaskUpdate(c, { sessionId: tabId }); + await broadcastTaskUpdate(c, { sessionId: sessionId }); } export async function enqueuePendingWorkbenchSessions(c: any): Promise { @@ -1113,7 +1097,7 @@ export async function enqueuePendingWorkbenchSessions(c: any): Promise { await self.send( "task.command.workbench.ensure_session", { - tabId: row.tabId, + sessionId: row.sessionId, model: row.model, }, { @@ -1167,7 +1151,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str let shouldEnsure = nextMeta.status === "pending_provision" || nextMeta.status === "pending_session_create" || nextMeta.status === "error"; if (shouldRecreateSessionForModelChange(nextMeta)) { - const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); await sandbox.destroySession(nextMeta.sandboxSessionId); nextMeta = await updateSessionMeta(c, sessionId, { sandboxSessionId: null, @@ -1179,7 +1163,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str }); shouldEnsure = true; } else if (nextMeta.status === "ready" && nextMeta.sandboxSessionId) { - const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); if (typeof sandbox.rawSendSessionMethod === "function") { try { await sandbox.rawSendSessionMethod(nextMeta.sandboxSessionId, "session/set_config_option", { @@ -1204,7 +1188,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str } export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array): Promise { - const meta = await ensureReadySessionMeta(c, sessionId); + const meta = requireSendableSessionMeta(await readSessionMeta(c, sessionId), sessionId); const record = await ensureWorkbenchSeeded(c); const runtime = await getTaskSandboxRuntime(c, record); await ensureSandboxRepo(c, runtime.sandbox, record); @@ -1253,7 +1237,7 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri export async function stopWorkbenchSession(c: any, sessionId: string): Promise { const meta = await requireReadySessionMeta(c, sessionId); - const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); await sandbox.destroySession(meta.sandboxSessionId); await updateSessionMeta(c, sessionId, { thinkingSinceMs: null, @@ -1263,7 +1247,7 @@ export async function stopWorkbenchSession(c: any, sessionId: string): Promise { const record = await ensureWorkbenchSeeded(c); - const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { tabId: sessionId, sandboxSessionId: sessionId })); + const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { sessionId: sessionId, sandboxSessionId: sessionId })); let changed = false; if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) { @@ -1317,13 +1301,13 @@ export async function syncWorkbenchSessionStatus(c: any, sessionId: string, stat } if (changed) { + await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + sessionId, + }); if (status !== "running") { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { - sessionId, - }); await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {}); } - await broadcastTaskUpdate(c, { sessionId: meta.tabId }); + await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); } } @@ -1339,7 +1323,7 @@ export async function closeWorkbenchSession(c: any, sessionId: string): Promise< return; } if (meta.sandboxSessionId) { - const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c)); + const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); await sandbox.destroySession(meta.sandboxSessionId); } await updateSessionMeta(c, sessionId, { @@ -1365,10 +1349,10 @@ export async function markWorkbenchUnread(c: any): Promise { if (!latest) { return; } - await updateSessionMeta(c, latest.tabId, { + await updateSessionMeta(c, latest.sessionId, { unread: 1, }); - await broadcastTaskUpdate(c, { sessionId: latest.tabId }); + await broadcastTaskUpdate(c, { sessionId: latest.sessionId }); } export async function publishWorkbenchPr(c: any): Promise { @@ -1376,17 +1360,17 @@ export async function publishWorkbenchPr(c: any): Promise { if (!record.branchName) { throw new Error("cannot publish PR without a branch"); } - let repoLocalPath = c.state.repoLocalPath; - if (!repoLocalPath) { - const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote); - const result = await project.ensure({ remoteUrl: c.state.repoRemote }); - repoLocalPath = result.localPath; - c.state.repoLocalPath = repoLocalPath; + const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); + const metadata = await repository.getRepositoryMetadata({}); + const repoFullName = metadata.fullName ?? githubRepoFullNameFromRemote(c.state.repoRemote); + if (!repoFullName) { + throw new Error(`Unable to resolve GitHub repository for ${c.state.repoRemote}`); } const { driver } = getActorRuntimeContext(); - const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId); - const created = await driver.github.createPr(repoLocalPath, record.branchName, record.title ?? c.state.task, undefined, { + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + await driver.github.createPr(repoFullName, record.branchName, record.title ?? c.state.task, undefined, { githubToken: auth?.githubToken ?? null, + baseBranch: metadata.defaultBranch ?? undefined, }); await c.db .update(taskTable) diff --git a/foundry/packages/backend/src/actors/task/workflow/commands.ts b/foundry/packages/backend/src/actors/task/workflow/commands.ts index 5e55b6c..d03ade1 100644 --- a/foundry/packages/backend/src/actors/task/workflow/commands.ts +++ b/foundry/packages/backend/src/actors/task/workflow/commands.ts @@ -28,7 +28,7 @@ export async function handleAttachActivity(loopCtx: any, msg: any): Promise 0) { target = connection.endpoint; @@ -78,9 +78,9 @@ export async function handleArchiveActivity(loopCtx: any, msg: any): Promise { + void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => { logActorWarning("task.commands", "failed to release sandbox during archive", { - workspaceId: loopCtx.state.workspaceId, + organizationId: loopCtx.state.organizationId, repoId: loopCtx.state.repoId, taskId: loopCtx.state.taskId, sandboxId: record.activeSandboxId, @@ -106,7 +106,7 @@ export async function killDestroySandboxActivity(loopCtx: any): Promise { return; } - await getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId).destroy(); + await getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(); } export async function killWriteDbActivity(loopCtx: any, msg: any): Promise { diff --git a/foundry/packages/backend/src/actors/task/workflow/common.ts b/foundry/packages/backend/src/actors/task/workflow/common.ts index 0dfc667..ae1e8dd 100644 --- a/foundry/packages/backend/src/actors/task/workflow/common.ts +++ b/foundry/packages/backend/src/actors/task/workflow/common.ts @@ -93,7 +93,7 @@ export async function getCurrentRecord(ctx: any): Promise { branchName: taskTable.branchName, title: taskTable.title, task: taskTable.task, - providerId: taskTable.providerId, + sandboxProviderId: taskTable.sandboxProviderId, status: taskTable.status, statusMessage: taskRuntime.statusMessage, activeSandboxId: taskRuntime.activeSandboxId, @@ -115,7 +115,7 @@ export async function getCurrentRecord(ctx: any): Promise { const sandboxes = await db .select({ sandboxId: taskSandboxes.sandboxId, - providerId: taskSandboxes.providerId, + sandboxProviderId: taskSandboxes.sandboxProviderId, sandboxActorId: taskSandboxes.sandboxActorId, switchTarget: taskSandboxes.switchTarget, cwd: taskSandboxes.cwd, @@ -126,21 +126,21 @@ export async function getCurrentRecord(ctx: any): Promise { .all(); return { - workspaceId: ctx.state.workspaceId, + organizationId: ctx.state.organizationId, repoId: ctx.state.repoId, repoRemote: ctx.state.repoRemote, taskId: ctx.state.taskId, branchName: row.branchName, title: row.title, task: row.task, - providerId: row.providerId, + sandboxProviderId: row.sandboxProviderId, status: row.status, statusMessage: row.statusMessage ?? null, activeSandboxId: row.activeSandboxId ?? null, activeSessionId: row.activeSessionId ?? null, sandboxes: sandboxes.map((sb) => ({ sandboxId: sb.sandboxId, - providerId: sb.providerId, + sandboxProviderId: sb.sandboxProviderId, sandboxActorId: sb.sandboxActorId ?? null, switchTarget: sb.switchTarget, cwd: sb.cwd ?? null, @@ -165,8 +165,8 @@ export async function getCurrentRecord(ctx: any): Promise { export async function appendHistory(ctx: any, kind: string, payload: Record): Promise { const client = ctx.client(); - const history = await client.history.getOrCreate(historyKey(ctx.state.workspaceId, ctx.state.repoId), { - createWithInput: { workspaceId: ctx.state.workspaceId, repoId: ctx.state.repoId }, + const history = await client.history.getOrCreate(historyKey(ctx.state.organizationId, ctx.state.repoId), { + createWithInput: { organizationId: ctx.state.organizationId, repoId: ctx.state.repoId }, }); await history.append({ kind, diff --git a/foundry/packages/backend/src/actors/task/workflow/index.ts b/foundry/packages/backend/src/actors/task/workflow/index.ts index f9049a7..f6ffd10 100644 --- a/foundry/packages/backend/src/actors/task/workflow/index.ts +++ b/foundry/packages/backend/src/actors/task/workflow/index.ts @@ -1,14 +1,7 @@ import { Loop } from "rivetkit/workflow"; import { logActorWarning, resolveErrorMessage } from "../../logging.js"; import { getCurrentRecord } from "./common.js"; -import { - initAssertNameActivity, - initBootstrapDbActivity, - initCompleteActivity, - initEnqueueProvisionActivity, - initEnsureNameActivity, - initFailedActivity, -} from "./init.js"; +import { initBootstrapDbActivity, initCompleteActivity, initEnqueueProvisionActivity, initFailedActivity } from "./init.js"; import { handleArchiveActivity, handleAttachActivity, @@ -67,12 +60,8 @@ const commandHandlers: Record = { await loopCtx.removed("init-failed", "step"); await loopCtx.removed("init-failed-v2", "step"); try { - await loopCtx.step({ - name: "init-ensure-name", - timeout: 5 * 60_000, - run: async () => initEnsureNameActivity(loopCtx), - }); - await loopCtx.step("init-assert-name", async () => initAssertNameActivity(loopCtx)); + await loopCtx.removed("init-ensure-name", "step"); + await loopCtx.removed("init-assert-name", "step"); await loopCtx.removed("init-create-sandbox", "step"); await loopCtx.removed("init-ensure-agent", "step"); await loopCtx.removed("init-start-sandbox-instance", "step"); @@ -156,11 +145,31 @@ const commandHandlers: Record = { } }, + "task.command.workbench.create_session_and_send": async (loopCtx, msg) => { + try { + const created = await loopCtx.step({ + name: "workbench-create-session-for-send", + timeout: 5 * 60_000, + run: async () => createWorkbenchSession(loopCtx, msg.body?.model), + }); + await loopCtx.step({ + name: "workbench-send-initial-message", + timeout: 5 * 60_000, + run: async () => sendWorkbenchMessage(loopCtx, created.sessionId, msg.body.text, []), + }); + } catch (error) { + logActorWarning("task.workflow", "create_session_and_send failed", { + error: resolveErrorMessage(error), + }); + } + await msg.complete({ ok: true }); + }, + "task.command.workbench.ensure_session": async (loopCtx, msg) => { await loopCtx.step({ name: "workbench-ensure-session", timeout: 5 * 60_000, - run: async () => ensureWorkbenchSession(loopCtx, msg.body.tabId, msg.body?.model), + run: async () => ensureWorkbenchSession(loopCtx, msg.body.sessionId, msg.body?.model), }); await msg.complete({ ok: true }); }, @@ -269,7 +278,16 @@ export async function runTaskWorkflow(ctx: any): Promise { } const handler = commandHandlers[msg.name as TaskQueueName]; if (handler) { - await handler(loopCtx, msg); + try { + await handler(loopCtx, msg); + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("task.workflow", "task workflow command failed", { + queueName: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } } return Loop.continue(undefined); }); diff --git a/foundry/packages/backend/src/actors/task/workflow/init.ts b/foundry/packages/backend/src/actors/task/workflow/init.ts index ec0b699..8a9962d 100644 --- a/foundry/packages/backend/src/actors/task/workflow/init.ts +++ b/foundry/packages/backend/src/actors/task/workflow/init.ts @@ -1,10 +1,8 @@ // @ts-nocheck import { eq } from "drizzle-orm"; -import { resolveCreateFlowDecision } from "../../../services/create-flow.js"; -import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js"; import { getActorRuntimeContext } from "../../context.js"; -import { getOrCreateHistory, getOrCreateProject, selfTask } from "../../handles.js"; -import { logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { getOrCreateHistory, selfTask } from "../../handles.js"; +import { resolveErrorMessage } from "../../logging.js"; import { defaultSandboxProviderId } from "../../../sandbox-config.js"; import { task as taskTable, taskRuntime } from "../db/schema.js"; import { TASK_ROW_ID, appendHistory, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js"; @@ -19,9 +17,8 @@ async function ensureTaskRuntimeCacheColumns(db: any): Promise { export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise { const { config } = getActorRuntimeContext(); - const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config); + const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); const now = Date.now(); - const initialStatusMessage = loopCtx.state.branchName && loopCtx.state.title ? "provisioning" : "naming"; await ensureTaskRuntimeCacheColumns(loopCtx.db); @@ -32,7 +29,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< branchName: loopCtx.state.branchName, title: loopCtx.state.title, task: loopCtx.state.task, - providerId, + sandboxProviderId, status: "init_bootstrap_db", agentType: loopCtx.state.agentType ?? config.default_agent, createdAt: now, @@ -44,7 +41,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< branchName: loopCtx.state.branchName, title: loopCtx.state.title, task: loopCtx.state.task, - providerId, + sandboxProviderId, status: "init_bootstrap_db", agentType: loopCtx.state.agentType ?? config.default_agent, updatedAt: now, @@ -60,7 +57,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: initialStatusMessage, + statusMessage: "provisioning", gitStateJson: null, gitStateUpdatedAt: null, provisionStage: "queued", @@ -74,7 +71,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: initialStatusMessage, + statusMessage: "provisioning", provisionStage: "queued", provisionStageUpdatedAt: now, updatedAt: now, @@ -102,7 +99,7 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro }); } catch (error) { logActorWarning("task.init", "background provision command failed", { - workspaceId: loopCtx.state.workspaceId, + organizationId: loopCtx.state.organizationId, repoId: loopCtx.state.repoId, taskId: loopCtx.state.taskId, error: resolveErrorMessage(error), @@ -111,106 +108,10 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro } } -export async function initEnsureNameActivity(loopCtx: any): Promise { - await setTaskState(loopCtx, "init_ensure_name", "determining title and branch"); - const existing = await loopCtx.db - .select({ - branchName: taskTable.branchName, - title: taskTable.title, - }) - .from(taskTable) - .where(eq(taskTable.id, TASK_ROW_ID)) - .get(); - - if (existing?.branchName && existing?.title) { - loopCtx.state.branchName = existing.branchName; - loopCtx.state.title = existing.title; - return; - } - - const { driver } = getActorRuntimeContext(); - const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId); - let repoLocalPath = loopCtx.state.repoLocalPath; - if (!repoLocalPath) { - const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote); - const result = await project.ensure({ remoteUrl: loopCtx.state.repoRemote }); - repoLocalPath = result.localPath; - loopCtx.state.repoLocalPath = repoLocalPath; - } - - try { - await driver.git.fetch(repoLocalPath, { githubToken: auth?.githubToken ?? null }); - } catch (error) { - logActorWarning("task.init", "fetch before naming failed", { - workspaceId: loopCtx.state.workspaceId, - repoId: loopCtx.state.repoId, - taskId: loopCtx.state.taskId, - error: resolveErrorMessage(error), - }); - } - - const remoteBranches = (await driver.git.listRemoteBranches(repoLocalPath, { githubToken: auth?.githubToken ?? null })).map( - (branch: any) => branch.branchName, - ); - const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote); - const reservedBranches = await project.listReservedBranches({}); - const resolved = resolveCreateFlowDecision({ - task: loopCtx.state.task, - explicitTitle: loopCtx.state.explicitTitle ?? undefined, - explicitBranchName: loopCtx.state.explicitBranchName ?? undefined, - localBranches: remoteBranches, - taskBranches: reservedBranches, - }); - - const now = Date.now(); - await loopCtx.db - .update(taskTable) - .set({ - branchName: resolved.branchName, - title: resolved.title, - updatedAt: now, - }) - .where(eq(taskTable.id, TASK_ROW_ID)) - .run(); - - loopCtx.state.branchName = resolved.branchName; - loopCtx.state.title = resolved.title; - loopCtx.state.explicitTitle = null; - loopCtx.state.explicitBranchName = null; - - await loopCtx.db - .update(taskRuntime) - .set({ - statusMessage: "provisioning", - provisionStage: "repo_prepared", - provisionStageUpdatedAt: now, - updatedAt: now, - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - - await project.registerTaskBranch({ - taskId: loopCtx.state.taskId, - branchName: resolved.branchName, - }); - - await appendHistory(loopCtx, "task.named", { - title: resolved.title, - branchName: resolved.branchName, - }); -} - -export async function initAssertNameActivity(loopCtx: any): Promise { - await setTaskState(loopCtx, "init_assert_name", "validating naming"); - if (!loopCtx.state.branchName) { - throw new Error("task branchName is not initialized"); - } -} - export async function initCompleteActivity(loopCtx: any, body: any): Promise { const now = Date.now(); const { config } = getActorRuntimeContext(); - const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config); + const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); await setTaskState(loopCtx, "init_complete", "task initialized"); await loopCtx.db @@ -224,12 +125,12 @@ export async function initCompleteActivity(loopCtx: any, body: any): Promise [name, queue()])), - options: { - name: "Workspace", - icon: "compass", - actionTimeout: 5 * 60_000, - }, - createState: (_c, workspaceId: string) => ({ - workspaceId, - }), - actions: workspaceActions, - run: workflow(runWorkspaceWorkflow), -}); diff --git a/foundry/packages/backend/src/config/organization.ts b/foundry/packages/backend/src/config/organization.ts new file mode 100644 index 0000000..8b5c766 --- /dev/null +++ b/foundry/packages/backend/src/config/organization.ts @@ -0,0 +1,13 @@ +import type { AppConfig } from "@sandbox-agent/foundry-shared"; + +export function defaultOrganization(config: AppConfig): string { + const organizationId = config.organization.default.trim(); + return organizationId.length > 0 ? organizationId : "default"; +} + +export function resolveOrganization(flagOrganization: string | undefined, config: AppConfig): string { + if (flagOrganization && flagOrganization.trim().length > 0) { + return flagOrganization.trim(); + } + return defaultOrganization(config); +} diff --git a/foundry/packages/backend/src/config/workspace.ts b/foundry/packages/backend/src/config/workspace.ts deleted file mode 100644 index 2225200..0000000 --- a/foundry/packages/backend/src/config/workspace.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { AppConfig } from "@sandbox-agent/foundry-shared"; - -export function defaultWorkspace(config: AppConfig): string { - const ws = config.workspace.default.trim(); - return ws.length > 0 ? ws : "default"; -} - -export function resolveWorkspace(flagWorkspace: string | undefined, config: AppConfig): string { - if (flagWorkspace && flagWorkspace.trim().length > 0) { - return flagWorkspace.trim(); - } - return defaultWorkspace(config); -} diff --git a/foundry/packages/backend/src/driver.ts b/foundry/packages/backend/src/driver.ts index e96fea8..5c01035 100644 --- a/foundry/packages/backend/src/driver.ts +++ b/foundry/packages/backend/src/driver.ts @@ -1,64 +1,12 @@ -import type { BranchSnapshot } from "./integrations/git/index.js"; -import type { PullRequestSnapshot } from "./integrations/github/index.js"; -import { - validateRemote, - ensureCloned, - fetch, - listRemoteBranches, - remoteDefaultBaseRef, - revParse, - ensureRemoteBranch, - diffStatForBranch, - conflictsWithMain, -} from "./integrations/git/index.js"; -import { - gitSpiceAvailable, - gitSpiceListStack, - gitSpiceRebaseBranch, - gitSpiceReparentBranch, - gitSpiceRestackRepo, - gitSpiceRestackSubtree, - gitSpiceSyncRepo, - gitSpiceTrackBranch, -} from "./integrations/git-spice/index.js"; -import { listPullRequests, createPr, starRepository } from "./integrations/github/index.js"; - -export interface GitDriver { - validateRemote(remoteUrl: string, options?: { githubToken?: string | null }): Promise; - ensureCloned(remoteUrl: string, targetPath: string, options?: { githubToken?: string | null }): Promise; - fetch(repoPath: string, options?: { githubToken?: string | null }): Promise; - listRemoteBranches(repoPath: string, options?: { githubToken?: string | null }): Promise; - remoteDefaultBaseRef(repoPath: string): Promise; - revParse(repoPath: string, ref: string): Promise; - ensureRemoteBranch(repoPath: string, branchName: string, options?: { githubToken?: string | null }): Promise; - diffStatForBranch(repoPath: string, branchName: string): Promise; - conflictsWithMain(repoPath: string, branchName: string): Promise; -} - -export interface StackBranchSnapshot { - branchName: string; - parentBranch: string | null; -} - -export interface StackDriver { - available(repoPath: string): Promise; - listStack(repoPath: string): Promise; - syncRepo(repoPath: string): Promise; - restackRepo(repoPath: string): Promise; - restackSubtree(repoPath: string, branchName: string): Promise; - rebaseBranch(repoPath: string, branchName: string): Promise; - reparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise; - trackBranch(repoPath: string, branchName: string, parentBranch: string): Promise; -} +import { createPr, starRepository } from "./integrations/github/index.js"; export interface GithubDriver { - listPullRequests(repoPath: string, options?: { githubToken?: string | null }): Promise; createPr( - repoPath: string, + repoFullName: string, headBranch: string, title: string, body?: string, - options?: { githubToken?: string | null }, + options?: { githubToken?: string | null; baseBranch?: string | null }, ): Promise<{ number: number; url: string }>; starRepository(repoFullName: string, options?: { githubToken?: string | null }): Promise; } @@ -68,37 +16,13 @@ export interface TmuxDriver { } export interface BackendDriver { - git: GitDriver; - stack: StackDriver; github: GithubDriver; tmux: TmuxDriver; } export function createDefaultDriver(): BackendDriver { return { - git: { - validateRemote, - ensureCloned, - fetch, - listRemoteBranches, - remoteDefaultBaseRef, - revParse, - ensureRemoteBranch, - diffStatForBranch, - conflictsWithMain, - }, - stack: { - available: gitSpiceAvailable, - listStack: gitSpiceListStack, - syncRepo: gitSpiceSyncRepo, - restackRepo: gitSpiceRestackRepo, - restackSubtree: gitSpiceRestackSubtree, - rebaseBranch: gitSpiceRebaseBranch, - reparentBranch: gitSpiceReparentBranch, - trackBranch: gitSpiceTrackBranch, - }, github: { - listPullRequests, createPr, starRepository, }, diff --git a/foundry/packages/backend/src/index.ts b/foundry/packages/backend/src/index.ts index fb75b94..3af36c3 100644 --- a/foundry/packages/backend/src/index.ts +++ b/foundry/packages/backend/src/index.ts @@ -3,14 +3,14 @@ import { cors } from "hono/cors"; import { randomUUID } from "node:crypto"; import { initActorRuntimeContext } from "./actors/context.js"; import { registry } from "./actors/index.js"; -import { workspaceKey } from "./actors/keys.js"; +import { organizationKey } from "./actors/keys.js"; import { loadConfig } from "./config/backend.js"; import { createBackends, createNotificationService } from "./notifications/index.js"; import { createDefaultDriver } from "./driver.js"; import { createClient } from "rivetkit/client"; import { initBetterAuthService } from "./services/better-auth.js"; import { createDefaultAppShellServices } from "./services/app-shell-runtime.js"; -import { APP_SHELL_WORKSPACE_ID } from "./actors/workspace/app-shell.js"; +import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/app-shell.js"; import { logger } from "./logging.js"; export interface BackendStartOptions { @@ -18,7 +18,7 @@ export interface BackendStartOptions { port?: number; } -interface AppWorkspaceLogContext { +interface AppOrganizationLogContext { action?: string; cfConnectingIp?: string; cfRay?: string; @@ -68,8 +68,8 @@ export async function startBackend(options: BackendStartOptions = {}): Promise ({ + const requestHeaderContext = (c: any): AppOrganizationLogContext => ({ cfConnectingIp: c.req.header("cf-connecting-ip") ?? undefined, cfRay: c.req.header("cf-ray") ?? undefined, forwardedFor: c.req.header("x-forwarded-for") ?? undefined, @@ -164,27 +164,27 @@ export async function startBackend(options: BackendStartOptions = {}): Promise { - if (cachedAppWorkspace) return cachedAppWorkspace; + const appOrganization = async (context: AppOrganizationLogContext = {}) => { + if (cachedAppOrganization) return cachedAppOrganization; const start = performance.now(); try { - const handle = await actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), { - createWithInput: APP_SHELL_WORKSPACE_ID, + const handle = await actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), { + createWithInput: APP_SHELL_ORGANIZATION_ID, }); - cachedAppWorkspace = handle; + cachedAppOrganization = handle; logger.info( { ...context, cache: "miss", durationMs: Math.round((performance.now() - start) * 100) / 100, }, - "app_workspace_resolve", + "app_organization_resolve", ); return handle; } catch (error) { @@ -196,13 +196,13 @@ export async function startBackend(options: BackendStartOptions = {}): Promise ({ + const requestLogContext = (c: any, sessionId?: string): AppOrganizationLogContext => ({ ...requestHeaderContext(c), method: c.req.method, path: c.req.path, @@ -255,7 +255,7 @@ export async function startBackend(options: BackendStartOptions = {}): Promise { const payload = await c.req.text(); - await (await appWorkspace(requestLogContext(c))).handleAppStripeWebhook({ + await (await appOrganization(requestLogContext(c))).handleAppStripeWebhook({ payload, signatureHeader: c.req.header("stripe-signature") ?? null, }); @@ -276,7 +276,7 @@ export async function startBackend(options: BackendStartOptions = {}): Promise { const payload = await c.req.text(); - await (await appWorkspace(requestLogContext(c))).handleAppGithubWebhook({ + await (await appOrganization(requestLogContext(c))).handleAppGithubWebhook({ payload, signatureHeader: c.req.header("x-hub-signature-256") ?? null, eventHeader: c.req.header("x-github-event") ?? null, diff --git a/foundry/packages/backend/src/integrations/git-spice/index.ts b/foundry/packages/backend/src/integrations/git-spice/index.ts deleted file mode 100644 index 877c82a..0000000 --- a/foundry/packages/backend/src/integrations/git-spice/index.ts +++ /dev/null @@ -1,223 +0,0 @@ -import { execFile } from "node:child_process"; -import { promisify } from "node:util"; - -const execFileAsync = promisify(execFile); - -const DEFAULT_TIMEOUT_MS = 2 * 60_000; - -interface SpiceCommand { - command: string; - prefix: string[]; -} - -export interface SpiceStackEntry { - branchName: string; - parentBranch: string | null; -} - -function spiceCommands(): SpiceCommand[] { - const explicit = process.env.HF_GIT_SPICE_BIN?.trim(); - const list: SpiceCommand[] = []; - if (explicit) { - list.push({ command: explicit, prefix: [] }); - } - list.push({ command: "git-spice", prefix: [] }); - list.push({ command: "git", prefix: ["spice"] }); - return list; -} - -function commandLabel(cmd: SpiceCommand): string { - return [cmd.command, ...cmd.prefix].join(" "); -} - -function looksMissing(error: unknown): boolean { - const detail = error instanceof Error ? error.message : String(error); - return detail.includes("ENOENT") || detail.includes("not a git command") || detail.includes("command not found"); -} - -async function tryRun(repoPath: string, cmd: SpiceCommand, args: string[]): Promise<{ stdout: string; stderr: string }> { - return await execFileAsync(cmd.command, [...cmd.prefix, ...args], { - cwd: repoPath, - timeout: DEFAULT_TIMEOUT_MS, - maxBuffer: 1024 * 1024 * 8, - env: { - ...process.env, - NO_COLOR: "1", - FORCE_COLOR: "0", - }, - }); -} - -async function pickCommand(repoPath: string): Promise { - for (const candidate of spiceCommands()) { - try { - await tryRun(repoPath, candidate, ["--help"]); - return candidate; - } catch (error) { - if (looksMissing(error)) { - continue; - } - } - } - return null; -} - -async function runSpice(repoPath: string, args: string[]): Promise<{ stdout: string; stderr: string }> { - const cmd = await pickCommand(repoPath); - if (!cmd) { - throw new Error("git-spice is not available (set HF_GIT_SPICE_BIN or install git-spice)"); - } - return await tryRun(repoPath, cmd, args); -} - -function parseLogJson(stdout: string): SpiceStackEntry[] { - const trimmed = stdout.trim(); - if (!trimmed) { - return []; - } - - const entries: SpiceStackEntry[] = []; - - // `git-spice log ... --json` prints one JSON object per line. - for (const line of trimmed.split("\n")) { - const raw = line.trim(); - if (!raw.startsWith("{")) { - continue; - } - try { - const value = JSON.parse(raw) as { - name?: string; - branch?: string; - parent?: string | null; - parentBranch?: string | null; - }; - const branchName = (value.name ?? value.branch ?? "").trim(); - if (!branchName) { - continue; - } - const parentRaw = value.parent ?? value.parentBranch ?? null; - const parentBranch = parentRaw ? parentRaw.trim() || null : null; - entries.push({ branchName, parentBranch }); - } catch { - continue; - } - } - - const seen = new Set(); - return entries.filter((entry) => { - if (seen.has(entry.branchName)) { - return false; - } - seen.add(entry.branchName); - return true; - }); -} - -async function runFallbacks(repoPath: string, commands: string[][], errorContext: string): Promise { - const failures: string[] = []; - for (const args of commands) { - try { - await runSpice(repoPath, args); - return; - } catch (error) { - failures.push(`${args.join(" ")} :: ${error instanceof Error ? error.message : String(error)}`); - } - } - throw new Error(`${errorContext}. attempts=${failures.join(" | ")}`); -} - -export async function gitSpiceAvailable(repoPath: string): Promise { - return (await pickCommand(repoPath)) !== null; -} - -export async function gitSpiceListStack(repoPath: string): Promise { - try { - const { stdout } = await runSpice(repoPath, ["log", "short", "--all", "--json", "--no-cr-status", "--no-prompt"]); - return parseLogJson(stdout); - } catch { - return []; - } -} - -export async function gitSpiceSyncRepo(repoPath: string): Promise { - await runFallbacks( - repoPath, - [ - ["repo", "sync", "--restack", "--no-prompt"], - ["repo", "sync", "--restack"], - ["repo", "sync"], - ], - "git-spice repo sync failed", - ); -} - -export async function gitSpiceRestackRepo(repoPath: string): Promise { - await runFallbacks( - repoPath, - [ - ["repo", "restack", "--no-prompt"], - ["repo", "restack"], - ], - "git-spice repo restack failed", - ); -} - -export async function gitSpiceRestackSubtree(repoPath: string, branchName: string): Promise { - await runFallbacks( - repoPath, - [ - ["upstack", "restack", "--branch", branchName, "--no-prompt"], - ["upstack", "restack", "--branch", branchName], - ["branch", "restack", "--branch", branchName, "--no-prompt"], - ["branch", "restack", "--branch", branchName], - ], - `git-spice restack subtree failed for ${branchName}`, - ); -} - -export async function gitSpiceRebaseBranch(repoPath: string, branchName: string): Promise { - await runFallbacks( - repoPath, - [ - ["branch", "restack", "--branch", branchName, "--no-prompt"], - ["branch", "restack", "--branch", branchName], - ], - `git-spice branch restack failed for ${branchName}`, - ); -} - -export async function gitSpiceReparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise { - await runFallbacks( - repoPath, - [ - ["upstack", "onto", "--branch", branchName, parentBranch, "--no-prompt"], - ["upstack", "onto", "--branch", branchName, parentBranch], - ["branch", "onto", "--branch", branchName, parentBranch, "--no-prompt"], - ["branch", "onto", "--branch", branchName, parentBranch], - ], - `git-spice reparent failed for ${branchName} -> ${parentBranch}`, - ); -} - -export async function gitSpiceTrackBranch(repoPath: string, branchName: string, parentBranch: string): Promise { - await runFallbacks( - repoPath, - [ - ["branch", "track", branchName, "--base", parentBranch, "--no-prompt"], - ["branch", "track", branchName, "--base", parentBranch], - ], - `git-spice track failed for ${branchName}`, - ); -} - -export function normalizeBaseBranchName(ref: string): string { - const trimmed = ref.trim(); - if (!trimmed) { - return "main"; - } - return trimmed.startsWith("origin/") ? trimmed.slice("origin/".length) : trimmed; -} - -export function describeSpiceCommandForLogs(repoPath: string): Promise { - return pickCommand(repoPath).then((cmd) => (cmd ? commandLabel(cmd) : null)); -} diff --git a/foundry/packages/backend/src/integrations/git/index.ts b/foundry/packages/backend/src/integrations/git/index.ts deleted file mode 100644 index 880e0f5..0000000 --- a/foundry/packages/backend/src/integrations/git/index.ts +++ /dev/null @@ -1,313 +0,0 @@ -import { execFile } from "node:child_process"; -import { chmodSync, existsSync, mkdirSync, mkdtempSync, writeFileSync } from "node:fs"; -import { tmpdir } from "node:os"; -import { dirname, resolve } from "node:path"; -import { promisify } from "node:util"; - -const execFileAsync = promisify(execFile); - -const DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS = 15_000; -const DEFAULT_GIT_FETCH_TIMEOUT_MS = 2 * 60_000; -const DEFAULT_GIT_CLONE_TIMEOUT_MS = 5 * 60_000; - -interface GitAuthOptions { - githubToken?: string | null; -} - -function resolveGithubToken(options?: GitAuthOptions): string | null { - const token = options?.githubToken ?? process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN ?? process.env.HF_GITHUB_TOKEN ?? process.env.HF_GH_TOKEN ?? null; - if (!token) return null; - const trimmed = token.trim(); - return trimmed.length > 0 ? trimmed : null; -} - -let cachedAskpassPath: string | null = null; -function ensureAskpassScript(): string { - if (cachedAskpassPath) { - return cachedAskpassPath; - } - - const dir = mkdtempSync(resolve(tmpdir(), "foundry-git-askpass-")); - const path = resolve(dir, "askpass.sh"); - - // Git invokes $GIT_ASKPASS with the prompt string as argv[1]. Provide both username and password. - // We avoid embedding the token in this file; it is read from env at runtime. - const content = [ - "#!/bin/sh", - 'prompt="$1"', - // Prefer GH_TOKEN/GITHUB_TOKEN but support HF_* aliases too. - 'token="${GH_TOKEN:-${GITHUB_TOKEN:-${HF_GITHUB_TOKEN:-${HF_GH_TOKEN:-}}}}"', - 'case "$prompt" in', - ' *Username*) echo "x-access-token" ;;', - ' *Password*) echo "$token" ;;', - ' *) echo "" ;;', - "esac", - "", - ].join("\n"); - - writeFileSync(path, content, "utf8"); - chmodSync(path, 0o700); - cachedAskpassPath = path; - return path; -} - -function gitEnv(options?: GitAuthOptions): Record { - const env: Record = { ...(process.env as Record) }; - env.GIT_TERMINAL_PROMPT = "0"; - - const token = resolveGithubToken(options); - if (token) { - env.GIT_ASKPASS = ensureAskpassScript(); - // Some tooling expects these vars; keep them aligned. - env.GITHUB_TOKEN = token; - env.GH_TOKEN = token; - } - - return env; -} - -async function configureGithubAuth(repoPath: string, options?: GitAuthOptions): Promise { - const token = resolveGithubToken(options); - if (!token) { - return; - } - - const authHeader = Buffer.from(`x-access-token:${token}`, "utf8").toString("base64"); - await execFileAsync("git", ["-C", repoPath, "config", "--local", "credential.helper", ""], { - env: gitEnv(options), - }); - await execFileAsync("git", ["-C", repoPath, "config", "--local", "http.https://github.com/.extraheader", `AUTHORIZATION: basic ${authHeader}`], { - env: gitEnv(options), - }); -} - -export interface BranchSnapshot { - branchName: string; - commitSha: string; -} - -export async function fetch(repoPath: string, options?: GitAuthOptions): Promise { - await execFileAsync("git", ["-C", repoPath, "fetch", "--prune", "--no-auto-gc"], { - timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS, - env: gitEnv(options), - }); -} - -export async function revParse(repoPath: string, ref: string): Promise { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "rev-parse", ref], { env: gitEnv() }); - return stdout.trim(); -} - -export async function validateRemote(remoteUrl: string, options?: GitAuthOptions): Promise { - const remote = remoteUrl.trim(); - if (!remote) { - throw new Error("remoteUrl is required"); - } - try { - await execFileAsync("git", ["ls-remote", "--exit-code", remote, "HEAD"], { - // This command does not need repo context. Running from a neutral directory - // avoids inheriting broken worktree .git indirection inside dev containers. - cwd: tmpdir(), - maxBuffer: 1024 * 1024, - timeout: DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS, - env: gitEnv(options), - }); - } catch (error) { - const detail = error instanceof Error ? error.message : String(error); - throw new Error(`git remote validation failed: ${detail}`); - } -} - -function isGitRepo(path: string): boolean { - return existsSync(resolve(path, ".git")); -} - -export async function ensureCloned(remoteUrl: string, targetPath: string, options?: GitAuthOptions): Promise { - const remote = remoteUrl.trim(); - if (!remote) { - throw new Error("remoteUrl is required"); - } - - if (existsSync(targetPath)) { - if (!isGitRepo(targetPath)) { - throw new Error(`targetPath exists but is not a git repo: ${targetPath}`); - } - - // Keep origin aligned with the configured remote URL. - await execFileAsync("git", ["-C", targetPath, "remote", "set-url", "origin", remote], { - maxBuffer: 1024 * 1024, - timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS, - env: gitEnv(options), - }); - await configureGithubAuth(targetPath, options); - await fetch(targetPath, options); - return; - } - - mkdirSync(dirname(targetPath), { recursive: true }); - await execFileAsync("git", ["clone", remote, targetPath], { - maxBuffer: 1024 * 1024 * 8, - timeout: DEFAULT_GIT_CLONE_TIMEOUT_MS, - env: gitEnv(options), - }); - await configureGithubAuth(targetPath, options); - await fetch(targetPath, options); - await ensureLocalBaseBranch(targetPath); -} - -async function hasLocalBranches(repoPath: string): Promise { - try { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "for-each-ref", "--format=%(refname:short)", "refs/heads"], { - env: gitEnv(), - }); - return stdout - .split("\n") - .map((line) => line.trim()) - .some(Boolean); - } catch { - return false; - } -} - -async function ensureLocalBaseBranch(repoPath: string): Promise { - if (await hasLocalBranches(repoPath)) { - return; - } - - const baseRef = await remoteDefaultBaseRef(repoPath); - const localBranch = baseRef.replace(/^origin\//, ""); - - await execFileAsync("git", ["-C", repoPath, "checkout", "-B", localBranch, baseRef], { - maxBuffer: 1024 * 1024, - timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS, - env: gitEnv(), - }); -} - -export async function remoteDefaultBaseRef(repoPath: string): Promise { - try { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "symbolic-ref", "refs/remotes/origin/HEAD"], { env: gitEnv() }); - const ref = stdout.trim(); // refs/remotes/origin/main - const match = ref.match(/^refs\/remotes\/(.+)$/); - if (match?.[1]) { - return match[1]; - } - } catch { - // fall through - } - - const candidates = ["origin/main", "origin/master", "main", "master"]; - for (const ref of candidates) { - try { - await execFileAsync("git", ["-C", repoPath, "rev-parse", "--verify", ref], { env: gitEnv() }); - return ref; - } catch { - continue; - } - } - return "origin/main"; -} - -export async function listRemoteBranches(repoPath: string, options?: GitAuthOptions): Promise { - await fetch(repoPath, options); - const { stdout } = await execFileAsync("git", ["-C", repoPath, "for-each-ref", "--format=%(refname:short) %(objectname)", "refs/remotes/origin"], { - maxBuffer: 1024 * 1024, - env: gitEnv(options), - }); - - return stdout - .trim() - .split("\n") - .filter((line) => line.trim().length > 0) - .map((line) => { - const [refName, commitSha] = line.trim().split(/\s+/, 2); - const short = (refName ?? "").trim(); - const branchName = short.replace(/^origin\//, ""); - return { branchName, commitSha: commitSha ?? "" }; - }) - .filter((row) => row.branchName.length > 0 && row.branchName !== "HEAD" && row.branchName !== "origin" && row.commitSha.length > 0); -} - -async function remoteBranchExists(repoPath: string, branchName: string): Promise { - try { - await execFileAsync("git", ["-C", repoPath, "show-ref", "--verify", `refs/remotes/origin/${branchName}`], { env: gitEnv() }); - return true; - } catch { - return false; - } -} - -export async function ensureRemoteBranch(repoPath: string, branchName: string, options?: GitAuthOptions): Promise { - await fetch(repoPath, options); - await ensureLocalBaseBranch(repoPath); - if (await remoteBranchExists(repoPath, branchName)) { - return; - } - - const baseRef = await remoteDefaultBaseRef(repoPath); - await execFileAsync("git", ["-C", repoPath, "push", "origin", `${baseRef}:refs/heads/${branchName}`], { - maxBuffer: 1024 * 1024 * 2, - env: gitEnv(options), - }); - await fetch(repoPath, options); -} - -export async function diffStatForBranch(repoPath: string, branchName: string): Promise { - try { - const baseRef = await remoteDefaultBaseRef(repoPath); - const headRef = `origin/${branchName}`; - const { stdout } = await execFileAsync("git", ["-C", repoPath, "diff", "--shortstat", `${baseRef}...${headRef}`], { - maxBuffer: 1024 * 1024, - env: gitEnv(), - }); - const trimmed = stdout.trim(); - if (!trimmed) { - return "+0/-0"; - } - const insertMatch = trimmed.match(/(\d+)\s+insertion/); - const deleteMatch = trimmed.match(/(\d+)\s+deletion/); - const insertions = insertMatch ? insertMatch[1] : "0"; - const deletions = deleteMatch ? deleteMatch[1] : "0"; - return `+${insertions}/-${deletions}`; - } catch { - return "+0/-0"; - } -} - -export async function conflictsWithMain(repoPath: string, branchName: string): Promise { - try { - const baseRef = await remoteDefaultBaseRef(repoPath); - const headRef = `origin/${branchName}`; - // Use merge-tree (git 2.38+) for a clean conflict check. - try { - await execFileAsync("git", ["-C", repoPath, "merge-tree", "--write-tree", "--no-messages", baseRef, headRef], { env: gitEnv() }); - // If merge-tree exits 0, no conflicts. Non-zero exit means conflicts. - return false; - } catch { - // merge-tree exits non-zero when there are conflicts - return true; - } - } catch { - return false; - } -} - -export async function getOriginOwner(repoPath: string): Promise { - try { - const { stdout } = await execFileAsync("git", ["-C", repoPath, "remote", "get-url", "origin"], { env: gitEnv() }); - const url = stdout.trim(); - // Handle SSH: git@github.com:owner/repo.git - const sshMatch = url.match(/[:\/]([^\/]+)\/[^\/]+(?:\.git)?$/); - if (sshMatch) { - return sshMatch[1] ?? ""; - } - // Handle HTTPS: https://github.com/owner/repo.git - const httpsMatch = url.match(/\/\/[^\/]+\/([^\/]+)\//); - if (httpsMatch) { - return httpsMatch[1] ?? ""; - } - return ""; - } catch { - return ""; - } -} diff --git a/foundry/packages/backend/src/integrations/github/index.ts b/foundry/packages/backend/src/integrations/github/index.ts index 536c9db..87fc996 100644 --- a/foundry/packages/backend/src/integrations/github/index.ts +++ b/foundry/packages/backend/src/integrations/github/index.ts @@ -1,262 +1,80 @@ -import { execFile } from "node:child_process"; -import { promisify } from "node:util"; - -const execFileAsync = promisify(execFile); - interface GithubAuthOptions { githubToken?: string | null; + baseBranch?: string | null; } -function ghEnv(options?: GithubAuthOptions): Record { - const env: Record = { ...(process.env as Record) }; +function authHeaders(options?: GithubAuthOptions): HeadersInit { const token = options?.githubToken?.trim(); - if (token) { - env.GH_TOKEN = token; - env.GITHUB_TOKEN = token; + if (!token) { + throw new Error("GitHub token is required for this operation"); } - return env; -} - -export interface PullRequestSnapshot { - number: number; - headRefName: string; - state: string; - title: string; - url: string; - author: string; - isDraft: boolean; - ciStatus: string | null; - reviewStatus: string | null; - reviewer: string | null; -} - -interface GhPrListItem { - number: number; - headRefName: string; - state: string; - title: string; - url?: string; - author?: { login?: string }; - isDraft?: boolean; - statusCheckRollup?: Array<{ - state?: string; - status?: string; - conclusion?: string; - __typename?: string; - }>; - reviews?: Array<{ - state?: string; - author?: { login?: string }; - }>; -} - -function parseCiStatus(checks: GhPrListItem["statusCheckRollup"]): string | null { - if (!checks || checks.length === 0) return null; - - let total = 0; - let successes = 0; - let hasRunning = false; - - for (const check of checks) { - total++; - const conclusion = check.conclusion?.toUpperCase(); - const state = check.state?.toUpperCase(); - const status = check.status?.toUpperCase(); - - if (conclusion === "SUCCESS" || state === "SUCCESS") { - successes++; - } else if (status === "IN_PROGRESS" || status === "QUEUED" || status === "PENDING" || state === "PENDING") { - hasRunning = true; - } - } - - if (hasRunning && successes < total) { - return "running"; - } - - return `${successes}/${total}`; -} - -function parseReviewStatus(reviews: GhPrListItem["reviews"]): { status: string | null; reviewer: string | null } { - if (!reviews || reviews.length === 0) { - return { status: null, reviewer: null }; - } - - // Build a map of latest review per author - const latestByAuthor = new Map(); - for (const review of reviews) { - const login = review.author?.login ?? "unknown"; - const state = review.state?.toUpperCase() ?? ""; - if (state === "COMMENTED") continue; // Skip comments, only track actionable reviews - latestByAuthor.set(login, { state, login }); - } - - // Check for CHANGES_REQUESTED first (takes priority), then APPROVED - for (const [, entry] of latestByAuthor) { - if (entry.state === "CHANGES_REQUESTED") { - return { status: "CHANGES_REQUESTED", reviewer: entry.login }; - } - } - - for (const [, entry] of latestByAuthor) { - if (entry.state === "APPROVED") { - return { status: "APPROVED", reviewer: entry.login }; - } - } - - // If there are reviews but none are APPROVED or CHANGES_REQUESTED - if (latestByAuthor.size > 0) { - const first = latestByAuthor.values().next().value; - return { status: "PENDING", reviewer: first?.login ?? null }; - } - - return { status: null, reviewer: null }; -} - -function snapshotFromGhItem(item: GhPrListItem): PullRequestSnapshot { - const { status: reviewStatus, reviewer } = parseReviewStatus(item.reviews); return { - number: item.number, - headRefName: item.headRefName, - state: item.state, - title: item.title, - url: item.url ?? "", - author: item.author?.login ?? "", - isDraft: item.isDraft ?? false, - ciStatus: parseCiStatus(item.statusCheckRollup), - reviewStatus, - reviewer, + Accept: "application/vnd.github+json", + Authorization: `Bearer ${token}`, + "X-GitHub-Api-Version": "2022-11-28", }; } -const PR_JSON_FIELDS = "number,headRefName,state,title,url,author,isDraft,statusCheckRollup,reviews"; - -export async function listPullRequests(repoPath: string, options?: GithubAuthOptions): Promise { - try { - const { stdout } = await execFileAsync("gh", ["pr", "list", "--json", PR_JSON_FIELDS, "--limit", "200"], { - maxBuffer: 1024 * 1024 * 4, - cwd: repoPath, - env: ghEnv(options), - }); - - const parsed = JSON.parse(stdout) as GhPrListItem[]; - - return parsed.map((item) => { - // Handle fork PRs where headRefName may contain "owner:branch" - const headRefName = item.headRefName.includes(":") ? (item.headRefName.split(":").pop() ?? item.headRefName) : item.headRefName; - - return snapshotFromGhItem({ ...item, headRefName }); - }); - } catch { - return []; - } -} - -export async function getPrInfo(repoPath: string, branchName: string, options?: GithubAuthOptions): Promise { - try { - const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", PR_JSON_FIELDS], { - maxBuffer: 1024 * 1024 * 4, - cwd: repoPath, - env: ghEnv(options), - }); - - const item = JSON.parse(stdout) as GhPrListItem; - return snapshotFromGhItem(item); - } catch { - return null; - } +async function githubRequest(path: string, init: RequestInit, options?: GithubAuthOptions): Promise { + return await fetch(`https://api.github.com${path}`, { + ...init, + headers: { + ...authHeaders(options), + ...(init.headers ?? {}), + }, + }); } export async function createPr( - repoPath: string, + repoFullName: string, headBranch: string, title: string, body?: string, options?: GithubAuthOptions, ): Promise<{ number: number; url: string }> { - const args = ["pr", "create", "--title", title, "--head", headBranch]; - if (body) { - args.push("--body", body); - } else { - args.push("--body", ""); + const baseBranch = options?.baseBranch?.trim() || "main"; + const response = await githubRequest( + `/repos/${repoFullName}/pulls`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + title, + head: headBranch, + base: baseBranch, + body: body ?? "", + }), + }, + options, + ); + + const payload = (await response.json()) as { number?: number; html_url?: string; message?: string }; + if (!response.ok || !payload.number || !payload.html_url) { + throw new Error(payload.message ?? `Failed to create pull request for ${repoFullName}`); } - const { stdout } = await execFileAsync("gh", args, { - maxBuffer: 1024 * 1024, - cwd: repoPath, - env: ghEnv(options), - }); - - // gh pr create outputs the PR URL on success - const url = stdout.trim(); - // Extract PR number from URL: https://github.com/owner/repo/pull/123 - const numberMatch = url.match(/\/pull\/(\d+)/); - const number = numberMatch ? parseInt(numberMatch[1]!, 10) : 0; - - return { number, url }; + return { + number: payload.number, + url: payload.html_url, + }; } export async function starRepository(repoFullName: string, options?: GithubAuthOptions): Promise { - try { - await execFileAsync("gh", ["api", "--method", "PUT", `user/starred/${repoFullName}`], { - maxBuffer: 1024 * 1024, - env: ghEnv(options), - }); - } catch (error) { - const message = - error instanceof Error ? error.message : `Failed to star GitHub repository ${repoFullName}. Ensure GitHub auth is configured for the backend.`; - throw new Error(message); - } -} - -export async function getAllowedMergeMethod(repoPath: string, options?: GithubAuthOptions): Promise<"squash" | "rebase" | "merge"> { - try { - // Get the repo owner/name from gh - const { stdout: repoJson } = await execFileAsync("gh", ["repo", "view", "--json", "owner,name"], { cwd: repoPath, env: ghEnv(options) }); - const repo = JSON.parse(repoJson) as { owner: { login: string }; name: string }; - const repoFullName = `${repo.owner.login}/${repo.name}`; - - const { stdout } = await execFileAsync("gh", ["api", `repos/${repoFullName}`, "--jq", ".allow_squash_merge, .allow_rebase_merge, .allow_merge_commit"], { - maxBuffer: 1024 * 1024, - cwd: repoPath, - env: ghEnv(options), - }); - - const lines = stdout.trim().split("\n"); - const allowSquash = lines[0]?.trim() === "true"; - const allowRebase = lines[1]?.trim() === "true"; - const allowMerge = lines[2]?.trim() === "true"; - - if (allowSquash) return "squash"; - if (allowRebase) return "rebase"; - if (allowMerge) return "merge"; - return "squash"; - } catch { - return "squash"; - } -} - -export async function mergePr(repoPath: string, prNumber: number, options?: GithubAuthOptions): Promise { - const method = await getAllowedMergeMethod(repoPath, options); - await execFileAsync("gh", ["pr", "merge", String(prNumber), `--${method}`, "--delete-branch"], { cwd: repoPath, env: ghEnv(options) }); -} - -export async function isPrMerged(repoPath: string, branchName: string, options?: GithubAuthOptions): Promise { - try { - const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", "state"], { cwd: repoPath, env: ghEnv(options) }); - const parsed = JSON.parse(stdout) as { state: string }; - return parsed.state.toUpperCase() === "MERGED"; - } catch { - return false; - } -} - -export async function getPrTitle(repoPath: string, branchName: string): Promise { - try { - const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", "title"], { cwd: repoPath }); - const parsed = JSON.parse(stdout) as { title: string }; - return parsed.title; - } catch { - return null; + const response = await githubRequest( + `/user/starred/${repoFullName}`, + { + method: "PUT", + headers: { + "Content-Length": "0", + }, + }, + options, + ); + + if (!response.ok) { + const payload = (await response.json().catch(() => null)) as { message?: string } | null; + throw new Error(payload?.message ?? `Failed to star GitHub repository ${repoFullName}`); } } diff --git a/foundry/packages/backend/src/integrations/graphite/index.ts b/foundry/packages/backend/src/integrations/graphite/index.ts deleted file mode 100644 index 4c708b0..0000000 --- a/foundry/packages/backend/src/integrations/graphite/index.ts +++ /dev/null @@ -1,140 +0,0 @@ -import { execFile } from "node:child_process"; -import { promisify } from "node:util"; - -const execFileAsync = promisify(execFile); - -export async function graphiteAvailable(repoPath: string): Promise { - try { - await execFileAsync("gt", ["trunk"], { cwd: repoPath }); - return true; - } catch { - return false; - } -} - -export async function graphiteGet(repoPath: string, branchName: string): Promise { - try { - await execFileAsync("gt", ["get", branchName], { cwd: repoPath }); - return true; - } catch { - return false; - } -} - -export async function graphiteCreateBranch(repoPath: string, branchName: string): Promise { - await execFileAsync("gt", ["create", branchName], { cwd: repoPath }); -} - -export async function graphiteCheckout(repoPath: string, branchName: string): Promise { - await execFileAsync("gt", ["checkout", branchName], { cwd: repoPath }); -} - -export async function graphiteSubmit(repoPath: string): Promise { - await execFileAsync("gt", ["submit", "--no-edit"], { cwd: repoPath }); -} - -export async function graphiteMergeBranch(repoPath: string, branchName: string): Promise { - await execFileAsync("gt", ["merge", branchName], { cwd: repoPath }); -} - -export async function graphiteAbandon(repoPath: string, branchName: string): Promise { - await execFileAsync("gt", ["abandon", branchName], { cwd: repoPath }); -} - -export interface GraphiteStackEntry { - branchName: string; - parentBranch: string | null; -} - -export async function graphiteGetStack(repoPath: string): Promise { - try { - // Try JSON output first - const { stdout } = await execFileAsync("gt", ["log", "--json"], { - cwd: repoPath, - maxBuffer: 1024 * 1024, - }); - - const parsed = JSON.parse(stdout) as Array<{ - branch?: string; - name?: string; - parent?: string; - parentBranch?: string; - }>; - - return parsed.map((entry) => ({ - branchName: entry.branch ?? entry.name ?? "", - parentBranch: entry.parent ?? entry.parentBranch ?? null, - })); - } catch { - // Fall back to text parsing of `gt log` - try { - const { stdout } = await execFileAsync("gt", ["log"], { - cwd: repoPath, - maxBuffer: 1024 * 1024, - }); - - const entries: GraphiteStackEntry[] = []; - const lines = stdout.split("\n").filter((l) => l.trim().length > 0); - - // Parse indented tree output: each line has tree chars (|, /, \, -, etc.) - // followed by branch names. Build parent-child from indentation level. - const branchStack: string[] = []; - - for (const line of lines) { - // Strip ANSI color codes - const clean = line.replace(/\x1b\[[0-9;]*m/g, ""); - // Extract branch name: skip tree characters and whitespace - const branchMatch = clean.match(/[│├└─|/\\*\s]*(?:◉|○|●)?\s*(.+)/); - if (!branchMatch) continue; - - const branchName = branchMatch[1]!.trim(); - if (!branchName || branchName.startsWith("(") || branchName === "") continue; - - // Determine indentation level by counting leading whitespace/tree chars - const indent = clean.search(/[a-zA-Z0-9]/); - const level = Math.max(0, Math.floor(indent / 2)); - - // Trim stack to current level - while (branchStack.length > level) { - branchStack.pop(); - } - - const parentBranch = branchStack.length > 0 ? (branchStack[branchStack.length - 1] ?? null) : null; - - entries.push({ branchName, parentBranch }); - branchStack.push(branchName); - } - - return entries; - } catch { - return []; - } - } -} - -export async function graphiteGetParent(repoPath: string, branchName: string): Promise { - try { - // Try `gt get ` to see parent info - const { stdout } = await execFileAsync("gt", ["get", branchName], { - cwd: repoPath, - maxBuffer: 1024 * 1024, - }); - - // Parse output for parent branch reference - const parentMatch = stdout.match(/parent:\s*(\S+)/i); - if (parentMatch) { - return parentMatch[1] ?? null; - } - } catch { - // Fall through to stack-based lookup - } - - // Fall back to stack info - try { - const stack = await graphiteGetStack(repoPath); - const entry = stack.find((e) => e.branchName === branchName); - return entry?.parentBranch ?? null; - } catch { - return null; - } -} diff --git a/foundry/packages/backend/src/sandbox-config.ts b/foundry/packages/backend/src/sandbox-config.ts index 4fa388f..9d85f51 100644 --- a/foundry/packages/backend/src/sandbox-config.ts +++ b/foundry/packages/backend/src/sandbox-config.ts @@ -1,10 +1,10 @@ -import type { AppConfig, ProviderId } from "@sandbox-agent/foundry-shared"; +import type { AppConfig, SandboxProviderId } from "@sandbox-agent/foundry-shared"; function hasE2BApiKey(config: AppConfig): boolean { - return Boolean(config.providers.e2b.apiKey?.trim()); + return Boolean(config.sandboxProviders.e2b.apiKey?.trim()); } -function forcedSandboxProviderId(): ProviderId | null { +function forcedSandboxProviderId(): SandboxProviderId | null { const raw = process.env.FOUNDRY_SANDBOX_PROVIDER?.trim() ?? process.env.HF_SANDBOX_PROVIDER?.trim() ?? null; if (raw === "local" || raw === "e2b") { return raw; @@ -12,7 +12,7 @@ function forcedSandboxProviderId(): ProviderId | null { return null; } -export function defaultSandboxProviderId(config: AppConfig): ProviderId { +export function defaultSandboxProviderId(config: AppConfig): SandboxProviderId { const forced = forcedSandboxProviderId(); if (forced === "local") { return "local"; @@ -26,11 +26,11 @@ export function defaultSandboxProviderId(config: AppConfig): ProviderId { return hasE2BApiKey(config) ? "e2b" : "local"; } -export function availableSandboxProviderIds(config: AppConfig): ProviderId[] { +export function availableSandboxProviderIds(config: AppConfig): SandboxProviderId[] { return hasE2BApiKey(config) ? ["e2b", "local"] : ["local"]; } -export function resolveSandboxProviderId(config: AppConfig, requested?: ProviderId | null): ProviderId { +export function resolveSandboxProviderId(config: AppConfig, requested?: SandboxProviderId | null): SandboxProviderId { if (requested === "e2b" && !hasE2BApiKey(config)) { throw new Error("E2B provider is not configured. Set E2B_API_KEY before selecting the e2b provider."); } diff --git a/foundry/packages/backend/src/services/app-github.ts b/foundry/packages/backend/src/services/app-github.ts index 065c382..6cb6db3 100644 --- a/foundry/packages/backend/src/services/app-github.ts +++ b/foundry/packages/backend/src/services/app-github.ts @@ -38,6 +38,12 @@ export interface GitHubRepositoryRecord { fullName: string; cloneUrl: string; private: boolean; + defaultBranch: string; +} + +export interface GitHubBranchRecord { + name: string; + commitSha: string; } export interface GitHubMemberRecord { @@ -341,12 +347,14 @@ export class GitHubAppClient { full_name: string; clone_url: string; private: boolean; + default_branch: string; }>("/user/repos?per_page=100&affiliation=owner,collaborator,organization_member&sort=updated", accessToken); return repositories.map((repository) => ({ fullName: repository.full_name, cloneUrl: repository.clone_url, private: repository.private, + defaultBranch: repository.default_branch, })); } @@ -356,12 +364,14 @@ export class GitHubAppClient { full_name: string; clone_url: string; private: boolean; + default_branch: string; }>("/installation/repositories?per_page=100", accessToken); return repositories.map((repository) => ({ fullName: repository.full_name, cloneUrl: repository.clone_url, private: repository.private, + defaultBranch: repository.default_branch, })); } @@ -371,11 +381,13 @@ export class GitHubAppClient { full_name: string; clone_url: string; private: boolean; + default_branch: string; }>(`/repos/${fullName}`, accessToken); return { fullName: repository.full_name, cloneUrl: repository.clone_url, private: repository.private, + defaultBranch: repository.default_branch, }; } catch (error) { if (error instanceof GitHubAppError && error.status === 404) { @@ -390,6 +402,15 @@ export class GitHubAppClient { return await this.getUserRepository(accessToken, fullName); } + async listUserRepositoryBranches(accessToken: string, fullName: string): Promise { + return await this.listRepositoryBranches(accessToken, fullName); + } + + async listInstallationRepositoryBranches(installationId: number, fullName: string): Promise { + const accessToken = await this.createInstallationAccessToken(installationId); + return await this.listRepositoryBranches(accessToken, fullName); + } + async listOrganizationMembers(accessToken: string, organizationLogin: string): Promise { const members = await this.paginate<{ id: number; @@ -687,6 +708,20 @@ export class GitHubAppClient { nextUrl: parseNextLink(response.headers.get("link")), }; } + + private async listRepositoryBranches(accessToken: string, fullName: string): Promise { + const branches = await this.paginate<{ + name: string; + commit?: { sha?: string | null } | null; + }>(`/repos/${fullName}/branches?per_page=100`, accessToken); + + return branches + .map((branch) => ({ + name: branch.name?.trim() ?? "", + commitSha: branch.commit?.sha?.trim() ?? "", + })) + .filter((branch) => branch.name.length > 0 && branch.commitSha.length > 0); + } } function parseNextLink(linkHeader: string | null): string | null { diff --git a/foundry/packages/backend/src/services/better-auth.ts b/foundry/packages/backend/src/services/better-auth.ts index 325ea59..4509402 100644 --- a/foundry/packages/backend/src/services/better-auth.ts +++ b/foundry/packages/backend/src/services/better-auth.ts @@ -1,7 +1,7 @@ import { betterAuth } from "better-auth"; import { createAdapterFactory } from "better-auth/adapters"; -import { APP_SHELL_WORKSPACE_ID } from "../actors/workspace/app-shell.js"; -import { authUserKey, workspaceKey } from "../actors/keys.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js"; +import { authUserKey, organizationKey } from "../actors/keys.js"; import { logger } from "../logging.js"; const AUTH_BASE_PATH = "/v1/auth"; @@ -43,7 +43,7 @@ async function callAuthEndpoint(auth: any, url: string, init?: RequestInit): Pro return await auth.handler(new Request(url, init)); } -function resolveRouteUserId(workspace: any, resolved: any): string | null { +function resolveRouteUserId(organization: any, resolved: any): string | null { if (!resolved) { return null; } @@ -75,11 +75,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } // getOrCreate is intentional here: the adapter runs during Better Auth callbacks - // which can fire before any explicit create path. The app workspace and auth user + // which can fire before any explicit create path. The app organization and auth user // actors must exist by the time the adapter needs them. - const appWorkspace = () => - actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), { - createWithInput: APP_SHELL_WORKSPACE_ID, + const appOrganization = () => + actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), { + createWithInput: APP_SHELL_ORGANIZATION_ID, }); // getOrCreate is intentional: Better Auth creates user records during OAuth @@ -109,9 +109,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } const email = direct("email"); if (typeof email === "string" && email.length > 0) { - const workspace = await appWorkspace(); - const resolved = await workspace.authFindEmailIndex({ email: email.toLowerCase() }); - return resolveRouteUserId(workspace, resolved); + const organization = await appOrganization(); + const resolved = await organization.authFindEmailIndex({ email: email.toLowerCase() }); + return resolveRouteUserId(organization, resolved); } return null; } @@ -124,12 +124,12 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const sessionId = direct("id") ?? data?.id; const sessionToken = direct("token") ?? data?.token; if (typeof sessionId === "string" || typeof sessionToken === "string") { - const workspace = await appWorkspace(); - const resolved = await workspace.authFindSessionIndex({ + const organization = await appOrganization(); + const resolved = await organization.authFindSessionIndex({ ...(typeof sessionId === "string" ? { sessionId } : {}), ...(typeof sessionToken === "string" ? { sessionToken } : {}), }); - return resolveRouteUserId(workspace, resolved); + return resolveRouteUserId(organization, resolved); } return null; } @@ -142,14 +142,14 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const accountRecordId = direct("id") ?? data?.id; const providerId = direct("providerId") ?? data?.providerId; const accountId = direct("accountId") ?? data?.accountId; - const workspace = await appWorkspace(); + const organization = await appOrganization(); if (typeof accountRecordId === "string" && accountRecordId.length > 0) { - const resolved = await workspace.authFindAccountIndex({ id: accountRecordId }); - return resolveRouteUserId(workspace, resolved); + const resolved = await organization.authFindAccountIndex({ id: accountRecordId }); + return resolveRouteUserId(organization, resolved); } if (typeof providerId === "string" && providerId.length > 0 && typeof accountId === "string" && accountId.length > 0) { - const resolved = await workspace.authFindAccountIndex({ providerId, accountId }); - return resolveRouteUserId(workspace, resolved); + const resolved = await organization.authFindAccountIndex({ providerId, accountId }); + return resolveRouteUserId(organization, resolved); } return null; } @@ -157,9 +157,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return null; }; - const ensureWorkspaceVerification = async (method: string, payload: Record) => { - const workspace = await appWorkspace(); - return await workspace[method](payload); + const ensureOrganizationVerification = async (method: string, payload: Record) => { + const organization = await appOrganization(); + return await organization[method](payload); }; return { @@ -170,7 +170,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin create: async ({ model, data }) => { const transformed = await transformInput(data, model, "create", true); if (model === "verification") { - return await ensureWorkspaceVerification("authCreateVerification", { data: transformed }); + return await ensureOrganizationVerification("authCreateVerification", { data: transformed }); } const userId = await resolveUserIdForQuery(model, undefined, transformed); @@ -180,17 +180,17 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const userActor = await getAuthUser(userId); const created = await userActor.createAuthRecord({ model, data: transformed }); - const workspace = await appWorkspace(); + const organization = await appOrganization(); if (model === "user" && typeof transformed.email === "string" && transformed.email.length > 0) { - await workspace.authUpsertEmailIndex({ + await organization.authUpsertEmailIndex({ email: transformed.email.toLowerCase(), userId, }); } if (model === "session") { - await workspace.authUpsertSessionIndex({ + await organization.authUpsertSessionIndex({ sessionId: String(created.id), sessionToken: String(created.token), userId, @@ -198,7 +198,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "account") { - await workspace.authUpsertAccountIndex({ + await organization.authUpsertAccountIndex({ id: String(created.id), providerId: String(created.providerId), accountId: String(created.accountId), @@ -212,7 +212,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin findOne: async ({ model, where, join }) => { const transformedWhere = transformWhereClause({ model, where, action: "findOne" }); if (model === "verification") { - return await ensureWorkspaceVerification("authFindOneVerification", { where: transformedWhere, join }); + return await ensureOrganizationVerification("authFindOneVerification", { where: transformedWhere, join }); } const userId = await resolveUserIdForQuery(model, transformedWhere); @@ -228,7 +228,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin findMany: async ({ model, where, limit, sortBy, offset, join }) => { const transformedWhere = transformWhereClause({ model, where, action: "findMany" }); if (model === "verification") { - return await ensureWorkspaceVerification("authFindManyVerification", { + return await ensureOrganizationVerification("authFindManyVerification", { where: transformedWhere, limit, sortBy, @@ -240,11 +240,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin if (model === "session") { const tokenClause = transformedWhere?.find((entry: any) => entry.field === "token" && entry.operator === "in"); if (tokenClause && Array.isArray(tokenClause.value)) { - const workspace = await appWorkspace(); + const organization = await appOrganization(); const resolved = await Promise.all( (tokenClause.value as string[]).map(async (sessionToken: string) => ({ sessionToken, - route: await workspace.authFindSessionIndex({ sessionToken }), + route: await organization.authFindSessionIndex({ sessionToken }), })), ); const byUser = new Map(); @@ -284,7 +284,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const transformedWhere = transformWhereClause({ model, where, action: "update" }); const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; if (model === "verification") { - return await ensureWorkspaceVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate }); + return await ensureOrganizationVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate }); } const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); @@ -302,19 +302,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin ? await userActor.findOneAuthRecord({ model, where: transformedWhere }) : null; const updated = await userActor.updateAuthRecord({ model, where: transformedWhere, update: transformedUpdate }); - const workspace = await appWorkspace(); + const organization = await appOrganization(); if (model === "user" && updated) { if (before?.email && before.email !== updated.email) { - await workspace.authDeleteEmailIndex({ email: before.email.toLowerCase() }); + await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() }); } if (updated.email) { - await workspace.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId }); + await organization.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId }); } } if (model === "session" && updated) { - await workspace.authUpsertSessionIndex({ + await organization.authUpsertSessionIndex({ sessionId: String(updated.id), sessionToken: String(updated.token), userId, @@ -322,7 +322,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "account" && updated) { - await workspace.authUpsertAccountIndex({ + await organization.authUpsertAccountIndex({ id: String(updated.id), providerId: String(updated.providerId), accountId: String(updated.accountId), @@ -337,7 +337,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const transformedWhere = transformWhereClause({ model, where, action: "updateMany" }); const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; if (model === "verification") { - return await ensureWorkspaceVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate }); + return await ensureOrganizationVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate }); } const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); @@ -352,7 +352,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin delete: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "delete" }); if (model === "verification") { - await ensureWorkspaceVerification("authDeleteVerification", { where: transformedWhere }); + await ensureOrganizationVerification("authDeleteVerification", { where: transformedWhere }); return; } @@ -362,19 +362,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } const userActor = await getAuthUser(userId); - const workspace = await appWorkspace(); + const organization = await appOrganization(); const before = await userActor.findOneAuthRecord({ model, where: transformedWhere }); await userActor.deleteAuthRecord({ model, where: transformedWhere }); if (model === "session" && before) { - await workspace.authDeleteSessionIndex({ + await organization.authDeleteSessionIndex({ sessionId: before.id, sessionToken: before.token, }); } if (model === "account" && before) { - await workspace.authDeleteAccountIndex({ + await organization.authDeleteAccountIndex({ id: before.id, providerId: before.providerId, accountId: before.accountId, @@ -382,14 +382,14 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "user" && before?.email) { - await workspace.authDeleteEmailIndex({ email: before.email.toLowerCase() }); + await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() }); } }, deleteMany: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "deleteMany" }); if (model === "verification") { - return await ensureWorkspaceVerification("authDeleteManyVerification", { where: transformedWhere }); + return await ensureOrganizationVerification("authDeleteManyVerification", { where: transformedWhere }); } if (model === "session") { @@ -398,11 +398,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return 0; } const userActor = await getAuthUser(userId); - const workspace = await appWorkspace(); + const organization = await appOrganization(); const sessions = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit: 5000 }); const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere }); for (const session of sessions) { - await workspace.authDeleteSessionIndex({ + await organization.authDeleteSessionIndex({ sessionId: session.id, sessionToken: session.token, }); @@ -423,7 +423,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin count: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "count" }); if (model === "verification") { - return await ensureWorkspaceVerification("authCountVerification", { where: transformedWhere }); + return await ensureOrganizationVerification("authCountVerification", { where: transformedWhere }); } const userId = await resolveUserIdForQuery(model, transformedWhere); @@ -476,8 +476,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin }, async getAuthState(sessionId: string) { - const workspace = await appWorkspace(); - const route = await workspace.authFindSessionIndex({ sessionId }); + const organization = await appOrganization(); + const route = await organization.authFindSessionIndex({ sessionId }); if (!route?.userId) { return null; } diff --git a/foundry/packages/backend/src/services/foundry-paths.ts b/foundry/packages/backend/src/services/foundry-paths.ts deleted file mode 100644 index d56c38d..0000000 --- a/foundry/packages/backend/src/services/foundry-paths.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { AppConfig } from "@sandbox-agent/foundry-shared"; -import { homedir } from "node:os"; -import { dirname, join, resolve } from "node:path"; - -function expandPath(input: string): string { - if (input.startsWith("~/")) { - return `${homedir()}/${input.slice(2)}`; - } - return input; -} - -export function foundryDataDir(config: AppConfig): string { - // Keep data collocated with the backend DB by default. - const dbPath = expandPath(config.backend.dbPath); - return resolve(dirname(dbPath)); -} - -export function foundryRepoClonePath(config: AppConfig, workspaceId: string, repoId: string): string { - return resolve(join(foundryDataDir(config), "repos", workspaceId, repoId)); -} diff --git a/foundry/packages/backend/src/services/github-auth.ts b/foundry/packages/backend/src/services/github-auth.ts index 8249927..ebbbce9 100644 --- a/foundry/packages/backend/src/services/github-auth.ts +++ b/foundry/packages/backend/src/services/github-auth.ts @@ -1,20 +1,20 @@ -import { getOrCreateWorkspace } from "../actors/handles.js"; -import { APP_SHELL_WORKSPACE_ID } from "../actors/workspace/app-shell.js"; +import { getOrCreateOrganization } from "../actors/handles.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js"; export interface ResolvedGithubAuth { githubToken: string; scopes: string[]; } -export async function resolveWorkspaceGithubAuth(c: any, workspaceId: string): Promise { - if (!workspaceId || workspaceId === APP_SHELL_WORKSPACE_ID) { +export async function resolveOrganizationGithubAuth(c: any, organizationId: string): Promise { + if (!organizationId || organizationId === APP_SHELL_ORGANIZATION_ID) { return null; } try { - const appWorkspace = await getOrCreateWorkspace(c, APP_SHELL_WORKSPACE_ID); - const resolved = await appWorkspace.resolveAppGithubToken({ - organizationId: workspaceId, + const appOrganization = await getOrCreateOrganization(c, APP_SHELL_ORGANIZATION_ID); + const resolved = await appOrganization.resolveAppGithubToken({ + organizationId: organizationId, requireRepoScope: true, }); if (!resolved?.accessToken) { diff --git a/foundry/packages/backend/src/services/repo-git-lock.ts b/foundry/packages/backend/src/services/repo-git-lock.ts deleted file mode 100644 index 971b95c..0000000 --- a/foundry/packages/backend/src/services/repo-git-lock.ts +++ /dev/null @@ -1,45 +0,0 @@ -interface RepoLockState { - locked: boolean; - waiters: Array<() => void>; -} - -const repoLocks = new Map(); - -async function acquireRepoLock(repoPath: string): Promise<() => void> { - let state = repoLocks.get(repoPath); - if (!state) { - state = { locked: false, waiters: [] }; - repoLocks.set(repoPath, state); - } - - if (!state.locked) { - state.locked = true; - return () => releaseRepoLock(repoPath, state); - } - - await new Promise((resolve) => { - state!.waiters.push(resolve); - }); - - return () => releaseRepoLock(repoPath, state!); -} - -function releaseRepoLock(repoPath: string, state: RepoLockState): void { - const next = state.waiters.shift(); - if (next) { - next(); - return; - } - - state.locked = false; - repoLocks.delete(repoPath); -} - -export async function withRepoGitLock(repoPath: string, fn: () => Promise): Promise { - const release = await acquireRepoLock(repoPath); - try { - return await fn(); - } finally { - release(); - } -} diff --git a/foundry/packages/backend/src/services/repo.ts b/foundry/packages/backend/src/services/repo.ts index 910f4e8..fb673cc 100644 --- a/foundry/packages/backend/src/services/repo.ts +++ b/foundry/packages/backend/src/services/repo.ts @@ -82,3 +82,30 @@ export function repoLabelFromRemote(remoteUrl: string): string { return basename(trimmed.replace(/\.git$/i, "")); } + +export function githubRepoFullNameFromRemote(remoteUrl: string): string | null { + const normalized = normalizeRemoteUrl(remoteUrl); + if (!normalized) { + return null; + } + + try { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, "").toLowerCase(); + if (hostname !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + const owner = parts[0]?.trim(); + const repo = (parts[1] ?? "").replace(/\.git$/i, "").trim(); + if (!owner || !repo) { + return null; + } + return `${owner}/${repo}`; + } catch { + return null; + } +} diff --git a/foundry/packages/backend/test/git-spice.test.ts b/foundry/packages/backend/test/git-spice.test.ts deleted file mode 100644 index d0b0455..0000000 --- a/foundry/packages/backend/test/git-spice.test.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { chmodSync, mkdtempSync, writeFileSync, readFileSync } from "node:fs"; -import { tmpdir } from "node:os"; -import { join } from "node:path"; -import { describe, expect, it } from "vitest"; -import { gitSpiceAvailable, gitSpiceListStack, gitSpiceRestackSubtree } from "../src/integrations/git-spice/index.js"; - -function makeTempDir(prefix: string): string { - return mkdtempSync(join(tmpdir(), prefix)); -} - -function writeScript(path: string, body: string): void { - writeFileSync(path, body, "utf8"); - chmodSync(path, 0o755); -} - -async function withEnv(updates: Record, fn: () => Promise): Promise { - const previous = new Map(); - for (const [key, value] of Object.entries(updates)) { - previous.set(key, process.env[key]); - if (value == null) { - delete process.env[key]; - } else { - process.env[key] = value; - } - } - - try { - return await fn(); - } finally { - for (const [key, value] of previous) { - if (value == null) { - delete process.env[key]; - } else { - process.env[key] = value; - } - } - } -} - -describe("git-spice integration", () => { - it("parses stack rows from mixed/malformed json output", async () => { - const repoPath = makeTempDir("hf-git-spice-parse-"); - const scriptPath = join(repoPath, "fake-git-spice.sh"); - writeScript( - scriptPath, - [ - "#!/bin/sh", - 'if [ \"$1\" = \"--help\" ]; then', - " exit 0", - "fi", - 'if [ \"$1\" = \"log\" ]; then', - " echo 'noise line'", - ' echo \'{"branch":"feature/a","parent":"main"}\'', - " echo '{bad json'", - ' echo \'{"name":"feature/b","parentBranch":"feature/a"}\'', - ' echo \'{"name":"feature/a","parent":"main"}\'', - " exit 0", - "fi", - "exit 1", - ].join("\n"), - ); - - await withEnv({ HF_GIT_SPICE_BIN: scriptPath }, async () => { - const rows = await gitSpiceListStack(repoPath); - expect(rows).toEqual([ - { branchName: "feature/a", parentBranch: "main" }, - { branchName: "feature/b", parentBranch: "feature/a" }, - ]); - }); - }); - - it("falls back across versioned subtree restack command variants", async () => { - const repoPath = makeTempDir("hf-git-spice-fallback-"); - const scriptPath = join(repoPath, "fake-git-spice.sh"); - const logPath = join(repoPath, "calls.log"); - writeScript( - scriptPath, - [ - "#!/bin/sh", - 'echo \"$*\" >> \"$SPICE_LOG_PATH\"', - 'if [ \"$1\" = \"--help\" ]; then', - " exit 0", - "fi", - 'if [ \"$1\" = \"upstack\" ] && [ \"$2\" = \"restack\" ]; then', - " exit 1", - "fi", - 'if [ \"$1\" = \"branch\" ] && [ \"$2\" = \"restack\" ] && [ \"$5\" = \"--no-prompt\" ]; then', - " exit 0", - "fi", - "exit 1", - ].join("\n"), - ); - - await withEnv( - { - HF_GIT_SPICE_BIN: scriptPath, - SPICE_LOG_PATH: logPath, - }, - async () => { - await gitSpiceRestackSubtree(repoPath, "feature/a"); - }, - ); - - const lines = readFileSync(logPath, "utf8") - .trim() - .split("\n") - .filter((line) => line.trim().length > 0); - - expect(lines).toContain("upstack restack --branch feature/a --no-prompt"); - expect(lines).toContain("upstack restack --branch feature/a"); - expect(lines).toContain("branch restack --branch feature/a --no-prompt"); - expect(lines).not.toContain("branch restack --branch feature/a"); - }); - - it("reports unavailable when explicit binary and PATH are missing", async () => { - const repoPath = makeTempDir("hf-git-spice-missing-"); - - await withEnv( - { - HF_GIT_SPICE_BIN: "/non-existent/hf-git-spice-binary", - PATH: "/non-existent/bin", - }, - async () => { - const available = await gitSpiceAvailable(repoPath); - expect(available).toBe(false); - }, - ); - }); -}); diff --git a/foundry/packages/backend/test/git-validate-remote.test.ts b/foundry/packages/backend/test/git-validate-remote.test.ts deleted file mode 100644 index 47849a2..0000000 --- a/foundry/packages/backend/test/git-validate-remote.test.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { afterEach, beforeEach, describe, expect, test } from "vitest"; -import { mkdtempSync, mkdirSync, writeFileSync } from "node:fs"; -import { tmpdir } from "node:os"; -import { join, resolve } from "node:path"; -import { promisify } from "node:util"; -import { execFile } from "node:child_process"; -import { validateRemote } from "../src/integrations/git/index.js"; - -const execFileAsync = promisify(execFile); - -describe("validateRemote", () => { - const originalCwd = process.cwd(); - - beforeEach(() => { - process.chdir(originalCwd); - }); - - afterEach(() => { - process.chdir(originalCwd); - }); - - test("ignores broken worktree gitdir in current directory", async () => { - const sandboxDir = mkdtempSync(join(tmpdir(), "validate-remote-cwd-")); - const brokenRepoDir = resolve(sandboxDir, "broken-worktree"); - const remoteRepoDir = resolve(sandboxDir, "remote"); - - mkdirSync(brokenRepoDir, { recursive: true }); - writeFileSync(resolve(brokenRepoDir, ".git"), "gitdir: /definitely/missing/worktree\n", "utf8"); - await execFileAsync("git", ["init", remoteRepoDir]); - await execFileAsync("git", ["-C", remoteRepoDir, "config", "user.name", "Foundry Test"]); - await execFileAsync("git", ["-C", remoteRepoDir, "config", "user.email", "test@example.com"]); - writeFileSync(resolve(remoteRepoDir, "README.md"), "# test\n", "utf8"); - await execFileAsync("git", ["-C", remoteRepoDir, "add", "README.md"]); - await execFileAsync("git", ["-C", remoteRepoDir, "commit", "-m", "init"]); - - process.chdir(brokenRepoDir); - - await expect(validateRemote(remoteRepoDir)).resolves.toBeUndefined(); - }); -}); diff --git a/foundry/packages/backend/test/helpers/test-context.ts b/foundry/packages/backend/test/helpers/test-context.ts index 31b1965..be169a8 100644 --- a/foundry/packages/backend/test/helpers/test-context.ts +++ b/foundry/packages/backend/test/helpers/test-context.ts @@ -9,7 +9,7 @@ export function createTestConfig(overrides?: Partial): AppConfig { return ConfigSchema.parse({ auto_submit: true, notify: ["terminal" as const], - workspace: { default: "default" }, + organization: { default: "default" }, backend: { host: "127.0.0.1", port: 7741, @@ -19,7 +19,7 @@ export function createTestConfig(overrides?: Partial): AppConfig { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, diff --git a/foundry/packages/backend/test/helpers/test-driver.ts b/foundry/packages/backend/test/helpers/test-driver.ts index 505bcc4..39975e5 100644 --- a/foundry/packages/backend/test/helpers/test-driver.ts +++ b/foundry/packages/backend/test/helpers/test-driver.ts @@ -1,47 +1,15 @@ -import type { BackendDriver, GitDriver, GithubDriver, StackDriver, TmuxDriver } from "../../src/driver.js"; +import type { BackendDriver, GithubDriver, TmuxDriver } from "../../src/driver.js"; export function createTestDriver(overrides?: Partial): BackendDriver { return { - git: overrides?.git ?? createTestGitDriver(), - stack: overrides?.stack ?? createTestStackDriver(), github: overrides?.github ?? createTestGithubDriver(), tmux: overrides?.tmux ?? createTestTmuxDriver(), }; } -export function createTestGitDriver(overrides?: Partial): GitDriver { - return { - validateRemote: async () => {}, - ensureCloned: async () => {}, - fetch: async () => {}, - listRemoteBranches: async () => [], - remoteDefaultBaseRef: async () => "origin/main", - revParse: async () => "abc1234567890", - ensureRemoteBranch: async () => {}, - diffStatForBranch: async () => "+0/-0", - conflictsWithMain: async () => false, - ...overrides, - }; -} - -export function createTestStackDriver(overrides?: Partial): StackDriver { - return { - available: async () => false, - listStack: async () => [], - syncRepo: async () => {}, - restackRepo: async () => {}, - restackSubtree: async () => {}, - rebaseBranch: async () => {}, - reparentBranch: async () => {}, - trackBranch: async () => {}, - ...overrides, - }; -} - export function createTestGithubDriver(overrides?: Partial): GithubDriver { return { - listPullRequests: async () => [], - createPr: async (_repoPath, _headBranch, _title) => ({ + createPr: async (_repoFullName, _headBranch, _title) => ({ number: 1, url: `https://github.com/test/repo/pull/1`, }), diff --git a/foundry/packages/backend/test/keys.test.ts b/foundry/packages/backend/test/keys.test.ts index 28bf1dc..ac5f3c8 100644 --- a/foundry/packages/backend/test/keys.test.ts +++ b/foundry/packages/backend/test/keys.test.ts @@ -1,20 +1,19 @@ import { describe, expect, it } from "vitest"; -import { githubDataKey, historyKey, projectBranchSyncKey, projectKey, taskKey, taskSandboxKey, workspaceKey } from "../src/actors/keys.js"; +import { githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/actors/keys.js"; describe("actor keys", () => { - it("prefixes every key with workspace namespace", () => { + it("prefixes every key with organization namespace", () => { const keys = [ - workspaceKey("default"), - projectKey("default", "repo"), + organizationKey("default"), + repositoryKey("default", "repo"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), historyKey("default", "repo"), githubDataKey("default"), - projectBranchSyncKey("default", "repo"), ]; for (const key of keys) { - expect(key[0]).toBe("ws"); + expect(key[0]).toBe("org"); expect(key[1]).toBe("default"); } }); diff --git a/foundry/packages/backend/test/workspace-isolation.test.ts b/foundry/packages/backend/test/organization-isolation.test.ts similarity index 63% rename from foundry/packages/backend/test/workspace-isolation.test.ts rename to foundry/packages/backend/test/organization-isolation.test.ts index fa004c7..fcd1950 100644 --- a/foundry/packages/backend/test/workspace-isolation.test.ts +++ b/foundry/packages/backend/test/organization-isolation.test.ts @@ -6,8 +6,9 @@ import { execFileSync } from "node:child_process"; import { setTimeout as delay } from "node:timers/promises"; import { describe, expect, it } from "vitest"; import { setupTest } from "rivetkit/test"; -import { workspaceKey } from "../src/actors/keys.js"; +import { organizationKey } from "../src/actors/keys.js"; import { registry } from "../src/actors/index.js"; +import { repoIdFromRemote } from "../src/services/repo.js"; import { createTestDriver } from "./helpers/test-driver.js"; import { createTestRuntimeContext } from "./helpers/test-context.js"; @@ -24,59 +25,60 @@ function createRepo(): { repoPath: string } { return { repoPath }; } -async function waitForWorkspaceRows(ws: any, workspaceId: string, expectedCount: number) { +async function waitForOrganizationRows(ws: any, organizationId: string, expectedCount: number) { for (let attempt = 0; attempt < 40; attempt += 1) { - const rows = await ws.listTasks({ workspaceId }); + const rows = await ws.listTasks({ organizationId }); if (rows.length >= expectedCount) { return rows; } await delay(50); } - return ws.listTasks({ workspaceId }); + return ws.listTasks({ organizationId }); } -describe("workspace isolation", () => { - it.skipIf(!runActorIntegration)("keeps task lists isolated by workspace", async (t) => { +describe("organization isolation", () => { + it.skipIf(!runActorIntegration)("keeps task lists isolated by organization", async (t) => { const testDriver = createTestDriver(); createTestRuntimeContext(testDriver); const { client } = await setupTest(t, registry); - const wsA = await client.workspace.getOrCreate(workspaceKey("alpha"), { + const wsA = await client.organization.getOrCreate(organizationKey("alpha"), { createWithInput: "alpha", }); - const wsB = await client.workspace.getOrCreate(workspaceKey("beta"), { + const wsB = await client.organization.getOrCreate(organizationKey("beta"), { createWithInput: "beta", }); const { repoPath } = createRepo(); - const repoA = await wsA.addRepo({ workspaceId: "alpha", remoteUrl: repoPath }); - const repoB = await wsB.addRepo({ workspaceId: "beta", remoteUrl: repoPath }); + const repoId = repoIdFromRemote(repoPath); + await wsA.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath }); + await wsB.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath }); await wsA.createTask({ - workspaceId: "alpha", - repoId: repoA.repoId, + organizationId: "alpha", + repoId, task: "task A", - providerId: "local", + sandboxProviderId: "local", explicitBranchName: "feature/a", explicitTitle: "A", }); await wsB.createTask({ - workspaceId: "beta", - repoId: repoB.repoId, + organizationId: "beta", + repoId, task: "task B", - providerId: "local", + sandboxProviderId: "local", explicitBranchName: "feature/b", explicitTitle: "B", }); - const aRows = await waitForWorkspaceRows(wsA, "alpha", 1); - const bRows = await waitForWorkspaceRows(wsB, "beta", 1); + const aRows = await waitForOrganizationRows(wsA, "alpha", 1); + const bRows = await waitForOrganizationRows(wsB, "beta", 1); expect(aRows.length).toBe(1); expect(bRows.length).toBe(1); - expect(aRows[0]?.workspaceId).toBe("alpha"); - expect(bRows[0]?.workspaceId).toBe("beta"); + expect(aRows[0]?.organizationId).toBe("alpha"); + expect(bRows[0]?.organizationId).toBe("beta"); expect(aRows[0]?.taskId).not.toBe(bRows[0]?.taskId); }); }); diff --git a/foundry/packages/backend/test/workspace-star-sandbox-agent-repo.test.ts b/foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts similarity index 80% rename from foundry/packages/backend/test/workspace-star-sandbox-agent-repo.test.ts rename to foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts index 8eabb99..b3a2410 100644 --- a/foundry/packages/backend/test/workspace-star-sandbox-agent-repo.test.ts +++ b/foundry/packages/backend/test/organization-star-sandbox-agent-repo.test.ts @@ -1,14 +1,14 @@ // @ts-nocheck import { describe, expect, it } from "vitest"; import { setupTest } from "rivetkit/test"; -import { workspaceKey } from "../src/actors/keys.js"; +import { organizationKey } from "../src/actors/keys.js"; import { registry } from "../src/actors/index.js"; import { createTestDriver } from "./helpers/test-driver.js"; import { createTestRuntimeContext } from "./helpers/test-context.js"; const runActorIntegration = process.env.HF_ENABLE_ACTOR_INTEGRATION_TESTS === "1"; -describe("workspace star sandbox agent repo", () => { +describe("organization star sandbox agent repo", () => { it.skipIf(!runActorIntegration)("stars the sandbox agent repo through the github driver", async (t) => { const calls: string[] = []; const testDriver = createTestDriver({ @@ -26,11 +26,11 @@ describe("workspace star sandbox agent repo", () => { createTestRuntimeContext(testDriver); const { client } = await setupTest(t, registry); - const ws = await client.workspace.getOrCreate(workspaceKey("alpha"), { + const ws = await client.organization.getOrCreate(organizationKey("alpha"), { createWithInput: "alpha", }); - const result = await ws.starSandboxAgentRepo({ workspaceId: "alpha" }); + const result = await ws.starSandboxAgentRepo({ organizationId: "alpha" }); expect(calls).toEqual(["rivet-dev/sandbox-agent"]); expect(result.repo).toBe("rivet-dev/sandbox-agent"); diff --git a/foundry/packages/backend/test/sandbox-config.test.ts b/foundry/packages/backend/test/sandbox-config.test.ts index 0b53f03..354f794 100644 --- a/foundry/packages/backend/test/sandbox-config.test.ts +++ b/foundry/packages/backend/test/sandbox-config.test.ts @@ -6,7 +6,7 @@ function makeConfig(overrides?: Partial): AppConfig { return ConfigSchema.parse({ auto_submit: true, notify: ["terminal"], - workspace: { default: "default" }, + organization: { default: "default" }, backend: { host: "127.0.0.1", port: 7741, @@ -16,7 +16,7 @@ function makeConfig(overrides?: Partial): AppConfig { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, @@ -33,7 +33,7 @@ describe("sandbox config", () => { it("prefers e2b when an api key is configured", () => { const config = makeConfig({ - providers: { + sandboxProviders: { local: {}, e2b: { apiKey: "test-token" }, }, diff --git a/foundry/packages/backend/test/stack-model.test.ts b/foundry/packages/backend/test/stack-model.test.ts deleted file mode 100644 index ca0a79f..0000000 --- a/foundry/packages/backend/test/stack-model.test.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { describe, expect, it } from "vitest"; -import { normalizeParentBranch, parentLookupFromStack, sortBranchesForOverview } from "../src/actors/project/stack-model.js"; - -describe("stack-model", () => { - it("normalizes self-parent references to null", () => { - expect(normalizeParentBranch("feature/a", "feature/a")).toBeNull(); - expect(normalizeParentBranch("feature/a", "main")).toBe("main"); - expect(normalizeParentBranch("feature/a", null)).toBeNull(); - }); - - it("builds parent lookup with sanitized entries", () => { - const lookup = parentLookupFromStack([ - { branchName: "feature/a", parentBranch: "main" }, - { branchName: "feature/b", parentBranch: "feature/b" }, - { branchName: " ", parentBranch: "main" }, - ]); - - expect(lookup.get("feature/a")).toBe("main"); - expect(lookup.get("feature/b")).toBeNull(); - expect(lookup.has(" ")).toBe(false); - }); - - it("orders branches by graph depth and handles cycles safely", () => { - const rows = sortBranchesForOverview([ - { branchName: "feature/b", parentBranch: "feature/a", updatedAt: 200 }, - { branchName: "feature/a", parentBranch: "main", updatedAt: 100 }, - { branchName: "main", parentBranch: null, updatedAt: 50 }, - { branchName: "cycle-a", parentBranch: "cycle-b", updatedAt: 300 }, - { branchName: "cycle-b", parentBranch: "cycle-a", updatedAt: 250 }, - ]); - - expect(rows.map((row) => row.branchName)).toEqual(["main", "feature/a", "feature/b", "cycle-a", "cycle-b"]); - }); -}); diff --git a/foundry/packages/backend/test/workbench-unread.test.ts b/foundry/packages/backend/test/workbench-unread.test.ts index aafc178..fc94e97 100644 --- a/foundry/packages/backend/test/workbench-unread.test.ts +++ b/foundry/packages/backend/test/workbench-unread.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import { shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workbench.js"; +import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workbench.js"; describe("workbench unread status transitions", () => { it("marks unread when a running session first becomes idle", () => { @@ -57,3 +57,30 @@ describe("workbench model changes", () => { ).toBe(false); }); }); + +describe("workbench send readiness", () => { + it("rejects unknown sessions", () => { + expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workbench session: session-1"); + }); + + it("rejects pending sessions", () => { + expect(() => + requireSendableSessionMeta( + { + status: "pending_session_create", + sandboxSessionId: null, + }, + "session-2", + ), + ).toThrow("Session is not ready (status: pending_session_create). Wait for session provisioning to complete."); + }); + + it("accepts ready sessions with a sandbox session id", () => { + const meta = { + status: "ready", + sandboxSessionId: "session-1", + }; + + expect(requireSendableSessionMeta(meta, "session-3")).toBe(meta); + }); +}); diff --git a/foundry/packages/cli/src/index.ts b/foundry/packages/cli/src/index.ts index 4043f32..fdf5a19 100644 --- a/foundry/packages/cli/src/index.ts +++ b/foundry/packages/cli/src/index.ts @@ -8,7 +8,7 @@ import { ensureBackendRunning, getBackendStatus, parseBackendPort, stopBackend } import { writeStderr, writeStdout } from "./io.js"; import { openEditorForTask } from "./task-editor.js"; import { spawnCreateTmuxWindow } from "./tmux.js"; -import { loadConfig, resolveWorkspace, saveConfig } from "./workspace/config.js"; +import { loadConfig, resolveOrganization, saveConfig } from "./organization/config.js"; async function ensureBunRuntime(): Promise { if (typeof (globalThis as { Bun?: unknown }).Bun !== "undefined") { @@ -41,9 +41,9 @@ async function ensureBunRuntime(): Promise { throw new Error("hf requires Bun runtime. Set HF_BUN or install Bun at ~/.bun/bin/bun."); } -async function runTuiCommand(config: ReturnType, workspaceId: string): Promise { +async function runTuiCommand(config: ReturnType, organizationId: string): Promise { const mod = await import("./tui.js"); - await mod.runTui(config, workspaceId); + await mod.runTui(config, organizationId); } function readOption(args: string[], flag: string): string | undefined { @@ -87,6 +87,92 @@ function positionals(args: string[]): string[] { return out; } +function normalizeRepoSelector(value: string): string { + let normalized = value.trim(); + if (!normalized) { + return ""; + } + + normalized = normalized.replace(/\/+$/, ""); + if (/^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/.test(normalized)) { + return `https://github.com/${normalized}.git`; + } + + if (/^(?:www\.)?github\.com\/.+/i.test(normalized)) { + normalized = `https://${normalized.replace(/^www\./i, "")}`; + } + + try { + if (/^https?:\/\//i.test(normalized)) { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, ""); + if (hostname.toLowerCase() === "github.com") { + const parts = url.pathname.split("/").filter(Boolean); + if (parts.length >= 2) { + return `${url.protocol}//${hostname}/${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}.git`; + } + } + url.search = ""; + url.hash = ""; + return url.toString().replace(/\/+$/, ""); + } + } catch { + // Keep the selector as-is for matching below. + } + + return normalized; +} + +function githubRepoFullNameFromSelector(value: string): string | null { + const normalized = normalizeRepoSelector(value); + try { + const url = new URL(normalized); + if (url.hostname.replace(/^www\./i, "").toLowerCase() !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}`; + } catch { + return null; + } +} + +async function resolveImportedRepo( + client: ReturnType, + organizationId: string, + repoSelector: string, +): Promise>[number]> { + const selector = repoSelector.trim(); + if (!selector) { + throw new Error("Missing required --repo "); + } + + const normalizedSelector = normalizeRepoSelector(selector); + const selectorFullName = githubRepoFullNameFromSelector(selector); + const repos = await client.listRepos(organizationId); + const match = repos.find((repo) => { + if (repo.repoId === selector) { + return true; + } + if (normalizeRepoSelector(repo.remoteUrl) === normalizedSelector) { + return true; + } + const repoFullName = githubRepoFullNameFromSelector(repo.remoteUrl); + return Boolean(selectorFullName && repoFullName && repoFullName === selectorFullName); + }); + + if (!match) { + throw new Error( + `Repo not available in organization ${organizationId}: ${repoSelector}. Create it in GitHub first, then sync repos in Foundry before running hf create.`, + ); + } + + return match; +} + function printUsage(): void { writeStdout(` Usage: @@ -94,22 +180,22 @@ Usage: hf backend stop [--host HOST] [--port PORT] hf backend status hf backend inspect - hf status [--workspace WS] [--json] - hf history [--workspace WS] [--limit N] [--branch NAME] [--task ID] [--json] - hf workspace use - hf tui [--workspace WS] + hf status [--organization ORG] [--json] + hf history [--organization ORG] [--limit N] [--branch NAME] [--task ID] [--json] + hf organization use + hf tui [--organization ORG] - hf create [task] [--workspace WS] --repo [--name NAME|--branch NAME] [--title TITLE] [--agent claude|codex] [--on BRANCH] - hf list [--workspace WS] [--format table|json] [--full] - hf switch [task-id | -] [--workspace WS] - hf attach [--workspace WS] - hf merge [--workspace WS] - hf archive [--workspace WS] - hf push [--workspace WS] - hf sync [--workspace WS] - hf kill [--workspace WS] [--delete-branch] [--abandon] - hf prune [--workspace WS] [--dry-run] [--yes] - hf statusline [--workspace WS] [--format table|claude-code] + hf create [task] [--organization ORG] --repo [--name NAME|--branch NAME] [--title TITLE] [--agent claude|codex] [--on BRANCH] + hf list [--organization ORG] [--format table|json] [--full] + hf switch [task-id | -] [--organization ORG] + hf attach [--organization ORG] + hf merge [--organization ORG] + hf archive [--organization ORG] + hf push [--organization ORG] + hf sync [--organization ORG] + hf kill [--organization ORG] [--delete-branch] [--abandon] + hf prune [--organization ORG] [--dry-run] [--yes] + hf statusline [--organization ORG] [--format table|claude-code] hf db path hf db nuke @@ -123,19 +209,19 @@ Tips: function printStatusUsage(): void { writeStdout(` Usage: - hf status [--workspace WS] [--json] + hf status [--organization ORG] [--json] Text Output: - workspace= + organization= backend running= pid= version= tasks total= status queued= running= idle= archived= killed= error= - providers = ... - providers - + sandboxProviders = ... + sandboxProviders - JSON Output: { - "workspaceId": "default", + "organizationId": "default", "backend": { ...backend status object... }, "tasks": { "total": 4, @@ -149,7 +235,7 @@ JSON Output: function printHistoryUsage(): void { writeStdout(` Usage: - hf history [--workspace WS] [--limit N] [--branch NAME] [--task ID] [--json] + hf history [--organization ORG] [--limit N] [--branch NAME] [--task ID] [--json] Text Output: \t\t\t @@ -164,18 +250,23 @@ JSON Output: [ { "id": "...", - "workspaceId": "default", + "organizationId": "default", "kind": "task.created", "taskId": "...", "repoId": "...", "branchName": "feature/foo", - "payloadJson": "{\\"providerId\\":\\"local\\"}", + "payloadJson": "{\\"sandboxProviderId\\":\\"local\\"}", "createdAt": 1770607522229 } ] `); } +async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { + const rows = await client.listTasks(organizationId); + return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId))); +} + async function handleBackend(args: string[]): Promise { const sub = args[0] ?? "start"; const config = loadConfig(); @@ -232,38 +323,38 @@ async function handleBackend(args: string[]): Promise { throw new Error(`Unknown backend subcommand: ${sub}`); } -async function handleWorkspace(args: string[]): Promise { +async function handleOrganization(args: string[]): Promise { const sub = args[0]; if (sub !== "use") { - throw new Error("Usage: hf workspace use "); + throw new Error("Usage: hf organization use "); } const name = args[1]; if (!name) { - throw new Error("Missing workspace name"); + throw new Error("Missing organization name"); } const config = loadConfig(); - config.workspace.default = name; + config.organization.default = name; saveConfig(config); const client = createBackendClientFromConfig(config); try { - await client.useWorkspace(name); + await client.useOrganization(name); } catch { // Backend may not be running yet. Config is already updated. } - writeStdout(`workspace=${name}`); + writeStdout(`organization=${name}`); } async function handleList(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const format = readOption(args, "--format") ?? "table"; const full = hasFlag(args, "--full"); const client = createBackendClientFromConfig(config); - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); if (format === "json") { writeStdout(JSON.stringify(rows, null, 2)); @@ -277,10 +368,10 @@ async function handleList(args: string[]): Promise { for (const row of rows) { const age = formatRelativeAge(row.updatedAt); - let line = `${row.taskId}\t${row.branchName}\t${row.status}\t${row.providerId}\t${age}`; + let line = `${row.taskId}\t${row.branchName}\t${row.status}\t${row.sandboxProviderId}\t${age}`; if (full) { - const task = row.task.length > 60 ? `${row.task.slice(0, 57)}...` : row.task; - line += `\t${row.title}\t${task}\t${row.activeSessionId ?? "-"}\t${row.activeSandboxId ?? "-"}`; + const preview = row.task.length > 60 ? `${row.task.slice(0, 57)}...` : row.task; + line += `\t${row.title}\t${preview}\t${row.activeSessionId ?? "-"}\t${row.activeSandboxId ?? "-"}`; } writeStdout(line); } @@ -292,9 +383,9 @@ async function handlePush(args: string[]): Promise { throw new Error("Missing task id for push"); } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const client = createBackendClientFromConfig(config); - await client.runAction(workspaceId, taskId, "push"); + await client.runAction(organizationId, taskId, "push"); writeStdout("ok"); } @@ -304,9 +395,9 @@ async function handleSync(args: string[]): Promise { throw new Error("Missing task id for sync"); } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const client = createBackendClientFromConfig(config); - await client.runAction(workspaceId, taskId, "sync"); + await client.runAction(organizationId, taskId, "sync"); writeStdout("ok"); } @@ -316,7 +407,7 @@ async function handleKill(args: string[]): Promise { throw new Error("Missing task id for kill"); } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const deleteBranch = hasFlag(args, "--delete-branch"); const abandon = hasFlag(args, "--abandon"); @@ -328,17 +419,17 @@ async function handleKill(args: string[]): Promise { } const client = createBackendClientFromConfig(config); - await client.runAction(workspaceId, taskId, "kill"); + await client.runAction(organizationId, taskId, "kill"); writeStdout("ok"); } async function handlePrune(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const dryRun = hasFlag(args, "--dry-run"); const yes = hasFlag(args, "--yes"); const client = createBackendClientFromConfig(config); - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); const prunable = rows.filter((r) => r.status === "archived" || r.status === "killed"); if (prunable.length === 0) { @@ -366,10 +457,10 @@ async function handlePrune(args: string[]): Promise { async function handleStatusline(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const format = readOption(args, "--format") ?? "table"; const client = createBackendClientFromConfig(config); - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); const summary = summarizeTasks(rows); const running = summary.byStatus.running; const idle = summary.byStatus.idle; @@ -402,7 +493,7 @@ async function handleDb(args: string[]): Promise { async function waitForTaskReady( client: ReturnType, - workspaceId: string, + organizationId: string, taskId: string, timeoutMs: number, ): Promise { @@ -410,7 +501,7 @@ async function waitForTaskReady( let delayMs = 250; for (;;) { - const record = await client.getTask(workspaceId, taskId); + const record = await client.getTask(organizationId, taskId); const hasName = Boolean(record.branchName && record.title); const hasSandbox = Boolean(record.activeSandboxId); @@ -432,11 +523,11 @@ async function waitForTaskReady( async function handleCreate(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); - const repoRemote = readOption(args, "--repo"); - if (!repoRemote) { - throw new Error("Missing required --repo "); + const repoSelector = readOption(args, "--repo"); + if (!repoSelector) { + throw new Error("Missing required --repo "); } const explicitBranchName = readOption(args, "--name") ?? readOption(args, "--branch"); const explicitTitle = readOption(args, "--title"); @@ -446,15 +537,15 @@ async function handleCreate(args: string[]): Promise { const onBranch = readOption(args, "--on"); const taskFromArgs = positionals(args).join(" ").trim(); - const task = taskFromArgs || openEditorForTask(); + const taskPrompt = taskFromArgs || openEditorForTask(); const client = createBackendClientFromConfig(config); - const repo = await client.addRepo(workspaceId, repoRemote); + const repo = await resolveImportedRepo(client, organizationId, repoSelector); const payload = CreateTaskInputSchema.parse({ - workspaceId, + organizationId, repoId: repo.repoId, - task, + task: taskPrompt, explicitTitle: explicitTitle || undefined, explicitBranchName: explicitBranchName || undefined, agentType, @@ -462,30 +553,30 @@ async function handleCreate(args: string[]): Promise { }); const created = await client.createTask(payload); - const task = await waitForTaskReady(client, workspaceId, created.taskId, 180_000); - const switched = await client.switchTask(workspaceId, task.taskId); - const attached = await client.attachTask(workspaceId, task.taskId); + const createdTask = await waitForTaskReady(client, organizationId, created.taskId, 180_000); + const switched = await client.switchTask(organizationId, createdTask.taskId); + const attached = await client.attachTask(organizationId, createdTask.taskId); - writeStdout(`Branch: ${task.branchName ?? "-"}`); - writeStdout(`Task: ${task.taskId}`); - writeStdout(`Provider: ${task.providerId}`); + writeStdout(`Branch: ${createdTask.branchName ?? "-"}`); + writeStdout(`Task: ${createdTask.taskId}`); + writeStdout(`Provider: ${createdTask.sandboxProviderId}`); writeStdout(`Session: ${attached.sessionId ?? "none"}`); writeStdout(`Target: ${switched.switchTarget || attached.target}`); - writeStdout(`Title: ${task.title ?? "-"}`); + writeStdout(`Title: ${createdTask.title ?? "-"}`); const tmuxResult = spawnCreateTmuxWindow({ - branchName: task.branchName ?? task.taskId, + branchName: createdTask.branchName ?? createdTask.taskId, targetPath: switched.switchTarget || attached.target, sessionId: attached.sessionId, }); if (tmuxResult.created) { - writeStdout(`Window: created (${task.branchName})`); + writeStdout(`Window: created (${createdTask.branchName})`); return; } writeStdout(""); - writeStdout(`Run: hf switch ${task.taskId}`); + writeStdout(`Run: hf switch ${createdTask.taskId}`); if ((switched.switchTarget || attached.target).startsWith("/")) { writeStdout(`cd ${switched.switchTarget || attached.target}`); } @@ -493,8 +584,8 @@ async function handleCreate(args: string[]): Promise { async function handleTui(args: string[]): Promise { const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); - await runTuiCommand(config, workspaceId); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); + await runTuiCommand(config, organizationId); } async function handleStatus(args: string[]): Promise { @@ -504,17 +595,17 @@ async function handleStatus(args: string[]): Promise { } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const client = createBackendClientFromConfig(config); const backendStatus = await getBackendStatus(config.backend.host, config.backend.port); - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); const summary = summarizeTasks(rows); if (hasFlag(args, "--json")) { writeStdout( JSON.stringify( { - workspaceId, + organizationId, backend: backendStatus, tasks: { total: summary.total, @@ -529,7 +620,7 @@ async function handleStatus(args: string[]): Promise { return; } - writeStdout(`workspace=${workspaceId}`); + writeStdout(`organization=${organizationId}`); writeStdout(`backend running=${backendStatus.running} pid=${backendStatus.pid ?? "unknown"} version=${backendStatus.version ?? "unknown"}`); writeStdout(`tasks total=${summary.total}`); writeStdout( @@ -538,7 +629,7 @@ async function handleStatus(args: string[]): Promise { const providerSummary = Object.entries(summary.byProvider) .map(([provider, count]) => `${provider}=${count}`) .join(" "); - writeStdout(`providers ${providerSummary || "-"}`); + writeStdout(`sandboxProviders ${providerSummary || "-"}`); } async function handleHistory(args: string[]): Promise { @@ -548,13 +639,13 @@ async function handleHistory(args: string[]): Promise { } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const limit = parseIntOption(readOption(args, "--limit"), 20, "limit"); const branch = readOption(args, "--branch"); const taskId = readOption(args, "--task"); const client = createBackendClientFromConfig(config); const rows = await client.listHistory({ - workspaceId, + organizationId, limit, branch: branch || undefined, taskId: taskId || undefined, @@ -593,11 +684,11 @@ async function handleSwitchLike(cmd: string, args: string[]): Promise { } const config = loadConfig(); - const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config); + const organizationId = resolveOrganization(readOption(args, "--organization"), config); const client = createBackendClientFromConfig(config); if (cmd === "switch" && taskId === "-") { - const rows = await client.listTasks(workspaceId); + const rows = await listDetailedTasks(client, organizationId); const active = rows.filter((r) => { const group = groupTaskStatus(r.status); return group === "running" || group === "idle" || group === "queued"; @@ -611,19 +702,19 @@ async function handleSwitchLike(cmd: string, args: string[]): Promise { } if (cmd === "switch") { - const result = await client.switchTask(workspaceId, taskId); + const result = await client.switchTask(organizationId, taskId); writeStdout(`cd ${result.switchTarget}`); return; } if (cmd === "attach") { - const result = await client.attachTask(workspaceId, taskId); + const result = await client.attachTask(organizationId, taskId); writeStdout(`target=${result.target} session=${result.sessionId ?? "none"}`); return; } if (cmd === "merge" || cmd === "archive") { - await client.runAction(workspaceId, taskId, cmd); + await client.runAction(organizationId, taskId, cmd); writeStdout("ok"); return; } @@ -656,8 +747,8 @@ async function main(): Promise { return; } - if (cmd === "workspace") { - await handleWorkspace(rest); + if (cmd === "organization") { + await handleOrganization(rest); return; } diff --git a/foundry/packages/cli/src/workspace/config.ts b/foundry/packages/cli/src/organization/config.ts similarity index 71% rename from foundry/packages/cli/src/workspace/config.ts rename to foundry/packages/cli/src/organization/config.ts index 5b05dc4..cfaebfe 100644 --- a/foundry/packages/cli/src/workspace/config.ts +++ b/foundry/packages/cli/src/organization/config.ts @@ -2,7 +2,7 @@ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; import { dirname } from "node:path"; import { homedir } from "node:os"; import * as toml from "@iarna/toml"; -import { ConfigSchema, resolveWorkspaceId, type AppConfig } from "@sandbox-agent/foundry-shared"; +import { ConfigSchema, resolveOrganizationId, type AppConfig } from "@sandbox-agent/foundry-shared"; export const CONFIG_PATH = `${homedir()}/.config/foundry/config.toml`; @@ -20,6 +20,6 @@ export function saveConfig(config: AppConfig, path = CONFIG_PATH): void { writeFileSync(path, toml.stringify(config), "utf8"); } -export function resolveWorkspace(flagWorkspace: string | undefined, config: AppConfig): string { - return resolveWorkspaceId(flagWorkspace, config); +export function resolveOrganization(flagOrganization: string | undefined, config: AppConfig): string { + return resolveOrganizationId(flagOrganization, config); } diff --git a/foundry/packages/cli/src/theme.ts b/foundry/packages/cli/src/theme.ts index 5315a44..633c079 100644 --- a/foundry/packages/cli/src/theme.ts +++ b/foundry/packages/cli/src/theme.ts @@ -588,7 +588,7 @@ function pointer(obj: JsonObject, parts: string[]): unknown { function opencodeConfigPaths(baseDir: string): string[] { const paths: string[] = []; - const rootish = opencodeProjectConfigPaths(baseDir); + const rootish = opencodeRepositoryConfigPaths(baseDir); paths.push(...rootish); const configDir = process.env.XDG_CONFIG_HOME || join(homedir(), ".config"); @@ -611,12 +611,12 @@ function opencodeThemeDirs(configDir: string | undefined, baseDir: string): stri dirs.push(join(xdgConfig, "opencode", "themes")); dirs.push(join(homedir(), ".opencode", "themes")); - dirs.push(...opencodeProjectThemeDirs(baseDir)); + dirs.push(...opencodeRepositoryThemeDirs(baseDir)); return dirs; } -function opencodeProjectConfigPaths(baseDir: string): string[] { +function opencodeRepositoryConfigPaths(baseDir: string): string[] { const dirs = ancestorDirs(baseDir); const out: string[] = []; for (const dir of dirs) { @@ -628,7 +628,7 @@ function opencodeProjectConfigPaths(baseDir: string): string[] { return out; } -function opencodeProjectThemeDirs(baseDir: string): string[] { +function opencodeRepositoryThemeDirs(baseDir: string): string[] { const dirs = ancestorDirs(baseDir); const out: string[] = []; for (const dir of dirs) { diff --git a/foundry/packages/cli/src/tui.ts b/foundry/packages/cli/src/tui.ts index d561565..c3aba9e 100644 --- a/foundry/packages/cli/src/tui.ts +++ b/foundry/packages/cli/src/tui.ts @@ -56,6 +56,11 @@ interface RenderOptions { height?: number; } +async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { + const rows = await client.listTasks(organizationId); + return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId))); +} + function pad(input: string, width: number): string { if (width <= 0) { return ""; @@ -183,7 +188,7 @@ function helpLines(width: number): string[] { export function formatRows( rows: TaskRecord[], selected: number, - workspaceId: string, + organizationId: string, status: string, searchQuery = "", showHelp = false, @@ -212,7 +217,7 @@ export function formatRows( return `${marker}${pad(display.name, branchWidth)} ${pad(display.diff, COLUMN_WIDTHS.diff)} ${pad(display.agent, COLUMN_WIDTHS.agent)} ${pad(display.pr, COLUMN_WIDTHS.pr)} ${pad(display.author, COLUMN_WIDTHS.author)} ${pad(display.ci, COLUMN_WIDTHS.ci)} ${pad(display.review, COLUMN_WIDTHS.review)} ${pad(display.age, COLUMN_WIDTHS.age)}`; }); - const footer = fitLine(buildFooterLine(totalWidth, ["Ctrl-H:cheatsheet", `workspace:${workspaceId}`, status], `v${CLI_BUILD_ID}`), totalWidth); + const footer = fitLine(buildFooterLine(totalWidth, ["Ctrl-H:cheatsheet", `organization:${organizationId}`, status], `v${CLI_BUILD_ID}`), totalWidth); const contentHeight = totalHeight - 1; const lines = [...header, ...body].map((line) => fitLine(line, totalWidth)); @@ -309,7 +314,7 @@ function buildStyledContent(content: string, theme: TuiTheme, api: StyledTextApi return new api.StyledText(chunks); } -export async function runTui(config: AppConfig, workspaceId: string): Promise { +export async function runTui(config: AppConfig, organizationId: string): Promise { const core = (await import("@opentui/core")) as OpenTuiLike; const createCliRenderer = core.createCliRenderer; const TextRenderable = core.TextRenderable; @@ -359,7 +364,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise { try { - const result = await client.switchTask(workspaceId, row.taskId); + const result = await client.switchTask(organizationId, row.taskId); close(`cd ${result.switchTarget}`); } catch (err) { busy = false; @@ -538,7 +543,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise { try { - const result = await client.attachTask(workspaceId, row.taskId); + const result = await client.attachTask(organizationId, row.taskId); close(`target=${result.target} session=${result.sessionId ?? "none"}`); } catch (err) { busy = false; @@ -554,7 +559,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise client.runAction(workspaceId, row.taskId, "archive"), `archived ${row.taskId}`); + void runActionWithRefresh(`archiving ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "archive"), `archived ${row.taskId}`); return; } @@ -563,7 +568,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise client.runAction(workspaceId, row.taskId, "sync"), `synced ${row.taskId}`); + void runActionWithRefresh(`syncing ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "sync"), `synced ${row.taskId}`); return; } @@ -575,8 +580,8 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise { - await client.runAction(workspaceId, row.taskId, "merge"); - await client.runAction(workspaceId, row.taskId, "archive"); + await client.runAction(organizationId, row.taskId, "merge"); + await client.runAction(organizationId, row.taskId, "archive"); }, `merged+archived ${row.taskId}`, ); diff --git a/foundry/packages/cli/test/backend-manager.test.ts b/foundry/packages/cli/test/backend-manager.test.ts index 53529ab..a6089c5 100644 --- a/foundry/packages/cli/test/backend-manager.test.ts +++ b/foundry/packages/cli/test/backend-manager.test.ts @@ -37,7 +37,7 @@ function healthyMetadataResponse(): { ok: boolean; json: () => Promise json: async () => ({ runtime: "rivetkit", actorNames: { - workspace: {}, + organization: {}, }, }), }; @@ -58,7 +58,7 @@ describe("backend manager", () => { const config: AppConfig = ConfigSchema.parse({ auto_submit: true, notify: ["terminal"], - workspace: { default: "default" }, + organization: { default: "default" }, backend: { host: "127.0.0.1", port: 7741, @@ -68,7 +68,7 @@ describe("backend manager", () => { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, diff --git a/foundry/packages/cli/test/workspace-config.test.ts b/foundry/packages/cli/test/organization-config.test.ts similarity index 59% rename from foundry/packages/cli/test/workspace-config.test.ts rename to foundry/packages/cli/test/organization-config.test.ts index 94145be..5053ec2 100644 --- a/foundry/packages/cli/test/workspace-config.test.ts +++ b/foundry/packages/cli/test/organization-config.test.ts @@ -1,13 +1,13 @@ import { describe, expect, it } from "vitest"; import { ConfigSchema } from "@sandbox-agent/foundry-shared"; -import { resolveWorkspace } from "../src/workspace/config.js"; +import { resolveOrganization } from "../src/organization/config.js"; -describe("cli workspace resolution", () => { - it("uses default workspace when no flag", () => { +describe("cli organization resolution", () => { + it("uses default organization when no flag", () => { const config = ConfigSchema.parse({ auto_submit: true as const, notify: ["terminal" as const], - workspace: { default: "team" }, + organization: { default: "team" }, backend: { host: "127.0.0.1", port: 7741, @@ -17,13 +17,13 @@ describe("cli workspace resolution", () => { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, }); - expect(resolveWorkspace(undefined, config)).toBe("team"); - expect(resolveWorkspace("alpha", config)).toBe("alpha"); + expect(resolveOrganization(undefined, config)).toBe("team"); + expect(resolveOrganization("alpha", config)).toBe("alpha"); }); }); diff --git a/foundry/packages/cli/test/theme.test.ts b/foundry/packages/cli/test/theme.test.ts index a492a63..2a0d7e3 100644 --- a/foundry/packages/cli/test/theme.test.ts +++ b/foundry/packages/cli/test/theme.test.ts @@ -21,7 +21,7 @@ describe("resolveTuiTheme", () => { const baseConfig: AppConfig = ConfigSchema.parse({ auto_submit: true, notify: ["terminal"], - workspace: { default: "default" }, + organization: { default: "default" }, backend: { host: "127.0.0.1", port: 7741, @@ -31,7 +31,7 @@ describe("resolveTuiTheme", () => { backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, diff --git a/foundry/packages/cli/test/tui-format.test.ts b/foundry/packages/cli/test/tui-format.test.ts index fe80182..9ba0feb 100644 --- a/foundry/packages/cli/test/tui-format.test.ts +++ b/foundry/packages/cli/test/tui-format.test.ts @@ -4,14 +4,14 @@ import { filterTasks, fuzzyMatch } from "@sandbox-agent/foundry-client"; import { formatRows } from "../src/tui.js"; const sample: TaskRecord = { - workspaceId: "default", + organizationId: "default", repoId: "repo-a", repoRemote: "https://example.com/repo-a.git", taskId: "task-1", branchName: "feature/test", title: "Test Title", task: "Do test", - providerId: "local", + sandboxProviderId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -19,7 +19,7 @@ const sample: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "local", + sandboxProviderId: "local", switchTarget: "sandbox://local/sandbox-1", cwd: null, createdAt: 1, diff --git a/foundry/packages/client/src/app-client.ts b/foundry/packages/client/src/app-client.ts index 1fb95d2..16968cf 100644 --- a/foundry/packages/client/src/app-client.ts +++ b/foundry/packages/client/src/app-client.ts @@ -24,7 +24,7 @@ export interface FoundryAppClient { cancelScheduledRenewal(organizationId: string): Promise; resumeSubscription(organizationId: string): Promise; reconnectGithub(organizationId: string): Promise; - recordSeatUsage(workspaceId: string): Promise; + recordSeatUsage(organizationId: string): Promise; } export interface CreateFoundryAppClientOptions { diff --git a/foundry/packages/client/src/backend-client.ts b/foundry/packages/client/src/backend-client.ts index ccb0657..14e5661 100644 --- a/foundry/packages/client/src/backend-client.ts +++ b/foundry/packages/client/src/backend-client.ts @@ -1,7 +1,6 @@ import { createClient } from "rivetkit/client"; import type { AgentType, - AddRepoInput, AppConfig, FoundryAppSnapshot, FoundryBillingPlanId, @@ -21,20 +20,18 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, TaskEvent, WorkbenchTaskDetail, WorkbenchTaskSummary, WorkbenchSessionDetail, - WorkspaceEvent, - WorkspaceSummarySnapshot, + OrganizationEvent, + OrganizationSummarySnapshot, HistoryEvent, HistoryQueryInput, - ProviderId, + SandboxProviderId, RepoOverview, - RepoStackActionInput, - RepoStackActionResult, RepoRecord, StarSandboxAgentRepoInput, StarSandboxAgentRepoResult, @@ -43,7 +40,7 @@ import type { } from "@sandbox-agent/foundry-shared"; import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; import { createMockBackendClient } from "./mock/backend-client.js"; -import { taskKey, taskSandboxKey, workspaceKey } from "./keys.js"; +import { taskKey, taskSandboxKey, organizationKey } from "./keys.js"; export type TaskAction = "push" | "sync" | "merge" | "archive" | "kill"; @@ -75,41 +72,39 @@ export interface ActorConn { dispose(): Promise; } -interface WorkspaceHandle { +interface OrganizationHandle { connect(): ActorConn; - addRepo(input: AddRepoInput): Promise; - listRepos(input: { workspaceId: string }): Promise; + listRepos(input: { organizationId: string }): Promise; createTask(input: CreateTaskInput): Promise; - listTasks(input: { workspaceId: string; repoId?: string }): Promise; - getRepoOverview(input: { workspaceId: string; repoId: string }): Promise; - runRepoStackAction(input: RepoStackActionInput): Promise; + listTasks(input: { organizationId: string; repoId?: string }): Promise; + getRepoOverview(input: { organizationId: string; repoId: string }): Promise; history(input: HistoryQueryInput): Promise; switchTask(taskId: string): Promise; - getTask(input: { workspaceId: string; taskId: string }): Promise; - attachTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; - pushTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - syncTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - mergeTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - archiveTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - killTask(input: { workspaceId: string; taskId: string; reason?: string }): Promise; - useWorkspace(input: { workspaceId: string }): Promise<{ workspaceId: string }>; + getTask(input: { organizationId: string; taskId: string }): Promise; + attachTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; + pushTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + syncTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + mergeTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + archiveTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + killTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + useOrganization(input: { organizationId: string }): Promise<{ organizationId: string }>; starSandboxAgentRepo(input: StarSandboxAgentRepoInput): Promise; - getWorkspaceSummary(input: { workspaceId: string }): Promise; + getOrganizationSummary(input: { organizationId: string }): Promise; applyTaskSummaryUpdate(input: { taskSummary: WorkbenchTaskSummary }): Promise; removeTaskSummary(input: { taskId: string }): Promise; - reconcileWorkbenchState(input: { workspaceId: string }): Promise; + reconcileWorkbenchState(input: { organizationId: string }): Promise; createWorkbenchTask(input: TaskWorkbenchCreateTaskInput): Promise; markWorkbenchUnread(input: TaskWorkbenchSelectInput): Promise; renameWorkbenchTask(input: TaskWorkbenchRenameInput): Promise; renameWorkbenchBranch(input: TaskWorkbenchRenameInput): Promise; - createWorkbenchSession(input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }>; + createWorkbenchSession(input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>; renameWorkbenchSession(input: TaskWorkbenchRenameSessionInput): Promise; setWorkbenchSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise; updateWorkbenchDraft(input: TaskWorkbenchUpdateDraftInput): Promise; changeWorkbenchModel(input: TaskWorkbenchChangeModelInput): Promise; sendWorkbenchMessage(input: TaskWorkbenchSendMessageInput): Promise; - stopWorkbenchSession(input: TaskWorkbenchTabInput): Promise; - closeWorkbenchSession(input: TaskWorkbenchTabInput): Promise; + stopWorkbenchSession(input: TaskWorkbenchSessionInput): Promise; + closeWorkbenchSession(input: TaskWorkbenchSessionInput): Promise; publishWorkbenchPr(input: TaskWorkbenchSelectInput): Promise; revertWorkbenchFile(input: TaskWorkbenchDiffInput): Promise; reloadGithubOrganization(): Promise; @@ -118,7 +113,7 @@ interface WorkspaceHandle { reloadGithubPullRequest(input: { repoId: string; prNumber: number }): Promise; } -interface AppWorkspaceHandle { +interface AppOrganizationHandle { connect(): ActorConn; getAppSnapshot(input: { sessionId: string }): Promise; skipAppStarterRepo(input: { sessionId: string }): Promise; @@ -131,7 +126,7 @@ interface AppWorkspaceHandle { createAppBillingPortalSession(input: { sessionId: string; organizationId: string }): Promise<{ url: string }>; cancelAppScheduledRenewal(input: { sessionId: string; organizationId: string }): Promise; resumeAppSubscription(input: { sessionId: string; organizationId: string }): Promise; - recordAppSeatUsage(input: { sessionId: string; workspaceId: string }): Promise; + recordAppSeatUsage(input: { sessionId: string; organizationId: string }): Promise; } interface TaskHandle { @@ -162,12 +157,12 @@ interface TaskSandboxHandle { rawSendSessionMethod(sessionId: string, method: string, params: Record): Promise; destroySession(sessionId: string): Promise; sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>; - providerState(): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }>; + providerState(): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; } interface RivetClient { - workspace: { - getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): WorkspaceHandle; + organization: { + getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): OrganizationHandle; }; task: { get(key?: string | string[]): TaskHandle; @@ -182,15 +177,15 @@ interface RivetClient { export interface BackendClientOptions { endpoint: string; - defaultWorkspaceId?: string; + defaultOrganizationId?: string; mode?: "remote" | "mock"; } export interface BackendClient { getAppSnapshot(): Promise; - connectWorkspace(workspaceId: string): Promise; - connectTask(workspaceId: string, repoId: string, taskId: string): Promise; - connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise; + connectOrganization(organizationId: string): Promise; + connectTask(organizationId: string, repoId: string, taskId: string): Promise; + connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise; subscribeApp(listener: () => void): () => void; signInWithGithub(): Promise; signOutApp(): Promise; @@ -204,109 +199,112 @@ export interface BackendClient { openAppBillingPortal(organizationId: string): Promise; cancelAppScheduledRenewal(organizationId: string): Promise; resumeAppSubscription(organizationId: string): Promise; - recordAppSeatUsage(workspaceId: string): Promise; - addRepo(workspaceId: string, remoteUrl: string): Promise; - listRepos(workspaceId: string): Promise; + recordAppSeatUsage(organizationId: string): Promise; + listRepos(organizationId: string): Promise; createTask(input: CreateTaskInput): Promise; - listTasks(workspaceId: string, repoId?: string): Promise; - getRepoOverview(workspaceId: string, repoId: string): Promise; - runRepoStackAction(input: RepoStackActionInput): Promise; - getTask(workspaceId: string, taskId: string): Promise; + listTasks(organizationId: string, repoId?: string): Promise; + getRepoOverview(organizationId: string, repoId: string): Promise; + getTask(organizationId: string, taskId: string): Promise; listHistory(input: HistoryQueryInput): Promise; - switchTask(workspaceId: string, taskId: string): Promise; - attachTask(workspaceId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; - runAction(workspaceId: string, taskId: string, action: TaskAction): Promise; + switchTask(organizationId: string, taskId: string): Promise; + attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; + runAction(organizationId: string, taskId: string, action: TaskAction): Promise; createSandboxSession(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; prompt: string; cwd?: string; agent?: AgentType | "opencode"; }): Promise<{ id: string; status: "running" | "idle" | "error" }>; listSandboxSessions( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, input?: { cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }>; listSandboxSessionEvents( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, input: { sessionId: string; cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }>; - createSandboxProcess(input: { workspaceId: string; providerId: ProviderId; sandboxId: string; request: ProcessCreateRequest }): Promise; - listSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }>; + createSandboxProcess(input: { + organizationId: string; + sandboxProviderId: SandboxProviderId; + sandboxId: string; + request: ProcessCreateRequest; + }): Promise; + listSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }>; getSandboxProcessLogs( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessLogFollowQuery, ): Promise; stopSandboxProcess( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessSignalQuery, ): Promise; killSandboxProcess( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessSignalQuery, ): Promise; - deleteSandboxProcess(workspaceId: string, providerId: ProviderId, sandboxId: string, processId: string): Promise; - subscribeSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): () => void; + deleteSandboxProcess(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string): Promise; + subscribeSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): () => void; sendSandboxPrompt(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; sessionId: string; prompt: string; notification?: boolean; }): Promise; sandboxSessionStatus( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, sessionId: string, ): Promise<{ id: string; status: "running" | "idle" | "error" }>; sandboxProviderState( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, - ): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }>; - getSandboxAgentConnection(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>; - getWorkspaceSummary(workspaceId: string): Promise; - getTaskDetail(workspaceId: string, repoId: string, taskId: string): Promise; - getSessionDetail(workspaceId: string, repoId: string, taskId: string, sessionId: string): Promise; - getWorkbench(workspaceId: string): Promise; - subscribeWorkbench(workspaceId: string, listener: () => void): () => void; - createWorkbenchTask(workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise; - markWorkbenchUnread(workspaceId: string, input: TaskWorkbenchSelectInput): Promise; - renameWorkbenchTask(workspaceId: string, input: TaskWorkbenchRenameInput): Promise; - renameWorkbenchBranch(workspaceId: string, input: TaskWorkbenchRenameInput): Promise; - createWorkbenchSession(workspaceId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }>; - renameWorkbenchSession(workspaceId: string, input: TaskWorkbenchRenameSessionInput): Promise; - setWorkbenchSessionUnread(workspaceId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise; - updateWorkbenchDraft(workspaceId: string, input: TaskWorkbenchUpdateDraftInput): Promise; - changeWorkbenchModel(workspaceId: string, input: TaskWorkbenchChangeModelInput): Promise; - sendWorkbenchMessage(workspaceId: string, input: TaskWorkbenchSendMessageInput): Promise; - stopWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise; - closeWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise; - publishWorkbenchPr(workspaceId: string, input: TaskWorkbenchSelectInput): Promise; - revertWorkbenchFile(workspaceId: string, input: TaskWorkbenchDiffInput): Promise; - reloadGithubOrganization(workspaceId: string): Promise; - reloadGithubPullRequests(workspaceId: string): Promise; - reloadGithubRepository(workspaceId: string, repoId: string): Promise; - reloadGithubPullRequest(workspaceId: string, repoId: string, prNumber: number): Promise; + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; + getSandboxAgentConnection(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>; + getOrganizationSummary(organizationId: string): Promise; + getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; + getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; + getWorkbench(organizationId: string): Promise; + subscribeWorkbench(organizationId: string, listener: () => void): () => void; + createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise; + markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise; + renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise; + renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise; + createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>; + renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise; + setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise; + updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise; + changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise; + sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise; + stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise; + closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise; + publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise; + revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise; + reloadGithubOrganization(organizationId: string): Promise; + reloadGithubPullRequests(organizationId: string): Promise; + reloadGithubRepository(organizationId: string, repoId: string): Promise; + reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise; health(): Promise<{ ok: true }>; - useWorkspace(workspaceId: string): Promise<{ workspaceId: string }>; - starSandboxAgentRepo(workspaceId: string): Promise; + useOrganization(organizationId: string): Promise<{ organizationId: string }>; + starSandboxAgentRepo(organizationId: string): Promise; } export function rivetEndpoint(config: AppConfig): string { @@ -316,10 +314,49 @@ export function rivetEndpoint(config: AppConfig): string { export function createBackendClientFromConfig(config: AppConfig): BackendClient { return createBackendClient({ endpoint: rivetEndpoint(config), - defaultWorkspaceId: config.workspace.default, + defaultOrganizationId: config.organization.default, }); } +export interface BackendHealthCheckOptions { + endpoint: string; + timeoutMs?: number; +} + +export interface BackendMetadata { + clientEndpoint: string; + appEndpoint: string; + rivetEndpoint: string; +} + +export async function checkBackendHealth(options: BackendHealthCheckOptions): Promise { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), options.timeoutMs ?? 1_500); + + try { + const response = await fetch(normalizeLegacyBackendEndpoint(options.endpoint), { + method: "GET", + signal: controller.signal, + }); + return response.status < 500; + } catch { + return false; + } finally { + clearTimeout(timeout); + } +} + +export async function readBackendMetadata(options: BackendHealthCheckOptions): Promise { + const endpoints = deriveBackendEndpoints(options.endpoint); + const clientEndpoint = endpoints.rivetEndpoint.replace(/\/v1\/rivet\/?$/, ""); + + return { + clientEndpoint, + appEndpoint: endpoints.appEndpoint, + rivetEndpoint: endpoints.rivetEndpoint, + }; +} + function stripTrailingSlash(value: string): string { return value.replace(/\/$/, ""); } @@ -366,7 +403,7 @@ function signedOutAppSnapshot(): FoundryAppSnapshot { export function createBackendClient(options: BackendClientOptions): BackendClient { if (options.mode === "mock") { - return createMockBackendClient(options.defaultWorkspaceId); + return createMockBackendClient(options.defaultOrganizationId); } const endpoints = deriveBackendEndpoints(options.endpoint); @@ -424,20 +461,20 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return typeof sessionId === "string" && sessionId.length > 0 ? sessionId : null; }; - const workspace = async (workspaceId: string): Promise => - client.workspace.getOrCreate(workspaceKey(workspaceId), { - createWithInput: workspaceId, + const organization = async (organizationId: string): Promise => + client.organization.getOrCreate(organizationKey(organizationId), { + createWithInput: organizationId, }); - const appWorkspace = async (): Promise => - client.workspace.getOrCreate(workspaceKey("app"), { + const appOrganization = async (): Promise => + client.organization.getOrCreate(organizationKey("app"), { createWithInput: "app", - }) as unknown as AppWorkspaceHandle; + }) as unknown as AppOrganizationHandle; - const task = async (workspaceId: string, repoId: string, taskId: string): Promise => client.task.get(taskKey(workspaceId, repoId, taskId)); + const task = async (organizationId: string, repoId: string, taskId: string): Promise => client.task.get(taskKey(organizationId, repoId, taskId)); - const sandboxByKey = async (workspaceId: string, _providerId: ProviderId, sandboxId: string): Promise => { - return (client as any).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId)); + const sandboxByKey = async (organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise => { + return (client as any).taskSandbox.get(taskSandboxKey(organizationId, sandboxId)); }; function isActorNotFoundError(error: unknown): boolean { @@ -445,21 +482,25 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return message.includes("Actor not found"); } - const sandboxByActorIdFromTask = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise => { - const ws = await workspace(workspaceId); - const rows = await ws.listTasks({ workspaceId }); + const sandboxByActorIdFromTask = async ( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise => { + const ws = await organization(organizationId); + const rows = await ws.listTasks({ organizationId }); const candidates = [...rows].sort((a, b) => b.updatedAt - a.updatedAt); for (const row of candidates) { try { - const detail = await ws.getTask({ workspaceId, taskId: row.taskId }); - if (detail.providerId !== providerId) { + const detail = await ws.getTask({ organizationId, taskId: row.taskId }); + if (detail.sandboxProviderId !== sandboxProviderId) { continue; } const sandbox = detail.sandboxes.find( (sb) => sb.sandboxId === sandboxId && - sb.providerId === providerId && + sb.sandboxProviderId === sandboxProviderId && typeof (sb as any).sandboxActorId === "string" && (sb as any).sandboxActorId.length > 0, ) as { sandboxActorId?: string } | undefined; @@ -479,19 +520,19 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }; const withSandboxHandle = async ( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, run: (handle: TaskSandboxHandle) => Promise, ): Promise => { - const handle = await sandboxByKey(workspaceId, providerId, sandboxId); + const handle = await sandboxByKey(organizationId, sandboxProviderId, sandboxId); try { return await run(handle); } catch (error) { if (!isActorNotFoundError(error)) { throw error; } - const fallback = await sandboxByActorIdFromTask(workspaceId, providerId, sandboxId); + const fallback = await sandboxByActorIdFromTask(organizationId, sandboxProviderId, sandboxId); if (!fallback) { throw error; } @@ -499,22 +540,22 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } }; - const connectWorkspace = async (workspaceId: string): Promise => { - return (await workspace(workspaceId)).connect() as ActorConn; + const connectOrganization = async (organizationId: string): Promise => { + return (await organization(organizationId)).connect() as ActorConn; }; - const connectTask = async (workspaceId: string, repoId: string, taskIdValue: string): Promise => { - return (await task(workspaceId, repoId, taskIdValue)).connect() as ActorConn; + const connectTask = async (organizationId: string, repoId: string, taskIdValue: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).connect() as ActorConn; }; - const connectSandbox = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise => { + const connectSandbox = async (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise => { try { - return (await sandboxByKey(workspaceId, providerId, sandboxId)).connect() as ActorConn; + return (await sandboxByKey(organizationId, sandboxProviderId, sandboxId)).connect() as ActorConn; } catch (error) { if (!isActorNotFoundError(error)) { throw error; } - const fallback = await sandboxByActorIdFromTask(workspaceId, providerId, sandboxId); + const fallback = await sandboxByActorIdFromTask(organizationId, sandboxProviderId, sandboxId); if (!fallback) { throw error; } @@ -522,14 +563,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } }; - const getWorkbenchCompat = async (workspaceId: string): Promise => { - const summary = await (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId }); + const getWorkbenchCompat = async (organizationId: string): Promise => { + const summary = await (await organization(organizationId)).getOrganizationSummary({ organizationId }); const tasks = ( await Promise.all( summary.taskSummaries.map(async (taskSummary) => { let detail; try { - detail = await (await task(workspaceId, taskSummary.repoId, taskSummary.id)).getTaskDetail(); + detail = await (await task(organizationId, taskSummary.repoId, taskSummary.id)).getTaskDetail(); } catch (error) { if (isActorNotFoundError(error)) { return null; @@ -539,7 +580,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien const sessionDetails = await Promise.all( detail.sessionsSummary.map(async (session) => { try { - const full = await (await task(workspaceId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id }); + const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id }); return [session.id, full] as const; } catch (error) { if (isActorNotFoundError(error)) { @@ -559,7 +600,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien updatedAtMs: detail.updatedAtMs, branch: detail.branch, pullRequest: detail.pullRequest, - tabs: detail.sessionsSummary.map((session) => { + sessions: detail.sessionsSummary.map((session) => { const full = sessionDetailsById.get(session.id); return { id: session.id, @@ -584,7 +625,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien ) ).filter((task): task is TaskWorkbenchSnapshot["tasks"][number] => task !== null); - const projects = summary.repos + const repositories = summary.repos .map((repo) => ({ id: repo.id, label: repo.label, @@ -594,31 +635,31 @@ export function createBackendClient(options: BackendClientOptions): BackendClien .filter((repo) => repo.tasks.length > 0); return { - workspaceId, + organizationId, repos: summary.repos.map((repo) => ({ id: repo.id, label: repo.label })), - projects, + repositories, tasks: tasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs), }; }; - const subscribeWorkbench = (workspaceId: string, listener: () => void): (() => void) => { - let entry = workbenchSubscriptions.get(workspaceId); + const subscribeWorkbench = (organizationId: string, listener: () => void): (() => void) => { + let entry = workbenchSubscriptions.get(organizationId); if (!entry) { entry = { listeners: new Set(), disposeConnPromise: null, }; - workbenchSubscriptions.set(workspaceId, entry); + workbenchSubscriptions.set(organizationId, entry); } entry.listeners.add(listener); if (!entry.disposeConnPromise) { entry.disposeConnPromise = (async () => { - const handle = await workspace(workspaceId); + const handle = await organization(organizationId); const conn = (handle as any).connect(); const unsubscribeEvent = conn.on("workbenchUpdated", () => { - const current = workbenchSubscriptions.get(workspaceId); + const current = workbenchSubscriptions.get(organizationId); if (!current) { return; } @@ -636,7 +677,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } return () => { - const current = workbenchSubscriptions.get(workspaceId); + const current = workbenchSubscriptions.get(organizationId); if (!current) { return; } @@ -645,17 +686,18 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return; } - workbenchSubscriptions.delete(workspaceId); + workbenchSubscriptions.delete(organizationId); void current.disposeConnPromise?.then(async (disposeConn) => { await disposeConn?.(); }); }; }; - const sandboxProcessSubscriptionKey = (workspaceId: string, providerId: ProviderId, sandboxId: string): string => `${workspaceId}:${providerId}:${sandboxId}`; + const sandboxProcessSubscriptionKey = (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): string => + `${organizationId}:${sandboxProviderId}:${sandboxId}`; - const subscribeSandboxProcesses = (workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): (() => void) => { - const key = sandboxProcessSubscriptionKey(workspaceId, providerId, sandboxId); + const subscribeSandboxProcesses = (organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): (() => void) => { + const key = sandboxProcessSubscriptionKey(organizationId, sandboxProviderId, sandboxId); let entry = sandboxProcessSubscriptions.get(key); if (!entry) { entry = { @@ -669,7 +711,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!entry.disposeConnPromise) { entry.disposeConnPromise = (async () => { - const conn = await connectSandbox(workspaceId, providerId, sandboxId); + const conn = await connectSandbox(organizationId, sandboxProviderId, sandboxId); const unsubscribeEvent = conn.on("processesUpdated", () => { const current = sandboxProcessSubscriptions.get(key); if (!current) { @@ -710,7 +752,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!appSubscriptions.disposeConnPromise) { appSubscriptions.disposeConnPromise = (async () => { - const handle = await appWorkspace(); + const handle = await appOrganization(); const conn = (handle as any).connect(); const unsubscribeEvent = conn.on("appUpdated", () => { for (const currentListener of [...appSubscriptions.listeners]) { @@ -745,19 +787,19 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { return signedOutAppSnapshot(); } - return await (await appWorkspace()).getAppSnapshot({ sessionId }); + return await (await appOrganization()).getAppSnapshot({ sessionId }); }, - async connectWorkspace(workspaceId: string): Promise { - return await connectWorkspace(workspaceId); + async connectOrganization(organizationId: string): Promise { + return await connectOrganization(organizationId); }, - async connectTask(workspaceId: string, repoId: string, taskIdValue: string): Promise { - return await connectTask(workspaceId, repoId, taskIdValue); + async connectTask(organizationId: string, repoId: string, taskIdValue: string): Promise { + return await connectTask(organizationId, repoId, taskIdValue); }, - async connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise { - return await connectSandbox(workspaceId, providerId, sandboxId); + async connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return await connectSandbox(organizationId, sandboxProviderId, sandboxId); }, subscribeApp(listener: () => void): () => void { @@ -788,7 +830,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).skipAppStarterRepo({ sessionId }); + return await (await appOrganization()).skipAppStarterRepo({ sessionId }); }, async starAppStarterRepo(organizationId: string): Promise { @@ -796,7 +838,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).starAppStarterRepo({ sessionId, organizationId }); + return await (await appOrganization()).starAppStarterRepo({ sessionId, organizationId }); }, async selectAppOrganization(organizationId: string): Promise { @@ -804,7 +846,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).selectAppOrganization({ sessionId, organizationId }); + return await (await appOrganization()).selectAppOrganization({ sessionId, organizationId }); }, async updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise { @@ -812,7 +854,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).updateAppOrganizationProfile({ + return await (await appOrganization()).updateAppOrganizationProfile({ sessionId, organizationId: input.organizationId, displayName: input.displayName, @@ -826,7 +868,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).triggerAppRepoImport({ sessionId, organizationId }); + return await (await appOrganization()).triggerAppRepoImport({ sessionId, organizationId }); }, async reconnectAppGithub(organizationId: string): Promise { @@ -834,7 +876,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - const response = await (await appWorkspace()).beginAppGithubInstall({ sessionId, organizationId }); + const response = await (await appOrganization()).beginAppGithubInstall({ sessionId, organizationId }); if (typeof window !== "undefined") { window.location.assign(response.url); } @@ -845,7 +887,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - const response = await (await appWorkspace()).createAppCheckoutSession({ sessionId, organizationId, planId }); + const response = await (await appOrganization()).createAppCheckoutSession({ sessionId, organizationId, planId }); if (typeof window !== "undefined") { window.location.assign(response.url); } @@ -856,7 +898,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - const response = await (await appWorkspace()).createAppBillingPortalSession({ sessionId, organizationId }); + const response = await (await appOrganization()).createAppBillingPortalSession({ sessionId, organizationId }); if (typeof window !== "undefined") { window.location.assign(response.url); } @@ -867,7 +909,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).cancelAppScheduledRenewal({ sessionId, organizationId }); + return await (await appOrganization()).cancelAppScheduledRenewal({ sessionId, organizationId }); }, async resumeAppSubscription(organizationId: string): Promise { @@ -875,117 +917,109 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).resumeAppSubscription({ sessionId, organizationId }); + return await (await appOrganization()).resumeAppSubscription({ sessionId, organizationId }); }, - async recordAppSeatUsage(workspaceId: string): Promise { + async recordAppSeatUsage(organizationId: string): Promise { const sessionId = await getSessionId(); if (!sessionId) { throw new Error("No active auth session"); } - return await (await appWorkspace()).recordAppSeatUsage({ sessionId, workspaceId }); + return await (await appOrganization()).recordAppSeatUsage({ sessionId, organizationId }); }, - async addRepo(workspaceId: string, remoteUrl: string): Promise { - return (await workspace(workspaceId)).addRepo({ workspaceId, remoteUrl }); - }, - - async listRepos(workspaceId: string): Promise { - return (await workspace(workspaceId)).listRepos({ workspaceId }); + async listRepos(organizationId: string): Promise { + return (await organization(organizationId)).listRepos({ organizationId }); }, async createTask(input: CreateTaskInput): Promise { - return (await workspace(input.workspaceId)).createTask(input); + return (await organization(input.organizationId)).createTask(input); }, - async starSandboxAgentRepo(workspaceId: string): Promise { - return (await workspace(workspaceId)).starSandboxAgentRepo({ workspaceId }); + async starSandboxAgentRepo(organizationId: string): Promise { + return (await organization(organizationId)).starSandboxAgentRepo({ organizationId }); }, - async listTasks(workspaceId: string, repoId?: string): Promise { - return (await workspace(workspaceId)).listTasks({ workspaceId, repoId }); + async listTasks(organizationId: string, repoId?: string): Promise { + return (await organization(organizationId)).listTasks({ organizationId, repoId }); }, - async getRepoOverview(workspaceId: string, repoId: string): Promise { - return (await workspace(workspaceId)).getRepoOverview({ workspaceId, repoId }); + async getRepoOverview(organizationId: string, repoId: string): Promise { + return (await organization(organizationId)).getRepoOverview({ organizationId, repoId }); }, - async runRepoStackAction(input: RepoStackActionInput): Promise { - return (await workspace(input.workspaceId)).runRepoStackAction(input); - }, - - async getTask(workspaceId: string, taskId: string): Promise { - return (await workspace(workspaceId)).getTask({ - workspaceId, + async getTask(organizationId: string, taskId: string): Promise { + return (await organization(organizationId)).getTask({ + organizationId, taskId, }); }, async listHistory(input: HistoryQueryInput): Promise { - return (await workspace(input.workspaceId)).history(input); + return (await organization(input.organizationId)).history(input); }, - async switchTask(workspaceId: string, taskId: string): Promise { - return (await workspace(workspaceId)).switchTask(taskId); + async switchTask(organizationId: string, taskId: string): Promise { + return (await organization(organizationId)).switchTask(taskId); }, - async attachTask(workspaceId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { - return (await workspace(workspaceId)).attachTask({ - workspaceId, + async attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + return (await organization(organizationId)).attachTask({ + organizationId, taskId, reason: "cli.attach", }); }, - async runAction(workspaceId: string, taskId: string, action: TaskAction): Promise { + async runAction(organizationId: string, taskId: string, action: TaskAction): Promise { if (action === "push") { - await (await workspace(workspaceId)).pushTask({ - workspaceId, + await (await organization(organizationId)).pushTask({ + organizationId, taskId, reason: "cli.push", }); return; } if (action === "sync") { - await (await workspace(workspaceId)).syncTask({ - workspaceId, + await (await organization(organizationId)).syncTask({ + organizationId, taskId, reason: "cli.sync", }); return; } if (action === "merge") { - await (await workspace(workspaceId)).mergeTask({ - workspaceId, + await (await organization(organizationId)).mergeTask({ + organizationId, taskId, reason: "cli.merge", }); return; } if (action === "archive") { - await (await workspace(workspaceId)).archiveTask({ - workspaceId, + await (await organization(organizationId)).archiveTask({ + organizationId, taskId, reason: "cli.archive", }); return; } - await (await workspace(workspaceId)).killTask({ - workspaceId, + await (await organization(organizationId)).killTask({ + organizationId, taskId, reason: "cli.kill", }); }, async createSandboxSession(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; prompt: string; cwd?: string; agent?: AgentType | "opencode"; }): Promise<{ id: string; status: "running" | "idle" | "error" }> { - const created = await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => + const created = await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.createSession({ agent: input.agent ?? "claude", sessionInit: { @@ -994,7 +1028,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }), ); if (input.prompt.trim().length > 0) { - await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => + await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.rawSendSessionMethod(created.id, "session/prompt", { prompt: [{ type: "text", text: input.prompt }], }), @@ -1007,83 +1041,87 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }, async listSandboxSessions( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, input?: { cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.listSessions(input ?? {})); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listSessions(input ?? {})); }, async listSandboxSessionEvents( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, input: { sessionId: string; cursor?: string; limit?: number }, ): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getEvents(input)); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.getEvents(input)); }, async createSandboxProcess(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; request: ProcessCreateRequest; }): Promise { - return await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => handle.createProcess(input.request)); + return await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.createProcess(input.request)); }, - async listSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.listProcesses()); + async listSandboxProcesses( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise<{ processes: SandboxProcessRecord[] }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listProcesses()); }, async getSandboxProcessLogs( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessLogFollowQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getProcessLogs(processId, query)); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.getProcessLogs(processId, query)); }, async stopSandboxProcess( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessSignalQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.stopProcess(processId, query)); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.stopProcess(processId, query)); }, async killSandboxProcess( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessSignalQuery, ): Promise { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.killProcess(processId, query)); + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.killProcess(processId, query)); }, - async deleteSandboxProcess(workspaceId: string, providerId: ProviderId, sandboxId: string, processId: string): Promise { - await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.deleteProcess(processId)); + async deleteSandboxProcess(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, processId: string): Promise { + await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.deleteProcess(processId)); }, - subscribeSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): () => void { - return subscribeSandboxProcesses(workspaceId, providerId, sandboxId, listener); + subscribeSandboxProcesses(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string, listener: () => void): () => void { + return subscribeSandboxProcesses(organizationId, sandboxProviderId, sandboxId, listener); }, async sendSandboxPrompt(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; sessionId: string; prompt: string; notification?: boolean; }): Promise { - await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) => + await withSandboxHandle(input.organizationId, input.sandboxProviderId, input.sandboxId, async (handle) => handle.rawSendSessionMethod(input.sessionId, "session/prompt", { prompt: [{ type: "text", text: input.prompt }], }), @@ -1091,8 +1129,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }, async sandboxSessionStatus( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, sessionId: string, ): Promise<{ id: string; status: "running" | "idle" | "error" }> { @@ -1103,123 +1141,127 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }, async sandboxProviderState( - workspaceId: string, - providerId: ProviderId, + organizationId: string, + sandboxProviderId: SandboxProviderId, sandboxId: string, - ): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.providerState()); + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.providerState()); }, - async getSandboxAgentConnection(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }> { - return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.sandboxAgentConnection()); + async getSandboxAgentConnection( + organizationId: string, + sandboxProviderId: SandboxProviderId, + sandboxId: string, + ): Promise<{ endpoint: string; token?: string }> { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.sandboxAgentConnection()); }, - async getWorkspaceSummary(workspaceId: string): Promise { - return (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId }); + async getOrganizationSummary(organizationId: string): Promise { + return (await organization(organizationId)).getOrganizationSummary({ organizationId }); }, - async getTaskDetail(workspaceId: string, repoId: string, taskIdValue: string): Promise { - return (await task(workspaceId, repoId, taskIdValue)).getTaskDetail(); + async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise { + return (await task(organizationId, repoId, taskIdValue)).getTaskDetail(); }, - async getSessionDetail(workspaceId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { - return (await task(workspaceId, repoId, taskIdValue)).getSessionDetail({ sessionId }); + async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { + return (await task(organizationId, repoId, taskIdValue)).getSessionDetail({ sessionId }); }, - async getWorkbench(workspaceId: string): Promise { - return await getWorkbenchCompat(workspaceId); + async getWorkbench(organizationId: string): Promise { + return await getWorkbenchCompat(organizationId); }, - subscribeWorkbench(workspaceId: string, listener: () => void): () => void { - return subscribeWorkbench(workspaceId, listener); + subscribeWorkbench(organizationId: string, listener: () => void): () => void { + return subscribeWorkbench(organizationId, listener); }, - async createWorkbenchTask(workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise { - return (await workspace(workspaceId)).createWorkbenchTask(input); + async createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise { + return (await organization(organizationId)).createWorkbenchTask(input); }, - async markWorkbenchUnread(workspaceId: string, input: TaskWorkbenchSelectInput): Promise { - await (await workspace(workspaceId)).markWorkbenchUnread(input); + async markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise { + await (await organization(organizationId)).markWorkbenchUnread(input); }, - async renameWorkbenchTask(workspaceId: string, input: TaskWorkbenchRenameInput): Promise { - await (await workspace(workspaceId)).renameWorkbenchTask(input); + async renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise { + await (await organization(organizationId)).renameWorkbenchTask(input); }, - async renameWorkbenchBranch(workspaceId: string, input: TaskWorkbenchRenameInput): Promise { - await (await workspace(workspaceId)).renameWorkbenchBranch(input); + async renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise { + await (await organization(organizationId)).renameWorkbenchBranch(input); }, - async createWorkbenchSession(workspaceId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> { - return await (await workspace(workspaceId)).createWorkbenchSession(input); + async createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { + return await (await organization(organizationId)).createWorkbenchSession(input); }, - async renameWorkbenchSession(workspaceId: string, input: TaskWorkbenchRenameSessionInput): Promise { - await (await workspace(workspaceId)).renameWorkbenchSession(input); + async renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise { + await (await organization(organizationId)).renameWorkbenchSession(input); }, - async setWorkbenchSessionUnread(workspaceId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { - await (await workspace(workspaceId)).setWorkbenchSessionUnread(input); + async setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { + await (await organization(organizationId)).setWorkbenchSessionUnread(input); }, - async updateWorkbenchDraft(workspaceId: string, input: TaskWorkbenchUpdateDraftInput): Promise { - await (await workspace(workspaceId)).updateWorkbenchDraft(input); + async updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise { + await (await organization(organizationId)).updateWorkbenchDraft(input); }, - async changeWorkbenchModel(workspaceId: string, input: TaskWorkbenchChangeModelInput): Promise { - await (await workspace(workspaceId)).changeWorkbenchModel(input); + async changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise { + await (await organization(organizationId)).changeWorkbenchModel(input); }, - async sendWorkbenchMessage(workspaceId: string, input: TaskWorkbenchSendMessageInput): Promise { - await (await workspace(workspaceId)).sendWorkbenchMessage(input); + async sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise { + await (await organization(organizationId)).sendWorkbenchMessage(input); }, - async stopWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise { - await (await workspace(workspaceId)).stopWorkbenchSession(input); + async stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise { + await (await organization(organizationId)).stopWorkbenchSession(input); }, - async closeWorkbenchSession(workspaceId: string, input: TaskWorkbenchTabInput): Promise { - await (await workspace(workspaceId)).closeWorkbenchSession(input); + async closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise { + await (await organization(organizationId)).closeWorkbenchSession(input); }, - async publishWorkbenchPr(workspaceId: string, input: TaskWorkbenchSelectInput): Promise { - await (await workspace(workspaceId)).publishWorkbenchPr(input); + async publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise { + await (await organization(organizationId)).publishWorkbenchPr(input); }, - async revertWorkbenchFile(workspaceId: string, input: TaskWorkbenchDiffInput): Promise { - await (await workspace(workspaceId)).revertWorkbenchFile(input); + async revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise { + await (await organization(organizationId)).revertWorkbenchFile(input); }, - async reloadGithubOrganization(workspaceId: string): Promise { - await (await workspace(workspaceId)).reloadGithubOrganization(); + async reloadGithubOrganization(organizationId: string): Promise { + await (await organization(organizationId)).reloadGithubOrganization(); }, - async reloadGithubPullRequests(workspaceId: string): Promise { - await (await workspace(workspaceId)).reloadGithubPullRequests(); + async reloadGithubPullRequests(organizationId: string): Promise { + await (await organization(organizationId)).reloadGithubPullRequests(); }, - async reloadGithubRepository(workspaceId: string, repoId: string): Promise { - await (await workspace(workspaceId)).reloadGithubRepository({ repoId }); + async reloadGithubRepository(organizationId: string, repoId: string): Promise { + await (await organization(organizationId)).reloadGithubRepository({ repoId }); }, - async reloadGithubPullRequest(workspaceId: string, repoId: string, prNumber: number): Promise { - await (await workspace(workspaceId)).reloadGithubPullRequest({ repoId, prNumber }); + async reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise { + await (await organization(organizationId)).reloadGithubPullRequest({ repoId, prNumber }); }, async health(): Promise<{ ok: true }> { - const workspaceId = options.defaultWorkspaceId; - if (!workspaceId) { - throw new Error("Backend client default workspace is required for health checks"); + const organizationId = options.defaultOrganizationId; + if (!organizationId) { + throw new Error("Backend client default organization is required for health checks"); } - await (await workspace(workspaceId)).useWorkspace({ - workspaceId, + await (await organization(organizationId)).useOrganization({ + organizationId, }); return { ok: true }; }, - async useWorkspace(workspaceId: string): Promise<{ workspaceId: string }> { - return (await workspace(workspaceId)).useWorkspace({ workspaceId }); + async useOrganization(organizationId: string): Promise<{ organizationId: string }> { + return (await organization(organizationId)).useOrganization({ organizationId }); }, }; } diff --git a/foundry/packages/client/src/index.ts b/foundry/packages/client/src/index.ts index 7605986..87909a9 100644 --- a/foundry/packages/client/src/index.ts +++ b/foundry/packages/client/src/index.ts @@ -1,10 +1,10 @@ export * from "./app-client.js"; export * from "./backend-client.js"; -export * from "./interest/manager.js"; -export * from "./interest/mock-manager.js"; -export * from "./interest/remote-manager.js"; -export * from "./interest/topics.js"; -export * from "./interest/use-interest.js"; +export * from "./subscription/manager.js"; +export * from "./subscription/mock-manager.js"; +export * from "./subscription/remote-manager.js"; +export * from "./subscription/topics.js"; +export * from "./subscription/use-subscription.js"; export * from "./keys.js"; export * from "./mock-app.js"; export * from "./view-model.js"; diff --git a/foundry/packages/client/src/interest/mock-manager.ts b/foundry/packages/client/src/interest/mock-manager.ts deleted file mode 100644 index f1c065e..0000000 --- a/foundry/packages/client/src/interest/mock-manager.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { createMockBackendClient } from "../mock/backend-client.js"; -import { RemoteInterestManager } from "./remote-manager.js"; - -/** - * Mock implementation shares the same interest-manager harness as the remote - * path, but uses the in-memory mock backend that synthesizes actor events. - */ -export class MockInterestManager extends RemoteInterestManager { - constructor() { - super(createMockBackendClient()); - } -} diff --git a/foundry/packages/client/src/keys.ts b/foundry/packages/client/src/keys.ts index 54fdcc1..314f16a 100644 --- a/foundry/packages/client/src/keys.ts +++ b/foundry/packages/client/src/keys.ts @@ -1,29 +1,21 @@ export type ActorKey = string[]; -export function workspaceKey(workspaceId: string): ActorKey { - return ["ws", workspaceId]; +export function organizationKey(organizationId: string): ActorKey { + return ["org", organizationId]; } -export function projectKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId]; +export function repositoryKey(organizationId: string, repoId: string): ActorKey { + return ["org", organizationId, "repository", repoId]; } -export function taskKey(workspaceId: string, repoId: string, taskId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "task", taskId]; +export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { + return ["org", organizationId, "repository", repoId, "task", taskId]; } -export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey { - return ["ws", workspaceId, "sandbox", sandboxId]; +export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { + return ["org", organizationId, "sandbox", sandboxId]; } -export function historyKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "history"]; -} - -export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "pr-sync"]; -} - -export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey { - return ["ws", workspaceId, "project", repoId, "branch-sync"]; +export function historyKey(organizationId: string, repoId: string): ActorKey { + return ["org", organizationId, "repository", repoId, "history"]; } diff --git a/foundry/packages/client/src/mock-app.ts b/foundry/packages/client/src/mock-app.ts index 1cec853..0fa6fc7 100644 --- a/foundry/packages/client/src/mock-app.ts +++ b/foundry/packages/client/src/mock-app.ts @@ -67,7 +67,7 @@ export interface MockFoundryOrganizationSettings { export interface MockFoundryOrganization { id: string; - workspaceId: string; + organizationId: string; kind: MockOrganizationKind; settings: MockFoundryOrganizationSettings; github: MockFoundryGithubState; @@ -118,7 +118,7 @@ export interface MockFoundryAppClient { cancelScheduledRenewal(organizationId: string): Promise; resumeSubscription(organizationId: string): Promise; reconnectGithub(organizationId: string): Promise; - recordSeatUsage(workspaceId: string): void; + recordSeatUsage(organizationId: string): void; } const STORAGE_KEY = "sandbox-agent-foundry:mock-app:v1"; @@ -173,7 +173,7 @@ function buildRivetOrganization(): MockFoundryOrganization { return { id: "rivet", - workspaceId: "rivet", + organizationId: "rivet", kind: "organization", settings: { displayName: rivetDevFixture.name ?? rivetDevFixture.login, @@ -254,7 +254,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { organizations: [ { id: "personal-nathan", - workspaceId: "personal-nathan", + organizationId: "personal-nathan", kind: "personal", settings: { displayName: "Nathan", @@ -290,7 +290,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { }, { id: "acme", - workspaceId: "acme", + organizationId: "acme", kind: "organization", settings: { displayName: "Acme", @@ -335,7 +335,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { buildRivetOrganization(), { id: "personal-jamie", - workspaceId: "personal-jamie", + organizationId: "personal-jamie", kind: "personal", settings: { displayName: "Jamie", @@ -659,8 +659,8 @@ class MockFoundryAppStore implements MockFoundryAppClient { })); } - recordSeatUsage(workspaceId: string): void { - const org = this.snapshot.organizations.find((candidate) => candidate.workspaceId === workspaceId); + recordSeatUsage(organizationId: string): void { + const org = this.snapshot.organizations.find((candidate) => candidate.organizationId === organizationId); const currentUser = currentMockUser(this.snapshot); if (!org || !currentUser) { return; diff --git a/foundry/packages/client/src/mock/backend-client.ts b/foundry/packages/client/src/mock/backend-client.ts index b87c8c4..011192d 100644 --- a/foundry/packages/client/src/mock/backend-client.ts +++ b/foundry/packages/client/src/mock/backend-client.ts @@ -1,5 +1,4 @@ import type { - AddRepoInput, AppEvent, CreateTaskInput, FoundryAppSnapshot, @@ -17,21 +16,19 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, TaskEvent, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary, - WorkspaceEvent, - WorkspaceSummarySnapshot, + OrganizationEvent, + OrganizationSummarySnapshot, HistoryEvent, HistoryQueryInput, - ProviderId, + SandboxProviderId, RepoOverview, RepoRecord, - RepoStackActionInput, - RepoStackActionResult, StarSandboxAgentRepoResult, SwitchResult, } from "@sandbox-agent/foundry-shared"; @@ -91,7 +88,7 @@ function toTaskStatus(status: TaskRecord["status"], archived: boolean): TaskReco return status; } -export function createMockBackendClient(defaultWorkspaceId = "default"): BackendClient { +export function createMockBackendClient(defaultOrganizationId = "default"): BackendClient { const workbench = getSharedMockWorkbenchClient(); const listenersBySandboxId = new Map void>>(); const processesBySandboxId = new Map(); @@ -176,9 +173,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend updatedAtMs: task.updatedAtMs, branch: task.branch, pullRequest: task.pullRequest, - sessionsSummary: task.tabs.map((tab) => ({ + sessionsSummary: task.sessions.map((tab) => ({ id: tab.id, sessionId: tab.sessionId, + sandboxSessionId: tab.sandboxSessionId ?? tab.sessionId, sessionName: tab.sessionName, agent: tab.agent, model: tab.model, @@ -192,10 +190,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend const buildTaskDetail = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskDetail => ({ ...buildTaskSummary(task), task: task.title, - agentType: task.tabs[0]?.agent === "Codex" ? "codex" : "claude", + agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude", runtimeStatus: toTaskStatus(task.status === "archived" ? "archived" : "running", task.status === "archived"), statusMessage: task.status === "archived" ? "archived" : "mock sandbox ready", - activeSessionId: task.tabs[0]?.sessionId ?? null, + activeSessionId: task.sessions[0]?.sessionId ?? null, diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0", prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null, reviewStatus: null, @@ -205,7 +203,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend minutesUsed: task.minutesUsed, sandboxes: [ { - providerId: "local", + sandboxProviderId: "local", sandboxId: task.id, cwd: mockCwd(task.repoName, task.id), }, @@ -213,15 +211,14 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend activeSandboxId: task.id, }); - const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], tabId: string): WorkbenchSessionDetail => { - const tab = task.tabs.find((candidate) => candidate.id === tabId); + const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], sessionId: string): WorkbenchSessionDetail => { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { - throw new Error(`Unknown mock tab ${tabId} for task ${task.id}`); + throw new Error(`Unknown mock session ${sessionId} for task ${task.id}`); } return { sessionId: tab.id, - tabId: tab.id, - sandboxSessionId: tab.sessionId, + sandboxSessionId: tab.sandboxSessionId ?? tab.sessionId, sessionName: tab.sessionName, agent: tab.agent, model: tab.model, @@ -234,11 +231,11 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }; }; - const buildWorkspaceSummary = (): WorkspaceSummarySnapshot => { + const buildOrganizationSummary = (): OrganizationSummarySnapshot => { const snapshot = workbench.getSnapshot(); const taskSummaries = snapshot.tasks.map(buildTaskSummary); return { - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, repos: snapshot.repos.map((repo) => { const repoTasks = taskSummaries.filter((task) => task.repoId === repo.id); return { @@ -253,39 +250,40 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }; }; - const workspaceScope = (workspaceId: string): string => `workspace:${workspaceId}`; - const taskScope = (workspaceId: string, repoId: string, taskId: string): string => `task:${workspaceId}:${repoId}:${taskId}`; - const sandboxScope = (workspaceId: string, providerId: string, sandboxId: string): string => `sandbox:${workspaceId}:${providerId}:${sandboxId}`; + const organizationScope = (organizationId: string): string => `organization:${organizationId}`; + const taskScope = (organizationId: string, repoId: string, taskId: string): string => `task:${organizationId}:${repoId}:${taskId}`; + const sandboxScope = (organizationId: string, sandboxProviderId: string, sandboxId: string): string => + `sandbox:${organizationId}:${sandboxProviderId}:${sandboxId}`; - const emitWorkspaceSnapshot = (): void => { - const summary = buildWorkspaceSummary(); + const emitOrganizationSnapshot = (): void => { + const summary = buildOrganizationSummary(); const latestTask = [...summary.taskSummaries].sort((left, right) => right.updatedAtMs - left.updatedAtMs)[0] ?? null; if (latestTask) { - emitConnectionEvent(workspaceScope(defaultWorkspaceId), "workspaceUpdated", { + emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", { type: "taskSummaryUpdated", taskSummary: latestTask, - } satisfies WorkspaceEvent); + } satisfies OrganizationEvent); } }; const emitTaskUpdate = (taskId: string): void => { const task = requireTask(taskId); - emitConnectionEvent(taskScope(defaultWorkspaceId, task.repoId, task.id), "taskUpdated", { + emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "taskUpdated", { type: "taskDetailUpdated", detail: buildTaskDetail(task), } satisfies TaskEvent); }; - const emitSessionUpdate = (taskId: string, tabId: string): void => { + const emitSessionUpdate = (taskId: string, sessionId: string): void => { const task = requireTask(taskId); - emitConnectionEvent(taskScope(defaultWorkspaceId, task.repoId, task.id), "sessionUpdated", { + emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "sessionUpdated", { type: "sessionUpdated", - session: buildSessionDetail(task, tabId), + session: buildSessionDetail(task, sessionId), } satisfies SessionEvent); }; const emitSandboxProcessesUpdate = (sandboxId: string): void => { - emitConnectionEvent(sandboxScope(defaultWorkspaceId, "local", sandboxId), "processesUpdated", { + emitConnectionEvent(sandboxScope(defaultOrganizationId, "local", sandboxId), "processesUpdated", { type: "processesUpdated", processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)), } satisfies SandboxProcessesEvent); @@ -296,22 +294,22 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend const cwd = mockCwd(task.repoName, task.id); const archived = task.status === "archived"; return { - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, repoId: task.repoId, repoRemote: mockRepoRemote(task.repoName), taskId: task.id, branchName: task.branch, title: task.title, task: task.title, - providerId: "local", + sandboxProviderId: "local", status: toTaskStatus(archived ? "archived" : "running", archived), statusMessage: archived ? "archived" : "mock sandbox ready", activeSandboxId: task.id, - activeSessionId: task.tabs[0]?.sessionId ?? null, + activeSessionId: task.sessions[0]?.sessionId ?? null, sandboxes: [ { sandboxId: task.id, - providerId: "local", + sandboxProviderId: "local", sandboxActorId: "mock-sandbox", switchTarget: `mock://${task.id}`, cwd, @@ -319,7 +317,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend updatedAt: task.updatedAtMs, }, ], - agentType: task.tabs[0]?.agent === "Codex" ? "codex" : "claude", + agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude", prSubmitted: Boolean(task.pullRequest), diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0", prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null, @@ -366,16 +364,16 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return unsupportedAppSnapshot(); }, - async connectWorkspace(workspaceId: string): Promise { - return createConn(workspaceScope(workspaceId)); + async connectOrganization(organizationId: string): Promise { + return createConn(organizationScope(organizationId)); }, - async connectTask(workspaceId: string, repoId: string, taskId: string): Promise { - return createConn(taskScope(workspaceId, repoId, taskId)); + async connectTask(organizationId: string, repoId: string, taskId: string): Promise { + return createConn(taskScope(organizationId, repoId, taskId)); }, - async connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise { - return createConn(sandboxScope(workspaceId, providerId, sandboxId)); + async connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return createConn(sandboxScope(organizationId, sandboxProviderId, sandboxId)); }, subscribeApp(): () => void { @@ -434,13 +432,9 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return unsupportedAppSnapshot(); }, - async addRepo(_workspaceId: string, _remoteUrl: string): Promise { - notSupported("addRepo"); - }, - - async listRepos(_workspaceId: string): Promise { + async listRepos(_organizationId: string): Promise { return workbench.getSnapshot().repos.map((repo) => ({ - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, repoId: repo.id, remoteUrl: mockRepoRemote(repo.label), createdAt: nowMs(), @@ -452,12 +446,12 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend notSupported("createTask"); }, - async listTasks(_workspaceId: string, repoId?: string): Promise { + async listTasks(_organizationId: string, repoId?: string): Promise { return workbench .getSnapshot() .tasks.filter((task) => !repoId || task.repoId === repoId) .map((task) => ({ - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, repoId: task.repoId, taskId: task.id, branchName: task.branch, @@ -467,15 +461,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend })); }, - async getRepoOverview(_workspaceId: string, _repoId: string): Promise { + async getRepoOverview(_organizationId: string, _repoId: string): Promise { notSupported("getRepoOverview"); }, - - async runRepoStackAction(_input: RepoStackActionInput): Promise { - notSupported("runRepoStackAction"); - }, - - async getTask(_workspaceId: string, taskId: string): Promise { + async getTask(_organizationId: string, taskId: string): Promise { return buildTaskRecord(taskId); }, @@ -483,23 +472,23 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return []; }, - async switchTask(_workspaceId: string, taskId: string): Promise { + async switchTask(_organizationId: string, taskId: string): Promise { return { - workspaceId: defaultWorkspaceId, + organizationId: defaultOrganizationId, taskId, - providerId: "local", + sandboxProviderId: "local", switchTarget: `mock://${taskId}`, }; }, - async attachTask(_workspaceId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + async attachTask(_organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { return { target: `mock://${taskId}`, - sessionId: requireTask(taskId).tabs[0]?.sessionId ?? null, + sessionId: requireTask(taskId).sessions[0]?.sessionId ?? null, }; }, - async runAction(_workspaceId: string, _taskId: string): Promise { + async runAction(_organizationId: string, _taskId: string): Promise { notSupported("runAction"); }, @@ -516,8 +505,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }, async createSandboxProcess(input: { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; request: ProcessCreateRequest; }): Promise { @@ -529,15 +518,15 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return cloneProcess(created); }, - async listSandboxProcesses(_workspaceId: string, _providerId: ProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> { + async listSandboxProcesses(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> { return { processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)), }; }, async getSandboxProcessLogs( - _workspaceId: string, - _providerId: ProviderId, + _organizationId: string, + _providerId: SandboxProviderId, sandboxId: string, processId: string, query?: ProcessLogFollowQuery, @@ -564,8 +553,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }, async stopSandboxProcess( - _workspaceId: string, - _providerId: ProviderId, + _organizationId: string, + _providerId: SandboxProviderId, sandboxId: string, processId: string, _query?: ProcessSignalQuery, @@ -583,8 +572,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }, async killSandboxProcess( - _workspaceId: string, - _providerId: ProviderId, + _organizationId: string, + _providerId: SandboxProviderId, sandboxId: string, processId: string, _query?: ProcessSignalQuery, @@ -601,7 +590,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return cloneProcess(process); }, - async deleteSandboxProcess(_workspaceId: string, _providerId: ProviderId, sandboxId: string, processId: string): Promise { + async deleteSandboxProcess(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string, processId: string): Promise { processesBySandboxId.set( sandboxId, ensureProcessList(sandboxId).filter((candidate) => candidate.id !== processId), @@ -609,7 +598,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend notifySandbox(sandboxId); }, - subscribeSandboxProcesses(_workspaceId: string, _providerId: ProviderId, sandboxId: string, listener: () => void): () => void { + subscribeSandboxProcesses(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string, listener: () => void): () => void { let listeners = listenersBySandboxId.get(sandboxId); if (!listeners) { listeners = new Set(); @@ -637,26 +626,26 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend }, async sandboxProviderState( - _workspaceId: string, - _providerId: ProviderId, + _organizationId: string, + _providerId: SandboxProviderId, sandboxId: string, - ): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> { - return { providerId: "local", sandboxId, state: "running", at: nowMs() }; + ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }> { + return { sandboxProviderId: "local", sandboxId, state: "running", at: nowMs() }; }, async getSandboxAgentConnection(): Promise<{ endpoint: string; token?: string }> { return { endpoint: "mock://terminal-unavailable" }; }, - async getWorkspaceSummary(): Promise { - return buildWorkspaceSummary(); + async getOrganizationSummary(): Promise { + return buildOrganizationSummary(); }, - async getTaskDetail(_workspaceId: string, _repoId: string, taskId: string): Promise { + async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise { return buildTaskDetail(requireTask(taskId)); }, - async getSessionDetail(_workspaceId: string, _repoId: string, taskId: string, sessionId: string): Promise { + async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise { return buildSessionDetail(requireTask(taskId), sessionId); }, @@ -664,103 +653,103 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return workbench.getSnapshot(); }, - subscribeWorkbench(_workspaceId: string, listener: () => void): () => void { + subscribeWorkbench(_organizationId: string, listener: () => void): () => void { return workbench.subscribe(listener); }, - async createWorkbenchTask(_workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise { + async createWorkbenchTask(_organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise { const created = await workbench.createTask(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(created.taskId); - if (created.tabId) { - emitSessionUpdate(created.taskId, created.tabId); + if (created.sessionId) { + emitSessionUpdate(created.taskId, created.sessionId); } return created; }, - async markWorkbenchUnread(_workspaceId: string, input: TaskWorkbenchSelectInput): Promise { + async markWorkbenchUnread(_organizationId: string, input: TaskWorkbenchSelectInput): Promise { await workbench.markTaskUnread(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async renameWorkbenchTask(_workspaceId: string, input: TaskWorkbenchRenameInput): Promise { + async renameWorkbenchTask(_organizationId: string, input: TaskWorkbenchRenameInput): Promise { await workbench.renameTask(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async renameWorkbenchBranch(_workspaceId: string, input: TaskWorkbenchRenameInput): Promise { + async renameWorkbenchBranch(_organizationId: string, input: TaskWorkbenchRenameInput): Promise { await workbench.renameBranch(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async createWorkbenchSession(_workspaceId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> { - const created = await workbench.addTab(input); - emitWorkspaceSnapshot(); + async createWorkbenchSession(_organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { + const created = await workbench.addSession(input); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, created.tabId); + emitSessionUpdate(input.taskId, created.sessionId); return created; }, - async renameWorkbenchSession(_workspaceId: string, input: TaskWorkbenchRenameSessionInput): Promise { + async renameWorkbenchSession(_organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise { await workbench.renameSession(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async setWorkbenchSessionUnread(_workspaceId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { + async setWorkbenchSessionUnread(_organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { await workbench.setSessionUnread(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async updateWorkbenchDraft(_workspaceId: string, input: TaskWorkbenchUpdateDraftInput): Promise { + async updateWorkbenchDraft(_organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise { await workbench.updateDraft(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async changeWorkbenchModel(_workspaceId: string, input: TaskWorkbenchChangeModelInput): Promise { + async changeWorkbenchModel(_organizationId: string, input: TaskWorkbenchChangeModelInput): Promise { await workbench.changeModel(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async sendWorkbenchMessage(_workspaceId: string, input: TaskWorkbenchSendMessageInput): Promise { + async sendWorkbenchMessage(_organizationId: string, input: TaskWorkbenchSendMessageInput): Promise { await workbench.sendMessage(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async stopWorkbenchSession(_workspaceId: string, input: TaskWorkbenchTabInput): Promise { + async stopWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise { await workbench.stopAgent(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); - emitSessionUpdate(input.taskId, input.tabId); + emitSessionUpdate(input.taskId, input.sessionId); }, - async closeWorkbenchSession(_workspaceId: string, input: TaskWorkbenchTabInput): Promise { - await workbench.closeTab(input); - emitWorkspaceSnapshot(); + async closeWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise { + await workbench.closeSession(input); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async publishWorkbenchPr(_workspaceId: string, input: TaskWorkbenchSelectInput): Promise { + async publishWorkbenchPr(_organizationId: string, input: TaskWorkbenchSelectInput): Promise { await workbench.publishPr(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async revertWorkbenchFile(_workspaceId: string, input: TaskWorkbenchDiffInput): Promise { + async revertWorkbenchFile(_organizationId: string, input: TaskWorkbenchDiffInput): Promise { await workbench.revertFile(input); - emitWorkspaceSnapshot(); + emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, @@ -776,8 +765,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend return { ok: true }; }, - async useWorkspace(workspaceId: string): Promise<{ workspaceId: string }> { - return { workspaceId }; + async useOrganization(organizationId: string): Promise<{ organizationId: string }> { + return { organizationId }; }, async starSandboxAgentRepo(): Promise { diff --git a/foundry/packages/client/src/mock/workbench-client.ts b/foundry/packages/client/src/mock/workbench-client.ts index f27c436..fbed2d0 100644 --- a/foundry/packages/client/src/mock/workbench-client.ts +++ b/foundry/packages/client/src/mock/workbench-client.ts @@ -1,7 +1,7 @@ import { MODEL_GROUPS, buildInitialMockLayoutViewModel, - groupWorkbenchProjects, + groupWorkbenchRepositories, nowMs, providerAgent, randomReply, @@ -10,7 +10,7 @@ import { uid, } from "../workbench-model.js"; import type { - TaskWorkbenchAddTabResponse, + TaskWorkbenchAddSessionResponse, TaskWorkbenchChangeModelInput, TaskWorkbenchCreateTaskInput, TaskWorkbenchCreateTaskResponse, @@ -21,9 +21,9 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, - WorkbenchAgentTab as AgentTab, + WorkbenchSession as AgentSession, WorkbenchTask as Task, WorkbenchTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; @@ -65,7 +65,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { async createTask(input: TaskWorkbenchCreateTaskInput): Promise { const id = uid(); - const tabId = `session-${id}`; + const sessionId = `session-${id}`; const repo = this.snapshot.repos.find((candidate) => candidate.id === input.repoId); if (!repo) { throw new Error(`Cannot create mock task for unknown repo ${input.repoId}`); @@ -79,10 +79,10 @@ class MockWorkbenchStore implements TaskWorkbenchClient { updatedAtMs: nowMs(), branch: input.branch?.trim() || null, pullRequest: null, - tabs: [ + sessions: [ { - id: tabId, - sessionId: tabId, + id: sessionId, + sessionId: sessionId, sessionName: "Session 1", agent: providerAgent( MODEL_GROUPS.find((group) => group.models.some((model) => model.id === (input.model ?? "claude-sonnet-4")))?.provider ?? "Claude", @@ -106,19 +106,19 @@ class MockWorkbenchStore implements TaskWorkbenchClient { ...current, tasks: [nextTask, ...current.tasks], })); - return { taskId: id, tabId }; + return { taskId: id, sessionId }; } async markTaskUnread(input: TaskWorkbenchSelectInput): Promise { this.updateTask(input.taskId, (task) => { - const targetTab = task.tabs[task.tabs.length - 1] ?? null; - if (!targetTab) { + const targetSession = task.sessions[task.sessions.length - 1] ?? null; + if (!targetSession) { return task; } return { ...task, - tabs: task.tabs.map((tab) => (tab.id === targetTab.id ? { ...tab, unread: true } : tab)), + sessions: task.sessions.map((session) => (session.id === targetSession.id ? { ...session, unread: true } : session)), }; }); } @@ -168,12 +168,12 @@ class MockWorkbenchStore implements TaskWorkbenchClient { } async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { - this.assertTab(input.taskId, input.tabId); + this.assertSession(input.taskId, input.sessionId); this.updateTask(input.taskId, (task) => ({ ...task, updatedAtMs: nowMs(), - tabs: task.tabs.map((tab) => - tab.id === input.tabId + sessions: task.sessions.map((tab) => + tab.id === input.sessionId ? { ...tab, draft: { @@ -193,7 +193,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { throw new Error(`Cannot send an empty mock prompt for task ${input.taskId}`); } - this.assertTab(input.taskId, input.tabId); + this.assertSession(input.taskId, input.sessionId); const startedAtMs = nowMs(); this.updateTask(input.taskId, (currentTask) => { @@ -202,10 +202,10 @@ class MockWorkbenchStore implements TaskWorkbenchClient { const newBranch = isFirstOnTask ? `feat/${slugify(newTitle)}` : currentTask.branch; const userMessageLines = [text, ...input.attachments.map((attachment) => `@ ${attachment.filePath}:${attachment.lineNumber}`)]; const userEvent = buildTranscriptEvent({ - sessionId: input.tabId, + sessionId: input.sessionId, sender: "client", createdAt: startedAtMs, - eventIndex: candidateEventIndex(currentTask, input.tabId), + eventIndex: candidateEventIndex(currentTask, input.sessionId), payload: { method: "session/prompt", params: { @@ -220,8 +220,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient { branch: newBranch, status: "running", updatedAtMs: startedAtMs, - tabs: currentTask.tabs.map((candidate) => - candidate.id === input.tabId + sessions: currentTask.sessions.map((candidate) => + candidate.id === input.sessionId ? { ...candidate, created: true, @@ -236,20 +236,20 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }; }); - const existingTimer = this.pendingTimers.get(input.tabId); + const existingTimer = this.pendingTimers.get(input.sessionId); if (existingTimer) { clearTimeout(existingTimer); } const timer = setTimeout(() => { const task = this.requireTask(input.taskId); - const replyTab = this.requireTab(task, input.tabId); + this.requireSession(task, input.sessionId); const completedAtMs = nowMs(); const replyEvent = buildTranscriptEvent({ - sessionId: input.tabId, + sessionId: input.sessionId, sender: "agent", createdAt: completedAtMs, - eventIndex: candidateEventIndex(task, input.tabId), + eventIndex: candidateEventIndex(task, input.sessionId), payload: { result: { text: randomReply(), @@ -259,8 +259,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }); this.updateTask(input.taskId, (currentTask) => { - const updatedTabs = currentTask.tabs.map((candidate) => { - if (candidate.id !== input.tabId) { + const updatedTabs = currentTask.sessions.map((candidate) => { + if (candidate.id !== input.sessionId) { return candidate; } @@ -277,35 +277,35 @@ class MockWorkbenchStore implements TaskWorkbenchClient { return { ...currentTask, updatedAtMs: completedAtMs, - tabs: updatedTabs, + sessions: updatedTabs, status: currentTask.status === "archived" ? "archived" : anyRunning ? "running" : "idle", }; }); - this.pendingTimers.delete(input.tabId); + this.pendingTimers.delete(input.sessionId); }, 2_500); - this.pendingTimers.set(input.tabId, timer); + this.pendingTimers.set(input.sessionId, timer); } - async stopAgent(input: TaskWorkbenchTabInput): Promise { - this.assertTab(input.taskId, input.tabId); - const existing = this.pendingTimers.get(input.tabId); + async stopAgent(input: TaskWorkbenchSessionInput): Promise { + this.assertSession(input.taskId, input.sessionId); + const existing = this.pendingTimers.get(input.sessionId); if (existing) { clearTimeout(existing); - this.pendingTimers.delete(input.tabId); + this.pendingTimers.delete(input.sessionId); } this.updateTask(input.taskId, (currentTask) => { - const updatedTabs = currentTask.tabs.map((candidate) => - candidate.id === input.tabId ? { ...candidate, status: "idle" as const, thinkingSinceMs: null } : candidate, + const updatedTabs = currentTask.sessions.map((candidate) => + candidate.id === input.sessionId ? { ...candidate, status: "idle" as const, thinkingSinceMs: null } : candidate, ); const anyRunning = updatedTabs.some((candidate) => candidate.status === "running"); return { ...currentTask, updatedAtMs: nowMs(), - tabs: updatedTabs, + sessions: updatedTabs, status: currentTask.status === "archived" ? "archived" : anyRunning ? "running" : "idle", }; }); @@ -314,40 +314,42 @@ class MockWorkbenchStore implements TaskWorkbenchClient { async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, - tabs: currentTask.tabs.map((candidate) => (candidate.id === input.tabId ? { ...candidate, unread: input.unread } : candidate)), + sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, unread: input.unread } : candidate)), })); } async renameSession(input: TaskWorkbenchRenameSessionInput): Promise { const title = input.title.trim(); if (!title) { - throw new Error(`Cannot rename session ${input.tabId} to an empty title`); + throw new Error(`Cannot rename session ${input.sessionId} to an empty title`); } this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, - tabs: currentTask.tabs.map((candidate) => (candidate.id === input.tabId ? { ...candidate, sessionName: title } : candidate)), + sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, sessionName: title } : candidate)), })); } - async closeTab(input: TaskWorkbenchTabInput): Promise { + async closeSession(input: TaskWorkbenchSessionInput): Promise { this.updateTask(input.taskId, (currentTask) => { - if (currentTask.tabs.length <= 1) { + if (currentTask.sessions.length <= 1) { return currentTask; } return { ...currentTask, - tabs: currentTask.tabs.filter((candidate) => candidate.id !== input.tabId), + sessions: currentTask.sessions.filter((candidate) => candidate.id !== input.sessionId), }; }); } - async addTab(input: TaskWorkbenchSelectInput): Promise { + async addSession(input: TaskWorkbenchSelectInput): Promise { this.assertTask(input.taskId); - const nextTab: AgentTab = { - id: uid(), - sessionId: null, - sessionName: `Session ${this.requireTask(input.taskId).tabs.length + 1}`, + const nextSessionId = uid(); + const nextSession: AgentSession = { + id: nextSessionId, + sessionId: nextSessionId, + sandboxSessionId: null, + sessionName: `Session ${this.requireTask(input.taskId).sessions.length + 1}`, agent: "Claude", model: "claude-sonnet-4", status: "idle", @@ -361,9 +363,9 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, updatedAtMs: nowMs(), - tabs: [...currentTask.tabs, nextTab], + sessions: [...currentTask.sessions, nextSession], })); - return { tabId: nextTab.id }; + return { sessionId: nextSession.id }; } async changeModel(input: TaskWorkbenchChangeModelInput): Promise { @@ -374,8 +376,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, - tabs: currentTask.tabs.map((candidate) => - candidate.id === input.tabId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate, + sessions: currentTask.sessions.map((candidate) => + candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate, ), })); } @@ -384,7 +386,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { const nextSnapshot = updater(this.snapshot); this.snapshot = { ...nextSnapshot, - projects: groupWorkbenchProjects(nextSnapshot.repos, nextSnapshot.tasks), + repositories: groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks), }; this.notify(); } @@ -407,9 +409,9 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.requireTask(taskId); } - private assertTab(taskId: string, tabId: string): void { + private assertSession(taskId: string, sessionId: string): void { const task = this.requireTask(taskId); - this.requireTab(task, tabId); + this.requireSession(task, sessionId); } private requireTask(taskId: string): Task { @@ -420,18 +422,18 @@ class MockWorkbenchStore implements TaskWorkbenchClient { return task; } - private requireTab(task: Task, tabId: string): AgentTab { - const tab = task.tabs.find((candidate) => candidate.id === tabId); - if (!tab) { - throw new Error(`Unable to find mock tab ${tabId} in task ${task.id}`); + private requireSession(task: Task, sessionId: string): AgentSession { + const session = task.sessions.find((candidate) => candidate.id === sessionId); + if (!session) { + throw new Error(`Unable to find mock session ${sessionId} in task ${task.id}`); } - return tab; + return session; } } -function candidateEventIndex(task: Task, tabId: string): number { - const tab = task.tabs.find((candidate) => candidate.id === tabId); - return (tab?.transcript.length ?? 0) + 1; +function candidateEventIndex(task: Task, sessionId: string): number { + const session = task.sessions.find((candidate) => candidate.id === sessionId); + return (session?.transcript.length ?? 0) + 1; } let sharedMockWorkbenchClient: TaskWorkbenchClient | null = null; diff --git a/foundry/packages/client/src/remote/app-client.ts b/foundry/packages/client/src/remote/app-client.ts index 9b80f3c..6daa2c5 100644 --- a/foundry/packages/client/src/remote/app-client.ts +++ b/foundry/packages/client/src/remote/app-client.ts @@ -104,8 +104,8 @@ class RemoteFoundryAppStore implements FoundryAppClient { await this.backend.reconnectAppGithub(organizationId); } - async recordSeatUsage(workspaceId: string): Promise { - this.snapshot = await this.backend.recordAppSeatUsage(workspaceId); + async recordSeatUsage(organizationId: string): Promise { + this.snapshot = await this.backend.recordAppSeatUsage(organizationId); this.notify(); } diff --git a/foundry/packages/client/src/remote/workbench-client.ts b/foundry/packages/client/src/remote/workbench-client.ts index 480921c..0dcbecb 100644 --- a/foundry/packages/client/src/remote/workbench-client.ts +++ b/foundry/packages/client/src/remote/workbench-client.ts @@ -1,5 +1,5 @@ import type { - TaskWorkbenchAddTabResponse, + TaskWorkbenchAddSessionResponse, TaskWorkbenchChangeModelInput, TaskWorkbenchCreateTaskInput, TaskWorkbenchCreateTaskResponse, @@ -10,21 +10,21 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, } from "@sandbox-agent/foundry-shared"; import type { BackendClient } from "../backend-client.js"; -import { groupWorkbenchProjects } from "../workbench-model.js"; +import { groupWorkbenchRepositories } from "../workbench-model.js"; import type { TaskWorkbenchClient } from "../workbench-client.js"; export interface RemoteWorkbenchClientOptions { backend: BackendClient; - workspaceId: string; + organizationId: string; } class RemoteWorkbenchStore implements TaskWorkbenchClient { private readonly backend: BackendClient; - private readonly workspaceId: string; + private readonly organizationId: string; private snapshot: TaskWorkbenchSnapshot; private readonly listeners = new Set<() => void>(); private unsubscribeWorkbench: (() => void) | null = null; @@ -33,11 +33,11 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient { constructor(options: RemoteWorkbenchClientOptions) { this.backend = options.backend; - this.workspaceId = options.workspaceId; + this.organizationId = options.organizationId; this.snapshot = { - workspaceId: options.workspaceId, + organizationId: options.organizationId, repos: [], - projects: [], + repositories: [], tasks: [], }; } @@ -63,86 +63,86 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient { } async createTask(input: TaskWorkbenchCreateTaskInput): Promise { - const created = await this.backend.createWorkbenchTask(this.workspaceId, input); + const created = await this.backend.createWorkbenchTask(this.organizationId, input); await this.refresh(); return created; } async markTaskUnread(input: TaskWorkbenchSelectInput): Promise { - await this.backend.markWorkbenchUnread(this.workspaceId, input); + await this.backend.markWorkbenchUnread(this.organizationId, input); await this.refresh(); } async renameTask(input: TaskWorkbenchRenameInput): Promise { - await this.backend.renameWorkbenchTask(this.workspaceId, input); + await this.backend.renameWorkbenchTask(this.organizationId, input); await this.refresh(); } async renameBranch(input: TaskWorkbenchRenameInput): Promise { - await this.backend.renameWorkbenchBranch(this.workspaceId, input); + await this.backend.renameWorkbenchBranch(this.organizationId, input); await this.refresh(); } async archiveTask(input: TaskWorkbenchSelectInput): Promise { - await this.backend.runAction(this.workspaceId, input.taskId, "archive"); + await this.backend.runAction(this.organizationId, input.taskId, "archive"); await this.refresh(); } async publishPr(input: TaskWorkbenchSelectInput): Promise { - await this.backend.publishWorkbenchPr(this.workspaceId, input); + await this.backend.publishWorkbenchPr(this.organizationId, input); await this.refresh(); } async revertFile(input: TaskWorkbenchDiffInput): Promise { - await this.backend.revertWorkbenchFile(this.workspaceId, input); + await this.backend.revertWorkbenchFile(this.organizationId, input); await this.refresh(); } async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { - await this.backend.updateWorkbenchDraft(this.workspaceId, input); + await this.backend.updateWorkbenchDraft(this.organizationId, input); // Skip refresh — the server broadcast will trigger it, and the frontend // holds local draft state to avoid the round-trip overwriting user input. } async sendMessage(input: TaskWorkbenchSendMessageInput): Promise { - await this.backend.sendWorkbenchMessage(this.workspaceId, input); + await this.backend.sendWorkbenchMessage(this.organizationId, input); await this.refresh(); } - async stopAgent(input: TaskWorkbenchTabInput): Promise { - await this.backend.stopWorkbenchSession(this.workspaceId, input); + async stopAgent(input: TaskWorkbenchSessionInput): Promise { + await this.backend.stopWorkbenchSession(this.organizationId, input); await this.refresh(); } async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise { - await this.backend.setWorkbenchSessionUnread(this.workspaceId, input); + await this.backend.setWorkbenchSessionUnread(this.organizationId, input); await this.refresh(); } async renameSession(input: TaskWorkbenchRenameSessionInput): Promise { - await this.backend.renameWorkbenchSession(this.workspaceId, input); + await this.backend.renameWorkbenchSession(this.organizationId, input); await this.refresh(); } - async closeTab(input: TaskWorkbenchTabInput): Promise { - await this.backend.closeWorkbenchSession(this.workspaceId, input); + async closeSession(input: TaskWorkbenchSessionInput): Promise { + await this.backend.closeWorkbenchSession(this.organizationId, input); await this.refresh(); } - async addTab(input: TaskWorkbenchSelectInput): Promise { - const created = await this.backend.createWorkbenchSession(this.workspaceId, input); + async addSession(input: TaskWorkbenchSelectInput): Promise { + const created = await this.backend.createWorkbenchSession(this.organizationId, input); await this.refresh(); return created; } async changeModel(input: TaskWorkbenchChangeModelInput): Promise { - await this.backend.changeWorkbenchModel(this.workspaceId, input); + await this.backend.changeWorkbenchModel(this.organizationId, input); await this.refresh(); } private ensureStarted(): void { if (!this.unsubscribeWorkbench) { - this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.workspaceId, () => { + this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.organizationId, () => { void this.refresh().catch(() => { this.scheduleRefreshRetry(); }); @@ -173,14 +173,14 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient { } this.refreshPromise = (async () => { - const nextSnapshot = await this.backend.getWorkbench(this.workspaceId); + const nextSnapshot = await this.backend.getWorkbench(this.organizationId); if (this.refreshRetryTimeout) { clearTimeout(this.refreshRetryTimeout); this.refreshRetryTimeout = null; } this.snapshot = { ...nextSnapshot, - projects: nextSnapshot.projects ?? groupWorkbenchProjects(nextSnapshot.repos, nextSnapshot.tasks), + repositories: nextSnapshot.repositories ?? groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks), }; for (const listener of [...this.listeners]) { listener(); diff --git a/foundry/packages/client/src/interest/manager.ts b/foundry/packages/client/src/subscription/manager.ts similarity index 82% rename from foundry/packages/client/src/interest/manager.ts rename to foundry/packages/client/src/subscription/manager.ts index 4b02230..b9bee0b 100644 --- a/foundry/packages/client/src/interest/manager.ts +++ b/foundry/packages/client/src/subscription/manager.ts @@ -2,7 +2,7 @@ import type { TopicData, TopicKey, TopicParams } from "./topics.js"; export type TopicStatus = "loading" | "connected" | "error"; -export interface DebugInterestTopic { +export interface DebugSubscriptionTopic { topicKey: TopicKey; cacheKey: string; listenerCount: number; @@ -17,17 +17,17 @@ export interface TopicState { } /** - * The InterestManager owns all realtime actor connections and cached state. + * The SubscriptionManager owns all realtime actor connections and cached state. * * Multiple subscribers to the same topic share one connection and one cache * entry. After the last subscriber leaves, a short grace period keeps the * connection warm so navigation does not thrash actor connections. */ -export interface InterestManager { +export interface SubscriptionManager { subscribe(topicKey: K, params: TopicParams, listener: () => void): () => void; getSnapshot(topicKey: K, params: TopicParams): TopicData | undefined; getStatus(topicKey: K, params: TopicParams): TopicStatus; getError(topicKey: K, params: TopicParams): Error | null; - listDebugTopics(): DebugInterestTopic[]; + listDebugTopics(): DebugSubscriptionTopic[]; dispose(): void; } diff --git a/foundry/packages/client/src/subscription/mock-manager.ts b/foundry/packages/client/src/subscription/mock-manager.ts new file mode 100644 index 0000000..bcdb389 --- /dev/null +++ b/foundry/packages/client/src/subscription/mock-manager.ts @@ -0,0 +1,12 @@ +import { createMockBackendClient } from "../mock/backend-client.js"; +import { RemoteSubscriptionManager } from "./remote-manager.js"; + +/** + * Mock implementation shares the same subscription-manager harness as the remote + * path, but uses the in-memory mock backend that synthesizes actor events. + */ +export class MockSubscriptionManager extends RemoteSubscriptionManager { + constructor() { + super(createMockBackendClient()); + } +} diff --git a/foundry/packages/client/src/interest/remote-manager.ts b/foundry/packages/client/src/subscription/remote-manager.ts similarity index 94% rename from foundry/packages/client/src/interest/remote-manager.ts rename to foundry/packages/client/src/subscription/remote-manager.ts index f857975..8cb2864 100644 --- a/foundry/packages/client/src/interest/remote-manager.ts +++ b/foundry/packages/client/src/subscription/remote-manager.ts @@ -1,14 +1,14 @@ import type { BackendClient } from "../backend-client.js"; -import type { DebugInterestTopic, InterestManager, TopicStatus } from "./manager.js"; +import type { DebugSubscriptionTopic, SubscriptionManager, TopicStatus } from "./manager.js"; import { topicDefinitions, type TopicData, type TopicDefinition, type TopicKey, type TopicParams } from "./topics.js"; const GRACE_PERIOD_MS = 30_000; /** - * Remote implementation of InterestManager. + * Remote implementation of SubscriptionManager. * Each cache entry owns one actor connection plus one materialized snapshot. */ -export class RemoteInterestManager implements InterestManager { +export class RemoteSubscriptionManager implements SubscriptionManager { private entries = new Map>(); constructor(private readonly backend: BackendClient) {} @@ -53,7 +53,7 @@ export class RemoteInterestManager implements InterestManager { return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.error ?? null; } - listDebugTopics(): DebugInterestTopic[] { + listDebugTopics(): DebugSubscriptionTopic[] { return [...this.entries.values()] .filter((entry) => entry.listenerCount > 0) .map((entry) => entry.getDebugTopic()) @@ -91,7 +91,7 @@ class TopicEntry { private readonly params: TParams, ) {} - getDebugTopic(): DebugInterestTopic { + getDebugTopic(): DebugSubscriptionTopic { return { topicKey: this.topicKey, cacheKey: this.cacheKey, diff --git a/foundry/packages/client/src/interest/topics.ts b/foundry/packages/client/src/subscription/topics.ts similarity index 73% rename from foundry/packages/client/src/interest/topics.ts rename to foundry/packages/client/src/subscription/topics.ts index 2e38bf0..f6a0acc 100644 --- a/foundry/packages/client/src/interest/topics.ts +++ b/foundry/packages/client/src/subscription/topics.ts @@ -1,19 +1,19 @@ import type { AppEvent, FoundryAppSnapshot, - ProviderId, + SandboxProviderId, SandboxProcessesEvent, SessionEvent, TaskEvent, WorkbenchSessionDetail, WorkbenchTaskDetail, - WorkspaceEvent, - WorkspaceSummarySnapshot, + OrganizationEvent, + OrganizationSummarySnapshot, } from "@sandbox-agent/foundry-shared"; import type { ActorConn, BackendClient, SandboxProcessRecord } from "../backend-client.js"; /** - * Topic definitions for the interest manager. + * Topic definitions for the subscription manager. * * Each topic describes one actor connection plus one materialized read model. * Events always carry full replacement payloads for the changed entity so the @@ -28,23 +28,23 @@ export interface TopicDefinition { } export interface AppTopicParams {} -export interface WorkspaceTopicParams { - workspaceId: string; +export interface OrganizationTopicParams { + organizationId: string; } export interface TaskTopicParams { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; } export interface SessionTopicParams { - workspaceId: string; + organizationId: string; repoId: string; taskId: string; sessionId: string; } export interface SandboxProcessesTopicParams { - workspaceId: string; - providerId: ProviderId; + organizationId: string; + sandboxProviderId: SandboxProviderId; sandboxId: string; } @@ -62,17 +62,17 @@ export const topicDefinitions = { app: { key: () => "app", event: "appUpdated", - connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectWorkspace("app"), + connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectOrganization("app"), fetchInitial: (backend: BackendClient, _params: AppTopicParams) => backend.getAppSnapshot(), applyEvent: (_current: FoundryAppSnapshot, event: AppEvent) => event.snapshot, } satisfies TopicDefinition, - workspace: { - key: (params: WorkspaceTopicParams) => `workspace:${params.workspaceId}`, - event: "workspaceUpdated", - connect: (backend: BackendClient, params: WorkspaceTopicParams) => backend.connectWorkspace(params.workspaceId), - fetchInitial: (backend: BackendClient, params: WorkspaceTopicParams) => backend.getWorkspaceSummary(params.workspaceId), - applyEvent: (current: WorkspaceSummarySnapshot, event: WorkspaceEvent) => { + organization: { + key: (params: OrganizationTopicParams) => `organization:${params.organizationId}`, + event: "organizationUpdated", + connect: (backend: BackendClient, params: OrganizationTopicParams) => backend.connectOrganization(params.organizationId), + fetchInitial: (backend: BackendClient, params: OrganizationTopicParams) => backend.getOrganizationSummary(params.organizationId), + applyEvent: (current: OrganizationSummarySnapshot, event: OrganizationEvent) => { switch (event.type) { case "taskSummaryUpdated": return { @@ -107,22 +107,22 @@ export const topicDefinitions = { }; } }, - } satisfies TopicDefinition, + } satisfies TopicDefinition, task: { - key: (params: TaskTopicParams) => `task:${params.workspaceId}:${params.taskId}`, + key: (params: TaskTopicParams) => `task:${params.organizationId}:${params.taskId}`, event: "taskUpdated", - connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.workspaceId, params.repoId, params.taskId), - fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.workspaceId, params.repoId, params.taskId), + connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), + fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.organizationId, params.repoId, params.taskId), applyEvent: (_current: WorkbenchTaskDetail, event: TaskEvent) => event.detail, } satisfies TopicDefinition, session: { - key: (params: SessionTopicParams) => `session:${params.workspaceId}:${params.taskId}:${params.sessionId}`, + key: (params: SessionTopicParams) => `session:${params.organizationId}:${params.taskId}:${params.sessionId}`, event: "sessionUpdated", - connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.workspaceId, params.repoId, params.taskId), + connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), fetchInitial: (backend: BackendClient, params: SessionTopicParams) => - backend.getSessionDetail(params.workspaceId, params.repoId, params.taskId, params.sessionId), + backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId), applyEvent: (current: WorkbenchSessionDetail, event: SessionEvent) => { if (event.session.sessionId !== current.sessionId) { return current; @@ -132,11 +132,12 @@ export const topicDefinitions = { } satisfies TopicDefinition, sandboxProcesses: { - key: (params: SandboxProcessesTopicParams) => `sandbox:${params.workspaceId}:${params.providerId}:${params.sandboxId}`, + key: (params: SandboxProcessesTopicParams) => `sandbox:${params.organizationId}:${params.sandboxProviderId}:${params.sandboxId}`, event: "processesUpdated", - connect: (backend: BackendClient, params: SandboxProcessesTopicParams) => backend.connectSandbox(params.workspaceId, params.providerId, params.sandboxId), + connect: (backend: BackendClient, params: SandboxProcessesTopicParams) => + backend.connectSandbox(params.organizationId, params.sandboxProviderId, params.sandboxId), fetchInitial: async (backend: BackendClient, params: SandboxProcessesTopicParams) => - (await backend.listSandboxProcesses(params.workspaceId, params.providerId, params.sandboxId)).processes, + (await backend.listSandboxProcesses(params.organizationId, params.sandboxProviderId, params.sandboxId)).processes, applyEvent: (_current: SandboxProcessRecord[], event: SandboxProcessesEvent) => event.processes, } satisfies TopicDefinition, } as const; diff --git a/foundry/packages/client/src/interest/use-interest.ts b/foundry/packages/client/src/subscription/use-subscription.ts similarity index 85% rename from foundry/packages/client/src/interest/use-interest.ts rename to foundry/packages/client/src/subscription/use-subscription.ts index 4ffd733..c83148a 100644 --- a/foundry/packages/client/src/interest/use-interest.ts +++ b/foundry/packages/client/src/subscription/use-subscription.ts @@ -1,14 +1,14 @@ import { useMemo, useRef, useSyncExternalStore } from "react"; -import type { InterestManager, TopicState } from "./manager.js"; +import type { SubscriptionManager, TopicState } from "./manager.js"; import { topicDefinitions, type TopicKey, type TopicParams } from "./topics.js"; /** - * React bridge for the interest manager. + * React bridge for the subscription manager. * * `null` params disable the subscription entirely, which is how screens express - * conditional interest in task/session/sandbox topics. + * conditional subscription in task/session/sandbox topics. */ -export function useInterest(manager: InterestManager, topicKey: K, params: TopicParams | null): TopicState { +export function useSubscription(manager: SubscriptionManager, topicKey: K, params: TopicParams | null): TopicState { const paramsKey = params ? (topicDefinitions[topicKey] as any).key(params) : null; const paramsRef = useRef | null>(params); paramsRef.current = params; diff --git a/foundry/packages/client/src/view-model.ts b/foundry/packages/client/src/view-model.ts index 239b8a5..c30ff2a 100644 --- a/foundry/packages/client/src/view-model.ts +++ b/foundry/packages/client/src/view-model.ts @@ -87,7 +87,7 @@ export function summarizeTasks(rows: TaskRecord[]): TaskSummary { for (const row of rows) { byStatus[groupTaskStatus(row.status)] += 1; - byProvider[row.providerId] = (byProvider[row.providerId] ?? 0) + 1; + byProvider[row.sandboxProviderId] = (byProvider[row.sandboxProviderId] ?? 0) + 1; } return { diff --git a/foundry/packages/client/src/workbench-client.ts b/foundry/packages/client/src/workbench-client.ts index b6990fc..c317649 100644 --- a/foundry/packages/client/src/workbench-client.ts +++ b/foundry/packages/client/src/workbench-client.ts @@ -1,5 +1,5 @@ import type { - TaskWorkbenchAddTabResponse, + TaskWorkbenchAddSessionResponse, TaskWorkbenchChangeModelInput, TaskWorkbenchCreateTaskInput, TaskWorkbenchCreateTaskResponse, @@ -10,7 +10,7 @@ import type { TaskWorkbenchSetSessionUnreadInput, TaskWorkbenchSendMessageInput, TaskWorkbenchSnapshot, - TaskWorkbenchTabInput, + TaskWorkbenchSessionInput, TaskWorkbenchUpdateDraftInput, } from "@sandbox-agent/foundry-shared"; import type { BackendClient } from "./backend-client.js"; @@ -22,7 +22,7 @@ export type TaskWorkbenchClientMode = "mock" | "remote"; export interface CreateTaskWorkbenchClientOptions { mode: TaskWorkbenchClientMode; backend?: BackendClient; - workspaceId?: string; + organizationId?: string; } export interface TaskWorkbenchClient { @@ -37,11 +37,11 @@ export interface TaskWorkbenchClient { revertFile(input: TaskWorkbenchDiffInput): Promise; updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise; sendMessage(input: TaskWorkbenchSendMessageInput): Promise; - stopAgent(input: TaskWorkbenchTabInput): Promise; + stopAgent(input: TaskWorkbenchSessionInput): Promise; setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise; renameSession(input: TaskWorkbenchRenameSessionInput): Promise; - closeTab(input: TaskWorkbenchTabInput): Promise; - addTab(input: TaskWorkbenchSelectInput): Promise; + closeSession(input: TaskWorkbenchSessionInput): Promise; + addSession(input: TaskWorkbenchSelectInput): Promise; changeModel(input: TaskWorkbenchChangeModelInput): Promise; } @@ -53,12 +53,12 @@ export function createTaskWorkbenchClient(options: CreateTaskWorkbenchClientOpti if (!options.backend) { throw new Error("Remote task workbench client requires a backend client"); } - if (!options.workspaceId) { - throw new Error("Remote task workbench client requires a workspace id"); + if (!options.organizationId) { + throw new Error("Remote task workbench client requires a organization id"); } return createRemoteWorkbenchClient({ backend: options.backend, - workspaceId: options.workspaceId, + organizationId: options.organizationId, }); } diff --git a/foundry/packages/client/src/workbench-model.ts b/foundry/packages/client/src/workbench-model.ts index 2affb4d..d30407f 100644 --- a/foundry/packages/client/src/workbench-model.ts +++ b/foundry/packages/client/src/workbench-model.ts @@ -1,6 +1,6 @@ import type { WorkbenchAgentKind as AgentKind, - WorkbenchAgentTab as AgentTab, + WorkbenchSession as AgentSession, WorkbenchDiffLineKind as DiffLineKind, WorkbenchFileTreeNode as FileTreeNode, WorkbenchTask as Task, @@ -9,7 +9,7 @@ import type { WorkbenchModelGroup as ModelGroup, WorkbenchModelId as ModelId, WorkbenchParsedDiffLine as ParsedDiffLine, - WorkbenchProjectSection, + WorkbenchRepositorySection, WorkbenchRepo, WorkbenchTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; @@ -186,17 +186,17 @@ function historyDetail(event: TranscriptEvent): string { return content || "Untitled event"; } -export function buildHistoryEvents(tabs: AgentTab[]): HistoryEvent[] { - return tabs - .flatMap((tab) => - tab.transcript +export function buildHistoryEvents(sessions: AgentSession[]): HistoryEvent[] { + return sessions + .flatMap((session) => + session.transcript .filter((event) => event.sender === "client") .map((event) => ({ - id: `history-${tab.id}-${event.id}`, + id: `history-${session.id}-${event.id}`, messageId: event.id, preview: historyPreview(event), - sessionName: tab.sessionName, - tabId: tab.id, + sessionName: session.sessionName, + sessionId: session.id, createdAtMs: event.createdAt, detail: historyDetail(event), })), @@ -316,7 +316,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(8), branch: "NathanFlurry/pi-bootstrap-fix", pullRequest: { number: 227, status: "ready" }, - tabs: [ + sessions: [ { id: "t1", sessionId: "t1", @@ -485,7 +485,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(3), branch: "feat/builtin-agent-skills", pullRequest: { number: 223, status: "draft" }, - tabs: [ + sessions: [ { id: "t3", sessionId: "t3", @@ -585,7 +585,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(45), branch: "hooks-example", pullRequest: { number: 225, status: "ready" }, - tabs: [ + sessions: [ { id: "t4", sessionId: "t4", @@ -660,7 +660,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(15), branch: "actor-reschedule-endpoint", pullRequest: { number: 4400, status: "ready" }, - tabs: [ + sessions: [ { id: "t5", sessionId: "t5", @@ -794,7 +794,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(35), branch: "feat/dynamic-actors", pullRequest: { number: 4395, status: "draft" }, - tabs: [ + sessions: [ { id: "t6", sessionId: "t6", @@ -851,7 +851,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(25), branch: "fix-use-full-cloud-run-pool-name", pullRequest: { number: 235, status: "ready" }, - tabs: [ + sessions: [ { id: "t7", sessionId: "t7", @@ -960,7 +960,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(50), branch: "fix-guard-support-https-targets", pullRequest: { number: 125, status: "ready" }, - tabs: [ + sessions: [ { id: "t8", sessionId: "t8", @@ -1074,7 +1074,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(2 * 24 * 60), branch: "chore-move-compute-gateway-to", pullRequest: { number: 123, status: "ready" }, - tabs: [ + sessions: [ { id: "t9", sessionId: "t9", @@ -1116,7 +1116,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(90), branch: "fix/namespace-isolation", pullRequest: null, - tabs: [ + sessions: [ { id: "t10", sessionId: "t10", @@ -1172,9 +1172,9 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(2), branch: "fix/auth-middleware", pullRequest: null, - tabs: [ + sessions: [ { - id: "status-error-tab", + id: "status-error-session", sessionId: "status-error-session", sessionName: "Auth fix", agent: "Claude", @@ -1204,10 +1204,11 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(0), branch: null, pullRequest: null, - tabs: [ + sessions: [ { - id: "status-prov-tab", - sessionId: null, + id: "status-prov-session", + sessionId: "status-prov-session", + sandboxSessionId: null, sessionName: "Session 1", agent: "Claude", model: "claude-sonnet-4", @@ -1263,9 +1264,9 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(1), branch: "refactor/ws-handler", pullRequest: null, - tabs: [ + sessions: [ { - id: "status-run-tab", + id: "status-run-session", sessionId: "status-run-session", sessionName: "WS refactor", agent: "Codex", @@ -1275,7 +1276,7 @@ export function buildInitialTasks(): Task[] { unread: false, created: true, draft: { text: "", attachments: [], updatedAtMs: null }, - transcript: transcriptFromLegacyMessages("status-run-tab", [ + transcript: transcriptFromLegacyMessages("status-run-session", [ { id: "sr1", role: "user", @@ -1297,7 +1298,7 @@ export function buildInitialTasks(): Task[] { /** * Build repos list from the rivet-dev fixture data (scripts/data/rivet-dev.json). * Uses real public repos so the mock sidebar matches what an actual rivet-dev - * workspace would show after a GitHub sync. + * organization would show after a GitHub sync. */ function buildMockRepos(): WorkbenchRepo[] { return rivetDevFixture.repos.map((r) => ({ @@ -1314,7 +1315,7 @@ function repoIdFromFullName(fullName: string): string { /** * Build task entries from open PR fixture data. - * Maps to the backend's PR sync behavior (ProjectPrSyncActor) where PRs + * Maps to the backend's PR sync behavior (RepositoryPrSyncActor) where PRs * appear as first-class sidebar items even without an associated task. * Each open PR gets a lightweight task entry so it shows in the sidebar. */ @@ -1339,7 +1340,7 @@ function buildPrTasks(): Task[] { updatedAtMs: new Date(pr.updatedAt).getTime(), branch: pr.headRefName, pullRequest: { number: pr.number, status: pr.draft ? ("draft" as const) : ("ready" as const) }, - tabs: [], + sessions: [], fileChanges: [], diffs: {}, fileTree: [], @@ -1352,15 +1353,15 @@ export function buildInitialMockLayoutViewModel(): TaskWorkbenchSnapshot { const repos = buildMockRepos(); const tasks = [...buildInitialTasks(), ...buildPrTasks()]; return { - workspaceId: "default", + organizationId: "default", repos, - projects: groupWorkbenchProjects(repos, tasks), + repositories: groupWorkbenchRepositories(repos, tasks), tasks, }; } -export function groupWorkbenchProjects(repos: WorkbenchRepo[], tasks: Task[]): WorkbenchProjectSection[] { - const grouped = new Map(); +export function groupWorkbenchRepositories(repos: WorkbenchRepo[], tasks: Task[]): WorkbenchRepositorySection[] { + const grouped = new Map(); for (const repo of repos) { grouped.set(repo.id, { @@ -1385,11 +1386,11 @@ export function groupWorkbenchProjects(repos: WorkbenchRepo[], tasks: Task[]): W } return [...grouped.values()] - .map((project) => ({ - ...project, - tasks: [...project.tasks].sort((a, b) => b.updatedAtMs - a.updatedAtMs), - updatedAtMs: project.tasks.length > 0 ? Math.max(...project.tasks.map((task) => task.updatedAtMs)) : project.updatedAtMs, + .map((repository) => ({ + ...repository, + tasks: [...repository.tasks].sort((a, b) => b.updatedAtMs - a.updatedAtMs), + updatedAtMs: repository.tasks.length > 0 ? Math.max(...repository.tasks.map((task) => task.updatedAtMs)) : repository.updatedAtMs, })) - .filter((project) => project.tasks.length > 0) + .filter((repository) => repository.tasks.length > 0) .sort((a, b) => b.updatedAtMs - a.updatedAtMs); } diff --git a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts index bdb7c1e..8446892 100644 --- a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts +++ b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts @@ -2,6 +2,7 @@ import { randomUUID } from "node:crypto"; import { describe, expect, it } from "vitest"; import type { HistoryEvent, RepoOverview } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; const RUN_FULL_E2E = process.env.HF_ENABLE_DAEMON_FULL_E2E === "1"; @@ -106,9 +107,9 @@ async function ensureRemoteBranchExists(token: string, fullName: string, branchN } describe("e2e(client): full integration stack workflow", () => { - it.skipIf(!RUN_FULL_E2E)("adds repo, loads branch graph, and executes a stack restack action", { timeout: 8 * 60_000 }, async () => { + it.skipIf(!RUN_FULL_E2E)("uses an imported repo, loads branch graph, and executes a stack restack action", { timeout: 8 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; - const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); const githubToken = requiredEnv("GITHUB_TOKEN"); const { fullName } = parseGithubRepo(repoRemote); @@ -117,56 +118,27 @@ describe("e2e(client): full integration stack workflow", () => { const client = createBackendClient({ endpoint, - defaultWorkspaceId: workspaceId, + defaultOrganizationId: organizationId, }); try { await ensureRemoteBranchExists(githubToken, fullName, seededBranch); - const repo = await client.addRepo(workspaceId, repoRemote); + const repo = await requireImportedRepo(client, organizationId, repoRemote); expect(repo.remoteUrl).toBe(normalizedRepoRemote); const overview = await poll( "repo overview includes seeded branch", 90_000, 1_000, - async () => client.getRepoOverview(workspaceId, repo.repoId), + async () => client.getRepoOverview(organizationId, repo.repoId), (value) => value.branches.some((row) => row.branchName === seededBranch), ); - if (!overview.stackAvailable) { - throw new Error( - "git-spice is unavailable for this repo during full integration e2e; set HF_GIT_SPICE_BIN or install git-spice in the backend container", - ); - } - - const stackResult = await client.runRepoStackAction({ - workspaceId, - repoId: repo.repoId, - action: "restack_repo", - }); - expect(stackResult.executed).toBe(true); - expect(stackResult.action).toBe("restack_repo"); - - await poll( - "repo stack action history event", - 60_000, - 1_000, - async () => client.listHistory({ workspaceId, limit: 200 }), - (events) => - events.some((event) => { - if (event.kind !== "repo.stack_action") { - return false; - } - const payload = parseHistoryPayload(event); - return payload.action === "restack_repo"; - }), - ); - - const postActionOverview = await client.getRepoOverview(workspaceId, repo.repoId); + const postActionOverview = await client.getRepoOverview(organizationId, repo.repoId); const seededRow = postActionOverview.branches.find((row) => row.branchName === seededBranch); expect(Boolean(seededRow)).toBe(true); - expect(postActionOverview.fetchedAt).toBeGreaterThan(overview.fetchedAt); + expect(postActionOverview.fetchedAt).toBeGreaterThanOrEqual(overview.fetchedAt); } finally { await githubApi(githubToken, `repos/${fullName}/git/refs/heads/${encodeURIComponent(seededBranch)}`, { method: "DELETE" }).catch(() => {}); } diff --git a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts index 8a8b0d3..83101fb 100644 --- a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts +++ b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from "vitest"; import type { TaskRecord, HistoryEvent } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; const RUN_E2E = process.env.HF_ENABLE_DAEMON_E2E === "1"; @@ -79,10 +80,10 @@ function parseHistoryPayload(event: HistoryEvent): Record { } } -async function debugDump(client: ReturnType, workspaceId: string, taskId: string): Promise { +async function debugDump(client: ReturnType, organizationId: string, taskId: string): Promise { try { - const task = await client.getTask(workspaceId, taskId); - const history = await client.listHistory({ workspaceId, taskId, limit: 80 }).catch(() => []); + const task = await client.getTask(organizationId, taskId); + const history = await client.listHistory({ organizationId, taskId, limit: 80 }).catch(() => []); const historySummary = history .slice(0, 20) .map((e) => `${new Date(e.createdAt).toISOString()} ${e.kind}`) @@ -91,7 +92,7 @@ async function debugDump(client: ReturnType, workspa let sessionEventsSummary = ""; if (task.activeSandboxId && task.activeSessionId) { const events = await client - .listSandboxSessionEvents(workspaceId, task.providerId, task.activeSandboxId, { + .listSandboxSessionEvents(organizationId, task.sandboxProviderId, task.activeSandboxId, { sessionId: task.activeSessionId, limit: 50, }) @@ -145,7 +146,7 @@ async function githubApi(token: string, path: string, init?: RequestInit): Promi describe("e2e: backend -> sandbox-agent -> git -> PR", () => { it.skipIf(!RUN_E2E)("creates a task, waits for agent to implement, and opens a PR", { timeout: 15 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; - const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); const githubToken = requiredEnv("GITHUB_TOKEN"); @@ -155,13 +156,13 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { const client = createBackendClient({ endpoint, - defaultWorkspaceId: workspaceId, + defaultOrganizationId: organizationId, }); - const repo = await client.addRepo(workspaceId, repoRemote); + const repo = await requireImportedRepo(client, organizationId, repoRemote); const created = await client.createTask({ - workspaceId, + organizationId, repoId: repo.repoId, task: [ "E2E test task:", @@ -171,7 +172,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "4. git push the branch to origin", "5. Stop when done (agent should go idle).", ].join("\n"), - providerId: "local", + sandboxProviderId: "local", explicitTitle: `test(e2e): ${runId}`, explicitBranchName: `e2e/${runId}`, }); @@ -188,7 +189,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { // Cold local sandbox startup can exceed a few minutes on first run. 8 * 60_000, 1_000, - async () => client.getTask(workspaceId, created.taskId), + async () => client.getTask(organizationId, created.taskId), (h) => Boolean(h.title && h.branchName && h.activeSandboxId), (h) => { if (h.status !== lastStatus) { @@ -199,7 +200,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -210,7 +211,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "task to create active session", 3 * 60_000, 1_500, - async () => client.getTask(workspaceId, created.taskId), + async () => client.getTask(organizationId, created.taskId), (h) => Boolean(h.activeSessionId), (h) => { if (h.status === "error") { @@ -218,7 +219,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -230,14 +231,14 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { 2_000, async () => ( - await client.listSandboxSessionEvents(workspaceId, withSession.providerId, sandboxId!, { + await client.listSandboxSessionEvents(organizationId, withSession.sandboxProviderId, sandboxId!, { sessionId: sessionId!, limit: 40, }) ).items, (events) => events.length > 0, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -245,7 +246,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "task to reach idle state", 8 * 60_000, 2_000, - async () => client.getTask(workspaceId, created.taskId), + async () => client.getTask(organizationId, created.taskId), (h) => h.status === "idle", (h) => { if (h.status === "error") { @@ -253,7 +254,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -261,11 +262,11 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "PR creation history event", 3 * 60_000, 2_000, - async () => client.listHistory({ workspaceId, taskId: created.taskId, limit: 200 }), + async () => client.listHistory({ organizationId, taskId: created.taskId, limit: 200 }), (events) => events.some((e) => e.kind === "task.pr_created"), ) .catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }) .then((events) => events.find((e) => e.kind === "task.pr_created")!); @@ -286,32 +287,32 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { expect(prFiles.some((f) => f.filename === expectedFile)).toBe(true); // Close the task and assert the sandbox is released (stopped). - await client.runAction(workspaceId, created.taskId, "archive"); + await client.runAction(organizationId, created.taskId, "archive"); await poll( "task to become archived (session released)", 60_000, 1_000, - async () => client.getTask(workspaceId, created.taskId), + async () => client.getTask(organizationId, created.taskId), (h) => h.status === "archived" && h.activeSessionId === null, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); + const dump = await debugDump(client, organizationId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); if (sandboxId) { - await poll<{ providerId: string; sandboxId: string; state: string; at: number }>( + await poll<{ sandboxProviderId: string; sandboxId: string; state: string; at: number }>( "sandbox to stop", 2 * 60_000, 2_000, - async () => client.sandboxProviderState(workspaceId, "local", sandboxId!), + async () => client.sandboxProviderState(organizationId, "local", sandboxId!), (s) => { const st = String(s.state).toLowerCase(); return st.includes("destroyed") || st.includes("stopped") || st.includes("suspended") || st.includes("paused"); }, ).catch(async (err) => { - const dump = await debugDump(client, workspaceId, created.taskId); - const state = await client.sandboxProviderState(workspaceId, "local", sandboxId!).catch(() => null); + const dump = await debugDump(client, organizationId, created.taskId); + const state = await client.sandboxProviderState(organizationId, "local", sandboxId!).catch(() => null); throw new Error(`${err instanceof Error ? err.message : String(err)}\n` + `sandbox state: ${state ? state.state : "unknown"}\n` + `${dump}`); }); } diff --git a/foundry/packages/client/test/e2e/helpers.ts b/foundry/packages/client/test/e2e/helpers.ts new file mode 100644 index 0000000..0e15c51 --- /dev/null +++ b/foundry/packages/client/test/e2e/helpers.ts @@ -0,0 +1,84 @@ +import type { RepoRecord } from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "../../src/backend-client.js"; + +function normalizeRepoSelector(value: string): string { + let normalized = value.trim(); + if (!normalized) { + return ""; + } + + normalized = normalized.replace(/\/+$/, ""); + if (/^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/.test(normalized)) { + return `https://github.com/${normalized}.git`; + } + + if (/^(?:www\.)?github\.com\/.+/i.test(normalized)) { + normalized = `https://${normalized.replace(/^www\./i, "")}`; + } + + try { + if (/^https?:\/\//i.test(normalized)) { + const url = new URL(normalized); + const hostname = url.hostname.replace(/^www\./i, ""); + if (hostname.toLowerCase() === "github.com") { + const parts = url.pathname.split("/").filter(Boolean); + if (parts.length >= 2) { + return `${url.protocol}//${hostname}/${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}.git`; + } + } + url.search = ""; + url.hash = ""; + return url.toString().replace(/\/+$/, ""); + } + } catch { + // Keep the selector as-is for matching below. + } + + return normalized; +} + +function githubRepoFullNameFromSelector(value: string): string | null { + const normalized = normalizeRepoSelector(value); + try { + const url = new URL(normalized); + if (url.hostname.replace(/^www\./i, "").toLowerCase() !== "github.com") { + return null; + } + const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean); + if (parts.length < 2) { + return null; + } + return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}`; + } catch { + return null; + } +} + +export async function requireImportedRepo(client: BackendClient, organizationId: string, repoSelector: string): Promise { + const selector = repoSelector.trim(); + if (!selector) { + throw new Error("Missing repo selector"); + } + + const normalizedSelector = normalizeRepoSelector(selector); + const selectorFullName = githubRepoFullNameFromSelector(selector); + const repos = await client.listRepos(organizationId); + const match = repos.find((repo) => { + if (repo.repoId === selector) { + return true; + } + if (normalizeRepoSelector(repo.remoteUrl) === normalizedSelector) { + return true; + } + const repoFullName = githubRepoFullNameFromSelector(repo.remoteUrl); + return Boolean(selectorFullName && repoFullName && repoFullName === selectorFullName); + }); + + if (!match) { + throw new Error( + `Repo not available in organization ${organizationId}: ${repoSelector}. Create it in GitHub first, then sync repos in Foundry before running this test.`, + ); + } + + return match; +} diff --git a/foundry/packages/client/test/e2e/workbench-e2e.test.ts b/foundry/packages/client/test/e2e/workbench-e2e.test.ts index 11f092b..5442795 100644 --- a/foundry/packages/client/test/e2e/workbench-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workbench-e2e.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from "vitest"; -import type { TaskWorkbenchSnapshot, WorkbenchAgentTab, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared"; +import type { TaskWorkbenchSnapshot, WorkbenchSession, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; const RUN_WORKBENCH_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_E2E === "1"; @@ -57,10 +58,10 @@ function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTas return task; } -function findTab(task: WorkbenchTask, tabId: string): WorkbenchAgentTab { - const tab = task.tabs.find((candidate) => candidate.id === tabId); +function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { - throw new Error(`tab ${tabId} missing from task ${task.id}`); + throw new Error(`tab ${sessionId} missing from task ${task.id}`); } return tab; } @@ -135,171 +136,175 @@ function transcriptIncludesAgentText(transcript: WorkbenchTranscriptEvent[], exp } describe("e2e(client): workbench flows", () => { - it.skipIf(!RUN_WORKBENCH_E2E)("creates a task, adds sessions, exchanges messages, and manages workbench state", { timeout: 20 * 60_000 }, async () => { - const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; - const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; - const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); - const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); - const runId = `wb-${Date.now().toString(36)}`; - const expectedFile = `${runId}.txt`; - const expectedInitialReply = `WORKBENCH_READY_${runId}`; - const expectedReply = `WORKBENCH_ACK_${runId}`; + it.skipIf(!RUN_WORKBENCH_E2E)( + "creates a task from an imported repo, adds sessions, exchanges messages, and manages workbench state", + { timeout: 20 * 60_000 }, + async () => { + const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); + const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); + const runId = `wb-${Date.now().toString(36)}`; + const expectedFile = `${runId}.txt`; + const expectedInitialReply = `WORKBENCH_READY_${runId}`; + const expectedReply = `WORKBENCH_ACK_${runId}`; - const client = createBackendClient({ - endpoint, - defaultWorkspaceId: workspaceId, - }); + const client = createBackendClient({ + endpoint, + defaultOrganizationId: organizationId, + }); - const repo = await client.addRepo(workspaceId, repoRemote); - const created = await client.createWorkbenchTask(workspaceId, { - repoId: repo.repoId, - title: `Workbench E2E ${runId}`, - branch: `e2e/${runId}`, - model, - task: `Reply with exactly: ${expectedInitialReply}`, - }); + const repo = await requireImportedRepo(client, organizationId, repoRemote); + const created = await client.createWorkbenchTask(organizationId, { + repoId: repo.repoId, + title: `Workbench E2E ${runId}`, + branch: `e2e/${runId}`, + model, + task: `Reply with exactly: ${expectedInitialReply}`, + }); - const provisioned = await poll( - "task provisioning", - 12 * 60_000, - 2_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => task.branch === `e2e/${runId}` && task.tabs.length > 0, - ); + const provisioned = await poll( + "task provisioning", + 12 * 60_000, + 2_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => task.branch === `e2e/${runId}` && task.sessions.length > 0, + ); - const primaryTab = provisioned.tabs[0]!; + const primaryTab = provisioned.sessions[0]!; - const initialCompleted = await poll( - "initial agent response", - 12 * 60_000, - 2_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => { - const tab = findTab(task, primaryTab.id); - return task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedInitialReply); - }, - ); - - expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy(); - expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true); - - await client.renameWorkbenchTask(workspaceId, { - taskId: created.taskId, - value: `Workbench E2E ${runId} Renamed`, - }); - await client.renameWorkbenchSession(workspaceId, { - taskId: created.taskId, - tabId: primaryTab.id, - title: "Primary Session", - }); - - const secondTab = await client.createWorkbenchSession(workspaceId, { - taskId: created.taskId, - model, - }); - - await client.renameWorkbenchSession(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - title: "Follow-up Session", - }); - - await client.updateWorkbenchDraft(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - text: [ - `Create a file named ${expectedFile} in the repo root.`, - `Write exactly this single line into the file: ${runId}`, - `Then reply with exactly: ${expectedReply}`, - ].join("\n"), - attachments: [ - { - id: `${expectedFile}:1`, - filePath: expectedFile, - lineNumber: 1, - lineContent: runId, + const initialCompleted = await poll( + "initial agent response", + 12 * 60_000, + 2_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => { + const tab = findTab(task, primaryTab.id); + return task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedInitialReply); }, - ], - }); + ); - const drafted = findTask(await client.getWorkbench(workspaceId), created.taskId); - expect(findTab(drafted, secondTab.tabId).draft.text).toContain(expectedReply); - expect(findTab(drafted, secondTab.tabId).draft.attachments).toHaveLength(1); + expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy(); + expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true); - await client.sendWorkbenchMessage(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - text: [ - `Create a file named ${expectedFile} in the repo root.`, - `Write exactly this single line into the file: ${runId}`, - `Then reply with exactly: ${expectedReply}`, - ].join("\n"), - attachments: [ - { - id: `${expectedFile}:1`, - filePath: expectedFile, - lineNumber: 1, - lineContent: runId, + await client.renameWorkbenchTask(organizationId, { + taskId: created.taskId, + value: `Workbench E2E ${runId} Renamed`, + }); + await client.renameWorkbenchSession(organizationId, { + taskId: created.taskId, + sessionId: primaryTab.id, + title: "Primary Session", + }); + + const secondTab = await client.createWorkbenchSession(organizationId, { + taskId: created.taskId, + model, + }); + + await client.renameWorkbenchSession(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + title: "Follow-up Session", + }); + + await client.updateWorkbenchDraft(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + text: [ + `Create a file named ${expectedFile} in the repo root.`, + `Write exactly this single line into the file: ${runId}`, + `Then reply with exactly: ${expectedReply}`, + ].join("\n"), + attachments: [ + { + id: `${expectedFile}:1`, + filePath: expectedFile, + lineNumber: 1, + lineContent: runId, + }, + ], + }); + + const drafted = findTask(await client.getWorkbench(organizationId), created.taskId); + expect(findTab(drafted, secondTab.sessionId).draft.text).toContain(expectedReply); + expect(findTab(drafted, secondTab.sessionId).draft.attachments).toHaveLength(1); + + await client.sendWorkbenchMessage(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + text: [ + `Create a file named ${expectedFile} in the repo root.`, + `Write exactly this single line into the file: ${runId}`, + `Then reply with exactly: ${expectedReply}`, + ].join("\n"), + attachments: [ + { + id: `${expectedFile}:1`, + filePath: expectedFile, + lineNumber: 1, + lineContent: runId, + }, + ], + }); + + const withSecondReply = await poll( + "follow-up session response", + 10 * 60_000, + 2_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => { + const tab = findTab(task, secondTab.sessionId); + return ( + tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply) && task.fileChanges.some((file) => file.path === expectedFile) + ); }, - ], - }); + ); - const withSecondReply = await poll( - "follow-up session response", - 10 * 60_000, - 2_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => { - const tab = findTab(task, secondTab.tabId); - return ( - tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply) && task.fileChanges.some((file) => file.path === expectedFile) - ); - }, - ); + const secondTranscript = findTab(withSecondReply, secondTab.sessionId).transcript; + expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true); + expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true); - const secondTranscript = findTab(withSecondReply, secondTab.tabId).transcript; - expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true); - expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true); + await client.setWorkbenchSessionUnread(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + unread: false, + }); + await client.markWorkbenchUnread(organizationId, { taskId: created.taskId }); - await client.setWorkbenchSessionUnread(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - unread: false, - }); - await client.markWorkbenchUnread(workspaceId, { taskId: created.taskId }); + const unreadSnapshot = findTask(await client.getWorkbench(organizationId), created.taskId); + expect(unreadSnapshot.sessions.some((tab) => tab.unread)).toBe(true); - const unreadSnapshot = findTask(await client.getWorkbench(workspaceId), created.taskId); - expect(unreadSnapshot.tabs.some((tab) => tab.unread)).toBe(true); + await client.closeWorkbenchSession(organizationId, { + taskId: created.taskId, + sessionId: secondTab.sessionId, + }); - await client.closeWorkbenchSession(workspaceId, { - taskId: created.taskId, - tabId: secondTab.tabId, - }); + const closedSnapshot = await poll( + "secondary session closed", + 30_000, + 1_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => !task.sessions.some((tab) => tab.id === secondTab.sessionId), + ); + expect(closedSnapshot.sessions).toHaveLength(1); - const closedSnapshot = await poll( - "secondary session closed", - 30_000, - 1_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => !task.tabs.some((tab) => tab.id === secondTab.tabId), - ); - expect(closedSnapshot.tabs).toHaveLength(1); + await client.revertWorkbenchFile(organizationId, { + taskId: created.taskId, + path: expectedFile, + }); - await client.revertWorkbenchFile(workspaceId, { - taskId: created.taskId, - path: expectedFile, - }); + const revertedSnapshot = await poll( + "file revert reflected in workbench", + 30_000, + 1_000, + async () => findTask(await client.getWorkbench(organizationId), created.taskId), + (task) => !task.fileChanges.some((file) => file.path === expectedFile), + ); - const revertedSnapshot = await poll( - "file revert reflected in workbench", - 30_000, - 1_000, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), - (task) => !task.fileChanges.some((file) => file.path === expectedFile), - ); - - expect(revertedSnapshot.fileChanges.some((file) => file.path === expectedFile)).toBe(false); - expect(revertedSnapshot.title).toBe(`Workbench E2E ${runId} Renamed`); - expect(findTab(revertedSnapshot, primaryTab.id).sessionName).toBe("Primary Session"); - }); + expect(revertedSnapshot.fileChanges.some((file) => file.path === expectedFile)).toBe(false); + expect(revertedSnapshot.title).toBe(`Workbench E2E ${runId} Renamed`); + expect(findTab(revertedSnapshot, primaryTab.id).sessionName).toBe("Primary Session"); + }, + ); }); diff --git a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts b/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts index 363fa66..b358b80 100644 --- a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts @@ -2,12 +2,13 @@ import { describe, expect, it } from "vitest"; import { createFoundryLogger, type TaskWorkbenchSnapshot, - type WorkbenchAgentTab, + type WorkbenchSession, type WorkbenchTask, type WorkbenchModelId, type WorkbenchTranscriptEvent, } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; +import { requireImportedRepo } from "./helpers.js"; const RUN_WORKBENCH_LOAD_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E === "1"; const logger = createFoundryLogger({ @@ -79,10 +80,10 @@ function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTas return task; } -function findTab(task: WorkbenchTask, tabId: string): WorkbenchAgentTab { - const tab = task.tabs.find((candidate) => candidate.id === tabId); +function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { + const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { - throw new Error(`tab ${tabId} missing from task ${task.id}`); + throw new Error(`tab ${sessionId} missing from task ${task.id}`); } return tab; } @@ -151,7 +152,7 @@ function average(values: number[]): number { async function measureWorkbenchSnapshot( client: ReturnType, - workspaceId: string, + organizationId: string, iterations: number, ): Promise<{ avgMs: number; @@ -166,19 +167,19 @@ async function measureWorkbenchSnapshot( for (let index = 0; index < iterations; index += 1) { const startedAt = performance.now(); - snapshot = await client.getWorkbench(workspaceId); + snapshot = await client.getWorkbench(organizationId); durations.push(performance.now() - startedAt); } const finalSnapshot = snapshot ?? { - workspaceId, + organizationId, repos: [], - projects: [], + repositories: [], tasks: [], }; const payloadBytes = Buffer.byteLength(JSON.stringify(finalSnapshot), "utf8"); - const tabCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.tabs.length, 0); - const transcriptEventCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.tabs.reduce((tabSum, tab) => tabSum + tab.transcript.length, 0), 0); + const tabCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.sessions.length, 0); + const transcriptEventCount = finalSnapshot.tasks.reduce((sum, task) => sum + task.sessions.reduce((tabSum, tab) => tabSum + tab.transcript.length, 0), 0); return { avgMs: Math.round(average(durations)), @@ -193,7 +194,7 @@ async function measureWorkbenchSnapshot( describe("e2e(client): workbench load", () => { it.skipIf(!RUN_WORKBENCH_LOAD_E2E)("runs a simple sequential load profile against the real backend", { timeout: 30 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; - const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; + const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); const taskCount = intEnv("HF_LOAD_TASK_COUNT", 3); @@ -202,10 +203,10 @@ describe("e2e(client): workbench load", () => { const client = createBackendClient({ endpoint, - defaultWorkspaceId: workspaceId, + defaultOrganizationId: organizationId, }); - const repo = await client.addRepo(workspaceId, repoRemote); + const repo = await requireImportedRepo(client, organizationId, repoRemote); const createTaskLatencies: number[] = []; const provisionLatencies: number[] = []; const createSessionLatencies: number[] = []; @@ -219,14 +220,14 @@ describe("e2e(client): workbench load", () => { transcriptEventCount: number; }> = []; - snapshotSeries.push(await measureWorkbenchSnapshot(client, workspaceId, 2)); + snapshotSeries.push(await measureWorkbenchSnapshot(client, organizationId, 2)); for (let taskIndex = 0; taskIndex < taskCount; taskIndex += 1) { const runId = `load-${taskIndex}-${Date.now().toString(36)}`; const initialReply = `LOAD_INIT_${runId}`; const createStartedAt = performance.now(); - const created = await client.createWorkbenchTask(workspaceId, { + const created = await client.createWorkbenchTask(organizationId, { repoId: repo.repoId, title: `Workbench Load ${runId}`, branch: `load/${runId}`, @@ -240,30 +241,30 @@ describe("e2e(client): workbench load", () => { `task ${runId} provisioning`, 12 * 60_000, pollIntervalMs, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), + async () => findTask(await client.getWorkbench(organizationId), created.taskId), (task) => { - const tab = task.tabs[0]; + const tab = task.sessions[0]; return Boolean(tab && task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, initialReply)); }, ); provisionLatencies.push(performance.now() - provisionStartedAt); - expect(provisioned.tabs.length).toBeGreaterThan(0); - const primaryTab = provisioned.tabs[0]!; + expect(provisioned.sessions.length).toBeGreaterThan(0); + const primaryTab = provisioned.sessions[0]!; expect(transcriptIncludesAgentText(primaryTab.transcript, initialReply)).toBe(true); for (let sessionIndex = 0; sessionIndex < extraSessionCount; sessionIndex += 1) { const expectedReply = `LOAD_REPLY_${runId}_${sessionIndex}`; const createSessionStartedAt = performance.now(); - const createdSession = await client.createWorkbenchSession(workspaceId, { + const createdSession = await client.createWorkbenchSession(organizationId, { taskId: created.taskId, model, }); createSessionLatencies.push(performance.now() - createSessionStartedAt); - await client.sendWorkbenchMessage(workspaceId, { + await client.sendWorkbenchMessage(organizationId, { taskId: created.taskId, - tabId: createdSession.tabId, + sessionId: createdSession.sessionId, text: `Run pwd in the repo, then reply with exactly: ${expectedReply}`, attachments: [], }); @@ -273,18 +274,18 @@ describe("e2e(client): workbench load", () => { `task ${runId} session ${sessionIndex} reply`, 10 * 60_000, pollIntervalMs, - async () => findTask(await client.getWorkbench(workspaceId), created.taskId), + async () => findTask(await client.getWorkbench(organizationId), created.taskId), (task) => { - const tab = findTab(task, createdSession.tabId); + const tab = findTab(task, createdSession.sessionId); return tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply); }, ); messageRoundTripLatencies.push(performance.now() - messageStartedAt); - expect(transcriptIncludesAgentText(findTab(withReply, createdSession.tabId).transcript, expectedReply)).toBe(true); + expect(transcriptIncludesAgentText(findTab(withReply, createdSession.sessionId).transcript, expectedReply)).toBe(true); } - const snapshotMetrics = await measureWorkbenchSnapshot(client, workspaceId, 3); + const snapshotMetrics = await measureWorkbenchSnapshot(client, organizationId, 3); snapshotSeries.push(snapshotMetrics); logger.info( { diff --git a/foundry/packages/client/test/keys.test.ts b/foundry/packages/client/test/keys.test.ts index 8f1f735..9bd6477 100644 --- a/foundry/packages/client/test/keys.test.ts +++ b/foundry/packages/client/test/keys.test.ts @@ -1,20 +1,18 @@ import { describe, expect, it } from "vitest"; -import { taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "../src/keys.js"; +import { historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/keys.js"; describe("actor keys", () => { - it("prefixes every key with workspace namespace", () => { + it("prefixes every key with organization namespace", () => { const keys = [ - workspaceKey("default"), - projectKey("default", "repo"), + organizationKey("default"), + repositoryKey("default", "repo"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), historyKey("default", "repo"), - projectPrSyncKey("default", "repo"), - projectBranchSyncKey("default", "repo"), ]; for (const key of keys) { - expect(key[0]).toBe("ws"); + expect(key[0]).toBe("org"); expect(key[1]).toBe("default"); } }); diff --git a/foundry/packages/client/test/interest-manager.test.ts b/foundry/packages/client/test/subscription-manager.test.ts similarity index 60% rename from foundry/packages/client/test/interest-manager.test.ts rename to foundry/packages/client/test/subscription-manager.test.ts index eb39f6c..9908113 100644 --- a/foundry/packages/client/test/interest-manager.test.ts +++ b/foundry/packages/client/test/subscription-manager.test.ts @@ -1,7 +1,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; -import type { WorkspaceEvent, WorkspaceSummarySnapshot } from "@sandbox-agent/foundry-shared"; +import type { OrganizationEvent, OrganizationSummarySnapshot } from "@sandbox-agent/foundry-shared"; import type { ActorConn, BackendClient } from "../src/backend-client.js"; -import { RemoteInterestManager } from "../src/interest/remote-manager.js"; +import { RemoteSubscriptionManager } from "../src/subscription/remote-manager.js"; class FakeActorConn implements ActorConn { private readonly listeners = new Map void>>(); @@ -47,9 +47,9 @@ class FakeActorConn implements ActorConn { } } -function workspaceSnapshot(): WorkspaceSummarySnapshot { +function organizationSnapshot(): OrganizationSummarySnapshot { return { - workspaceId: "ws-1", + organizationId: "org-1", repos: [{ id: "repo-1", label: "repo-1", taskCount: 1, latestActivityMs: 10 }], taskSummaries: [ { @@ -68,10 +68,10 @@ function workspaceSnapshot(): WorkspaceSummarySnapshot { }; } -function createBackend(conn: FakeActorConn, snapshot: WorkspaceSummarySnapshot): BackendClient { +function createBackend(conn: FakeActorConn, snapshot: OrganizationSummarySnapshot): BackendClient { return { - connectWorkspace: vi.fn(async () => conn), - getWorkspaceSummary: vi.fn(async () => snapshot), + connectOrganization: vi.fn(async () => conn), + getOrganizationSummary: vi.fn(async () => snapshot), } as unknown as BackendClient; } @@ -80,7 +80,7 @@ async function flushAsyncWork(): Promise { await Promise.resolve(); } -describe("RemoteInterestManager", () => { +describe("RemoteSubscriptionManager", () => { beforeEach(() => { vi.useFakeTimers(); }); @@ -91,30 +91,30 @@ describe("RemoteInterestManager", () => { it("shares one connection per topic key and applies incoming events", async () => { const conn = new FakeActorConn(); - const backend = createBackend(conn, workspaceSnapshot()); - const manager = new RemoteInterestManager(backend); - const params = { workspaceId: "ws-1" } as const; + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; const listenerA = vi.fn(); const listenerB = vi.fn(); - const unsubscribeA = manager.subscribe("workspace", params, listenerA); - const unsubscribeB = manager.subscribe("workspace", params, listenerB); + const unsubscribeA = manager.subscribe("organization", params, listenerA); + const unsubscribeB = manager.subscribe("organization", params, listenerB); await flushAsyncWork(); - expect(backend.connectWorkspace).toHaveBeenCalledTimes(1); - expect(backend.getWorkspaceSummary).toHaveBeenCalledTimes(1); - expect(manager.getStatus("workspace", params)).toBe("connected"); - expect(manager.getSnapshot("workspace", params)?.taskSummaries[0]?.title).toBe("Initial task"); + expect(backend.connectOrganization).toHaveBeenCalledTimes(1); + expect(backend.getOrganizationSummary).toHaveBeenCalledTimes(1); + expect(manager.getStatus("organization", params)).toBe("connected"); + expect(manager.getSnapshot("organization", params)?.taskSummaries[0]?.title).toBe("Initial task"); expect(manager.listDebugTopics()).toEqual([ expect.objectContaining({ - topicKey: "workspace", - cacheKey: "workspace:ws-1", + topicKey: "organization", + cacheKey: "organization:org-1", listenerCount: 2, status: "connected", }), ]); - conn.emit("workspaceUpdated", { + conn.emit("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: { id: "task-1", @@ -127,9 +127,9 @@ describe("RemoteInterestManager", () => { pullRequest: null, sessionsSummary: [], }, - } satisfies WorkspaceEvent); + } satisfies OrganizationEvent); - expect(manager.getSnapshot("workspace", params)?.taskSummaries[0]?.title).toBe("Updated task"); + expect(manager.getSnapshot("organization", params)?.taskSummaries[0]?.title).toBe("Updated task"); expect(listenerA).toHaveBeenCalled(); expect(listenerB).toHaveBeenCalled(); expect(manager.listDebugTopics()[0]?.lastRefreshAt).toEqual(expect.any(Number)); @@ -141,21 +141,21 @@ describe("RemoteInterestManager", () => { it("keeps a topic warm during the grace period and tears it down afterwards", async () => { const conn = new FakeActorConn(); - const backend = createBackend(conn, workspaceSnapshot()); - const manager = new RemoteInterestManager(backend); - const params = { workspaceId: "ws-1" } as const; + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; - const unsubscribeA = manager.subscribe("workspace", params, () => {}); + const unsubscribeA = manager.subscribe("organization", params, () => {}); await flushAsyncWork(); unsubscribeA(); vi.advanceTimersByTime(29_000); expect(manager.listDebugTopics()).toEqual([]); - const unsubscribeB = manager.subscribe("workspace", params, () => {}); + const unsubscribeB = manager.subscribe("organization", params, () => {}); await flushAsyncWork(); - expect(backend.connectWorkspace).toHaveBeenCalledTimes(1); + expect(backend.connectOrganization).toHaveBeenCalledTimes(1); expect(conn.disposeCount).toBe(0); unsubscribeB(); @@ -163,21 +163,21 @@ describe("RemoteInterestManager", () => { vi.advanceTimersByTime(30_000); expect(conn.disposeCount).toBe(1); - expect(manager.getSnapshot("workspace", params)).toBeUndefined(); + expect(manager.getSnapshot("organization", params)).toBeUndefined(); }); it("surfaces connection errors to subscribers", async () => { const conn = new FakeActorConn(); - const backend = createBackend(conn, workspaceSnapshot()); - const manager = new RemoteInterestManager(backend); - const params = { workspaceId: "ws-1" } as const; + const backend = createBackend(conn, organizationSnapshot()); + const manager = new RemoteSubscriptionManager(backend); + const params = { organizationId: "org-1" } as const; - manager.subscribe("workspace", params, () => {}); + manager.subscribe("organization", params, () => {}); await flushAsyncWork(); conn.emitError(new Error("socket dropped")); - expect(manager.getStatus("workspace", params)).toBe("error"); - expect(manager.getError("workspace", params)?.message).toBe("socket dropped"); + expect(manager.getStatus("organization", params)).toBe("error"); + expect(manager.getError("organization", params)?.message).toBe("socket dropped"); }); }); diff --git a/foundry/packages/client/test/view-model.test.ts b/foundry/packages/client/test/view-model.test.ts index 4767b8f..b494135 100644 --- a/foundry/packages/client/test/view-model.test.ts +++ b/foundry/packages/client/test/view-model.test.ts @@ -3,14 +3,14 @@ import type { TaskRecord } from "@sandbox-agent/foundry-shared"; import { filterTasks, formatRelativeAge, fuzzyMatch, summarizeTasks } from "../src/view-model.js"; const sample: TaskRecord = { - workspaceId: "default", + organizationId: "default", repoId: "repo-a", repoRemote: "https://example.com/repo-a.git", taskId: "task-1", branchName: "feature/test", title: "Test Title", task: "Do test", - providerId: "local", + sandboxProviderId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -18,7 +18,7 @@ const sample: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "local", + sandboxProviderId: "local", sandboxActorId: null, switchTarget: "sandbox://local/sandbox-1", cwd: null, @@ -59,7 +59,7 @@ describe("search helpers", () => { }, ]; expect(filterTasks(rows, "doc")).toHaveLength(1); - expect(filterTasks(rows, "h2")).toHaveLength(1); + expect(filterTasks(rows, "intro")).toHaveLength(1); expect(filterTasks(rows, "test")).toHaveLength(2); }); }); @@ -73,8 +73,8 @@ describe("summary helpers", () => { it("summarizes by status and provider", () => { const rows: TaskRecord[] = [ sample, - { ...sample, taskId: "task-2", status: "idle", providerId: "local" }, - { ...sample, taskId: "task-3", status: "error", providerId: "local" }, + { ...sample, taskId: "task-2", status: "idle", sandboxProviderId: "local" }, + { ...sample, taskId: "task-3", status: "error", sandboxProviderId: "local" }, ]; const summary = summarizeTasks(rows); diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json b/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json index 86cdb1f..6844932 100644 --- a/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json +++ b/foundry/packages/desktop/src-tauri/gen/schemas/acl-manifests.json @@ -1 +1,1922 @@ -{"core":{"default_permission":{"identifier":"default","description":"Default core plugins set.","permissions":["core:path:default","core:event:default","core:window:default","core:webview:default","core:app:default","core:image:default","core:resources:default","core:menu:default","core:tray:default"]},"permissions":{},"permission_sets":{},"global_scope_schema":null},"core:app":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-version","allow-name","allow-tauri-version","allow-identifier","allow-bundle-type","allow-register-listener","allow-remove-listener"]},"permissions":{"allow-app-hide":{"identifier":"allow-app-hide","description":"Enables the app_hide command without any pre-configured scope.","commands":{"allow":["app_hide"],"deny":[]}},"allow-app-show":{"identifier":"allow-app-show","description":"Enables the app_show command without any pre-configured scope.","commands":{"allow":["app_show"],"deny":[]}},"allow-bundle-type":{"identifier":"allow-bundle-type","description":"Enables the bundle_type command without any pre-configured scope.","commands":{"allow":["bundle_type"],"deny":[]}},"allow-default-window-icon":{"identifier":"allow-default-window-icon","description":"Enables the default_window_icon command without any pre-configured scope.","commands":{"allow":["default_window_icon"],"deny":[]}},"allow-fetch-data-store-identifiers":{"identifier":"allow-fetch-data-store-identifiers","description":"Enables the fetch_data_store_identifiers command without any pre-configured scope.","commands":{"allow":["fetch_data_store_identifiers"],"deny":[]}},"allow-identifier":{"identifier":"allow-identifier","description":"Enables the identifier command without any pre-configured scope.","commands":{"allow":["identifier"],"deny":[]}},"allow-name":{"identifier":"allow-name","description":"Enables the name command without any pre-configured scope.","commands":{"allow":["name"],"deny":[]}},"allow-register-listener":{"identifier":"allow-register-listener","description":"Enables the register_listener command without any pre-configured scope.","commands":{"allow":["register_listener"],"deny":[]}},"allow-remove-data-store":{"identifier":"allow-remove-data-store","description":"Enables the remove_data_store command without any pre-configured scope.","commands":{"allow":["remove_data_store"],"deny":[]}},"allow-remove-listener":{"identifier":"allow-remove-listener","description":"Enables the remove_listener command without any pre-configured scope.","commands":{"allow":["remove_listener"],"deny":[]}},"allow-set-app-theme":{"identifier":"allow-set-app-theme","description":"Enables the set_app_theme command without any pre-configured scope.","commands":{"allow":["set_app_theme"],"deny":[]}},"allow-set-dock-visibility":{"identifier":"allow-set-dock-visibility","description":"Enables the set_dock_visibility command without any pre-configured scope.","commands":{"allow":["set_dock_visibility"],"deny":[]}},"allow-tauri-version":{"identifier":"allow-tauri-version","description":"Enables the tauri_version command without any pre-configured scope.","commands":{"allow":["tauri_version"],"deny":[]}},"allow-version":{"identifier":"allow-version","description":"Enables the version command without any pre-configured scope.","commands":{"allow":["version"],"deny":[]}},"deny-app-hide":{"identifier":"deny-app-hide","description":"Denies the app_hide command without any pre-configured scope.","commands":{"allow":[],"deny":["app_hide"]}},"deny-app-show":{"identifier":"deny-app-show","description":"Denies the app_show command without any pre-configured scope.","commands":{"allow":[],"deny":["app_show"]}},"deny-bundle-type":{"identifier":"deny-bundle-type","description":"Denies the bundle_type command without any pre-configured scope.","commands":{"allow":[],"deny":["bundle_type"]}},"deny-default-window-icon":{"identifier":"deny-default-window-icon","description":"Denies the default_window_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["default_window_icon"]}},"deny-fetch-data-store-identifiers":{"identifier":"deny-fetch-data-store-identifiers","description":"Denies the fetch_data_store_identifiers command without any pre-configured scope.","commands":{"allow":[],"deny":["fetch_data_store_identifiers"]}},"deny-identifier":{"identifier":"deny-identifier","description":"Denies the identifier command without any pre-configured scope.","commands":{"allow":[],"deny":["identifier"]}},"deny-name":{"identifier":"deny-name","description":"Denies the name command without any pre-configured scope.","commands":{"allow":[],"deny":["name"]}},"deny-register-listener":{"identifier":"deny-register-listener","description":"Denies the register_listener command without any pre-configured scope.","commands":{"allow":[],"deny":["register_listener"]}},"deny-remove-data-store":{"identifier":"deny-remove-data-store","description":"Denies the remove_data_store command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_data_store"]}},"deny-remove-listener":{"identifier":"deny-remove-listener","description":"Denies the remove_listener command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_listener"]}},"deny-set-app-theme":{"identifier":"deny-set-app-theme","description":"Denies the set_app_theme command without any pre-configured scope.","commands":{"allow":[],"deny":["set_app_theme"]}},"deny-set-dock-visibility":{"identifier":"deny-set-dock-visibility","description":"Denies the set_dock_visibility command without any pre-configured scope.","commands":{"allow":[],"deny":["set_dock_visibility"]}},"deny-tauri-version":{"identifier":"deny-tauri-version","description":"Denies the tauri_version command without any pre-configured scope.","commands":{"allow":[],"deny":["tauri_version"]}},"deny-version":{"identifier":"deny-version","description":"Denies the version command without any pre-configured scope.","commands":{"allow":[],"deny":["version"]}}},"permission_sets":{},"global_scope_schema":null},"core:event":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-listen","allow-unlisten","allow-emit","allow-emit-to"]},"permissions":{"allow-emit":{"identifier":"allow-emit","description":"Enables the emit command without any pre-configured scope.","commands":{"allow":["emit"],"deny":[]}},"allow-emit-to":{"identifier":"allow-emit-to","description":"Enables the emit_to command without any pre-configured scope.","commands":{"allow":["emit_to"],"deny":[]}},"allow-listen":{"identifier":"allow-listen","description":"Enables the listen command without any pre-configured scope.","commands":{"allow":["listen"],"deny":[]}},"allow-unlisten":{"identifier":"allow-unlisten","description":"Enables the unlisten command without any pre-configured scope.","commands":{"allow":["unlisten"],"deny":[]}},"deny-emit":{"identifier":"deny-emit","description":"Denies the emit command without any pre-configured scope.","commands":{"allow":[],"deny":["emit"]}},"deny-emit-to":{"identifier":"deny-emit-to","description":"Denies the emit_to command without any pre-configured scope.","commands":{"allow":[],"deny":["emit_to"]}},"deny-listen":{"identifier":"deny-listen","description":"Denies the listen command without any pre-configured scope.","commands":{"allow":[],"deny":["listen"]}},"deny-unlisten":{"identifier":"deny-unlisten","description":"Denies the unlisten command without any pre-configured scope.","commands":{"allow":[],"deny":["unlisten"]}}},"permission_sets":{},"global_scope_schema":null},"core:image":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-from-bytes","allow-from-path","allow-rgba","allow-size"]},"permissions":{"allow-from-bytes":{"identifier":"allow-from-bytes","description":"Enables the from_bytes command without any pre-configured scope.","commands":{"allow":["from_bytes"],"deny":[]}},"allow-from-path":{"identifier":"allow-from-path","description":"Enables the from_path command without any pre-configured scope.","commands":{"allow":["from_path"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-rgba":{"identifier":"allow-rgba","description":"Enables the rgba command without any pre-configured scope.","commands":{"allow":["rgba"],"deny":[]}},"allow-size":{"identifier":"allow-size","description":"Enables the size command without any pre-configured scope.","commands":{"allow":["size"],"deny":[]}},"deny-from-bytes":{"identifier":"deny-from-bytes","description":"Denies the from_bytes command without any pre-configured scope.","commands":{"allow":[],"deny":["from_bytes"]}},"deny-from-path":{"identifier":"deny-from-path","description":"Denies the from_path command without any pre-configured scope.","commands":{"allow":[],"deny":["from_path"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-rgba":{"identifier":"deny-rgba","description":"Denies the rgba command without any pre-configured scope.","commands":{"allow":[],"deny":["rgba"]}},"deny-size":{"identifier":"deny-size","description":"Denies the size command without any pre-configured scope.","commands":{"allow":[],"deny":["size"]}}},"permission_sets":{},"global_scope_schema":null},"core:menu":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-append","allow-prepend","allow-insert","allow-remove","allow-remove-at","allow-items","allow-get","allow-popup","allow-create-default","allow-set-as-app-menu","allow-set-as-window-menu","allow-text","allow-set-text","allow-is-enabled","allow-set-enabled","allow-set-accelerator","allow-set-as-windows-menu-for-nsapp","allow-set-as-help-menu-for-nsapp","allow-is-checked","allow-set-checked","allow-set-icon"]},"permissions":{"allow-append":{"identifier":"allow-append","description":"Enables the append command without any pre-configured scope.","commands":{"allow":["append"],"deny":[]}},"allow-create-default":{"identifier":"allow-create-default","description":"Enables the create_default command without any pre-configured scope.","commands":{"allow":["create_default"],"deny":[]}},"allow-get":{"identifier":"allow-get","description":"Enables the get command without any pre-configured scope.","commands":{"allow":["get"],"deny":[]}},"allow-insert":{"identifier":"allow-insert","description":"Enables the insert command without any pre-configured scope.","commands":{"allow":["insert"],"deny":[]}},"allow-is-checked":{"identifier":"allow-is-checked","description":"Enables the is_checked command without any pre-configured scope.","commands":{"allow":["is_checked"],"deny":[]}},"allow-is-enabled":{"identifier":"allow-is-enabled","description":"Enables the is_enabled command without any pre-configured scope.","commands":{"allow":["is_enabled"],"deny":[]}},"allow-items":{"identifier":"allow-items","description":"Enables the items command without any pre-configured scope.","commands":{"allow":["items"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-popup":{"identifier":"allow-popup","description":"Enables the popup command without any pre-configured scope.","commands":{"allow":["popup"],"deny":[]}},"allow-prepend":{"identifier":"allow-prepend","description":"Enables the prepend command without any pre-configured scope.","commands":{"allow":["prepend"],"deny":[]}},"allow-remove":{"identifier":"allow-remove","description":"Enables the remove command without any pre-configured scope.","commands":{"allow":["remove"],"deny":[]}},"allow-remove-at":{"identifier":"allow-remove-at","description":"Enables the remove_at command without any pre-configured scope.","commands":{"allow":["remove_at"],"deny":[]}},"allow-set-accelerator":{"identifier":"allow-set-accelerator","description":"Enables the set_accelerator command without any pre-configured scope.","commands":{"allow":["set_accelerator"],"deny":[]}},"allow-set-as-app-menu":{"identifier":"allow-set-as-app-menu","description":"Enables the set_as_app_menu command without any pre-configured scope.","commands":{"allow":["set_as_app_menu"],"deny":[]}},"allow-set-as-help-menu-for-nsapp":{"identifier":"allow-set-as-help-menu-for-nsapp","description":"Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":["set_as_help_menu_for_nsapp"],"deny":[]}},"allow-set-as-window-menu":{"identifier":"allow-set-as-window-menu","description":"Enables the set_as_window_menu command without any pre-configured scope.","commands":{"allow":["set_as_window_menu"],"deny":[]}},"allow-set-as-windows-menu-for-nsapp":{"identifier":"allow-set-as-windows-menu-for-nsapp","description":"Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":["set_as_windows_menu_for_nsapp"],"deny":[]}},"allow-set-checked":{"identifier":"allow-set-checked","description":"Enables the set_checked command without any pre-configured scope.","commands":{"allow":["set_checked"],"deny":[]}},"allow-set-enabled":{"identifier":"allow-set-enabled","description":"Enables the set_enabled command without any pre-configured scope.","commands":{"allow":["set_enabled"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-text":{"identifier":"allow-set-text","description":"Enables the set_text command without any pre-configured scope.","commands":{"allow":["set_text"],"deny":[]}},"allow-text":{"identifier":"allow-text","description":"Enables the text command without any pre-configured scope.","commands":{"allow":["text"],"deny":[]}},"deny-append":{"identifier":"deny-append","description":"Denies the append command without any pre-configured scope.","commands":{"allow":[],"deny":["append"]}},"deny-create-default":{"identifier":"deny-create-default","description":"Denies the create_default command without any pre-configured scope.","commands":{"allow":[],"deny":["create_default"]}},"deny-get":{"identifier":"deny-get","description":"Denies the get command without any pre-configured scope.","commands":{"allow":[],"deny":["get"]}},"deny-insert":{"identifier":"deny-insert","description":"Denies the insert command without any pre-configured scope.","commands":{"allow":[],"deny":["insert"]}},"deny-is-checked":{"identifier":"deny-is-checked","description":"Denies the is_checked command without any pre-configured scope.","commands":{"allow":[],"deny":["is_checked"]}},"deny-is-enabled":{"identifier":"deny-is-enabled","description":"Denies the is_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["is_enabled"]}},"deny-items":{"identifier":"deny-items","description":"Denies the items command without any pre-configured scope.","commands":{"allow":[],"deny":["items"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-popup":{"identifier":"deny-popup","description":"Denies the popup command without any pre-configured scope.","commands":{"allow":[],"deny":["popup"]}},"deny-prepend":{"identifier":"deny-prepend","description":"Denies the prepend command without any pre-configured scope.","commands":{"allow":[],"deny":["prepend"]}},"deny-remove":{"identifier":"deny-remove","description":"Denies the remove command without any pre-configured scope.","commands":{"allow":[],"deny":["remove"]}},"deny-remove-at":{"identifier":"deny-remove-at","description":"Denies the remove_at command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_at"]}},"deny-set-accelerator":{"identifier":"deny-set-accelerator","description":"Denies the set_accelerator command without any pre-configured scope.","commands":{"allow":[],"deny":["set_accelerator"]}},"deny-set-as-app-menu":{"identifier":"deny-set-as-app-menu","description":"Denies the set_as_app_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_app_menu"]}},"deny-set-as-help-menu-for-nsapp":{"identifier":"deny-set-as-help-menu-for-nsapp","description":"Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_help_menu_for_nsapp"]}},"deny-set-as-window-menu":{"identifier":"deny-set-as-window-menu","description":"Denies the set_as_window_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_window_menu"]}},"deny-set-as-windows-menu-for-nsapp":{"identifier":"deny-set-as-windows-menu-for-nsapp","description":"Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_windows_menu_for_nsapp"]}},"deny-set-checked":{"identifier":"deny-set-checked","description":"Denies the set_checked command without any pre-configured scope.","commands":{"allow":[],"deny":["set_checked"]}},"deny-set-enabled":{"identifier":"deny-set-enabled","description":"Denies the set_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["set_enabled"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-text":{"identifier":"deny-set-text","description":"Denies the set_text command without any pre-configured scope.","commands":{"allow":[],"deny":["set_text"]}},"deny-text":{"identifier":"deny-text","description":"Denies the text command without any pre-configured scope.","commands":{"allow":[],"deny":["text"]}}},"permission_sets":{},"global_scope_schema":null},"core:path":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-resolve-directory","allow-resolve","allow-normalize","allow-join","allow-dirname","allow-extname","allow-basename","allow-is-absolute"]},"permissions":{"allow-basename":{"identifier":"allow-basename","description":"Enables the basename command without any pre-configured scope.","commands":{"allow":["basename"],"deny":[]}},"allow-dirname":{"identifier":"allow-dirname","description":"Enables the dirname command without any pre-configured scope.","commands":{"allow":["dirname"],"deny":[]}},"allow-extname":{"identifier":"allow-extname","description":"Enables the extname command without any pre-configured scope.","commands":{"allow":["extname"],"deny":[]}},"allow-is-absolute":{"identifier":"allow-is-absolute","description":"Enables the is_absolute command without any pre-configured scope.","commands":{"allow":["is_absolute"],"deny":[]}},"allow-join":{"identifier":"allow-join","description":"Enables the join command without any pre-configured scope.","commands":{"allow":["join"],"deny":[]}},"allow-normalize":{"identifier":"allow-normalize","description":"Enables the normalize command without any pre-configured scope.","commands":{"allow":["normalize"],"deny":[]}},"allow-resolve":{"identifier":"allow-resolve","description":"Enables the resolve command without any pre-configured scope.","commands":{"allow":["resolve"],"deny":[]}},"allow-resolve-directory":{"identifier":"allow-resolve-directory","description":"Enables the resolve_directory command without any pre-configured scope.","commands":{"allow":["resolve_directory"],"deny":[]}},"deny-basename":{"identifier":"deny-basename","description":"Denies the basename command without any pre-configured scope.","commands":{"allow":[],"deny":["basename"]}},"deny-dirname":{"identifier":"deny-dirname","description":"Denies the dirname command without any pre-configured scope.","commands":{"allow":[],"deny":["dirname"]}},"deny-extname":{"identifier":"deny-extname","description":"Denies the extname command without any pre-configured scope.","commands":{"allow":[],"deny":["extname"]}},"deny-is-absolute":{"identifier":"deny-is-absolute","description":"Denies the is_absolute command without any pre-configured scope.","commands":{"allow":[],"deny":["is_absolute"]}},"deny-join":{"identifier":"deny-join","description":"Denies the join command without any pre-configured scope.","commands":{"allow":[],"deny":["join"]}},"deny-normalize":{"identifier":"deny-normalize","description":"Denies the normalize command without any pre-configured scope.","commands":{"allow":[],"deny":["normalize"]}},"deny-resolve":{"identifier":"deny-resolve","description":"Denies the resolve command without any pre-configured scope.","commands":{"allow":[],"deny":["resolve"]}},"deny-resolve-directory":{"identifier":"deny-resolve-directory","description":"Denies the resolve_directory command without any pre-configured scope.","commands":{"allow":[],"deny":["resolve_directory"]}}},"permission_sets":{},"global_scope_schema":null},"core:resources":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-close"]},"permissions":{"allow-close":{"identifier":"allow-close","description":"Enables the close command without any pre-configured scope.","commands":{"allow":["close"],"deny":[]}},"deny-close":{"identifier":"deny-close","description":"Denies the close command without any pre-configured scope.","commands":{"allow":[],"deny":["close"]}}},"permission_sets":{},"global_scope_schema":null},"core:tray":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-get-by-id","allow-remove-by-id","allow-set-icon","allow-set-menu","allow-set-tooltip","allow-set-title","allow-set-visible","allow-set-temp-dir-path","allow-set-icon-as-template","allow-set-show-menu-on-left-click"]},"permissions":{"allow-get-by-id":{"identifier":"allow-get-by-id","description":"Enables the get_by_id command without any pre-configured scope.","commands":{"allow":["get_by_id"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-remove-by-id":{"identifier":"allow-remove-by-id","description":"Enables the remove_by_id command without any pre-configured scope.","commands":{"allow":["remove_by_id"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-icon-as-template":{"identifier":"allow-set-icon-as-template","description":"Enables the set_icon_as_template command without any pre-configured scope.","commands":{"allow":["set_icon_as_template"],"deny":[]}},"allow-set-menu":{"identifier":"allow-set-menu","description":"Enables the set_menu command without any pre-configured scope.","commands":{"allow":["set_menu"],"deny":[]}},"allow-set-show-menu-on-left-click":{"identifier":"allow-set-show-menu-on-left-click","description":"Enables the set_show_menu_on_left_click command without any pre-configured scope.","commands":{"allow":["set_show_menu_on_left_click"],"deny":[]}},"allow-set-temp-dir-path":{"identifier":"allow-set-temp-dir-path","description":"Enables the set_temp_dir_path command without any pre-configured scope.","commands":{"allow":["set_temp_dir_path"],"deny":[]}},"allow-set-title":{"identifier":"allow-set-title","description":"Enables the set_title command without any pre-configured scope.","commands":{"allow":["set_title"],"deny":[]}},"allow-set-tooltip":{"identifier":"allow-set-tooltip","description":"Enables the set_tooltip command without any pre-configured scope.","commands":{"allow":["set_tooltip"],"deny":[]}},"allow-set-visible":{"identifier":"allow-set-visible","description":"Enables the set_visible command without any pre-configured scope.","commands":{"allow":["set_visible"],"deny":[]}},"deny-get-by-id":{"identifier":"deny-get-by-id","description":"Denies the get_by_id command without any pre-configured scope.","commands":{"allow":[],"deny":["get_by_id"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-remove-by-id":{"identifier":"deny-remove-by-id","description":"Denies the remove_by_id command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_by_id"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-icon-as-template":{"identifier":"deny-set-icon-as-template","description":"Denies the set_icon_as_template command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon_as_template"]}},"deny-set-menu":{"identifier":"deny-set-menu","description":"Denies the set_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_menu"]}},"deny-set-show-menu-on-left-click":{"identifier":"deny-set-show-menu-on-left-click","description":"Denies the set_show_menu_on_left_click command without any pre-configured scope.","commands":{"allow":[],"deny":["set_show_menu_on_left_click"]}},"deny-set-temp-dir-path":{"identifier":"deny-set-temp-dir-path","description":"Denies the set_temp_dir_path command without any pre-configured scope.","commands":{"allow":[],"deny":["set_temp_dir_path"]}},"deny-set-title":{"identifier":"deny-set-title","description":"Denies the set_title command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title"]}},"deny-set-tooltip":{"identifier":"deny-set-tooltip","description":"Denies the set_tooltip command without any pre-configured scope.","commands":{"allow":[],"deny":["set_tooltip"]}},"deny-set-visible":{"identifier":"deny-set-visible","description":"Denies the set_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["set_visible"]}}},"permission_sets":{},"global_scope_schema":null},"core:webview":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-get-all-webviews","allow-webview-position","allow-webview-size","allow-internal-toggle-devtools"]},"permissions":{"allow-clear-all-browsing-data":{"identifier":"allow-clear-all-browsing-data","description":"Enables the clear_all_browsing_data command without any pre-configured scope.","commands":{"allow":["clear_all_browsing_data"],"deny":[]}},"allow-create-webview":{"identifier":"allow-create-webview","description":"Enables the create_webview command without any pre-configured scope.","commands":{"allow":["create_webview"],"deny":[]}},"allow-create-webview-window":{"identifier":"allow-create-webview-window","description":"Enables the create_webview_window command without any pre-configured scope.","commands":{"allow":["create_webview_window"],"deny":[]}},"allow-get-all-webviews":{"identifier":"allow-get-all-webviews","description":"Enables the get_all_webviews command without any pre-configured scope.","commands":{"allow":["get_all_webviews"],"deny":[]}},"allow-internal-toggle-devtools":{"identifier":"allow-internal-toggle-devtools","description":"Enables the internal_toggle_devtools command without any pre-configured scope.","commands":{"allow":["internal_toggle_devtools"],"deny":[]}},"allow-print":{"identifier":"allow-print","description":"Enables the print command without any pre-configured scope.","commands":{"allow":["print"],"deny":[]}},"allow-reparent":{"identifier":"allow-reparent","description":"Enables the reparent command without any pre-configured scope.","commands":{"allow":["reparent"],"deny":[]}},"allow-set-webview-auto-resize":{"identifier":"allow-set-webview-auto-resize","description":"Enables the set_webview_auto_resize command without any pre-configured scope.","commands":{"allow":["set_webview_auto_resize"],"deny":[]}},"allow-set-webview-background-color":{"identifier":"allow-set-webview-background-color","description":"Enables the set_webview_background_color command without any pre-configured scope.","commands":{"allow":["set_webview_background_color"],"deny":[]}},"allow-set-webview-focus":{"identifier":"allow-set-webview-focus","description":"Enables the set_webview_focus command without any pre-configured scope.","commands":{"allow":["set_webview_focus"],"deny":[]}},"allow-set-webview-position":{"identifier":"allow-set-webview-position","description":"Enables the set_webview_position command without any pre-configured scope.","commands":{"allow":["set_webview_position"],"deny":[]}},"allow-set-webview-size":{"identifier":"allow-set-webview-size","description":"Enables the set_webview_size command without any pre-configured scope.","commands":{"allow":["set_webview_size"],"deny":[]}},"allow-set-webview-zoom":{"identifier":"allow-set-webview-zoom","description":"Enables the set_webview_zoom command without any pre-configured scope.","commands":{"allow":["set_webview_zoom"],"deny":[]}},"allow-webview-close":{"identifier":"allow-webview-close","description":"Enables the webview_close command without any pre-configured scope.","commands":{"allow":["webview_close"],"deny":[]}},"allow-webview-hide":{"identifier":"allow-webview-hide","description":"Enables the webview_hide command without any pre-configured scope.","commands":{"allow":["webview_hide"],"deny":[]}},"allow-webview-position":{"identifier":"allow-webview-position","description":"Enables the webview_position command without any pre-configured scope.","commands":{"allow":["webview_position"],"deny":[]}},"allow-webview-show":{"identifier":"allow-webview-show","description":"Enables the webview_show command without any pre-configured scope.","commands":{"allow":["webview_show"],"deny":[]}},"allow-webview-size":{"identifier":"allow-webview-size","description":"Enables the webview_size command without any pre-configured scope.","commands":{"allow":["webview_size"],"deny":[]}},"deny-clear-all-browsing-data":{"identifier":"deny-clear-all-browsing-data","description":"Denies the clear_all_browsing_data command without any pre-configured scope.","commands":{"allow":[],"deny":["clear_all_browsing_data"]}},"deny-create-webview":{"identifier":"deny-create-webview","description":"Denies the create_webview command without any pre-configured scope.","commands":{"allow":[],"deny":["create_webview"]}},"deny-create-webview-window":{"identifier":"deny-create-webview-window","description":"Denies the create_webview_window command without any pre-configured scope.","commands":{"allow":[],"deny":["create_webview_window"]}},"deny-get-all-webviews":{"identifier":"deny-get-all-webviews","description":"Denies the get_all_webviews command without any pre-configured scope.","commands":{"allow":[],"deny":["get_all_webviews"]}},"deny-internal-toggle-devtools":{"identifier":"deny-internal-toggle-devtools","description":"Denies the internal_toggle_devtools command without any pre-configured scope.","commands":{"allow":[],"deny":["internal_toggle_devtools"]}},"deny-print":{"identifier":"deny-print","description":"Denies the print command without any pre-configured scope.","commands":{"allow":[],"deny":["print"]}},"deny-reparent":{"identifier":"deny-reparent","description":"Denies the reparent command without any pre-configured scope.","commands":{"allow":[],"deny":["reparent"]}},"deny-set-webview-auto-resize":{"identifier":"deny-set-webview-auto-resize","description":"Denies the set_webview_auto_resize command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_auto_resize"]}},"deny-set-webview-background-color":{"identifier":"deny-set-webview-background-color","description":"Denies the set_webview_background_color command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_background_color"]}},"deny-set-webview-focus":{"identifier":"deny-set-webview-focus","description":"Denies the set_webview_focus command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_focus"]}},"deny-set-webview-position":{"identifier":"deny-set-webview-position","description":"Denies the set_webview_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_position"]}},"deny-set-webview-size":{"identifier":"deny-set-webview-size","description":"Denies the set_webview_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_size"]}},"deny-set-webview-zoom":{"identifier":"deny-set-webview-zoom","description":"Denies the set_webview_zoom command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_zoom"]}},"deny-webview-close":{"identifier":"deny-webview-close","description":"Denies the webview_close command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_close"]}},"deny-webview-hide":{"identifier":"deny-webview-hide","description":"Denies the webview_hide command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_hide"]}},"deny-webview-position":{"identifier":"deny-webview-position","description":"Denies the webview_position command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_position"]}},"deny-webview-show":{"identifier":"deny-webview-show","description":"Denies the webview_show command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_show"]}},"deny-webview-size":{"identifier":"deny-webview-size","description":"Denies the webview_size command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_size"]}}},"permission_sets":{},"global_scope_schema":null},"core:window":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-get-all-windows","allow-scale-factor","allow-inner-position","allow-outer-position","allow-inner-size","allow-outer-size","allow-is-fullscreen","allow-is-minimized","allow-is-maximized","allow-is-focused","allow-is-decorated","allow-is-resizable","allow-is-maximizable","allow-is-minimizable","allow-is-closable","allow-is-visible","allow-is-enabled","allow-title","allow-current-monitor","allow-primary-monitor","allow-monitor-from-point","allow-available-monitors","allow-cursor-position","allow-theme","allow-is-always-on-top","allow-internal-toggle-maximize"]},"permissions":{"allow-available-monitors":{"identifier":"allow-available-monitors","description":"Enables the available_monitors command without any pre-configured scope.","commands":{"allow":["available_monitors"],"deny":[]}},"allow-center":{"identifier":"allow-center","description":"Enables the center command without any pre-configured scope.","commands":{"allow":["center"],"deny":[]}},"allow-close":{"identifier":"allow-close","description":"Enables the close command without any pre-configured scope.","commands":{"allow":["close"],"deny":[]}},"allow-create":{"identifier":"allow-create","description":"Enables the create command without any pre-configured scope.","commands":{"allow":["create"],"deny":[]}},"allow-current-monitor":{"identifier":"allow-current-monitor","description":"Enables the current_monitor command without any pre-configured scope.","commands":{"allow":["current_monitor"],"deny":[]}},"allow-cursor-position":{"identifier":"allow-cursor-position","description":"Enables the cursor_position command without any pre-configured scope.","commands":{"allow":["cursor_position"],"deny":[]}},"allow-destroy":{"identifier":"allow-destroy","description":"Enables the destroy command without any pre-configured scope.","commands":{"allow":["destroy"],"deny":[]}},"allow-get-all-windows":{"identifier":"allow-get-all-windows","description":"Enables the get_all_windows command without any pre-configured scope.","commands":{"allow":["get_all_windows"],"deny":[]}},"allow-hide":{"identifier":"allow-hide","description":"Enables the hide command without any pre-configured scope.","commands":{"allow":["hide"],"deny":[]}},"allow-inner-position":{"identifier":"allow-inner-position","description":"Enables the inner_position command without any pre-configured scope.","commands":{"allow":["inner_position"],"deny":[]}},"allow-inner-size":{"identifier":"allow-inner-size","description":"Enables the inner_size command without any pre-configured scope.","commands":{"allow":["inner_size"],"deny":[]}},"allow-internal-toggle-maximize":{"identifier":"allow-internal-toggle-maximize","description":"Enables the internal_toggle_maximize command without any pre-configured scope.","commands":{"allow":["internal_toggle_maximize"],"deny":[]}},"allow-is-always-on-top":{"identifier":"allow-is-always-on-top","description":"Enables the is_always_on_top command without any pre-configured scope.","commands":{"allow":["is_always_on_top"],"deny":[]}},"allow-is-closable":{"identifier":"allow-is-closable","description":"Enables the is_closable command without any pre-configured scope.","commands":{"allow":["is_closable"],"deny":[]}},"allow-is-decorated":{"identifier":"allow-is-decorated","description":"Enables the is_decorated command without any pre-configured scope.","commands":{"allow":["is_decorated"],"deny":[]}},"allow-is-enabled":{"identifier":"allow-is-enabled","description":"Enables the is_enabled command without any pre-configured scope.","commands":{"allow":["is_enabled"],"deny":[]}},"allow-is-focused":{"identifier":"allow-is-focused","description":"Enables the is_focused command without any pre-configured scope.","commands":{"allow":["is_focused"],"deny":[]}},"allow-is-fullscreen":{"identifier":"allow-is-fullscreen","description":"Enables the is_fullscreen command without any pre-configured scope.","commands":{"allow":["is_fullscreen"],"deny":[]}},"allow-is-maximizable":{"identifier":"allow-is-maximizable","description":"Enables the is_maximizable command without any pre-configured scope.","commands":{"allow":["is_maximizable"],"deny":[]}},"allow-is-maximized":{"identifier":"allow-is-maximized","description":"Enables the is_maximized command without any pre-configured scope.","commands":{"allow":["is_maximized"],"deny":[]}},"allow-is-minimizable":{"identifier":"allow-is-minimizable","description":"Enables the is_minimizable command without any pre-configured scope.","commands":{"allow":["is_minimizable"],"deny":[]}},"allow-is-minimized":{"identifier":"allow-is-minimized","description":"Enables the is_minimized command without any pre-configured scope.","commands":{"allow":["is_minimized"],"deny":[]}},"allow-is-resizable":{"identifier":"allow-is-resizable","description":"Enables the is_resizable command without any pre-configured scope.","commands":{"allow":["is_resizable"],"deny":[]}},"allow-is-visible":{"identifier":"allow-is-visible","description":"Enables the is_visible command without any pre-configured scope.","commands":{"allow":["is_visible"],"deny":[]}},"allow-maximize":{"identifier":"allow-maximize","description":"Enables the maximize command without any pre-configured scope.","commands":{"allow":["maximize"],"deny":[]}},"allow-minimize":{"identifier":"allow-minimize","description":"Enables the minimize command without any pre-configured scope.","commands":{"allow":["minimize"],"deny":[]}},"allow-monitor-from-point":{"identifier":"allow-monitor-from-point","description":"Enables the monitor_from_point command without any pre-configured scope.","commands":{"allow":["monitor_from_point"],"deny":[]}},"allow-outer-position":{"identifier":"allow-outer-position","description":"Enables the outer_position command without any pre-configured scope.","commands":{"allow":["outer_position"],"deny":[]}},"allow-outer-size":{"identifier":"allow-outer-size","description":"Enables the outer_size command without any pre-configured scope.","commands":{"allow":["outer_size"],"deny":[]}},"allow-primary-monitor":{"identifier":"allow-primary-monitor","description":"Enables the primary_monitor command without any pre-configured scope.","commands":{"allow":["primary_monitor"],"deny":[]}},"allow-request-user-attention":{"identifier":"allow-request-user-attention","description":"Enables the request_user_attention command without any pre-configured scope.","commands":{"allow":["request_user_attention"],"deny":[]}},"allow-scale-factor":{"identifier":"allow-scale-factor","description":"Enables the scale_factor command without any pre-configured scope.","commands":{"allow":["scale_factor"],"deny":[]}},"allow-set-always-on-bottom":{"identifier":"allow-set-always-on-bottom","description":"Enables the set_always_on_bottom command without any pre-configured scope.","commands":{"allow":["set_always_on_bottom"],"deny":[]}},"allow-set-always-on-top":{"identifier":"allow-set-always-on-top","description":"Enables the set_always_on_top command without any pre-configured scope.","commands":{"allow":["set_always_on_top"],"deny":[]}},"allow-set-background-color":{"identifier":"allow-set-background-color","description":"Enables the set_background_color command without any pre-configured scope.","commands":{"allow":["set_background_color"],"deny":[]}},"allow-set-badge-count":{"identifier":"allow-set-badge-count","description":"Enables the set_badge_count command without any pre-configured scope.","commands":{"allow":["set_badge_count"],"deny":[]}},"allow-set-badge-label":{"identifier":"allow-set-badge-label","description":"Enables the set_badge_label command without any pre-configured scope.","commands":{"allow":["set_badge_label"],"deny":[]}},"allow-set-closable":{"identifier":"allow-set-closable","description":"Enables the set_closable command without any pre-configured scope.","commands":{"allow":["set_closable"],"deny":[]}},"allow-set-content-protected":{"identifier":"allow-set-content-protected","description":"Enables the set_content_protected command without any pre-configured scope.","commands":{"allow":["set_content_protected"],"deny":[]}},"allow-set-cursor-grab":{"identifier":"allow-set-cursor-grab","description":"Enables the set_cursor_grab command without any pre-configured scope.","commands":{"allow":["set_cursor_grab"],"deny":[]}},"allow-set-cursor-icon":{"identifier":"allow-set-cursor-icon","description":"Enables the set_cursor_icon command without any pre-configured scope.","commands":{"allow":["set_cursor_icon"],"deny":[]}},"allow-set-cursor-position":{"identifier":"allow-set-cursor-position","description":"Enables the set_cursor_position command without any pre-configured scope.","commands":{"allow":["set_cursor_position"],"deny":[]}},"allow-set-cursor-visible":{"identifier":"allow-set-cursor-visible","description":"Enables the set_cursor_visible command without any pre-configured scope.","commands":{"allow":["set_cursor_visible"],"deny":[]}},"allow-set-decorations":{"identifier":"allow-set-decorations","description":"Enables the set_decorations command without any pre-configured scope.","commands":{"allow":["set_decorations"],"deny":[]}},"allow-set-effects":{"identifier":"allow-set-effects","description":"Enables the set_effects command without any pre-configured scope.","commands":{"allow":["set_effects"],"deny":[]}},"allow-set-enabled":{"identifier":"allow-set-enabled","description":"Enables the set_enabled command without any pre-configured scope.","commands":{"allow":["set_enabled"],"deny":[]}},"allow-set-focus":{"identifier":"allow-set-focus","description":"Enables the set_focus command without any pre-configured scope.","commands":{"allow":["set_focus"],"deny":[]}},"allow-set-focusable":{"identifier":"allow-set-focusable","description":"Enables the set_focusable command without any pre-configured scope.","commands":{"allow":["set_focusable"],"deny":[]}},"allow-set-fullscreen":{"identifier":"allow-set-fullscreen","description":"Enables the set_fullscreen command without any pre-configured scope.","commands":{"allow":["set_fullscreen"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-ignore-cursor-events":{"identifier":"allow-set-ignore-cursor-events","description":"Enables the set_ignore_cursor_events command without any pre-configured scope.","commands":{"allow":["set_ignore_cursor_events"],"deny":[]}},"allow-set-max-size":{"identifier":"allow-set-max-size","description":"Enables the set_max_size command without any pre-configured scope.","commands":{"allow":["set_max_size"],"deny":[]}},"allow-set-maximizable":{"identifier":"allow-set-maximizable","description":"Enables the set_maximizable command without any pre-configured scope.","commands":{"allow":["set_maximizable"],"deny":[]}},"allow-set-min-size":{"identifier":"allow-set-min-size","description":"Enables the set_min_size command without any pre-configured scope.","commands":{"allow":["set_min_size"],"deny":[]}},"allow-set-minimizable":{"identifier":"allow-set-minimizable","description":"Enables the set_minimizable command without any pre-configured scope.","commands":{"allow":["set_minimizable"],"deny":[]}},"allow-set-overlay-icon":{"identifier":"allow-set-overlay-icon","description":"Enables the set_overlay_icon command without any pre-configured scope.","commands":{"allow":["set_overlay_icon"],"deny":[]}},"allow-set-position":{"identifier":"allow-set-position","description":"Enables the set_position command without any pre-configured scope.","commands":{"allow":["set_position"],"deny":[]}},"allow-set-progress-bar":{"identifier":"allow-set-progress-bar","description":"Enables the set_progress_bar command without any pre-configured scope.","commands":{"allow":["set_progress_bar"],"deny":[]}},"allow-set-resizable":{"identifier":"allow-set-resizable","description":"Enables the set_resizable command without any pre-configured scope.","commands":{"allow":["set_resizable"],"deny":[]}},"allow-set-shadow":{"identifier":"allow-set-shadow","description":"Enables the set_shadow command without any pre-configured scope.","commands":{"allow":["set_shadow"],"deny":[]}},"allow-set-simple-fullscreen":{"identifier":"allow-set-simple-fullscreen","description":"Enables the set_simple_fullscreen command without any pre-configured scope.","commands":{"allow":["set_simple_fullscreen"],"deny":[]}},"allow-set-size":{"identifier":"allow-set-size","description":"Enables the set_size command without any pre-configured scope.","commands":{"allow":["set_size"],"deny":[]}},"allow-set-size-constraints":{"identifier":"allow-set-size-constraints","description":"Enables the set_size_constraints command without any pre-configured scope.","commands":{"allow":["set_size_constraints"],"deny":[]}},"allow-set-skip-taskbar":{"identifier":"allow-set-skip-taskbar","description":"Enables the set_skip_taskbar command without any pre-configured scope.","commands":{"allow":["set_skip_taskbar"],"deny":[]}},"allow-set-theme":{"identifier":"allow-set-theme","description":"Enables the set_theme command without any pre-configured scope.","commands":{"allow":["set_theme"],"deny":[]}},"allow-set-title":{"identifier":"allow-set-title","description":"Enables the set_title command without any pre-configured scope.","commands":{"allow":["set_title"],"deny":[]}},"allow-set-title-bar-style":{"identifier":"allow-set-title-bar-style","description":"Enables the set_title_bar_style command without any pre-configured scope.","commands":{"allow":["set_title_bar_style"],"deny":[]}},"allow-set-visible-on-all-workspaces":{"identifier":"allow-set-visible-on-all-workspaces","description":"Enables the set_visible_on_all_workspaces command without any pre-configured scope.","commands":{"allow":["set_visible_on_all_workspaces"],"deny":[]}},"allow-show":{"identifier":"allow-show","description":"Enables the show command without any pre-configured scope.","commands":{"allow":["show"],"deny":[]}},"allow-start-dragging":{"identifier":"allow-start-dragging","description":"Enables the start_dragging command without any pre-configured scope.","commands":{"allow":["start_dragging"],"deny":[]}},"allow-start-resize-dragging":{"identifier":"allow-start-resize-dragging","description":"Enables the start_resize_dragging command without any pre-configured scope.","commands":{"allow":["start_resize_dragging"],"deny":[]}},"allow-theme":{"identifier":"allow-theme","description":"Enables the theme command without any pre-configured scope.","commands":{"allow":["theme"],"deny":[]}},"allow-title":{"identifier":"allow-title","description":"Enables the title command without any pre-configured scope.","commands":{"allow":["title"],"deny":[]}},"allow-toggle-maximize":{"identifier":"allow-toggle-maximize","description":"Enables the toggle_maximize command without any pre-configured scope.","commands":{"allow":["toggle_maximize"],"deny":[]}},"allow-unmaximize":{"identifier":"allow-unmaximize","description":"Enables the unmaximize command without any pre-configured scope.","commands":{"allow":["unmaximize"],"deny":[]}},"allow-unminimize":{"identifier":"allow-unminimize","description":"Enables the unminimize command without any pre-configured scope.","commands":{"allow":["unminimize"],"deny":[]}},"deny-available-monitors":{"identifier":"deny-available-monitors","description":"Denies the available_monitors command without any pre-configured scope.","commands":{"allow":[],"deny":["available_monitors"]}},"deny-center":{"identifier":"deny-center","description":"Denies the center command without any pre-configured scope.","commands":{"allow":[],"deny":["center"]}},"deny-close":{"identifier":"deny-close","description":"Denies the close command without any pre-configured scope.","commands":{"allow":[],"deny":["close"]}},"deny-create":{"identifier":"deny-create","description":"Denies the create command without any pre-configured scope.","commands":{"allow":[],"deny":["create"]}},"deny-current-monitor":{"identifier":"deny-current-monitor","description":"Denies the current_monitor command without any pre-configured scope.","commands":{"allow":[],"deny":["current_monitor"]}},"deny-cursor-position":{"identifier":"deny-cursor-position","description":"Denies the cursor_position command without any pre-configured scope.","commands":{"allow":[],"deny":["cursor_position"]}},"deny-destroy":{"identifier":"deny-destroy","description":"Denies the destroy command without any pre-configured scope.","commands":{"allow":[],"deny":["destroy"]}},"deny-get-all-windows":{"identifier":"deny-get-all-windows","description":"Denies the get_all_windows command without any pre-configured scope.","commands":{"allow":[],"deny":["get_all_windows"]}},"deny-hide":{"identifier":"deny-hide","description":"Denies the hide command without any pre-configured scope.","commands":{"allow":[],"deny":["hide"]}},"deny-inner-position":{"identifier":"deny-inner-position","description":"Denies the inner_position command without any pre-configured scope.","commands":{"allow":[],"deny":["inner_position"]}},"deny-inner-size":{"identifier":"deny-inner-size","description":"Denies the inner_size command without any pre-configured scope.","commands":{"allow":[],"deny":["inner_size"]}},"deny-internal-toggle-maximize":{"identifier":"deny-internal-toggle-maximize","description":"Denies the internal_toggle_maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["internal_toggle_maximize"]}},"deny-is-always-on-top":{"identifier":"deny-is-always-on-top","description":"Denies the is_always_on_top command without any pre-configured scope.","commands":{"allow":[],"deny":["is_always_on_top"]}},"deny-is-closable":{"identifier":"deny-is-closable","description":"Denies the is_closable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_closable"]}},"deny-is-decorated":{"identifier":"deny-is-decorated","description":"Denies the is_decorated command without any pre-configured scope.","commands":{"allow":[],"deny":["is_decorated"]}},"deny-is-enabled":{"identifier":"deny-is-enabled","description":"Denies the is_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["is_enabled"]}},"deny-is-focused":{"identifier":"deny-is-focused","description":"Denies the is_focused command without any pre-configured scope.","commands":{"allow":[],"deny":["is_focused"]}},"deny-is-fullscreen":{"identifier":"deny-is-fullscreen","description":"Denies the is_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["is_fullscreen"]}},"deny-is-maximizable":{"identifier":"deny-is-maximizable","description":"Denies the is_maximizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_maximizable"]}},"deny-is-maximized":{"identifier":"deny-is-maximized","description":"Denies the is_maximized command without any pre-configured scope.","commands":{"allow":[],"deny":["is_maximized"]}},"deny-is-minimizable":{"identifier":"deny-is-minimizable","description":"Denies the is_minimizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_minimizable"]}},"deny-is-minimized":{"identifier":"deny-is-minimized","description":"Denies the is_minimized command without any pre-configured scope.","commands":{"allow":[],"deny":["is_minimized"]}},"deny-is-resizable":{"identifier":"deny-is-resizable","description":"Denies the is_resizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_resizable"]}},"deny-is-visible":{"identifier":"deny-is-visible","description":"Denies the is_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["is_visible"]}},"deny-maximize":{"identifier":"deny-maximize","description":"Denies the maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["maximize"]}},"deny-minimize":{"identifier":"deny-minimize","description":"Denies the minimize command without any pre-configured scope.","commands":{"allow":[],"deny":["minimize"]}},"deny-monitor-from-point":{"identifier":"deny-monitor-from-point","description":"Denies the monitor_from_point command without any pre-configured scope.","commands":{"allow":[],"deny":["monitor_from_point"]}},"deny-outer-position":{"identifier":"deny-outer-position","description":"Denies the outer_position command without any pre-configured scope.","commands":{"allow":[],"deny":["outer_position"]}},"deny-outer-size":{"identifier":"deny-outer-size","description":"Denies the outer_size command without any pre-configured scope.","commands":{"allow":[],"deny":["outer_size"]}},"deny-primary-monitor":{"identifier":"deny-primary-monitor","description":"Denies the primary_monitor command without any pre-configured scope.","commands":{"allow":[],"deny":["primary_monitor"]}},"deny-request-user-attention":{"identifier":"deny-request-user-attention","description":"Denies the request_user_attention command without any pre-configured scope.","commands":{"allow":[],"deny":["request_user_attention"]}},"deny-scale-factor":{"identifier":"deny-scale-factor","description":"Denies the scale_factor command without any pre-configured scope.","commands":{"allow":[],"deny":["scale_factor"]}},"deny-set-always-on-bottom":{"identifier":"deny-set-always-on-bottom","description":"Denies the set_always_on_bottom command without any pre-configured scope.","commands":{"allow":[],"deny":["set_always_on_bottom"]}},"deny-set-always-on-top":{"identifier":"deny-set-always-on-top","description":"Denies the set_always_on_top command without any pre-configured scope.","commands":{"allow":[],"deny":["set_always_on_top"]}},"deny-set-background-color":{"identifier":"deny-set-background-color","description":"Denies the set_background_color command without any pre-configured scope.","commands":{"allow":[],"deny":["set_background_color"]}},"deny-set-badge-count":{"identifier":"deny-set-badge-count","description":"Denies the set_badge_count command without any pre-configured scope.","commands":{"allow":[],"deny":["set_badge_count"]}},"deny-set-badge-label":{"identifier":"deny-set-badge-label","description":"Denies the set_badge_label command without any pre-configured scope.","commands":{"allow":[],"deny":["set_badge_label"]}},"deny-set-closable":{"identifier":"deny-set-closable","description":"Denies the set_closable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_closable"]}},"deny-set-content-protected":{"identifier":"deny-set-content-protected","description":"Denies the set_content_protected command without any pre-configured scope.","commands":{"allow":[],"deny":["set_content_protected"]}},"deny-set-cursor-grab":{"identifier":"deny-set-cursor-grab","description":"Denies the set_cursor_grab command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_grab"]}},"deny-set-cursor-icon":{"identifier":"deny-set-cursor-icon","description":"Denies the set_cursor_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_icon"]}},"deny-set-cursor-position":{"identifier":"deny-set-cursor-position","description":"Denies the set_cursor_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_position"]}},"deny-set-cursor-visible":{"identifier":"deny-set-cursor-visible","description":"Denies the set_cursor_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_visible"]}},"deny-set-decorations":{"identifier":"deny-set-decorations","description":"Denies the set_decorations command without any pre-configured scope.","commands":{"allow":[],"deny":["set_decorations"]}},"deny-set-effects":{"identifier":"deny-set-effects","description":"Denies the set_effects command without any pre-configured scope.","commands":{"allow":[],"deny":["set_effects"]}},"deny-set-enabled":{"identifier":"deny-set-enabled","description":"Denies the set_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["set_enabled"]}},"deny-set-focus":{"identifier":"deny-set-focus","description":"Denies the set_focus command without any pre-configured scope.","commands":{"allow":[],"deny":["set_focus"]}},"deny-set-focusable":{"identifier":"deny-set-focusable","description":"Denies the set_focusable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_focusable"]}},"deny-set-fullscreen":{"identifier":"deny-set-fullscreen","description":"Denies the set_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["set_fullscreen"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-ignore-cursor-events":{"identifier":"deny-set-ignore-cursor-events","description":"Denies the set_ignore_cursor_events command without any pre-configured scope.","commands":{"allow":[],"deny":["set_ignore_cursor_events"]}},"deny-set-max-size":{"identifier":"deny-set-max-size","description":"Denies the set_max_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_max_size"]}},"deny-set-maximizable":{"identifier":"deny-set-maximizable","description":"Denies the set_maximizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_maximizable"]}},"deny-set-min-size":{"identifier":"deny-set-min-size","description":"Denies the set_min_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_min_size"]}},"deny-set-minimizable":{"identifier":"deny-set-minimizable","description":"Denies the set_minimizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_minimizable"]}},"deny-set-overlay-icon":{"identifier":"deny-set-overlay-icon","description":"Denies the set_overlay_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_overlay_icon"]}},"deny-set-position":{"identifier":"deny-set-position","description":"Denies the set_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_position"]}},"deny-set-progress-bar":{"identifier":"deny-set-progress-bar","description":"Denies the set_progress_bar command without any pre-configured scope.","commands":{"allow":[],"deny":["set_progress_bar"]}},"deny-set-resizable":{"identifier":"deny-set-resizable","description":"Denies the set_resizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_resizable"]}},"deny-set-shadow":{"identifier":"deny-set-shadow","description":"Denies the set_shadow command without any pre-configured scope.","commands":{"allow":[],"deny":["set_shadow"]}},"deny-set-simple-fullscreen":{"identifier":"deny-set-simple-fullscreen","description":"Denies the set_simple_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["set_simple_fullscreen"]}},"deny-set-size":{"identifier":"deny-set-size","description":"Denies the set_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_size"]}},"deny-set-size-constraints":{"identifier":"deny-set-size-constraints","description":"Denies the set_size_constraints command without any pre-configured scope.","commands":{"allow":[],"deny":["set_size_constraints"]}},"deny-set-skip-taskbar":{"identifier":"deny-set-skip-taskbar","description":"Denies the set_skip_taskbar command without any pre-configured scope.","commands":{"allow":[],"deny":["set_skip_taskbar"]}},"deny-set-theme":{"identifier":"deny-set-theme","description":"Denies the set_theme command without any pre-configured scope.","commands":{"allow":[],"deny":["set_theme"]}},"deny-set-title":{"identifier":"deny-set-title","description":"Denies the set_title command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title"]}},"deny-set-title-bar-style":{"identifier":"deny-set-title-bar-style","description":"Denies the set_title_bar_style command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title_bar_style"]}},"deny-set-visible-on-all-workspaces":{"identifier":"deny-set-visible-on-all-workspaces","description":"Denies the set_visible_on_all_workspaces command without any pre-configured scope.","commands":{"allow":[],"deny":["set_visible_on_all_workspaces"]}},"deny-show":{"identifier":"deny-show","description":"Denies the show command without any pre-configured scope.","commands":{"allow":[],"deny":["show"]}},"deny-start-dragging":{"identifier":"deny-start-dragging","description":"Denies the start_dragging command without any pre-configured scope.","commands":{"allow":[],"deny":["start_dragging"]}},"deny-start-resize-dragging":{"identifier":"deny-start-resize-dragging","description":"Denies the start_resize_dragging command without any pre-configured scope.","commands":{"allow":[],"deny":["start_resize_dragging"]}},"deny-theme":{"identifier":"deny-theme","description":"Denies the theme command without any pre-configured scope.","commands":{"allow":[],"deny":["theme"]}},"deny-title":{"identifier":"deny-title","description":"Denies the title command without any pre-configured scope.","commands":{"allow":[],"deny":["title"]}},"deny-toggle-maximize":{"identifier":"deny-toggle-maximize","description":"Denies the toggle_maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["toggle_maximize"]}},"deny-unmaximize":{"identifier":"deny-unmaximize","description":"Denies the unmaximize command without any pre-configured scope.","commands":{"allow":[],"deny":["unmaximize"]}},"deny-unminimize":{"identifier":"deny-unminimize","description":"Denies the unminimize command without any pre-configured scope.","commands":{"allow":[],"deny":["unminimize"]}}},"permission_sets":{},"global_scope_schema":null},"shell":{"default_permission":{"identifier":"default","description":"This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n","permissions":["allow-open"]},"permissions":{"allow-execute":{"identifier":"allow-execute","description":"Enables the execute command without any pre-configured scope.","commands":{"allow":["execute"],"deny":[]}},"allow-kill":{"identifier":"allow-kill","description":"Enables the kill command without any pre-configured scope.","commands":{"allow":["kill"],"deny":[]}},"allow-open":{"identifier":"allow-open","description":"Enables the open command without any pre-configured scope.","commands":{"allow":["open"],"deny":[]}},"allow-spawn":{"identifier":"allow-spawn","description":"Enables the spawn command without any pre-configured scope.","commands":{"allow":["spawn"],"deny":[]}},"allow-stdin-write":{"identifier":"allow-stdin-write","description":"Enables the stdin_write command without any pre-configured scope.","commands":{"allow":["stdin_write"],"deny":[]}},"deny-execute":{"identifier":"deny-execute","description":"Denies the execute command without any pre-configured scope.","commands":{"allow":[],"deny":["execute"]}},"deny-kill":{"identifier":"deny-kill","description":"Denies the kill command without any pre-configured scope.","commands":{"allow":[],"deny":["kill"]}},"deny-open":{"identifier":"deny-open","description":"Denies the open command without any pre-configured scope.","commands":{"allow":[],"deny":["open"]}},"deny-spawn":{"identifier":"deny-spawn","description":"Denies the spawn command without any pre-configured scope.","commands":{"allow":[],"deny":["spawn"]}},"deny-stdin-write":{"identifier":"deny-stdin-write","description":"Denies the stdin_write command without any pre-configured scope.","commands":{"allow":[],"deny":["stdin_write"]}}},"permission_sets":{},"global_scope_schema":{"$schema":"http://json-schema.org/draft-07/schema#","anyOf":[{"additionalProperties":false,"properties":{"args":{"allOf":[{"$ref":"#/definitions/ShellScopeEntryAllowedArgs"}],"description":"The allowed arguments for the command execution."},"cmd":{"description":"The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.","type":"string"},"name":{"description":"The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.","type":"string"}},"required":["cmd","name"],"type":"object"},{"additionalProperties":false,"properties":{"args":{"allOf":[{"$ref":"#/definitions/ShellScopeEntryAllowedArgs"}],"description":"The allowed arguments for the command execution."},"name":{"description":"The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.","type":"string"},"sidecar":{"description":"If this command is a sidecar command.","type":"boolean"}},"required":["name","sidecar"],"type":"object"}],"definitions":{"ShellScopeEntryAllowedArg":{"anyOf":[{"description":"A non-configurable argument that is passed to the command in the order it was specified.","type":"string"},{"additionalProperties":false,"description":"A variable that is set while calling the command from the webview API.","properties":{"raw":{"default":false,"description":"Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.","type":"boolean"},"validator":{"description":"[regex] validator to require passed values to conform to an expected input.\n\nThis will require the argument value passed to this variable to match the `validator` regex before it will be executed.\n\nThe regex string is by default surrounded by `^...$` to match the full string. For example the `https?://\\w+` regex would be registered as `^https?://\\w+$`.\n\n[regex]: ","type":"string"}},"required":["validator"],"type":"object"}],"description":"A command argument allowed to be executed by the webview API."},"ShellScopeEntryAllowedArgs":{"anyOf":[{"description":"Use a simple boolean to allow all or disable all arguments to this command configuration.","type":"boolean"},{"description":"A specific set of [`ShellScopeEntryAllowedArg`] that are valid to call for the command configuration.","items":{"$ref":"#/definitions/ShellScopeEntryAllowedArg"},"type":"array"}],"description":"A set of command arguments allowed to be executed by the webview API.\n\nA value of `true` will allow any arguments to be passed to the command. `false` will disable all arguments. A list of [`ShellScopeEntryAllowedArg`] will set those arguments as the only valid arguments to be passed to the attached command configuration."}},"description":"Shell scope entry.","title":"ShellScopeEntry"}}} \ No newline at end of file +{ + "core": { + "default_permission": { + "identifier": "default", + "description": "Default core plugins set.", + "permissions": [ + "core:path:default", + "core:event:default", + "core:window:default", + "core:webview:default", + "core:app:default", + "core:image:default", + "core:resources:default", + "core:menu:default", + "core:tray:default" + ] + }, + "permissions": {}, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:app": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": [ + "allow-version", + "allow-name", + "allow-tauri-version", + "allow-identifier", + "allow-bundle-type", + "allow-register-listener", + "allow-remove-listener" + ] + }, + "permissions": { + "allow-app-hide": { + "identifier": "allow-app-hide", + "description": "Enables the app_hide command without any pre-configured scope.", + "commands": { "allow": ["app_hide"], "deny": [] } + }, + "allow-app-show": { + "identifier": "allow-app-show", + "description": "Enables the app_show command without any pre-configured scope.", + "commands": { "allow": ["app_show"], "deny": [] } + }, + "allow-bundle-type": { + "identifier": "allow-bundle-type", + "description": "Enables the bundle_type command without any pre-configured scope.", + "commands": { "allow": ["bundle_type"], "deny": [] } + }, + "allow-default-window-icon": { + "identifier": "allow-default-window-icon", + "description": "Enables the default_window_icon command without any pre-configured scope.", + "commands": { "allow": ["default_window_icon"], "deny": [] } + }, + "allow-fetch-data-store-identifiers": { + "identifier": "allow-fetch-data-store-identifiers", + "description": "Enables the fetch_data_store_identifiers command without any pre-configured scope.", + "commands": { "allow": ["fetch_data_store_identifiers"], "deny": [] } + }, + "allow-identifier": { + "identifier": "allow-identifier", + "description": "Enables the identifier command without any pre-configured scope.", + "commands": { "allow": ["identifier"], "deny": [] } + }, + "allow-name": { + "identifier": "allow-name", + "description": "Enables the name command without any pre-configured scope.", + "commands": { "allow": ["name"], "deny": [] } + }, + "allow-register-listener": { + "identifier": "allow-register-listener", + "description": "Enables the register_listener command without any pre-configured scope.", + "commands": { "allow": ["register_listener"], "deny": [] } + }, + "allow-remove-data-store": { + "identifier": "allow-remove-data-store", + "description": "Enables the remove_data_store command without any pre-configured scope.", + "commands": { "allow": ["remove_data_store"], "deny": [] } + }, + "allow-remove-listener": { + "identifier": "allow-remove-listener", + "description": "Enables the remove_listener command without any pre-configured scope.", + "commands": { "allow": ["remove_listener"], "deny": [] } + }, + "allow-set-app-theme": { + "identifier": "allow-set-app-theme", + "description": "Enables the set_app_theme command without any pre-configured scope.", + "commands": { "allow": ["set_app_theme"], "deny": [] } + }, + "allow-set-dock-visibility": { + "identifier": "allow-set-dock-visibility", + "description": "Enables the set_dock_visibility command without any pre-configured scope.", + "commands": { "allow": ["set_dock_visibility"], "deny": [] } + }, + "allow-tauri-version": { + "identifier": "allow-tauri-version", + "description": "Enables the tauri_version command without any pre-configured scope.", + "commands": { "allow": ["tauri_version"], "deny": [] } + }, + "allow-version": { + "identifier": "allow-version", + "description": "Enables the version command without any pre-configured scope.", + "commands": { "allow": ["version"], "deny": [] } + }, + "deny-app-hide": { + "identifier": "deny-app-hide", + "description": "Denies the app_hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["app_hide"] } + }, + "deny-app-show": { + "identifier": "deny-app-show", + "description": "Denies the app_show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["app_show"] } + }, + "deny-bundle-type": { + "identifier": "deny-bundle-type", + "description": "Denies the bundle_type command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["bundle_type"] } + }, + "deny-default-window-icon": { + "identifier": "deny-default-window-icon", + "description": "Denies the default_window_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["default_window_icon"] } + }, + "deny-fetch-data-store-identifiers": { + "identifier": "deny-fetch-data-store-identifiers", + "description": "Denies the fetch_data_store_identifiers command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["fetch_data_store_identifiers"] } + }, + "deny-identifier": { + "identifier": "deny-identifier", + "description": "Denies the identifier command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["identifier"] } + }, + "deny-name": { + "identifier": "deny-name", + "description": "Denies the name command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["name"] } + }, + "deny-register-listener": { + "identifier": "deny-register-listener", + "description": "Denies the register_listener command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["register_listener"] } + }, + "deny-remove-data-store": { + "identifier": "deny-remove-data-store", + "description": "Denies the remove_data_store command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_data_store"] } + }, + "deny-remove-listener": { + "identifier": "deny-remove-listener", + "description": "Denies the remove_listener command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_listener"] } + }, + "deny-set-app-theme": { + "identifier": "deny-set-app-theme", + "description": "Denies the set_app_theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_app_theme"] } + }, + "deny-set-dock-visibility": { + "identifier": "deny-set-dock-visibility", + "description": "Denies the set_dock_visibility command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_dock_visibility"] } + }, + "deny-tauri-version": { + "identifier": "deny-tauri-version", + "description": "Denies the tauri_version command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["tauri_version"] } + }, + "deny-version": { + "identifier": "deny-version", + "description": "Denies the version command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["version"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:event": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-listen", "allow-unlisten", "allow-emit", "allow-emit-to"] + }, + "permissions": { + "allow-emit": { + "identifier": "allow-emit", + "description": "Enables the emit command without any pre-configured scope.", + "commands": { "allow": ["emit"], "deny": [] } + }, + "allow-emit-to": { + "identifier": "allow-emit-to", + "description": "Enables the emit_to command without any pre-configured scope.", + "commands": { "allow": ["emit_to"], "deny": [] } + }, + "allow-listen": { + "identifier": "allow-listen", + "description": "Enables the listen command without any pre-configured scope.", + "commands": { "allow": ["listen"], "deny": [] } + }, + "allow-unlisten": { + "identifier": "allow-unlisten", + "description": "Enables the unlisten command without any pre-configured scope.", + "commands": { "allow": ["unlisten"], "deny": [] } + }, + "deny-emit": { + "identifier": "deny-emit", + "description": "Denies the emit command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["emit"] } + }, + "deny-emit-to": { + "identifier": "deny-emit-to", + "description": "Denies the emit_to command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["emit_to"] } + }, + "deny-listen": { + "identifier": "deny-listen", + "description": "Denies the listen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["listen"] } + }, + "deny-unlisten": { + "identifier": "deny-unlisten", + "description": "Denies the unlisten command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unlisten"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:image": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-new", "allow-from-bytes", "allow-from-path", "allow-rgba", "allow-size"] + }, + "permissions": { + "allow-from-bytes": { + "identifier": "allow-from-bytes", + "description": "Enables the from_bytes command without any pre-configured scope.", + "commands": { "allow": ["from_bytes"], "deny": [] } + }, + "allow-from-path": { + "identifier": "allow-from-path", + "description": "Enables the from_path command without any pre-configured scope.", + "commands": { "allow": ["from_path"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-rgba": { + "identifier": "allow-rgba", + "description": "Enables the rgba command without any pre-configured scope.", + "commands": { "allow": ["rgba"], "deny": [] } + }, + "allow-size": { + "identifier": "allow-size", + "description": "Enables the size command without any pre-configured scope.", + "commands": { "allow": ["size"], "deny": [] } + }, + "deny-from-bytes": { + "identifier": "deny-from-bytes", + "description": "Denies the from_bytes command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["from_bytes"] } + }, + "deny-from-path": { + "identifier": "deny-from-path", + "description": "Denies the from_path command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["from_path"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-rgba": { + "identifier": "deny-rgba", + "description": "Denies the rgba command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["rgba"] } + }, + "deny-size": { + "identifier": "deny-size", + "description": "Denies the size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["size"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:menu": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-new", + "allow-append", + "allow-prepend", + "allow-insert", + "allow-remove", + "allow-remove-at", + "allow-items", + "allow-get", + "allow-popup", + "allow-create-default", + "allow-set-as-app-menu", + "allow-set-as-window-menu", + "allow-text", + "allow-set-text", + "allow-is-enabled", + "allow-set-enabled", + "allow-set-accelerator", + "allow-set-as-windows-menu-for-nsapp", + "allow-set-as-help-menu-for-nsapp", + "allow-is-checked", + "allow-set-checked", + "allow-set-icon" + ] + }, + "permissions": { + "allow-append": { + "identifier": "allow-append", + "description": "Enables the append command without any pre-configured scope.", + "commands": { "allow": ["append"], "deny": [] } + }, + "allow-create-default": { + "identifier": "allow-create-default", + "description": "Enables the create_default command without any pre-configured scope.", + "commands": { "allow": ["create_default"], "deny": [] } + }, + "allow-get": { + "identifier": "allow-get", + "description": "Enables the get command without any pre-configured scope.", + "commands": { "allow": ["get"], "deny": [] } + }, + "allow-insert": { + "identifier": "allow-insert", + "description": "Enables the insert command without any pre-configured scope.", + "commands": { "allow": ["insert"], "deny": [] } + }, + "allow-is-checked": { + "identifier": "allow-is-checked", + "description": "Enables the is_checked command without any pre-configured scope.", + "commands": { "allow": ["is_checked"], "deny": [] } + }, + "allow-is-enabled": { + "identifier": "allow-is-enabled", + "description": "Enables the is_enabled command without any pre-configured scope.", + "commands": { "allow": ["is_enabled"], "deny": [] } + }, + "allow-items": { + "identifier": "allow-items", + "description": "Enables the items command without any pre-configured scope.", + "commands": { "allow": ["items"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-popup": { + "identifier": "allow-popup", + "description": "Enables the popup command without any pre-configured scope.", + "commands": { "allow": ["popup"], "deny": [] } + }, + "allow-prepend": { + "identifier": "allow-prepend", + "description": "Enables the prepend command without any pre-configured scope.", + "commands": { "allow": ["prepend"], "deny": [] } + }, + "allow-remove": { + "identifier": "allow-remove", + "description": "Enables the remove command without any pre-configured scope.", + "commands": { "allow": ["remove"], "deny": [] } + }, + "allow-remove-at": { + "identifier": "allow-remove-at", + "description": "Enables the remove_at command without any pre-configured scope.", + "commands": { "allow": ["remove_at"], "deny": [] } + }, + "allow-set-accelerator": { + "identifier": "allow-set-accelerator", + "description": "Enables the set_accelerator command without any pre-configured scope.", + "commands": { "allow": ["set_accelerator"], "deny": [] } + }, + "allow-set-as-app-menu": { + "identifier": "allow-set-as-app-menu", + "description": "Enables the set_as_app_menu command without any pre-configured scope.", + "commands": { "allow": ["set_as_app_menu"], "deny": [] } + }, + "allow-set-as-help-menu-for-nsapp": { + "identifier": "allow-set-as-help-menu-for-nsapp", + "description": "Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": ["set_as_help_menu_for_nsapp"], "deny": [] } + }, + "allow-set-as-window-menu": { + "identifier": "allow-set-as-window-menu", + "description": "Enables the set_as_window_menu command without any pre-configured scope.", + "commands": { "allow": ["set_as_window_menu"], "deny": [] } + }, + "allow-set-as-windows-menu-for-nsapp": { + "identifier": "allow-set-as-windows-menu-for-nsapp", + "description": "Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": ["set_as_windows_menu_for_nsapp"], "deny": [] } + }, + "allow-set-checked": { + "identifier": "allow-set-checked", + "description": "Enables the set_checked command without any pre-configured scope.", + "commands": { "allow": ["set_checked"], "deny": [] } + }, + "allow-set-enabled": { + "identifier": "allow-set-enabled", + "description": "Enables the set_enabled command without any pre-configured scope.", + "commands": { "allow": ["set_enabled"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-text": { + "identifier": "allow-set-text", + "description": "Enables the set_text command without any pre-configured scope.", + "commands": { "allow": ["set_text"], "deny": [] } + }, + "allow-text": { + "identifier": "allow-text", + "description": "Enables the text command without any pre-configured scope.", + "commands": { "allow": ["text"], "deny": [] } + }, + "deny-append": { + "identifier": "deny-append", + "description": "Denies the append command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["append"] } + }, + "deny-create-default": { + "identifier": "deny-create-default", + "description": "Denies the create_default command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_default"] } + }, + "deny-get": { + "identifier": "deny-get", + "description": "Denies the get command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get"] } + }, + "deny-insert": { + "identifier": "deny-insert", + "description": "Denies the insert command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["insert"] } + }, + "deny-is-checked": { + "identifier": "deny-is-checked", + "description": "Denies the is_checked command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_checked"] } + }, + "deny-is-enabled": { + "identifier": "deny-is-enabled", + "description": "Denies the is_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_enabled"] } + }, + "deny-items": { + "identifier": "deny-items", + "description": "Denies the items command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["items"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-popup": { + "identifier": "deny-popup", + "description": "Denies the popup command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["popup"] } + }, + "deny-prepend": { + "identifier": "deny-prepend", + "description": "Denies the prepend command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["prepend"] } + }, + "deny-remove": { + "identifier": "deny-remove", + "description": "Denies the remove command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove"] } + }, + "deny-remove-at": { + "identifier": "deny-remove-at", + "description": "Denies the remove_at command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_at"] } + }, + "deny-set-accelerator": { + "identifier": "deny-set-accelerator", + "description": "Denies the set_accelerator command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_accelerator"] } + }, + "deny-set-as-app-menu": { + "identifier": "deny-set-as-app-menu", + "description": "Denies the set_as_app_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_app_menu"] } + }, + "deny-set-as-help-menu-for-nsapp": { + "identifier": "deny-set-as-help-menu-for-nsapp", + "description": "Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_help_menu_for_nsapp"] } + }, + "deny-set-as-window-menu": { + "identifier": "deny-set-as-window-menu", + "description": "Denies the set_as_window_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_window_menu"] } + }, + "deny-set-as-windows-menu-for-nsapp": { + "identifier": "deny-set-as-windows-menu-for-nsapp", + "description": "Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_as_windows_menu_for_nsapp"] } + }, + "deny-set-checked": { + "identifier": "deny-set-checked", + "description": "Denies the set_checked command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_checked"] } + }, + "deny-set-enabled": { + "identifier": "deny-set-enabled", + "description": "Denies the set_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_enabled"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-text": { + "identifier": "deny-set-text", + "description": "Denies the set_text command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_text"] } + }, + "deny-text": { + "identifier": "deny-text", + "description": "Denies the text command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["text"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:path": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-resolve-directory", + "allow-resolve", + "allow-normalize", + "allow-join", + "allow-dirname", + "allow-extname", + "allow-basename", + "allow-is-absolute" + ] + }, + "permissions": { + "allow-basename": { + "identifier": "allow-basename", + "description": "Enables the basename command without any pre-configured scope.", + "commands": { "allow": ["basename"], "deny": [] } + }, + "allow-dirname": { + "identifier": "allow-dirname", + "description": "Enables the dirname command without any pre-configured scope.", + "commands": { "allow": ["dirname"], "deny": [] } + }, + "allow-extname": { + "identifier": "allow-extname", + "description": "Enables the extname command without any pre-configured scope.", + "commands": { "allow": ["extname"], "deny": [] } + }, + "allow-is-absolute": { + "identifier": "allow-is-absolute", + "description": "Enables the is_absolute command without any pre-configured scope.", + "commands": { "allow": ["is_absolute"], "deny": [] } + }, + "allow-join": { + "identifier": "allow-join", + "description": "Enables the join command without any pre-configured scope.", + "commands": { "allow": ["join"], "deny": [] } + }, + "allow-normalize": { + "identifier": "allow-normalize", + "description": "Enables the normalize command without any pre-configured scope.", + "commands": { "allow": ["normalize"], "deny": [] } + }, + "allow-resolve": { + "identifier": "allow-resolve", + "description": "Enables the resolve command without any pre-configured scope.", + "commands": { "allow": ["resolve"], "deny": [] } + }, + "allow-resolve-directory": { + "identifier": "allow-resolve-directory", + "description": "Enables the resolve_directory command without any pre-configured scope.", + "commands": { "allow": ["resolve_directory"], "deny": [] } + }, + "deny-basename": { + "identifier": "deny-basename", + "description": "Denies the basename command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["basename"] } + }, + "deny-dirname": { + "identifier": "deny-dirname", + "description": "Denies the dirname command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["dirname"] } + }, + "deny-extname": { + "identifier": "deny-extname", + "description": "Denies the extname command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["extname"] } + }, + "deny-is-absolute": { + "identifier": "deny-is-absolute", + "description": "Denies the is_absolute command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_absolute"] } + }, + "deny-join": { + "identifier": "deny-join", + "description": "Denies the join command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["join"] } + }, + "deny-normalize": { + "identifier": "deny-normalize", + "description": "Denies the normalize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["normalize"] } + }, + "deny-resolve": { + "identifier": "deny-resolve", + "description": "Denies the resolve command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["resolve"] } + }, + "deny-resolve-directory": { + "identifier": "deny-resolve-directory", + "description": "Denies the resolve_directory command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["resolve_directory"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:resources": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": ["allow-close"] + }, + "permissions": { + "allow-close": { + "identifier": "allow-close", + "description": "Enables the close command without any pre-configured scope.", + "commands": { "allow": ["close"], "deny": [] } + }, + "deny-close": { + "identifier": "deny-close", + "description": "Denies the close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["close"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:tray": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin, which enables all commands.", + "permissions": [ + "allow-new", + "allow-get-by-id", + "allow-remove-by-id", + "allow-set-icon", + "allow-set-menu", + "allow-set-tooltip", + "allow-set-title", + "allow-set-visible", + "allow-set-temp-dir-path", + "allow-set-icon-as-template", + "allow-set-show-menu-on-left-click" + ] + }, + "permissions": { + "allow-get-by-id": { + "identifier": "allow-get-by-id", + "description": "Enables the get_by_id command without any pre-configured scope.", + "commands": { "allow": ["get_by_id"], "deny": [] } + }, + "allow-new": { + "identifier": "allow-new", + "description": "Enables the new command without any pre-configured scope.", + "commands": { "allow": ["new"], "deny": [] } + }, + "allow-remove-by-id": { + "identifier": "allow-remove-by-id", + "description": "Enables the remove_by_id command without any pre-configured scope.", + "commands": { "allow": ["remove_by_id"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-icon-as-template": { + "identifier": "allow-set-icon-as-template", + "description": "Enables the set_icon_as_template command without any pre-configured scope.", + "commands": { "allow": ["set_icon_as_template"], "deny": [] } + }, + "allow-set-menu": { + "identifier": "allow-set-menu", + "description": "Enables the set_menu command without any pre-configured scope.", + "commands": { "allow": ["set_menu"], "deny": [] } + }, + "allow-set-show-menu-on-left-click": { + "identifier": "allow-set-show-menu-on-left-click", + "description": "Enables the set_show_menu_on_left_click command without any pre-configured scope.", + "commands": { "allow": ["set_show_menu_on_left_click"], "deny": [] } + }, + "allow-set-temp-dir-path": { + "identifier": "allow-set-temp-dir-path", + "description": "Enables the set_temp_dir_path command without any pre-configured scope.", + "commands": { "allow": ["set_temp_dir_path"], "deny": [] } + }, + "allow-set-title": { + "identifier": "allow-set-title", + "description": "Enables the set_title command without any pre-configured scope.", + "commands": { "allow": ["set_title"], "deny": [] } + }, + "allow-set-tooltip": { + "identifier": "allow-set-tooltip", + "description": "Enables the set_tooltip command without any pre-configured scope.", + "commands": { "allow": ["set_tooltip"], "deny": [] } + }, + "allow-set-visible": { + "identifier": "allow-set-visible", + "description": "Enables the set_visible command without any pre-configured scope.", + "commands": { "allow": ["set_visible"], "deny": [] } + }, + "deny-get-by-id": { + "identifier": "deny-get-by-id", + "description": "Denies the get_by_id command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_by_id"] } + }, + "deny-new": { + "identifier": "deny-new", + "description": "Denies the new command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["new"] } + }, + "deny-remove-by-id": { + "identifier": "deny-remove-by-id", + "description": "Denies the remove_by_id command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["remove_by_id"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-icon-as-template": { + "identifier": "deny-set-icon-as-template", + "description": "Denies the set_icon_as_template command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon_as_template"] } + }, + "deny-set-menu": { + "identifier": "deny-set-menu", + "description": "Denies the set_menu command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_menu"] } + }, + "deny-set-show-menu-on-left-click": { + "identifier": "deny-set-show-menu-on-left-click", + "description": "Denies the set_show_menu_on_left_click command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_show_menu_on_left_click"] } + }, + "deny-set-temp-dir-path": { + "identifier": "deny-set-temp-dir-path", + "description": "Denies the set_temp_dir_path command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_temp_dir_path"] } + }, + "deny-set-title": { + "identifier": "deny-set-title", + "description": "Denies the set_title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title"] } + }, + "deny-set-tooltip": { + "identifier": "deny-set-tooltip", + "description": "Denies the set_tooltip command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_tooltip"] } + }, + "deny-set-visible": { + "identifier": "deny-set-visible", + "description": "Denies the set_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_visible"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:webview": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": ["allow-get-all-webviews", "allow-webview-position", "allow-webview-size", "allow-internal-toggle-devtools"] + }, + "permissions": { + "allow-clear-all-browsing-data": { + "identifier": "allow-clear-all-browsing-data", + "description": "Enables the clear_all_browsing_data command without any pre-configured scope.", + "commands": { "allow": ["clear_all_browsing_data"], "deny": [] } + }, + "allow-create-webview": { + "identifier": "allow-create-webview", + "description": "Enables the create_webview command without any pre-configured scope.", + "commands": { "allow": ["create_webview"], "deny": [] } + }, + "allow-create-webview-window": { + "identifier": "allow-create-webview-window", + "description": "Enables the create_webview_window command without any pre-configured scope.", + "commands": { "allow": ["create_webview_window"], "deny": [] } + }, + "allow-get-all-webviews": { + "identifier": "allow-get-all-webviews", + "description": "Enables the get_all_webviews command without any pre-configured scope.", + "commands": { "allow": ["get_all_webviews"], "deny": [] } + }, + "allow-internal-toggle-devtools": { + "identifier": "allow-internal-toggle-devtools", + "description": "Enables the internal_toggle_devtools command without any pre-configured scope.", + "commands": { "allow": ["internal_toggle_devtools"], "deny": [] } + }, + "allow-print": { + "identifier": "allow-print", + "description": "Enables the print command without any pre-configured scope.", + "commands": { "allow": ["print"], "deny": [] } + }, + "allow-reparent": { + "identifier": "allow-reparent", + "description": "Enables the reparent command without any pre-configured scope.", + "commands": { "allow": ["reparent"], "deny": [] } + }, + "allow-set-webview-auto-resize": { + "identifier": "allow-set-webview-auto-resize", + "description": "Enables the set_webview_auto_resize command without any pre-configured scope.", + "commands": { "allow": ["set_webview_auto_resize"], "deny": [] } + }, + "allow-set-webview-background-color": { + "identifier": "allow-set-webview-background-color", + "description": "Enables the set_webview_background_color command without any pre-configured scope.", + "commands": { "allow": ["set_webview_background_color"], "deny": [] } + }, + "allow-set-webview-focus": { + "identifier": "allow-set-webview-focus", + "description": "Enables the set_webview_focus command without any pre-configured scope.", + "commands": { "allow": ["set_webview_focus"], "deny": [] } + }, + "allow-set-webview-position": { + "identifier": "allow-set-webview-position", + "description": "Enables the set_webview_position command without any pre-configured scope.", + "commands": { "allow": ["set_webview_position"], "deny": [] } + }, + "allow-set-webview-size": { + "identifier": "allow-set-webview-size", + "description": "Enables the set_webview_size command without any pre-configured scope.", + "commands": { "allow": ["set_webview_size"], "deny": [] } + }, + "allow-set-webview-zoom": { + "identifier": "allow-set-webview-zoom", + "description": "Enables the set_webview_zoom command without any pre-configured scope.", + "commands": { "allow": ["set_webview_zoom"], "deny": [] } + }, + "allow-webview-close": { + "identifier": "allow-webview-close", + "description": "Enables the webview_close command without any pre-configured scope.", + "commands": { "allow": ["webview_close"], "deny": [] } + }, + "allow-webview-hide": { + "identifier": "allow-webview-hide", + "description": "Enables the webview_hide command without any pre-configured scope.", + "commands": { "allow": ["webview_hide"], "deny": [] } + }, + "allow-webview-position": { + "identifier": "allow-webview-position", + "description": "Enables the webview_position command without any pre-configured scope.", + "commands": { "allow": ["webview_position"], "deny": [] } + }, + "allow-webview-show": { + "identifier": "allow-webview-show", + "description": "Enables the webview_show command without any pre-configured scope.", + "commands": { "allow": ["webview_show"], "deny": [] } + }, + "allow-webview-size": { + "identifier": "allow-webview-size", + "description": "Enables the webview_size command without any pre-configured scope.", + "commands": { "allow": ["webview_size"], "deny": [] } + }, + "deny-clear-all-browsing-data": { + "identifier": "deny-clear-all-browsing-data", + "description": "Denies the clear_all_browsing_data command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["clear_all_browsing_data"] } + }, + "deny-create-webview": { + "identifier": "deny-create-webview", + "description": "Denies the create_webview command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_webview"] } + }, + "deny-create-webview-window": { + "identifier": "deny-create-webview-window", + "description": "Denies the create_webview_window command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create_webview_window"] } + }, + "deny-get-all-webviews": { + "identifier": "deny-get-all-webviews", + "description": "Denies the get_all_webviews command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_all_webviews"] } + }, + "deny-internal-toggle-devtools": { + "identifier": "deny-internal-toggle-devtools", + "description": "Denies the internal_toggle_devtools command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["internal_toggle_devtools"] } + }, + "deny-print": { + "identifier": "deny-print", + "description": "Denies the print command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["print"] } + }, + "deny-reparent": { + "identifier": "deny-reparent", + "description": "Denies the reparent command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["reparent"] } + }, + "deny-set-webview-auto-resize": { + "identifier": "deny-set-webview-auto-resize", + "description": "Denies the set_webview_auto_resize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_auto_resize"] } + }, + "deny-set-webview-background-color": { + "identifier": "deny-set-webview-background-color", + "description": "Denies the set_webview_background_color command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_background_color"] } + }, + "deny-set-webview-focus": { + "identifier": "deny-set-webview-focus", + "description": "Denies the set_webview_focus command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_focus"] } + }, + "deny-set-webview-position": { + "identifier": "deny-set-webview-position", + "description": "Denies the set_webview_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_position"] } + }, + "deny-set-webview-size": { + "identifier": "deny-set-webview-size", + "description": "Denies the set_webview_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_size"] } + }, + "deny-set-webview-zoom": { + "identifier": "deny-set-webview-zoom", + "description": "Denies the set_webview_zoom command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_webview_zoom"] } + }, + "deny-webview-close": { + "identifier": "deny-webview-close", + "description": "Denies the webview_close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_close"] } + }, + "deny-webview-hide": { + "identifier": "deny-webview-hide", + "description": "Denies the webview_hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_hide"] } + }, + "deny-webview-position": { + "identifier": "deny-webview-position", + "description": "Denies the webview_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_position"] } + }, + "deny-webview-show": { + "identifier": "deny-webview-show", + "description": "Denies the webview_show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_show"] } + }, + "deny-webview-size": { + "identifier": "deny-webview-size", + "description": "Denies the webview_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["webview_size"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "core:window": { + "default_permission": { + "identifier": "default", + "description": "Default permissions for the plugin.", + "permissions": [ + "allow-get-all-windows", + "allow-scale-factor", + "allow-inner-position", + "allow-outer-position", + "allow-inner-size", + "allow-outer-size", + "allow-is-fullscreen", + "allow-is-minimized", + "allow-is-maximized", + "allow-is-focused", + "allow-is-decorated", + "allow-is-resizable", + "allow-is-maximizable", + "allow-is-minimizable", + "allow-is-closable", + "allow-is-visible", + "allow-is-enabled", + "allow-title", + "allow-current-monitor", + "allow-primary-monitor", + "allow-monitor-from-point", + "allow-available-monitors", + "allow-cursor-position", + "allow-theme", + "allow-is-always-on-top", + "allow-internal-toggle-maximize" + ] + }, + "permissions": { + "allow-available-monitors": { + "identifier": "allow-available-monitors", + "description": "Enables the available_monitors command without any pre-configured scope.", + "commands": { "allow": ["available_monitors"], "deny": [] } + }, + "allow-center": { + "identifier": "allow-center", + "description": "Enables the center command without any pre-configured scope.", + "commands": { "allow": ["center"], "deny": [] } + }, + "allow-close": { + "identifier": "allow-close", + "description": "Enables the close command without any pre-configured scope.", + "commands": { "allow": ["close"], "deny": [] } + }, + "allow-create": { + "identifier": "allow-create", + "description": "Enables the create command without any pre-configured scope.", + "commands": { "allow": ["create"], "deny": [] } + }, + "allow-current-monitor": { + "identifier": "allow-current-monitor", + "description": "Enables the current_monitor command without any pre-configured scope.", + "commands": { "allow": ["current_monitor"], "deny": [] } + }, + "allow-cursor-position": { + "identifier": "allow-cursor-position", + "description": "Enables the cursor_position command without any pre-configured scope.", + "commands": { "allow": ["cursor_position"], "deny": [] } + }, + "allow-destroy": { + "identifier": "allow-destroy", + "description": "Enables the destroy command without any pre-configured scope.", + "commands": { "allow": ["destroy"], "deny": [] } + }, + "allow-get-all-windows": { + "identifier": "allow-get-all-windows", + "description": "Enables the get_all_windows command without any pre-configured scope.", + "commands": { "allow": ["get_all_windows"], "deny": [] } + }, + "allow-hide": { + "identifier": "allow-hide", + "description": "Enables the hide command without any pre-configured scope.", + "commands": { "allow": ["hide"], "deny": [] } + }, + "allow-inner-position": { + "identifier": "allow-inner-position", + "description": "Enables the inner_position command without any pre-configured scope.", + "commands": { "allow": ["inner_position"], "deny": [] } + }, + "allow-inner-size": { + "identifier": "allow-inner-size", + "description": "Enables the inner_size command without any pre-configured scope.", + "commands": { "allow": ["inner_size"], "deny": [] } + }, + "allow-internal-toggle-maximize": { + "identifier": "allow-internal-toggle-maximize", + "description": "Enables the internal_toggle_maximize command without any pre-configured scope.", + "commands": { "allow": ["internal_toggle_maximize"], "deny": [] } + }, + "allow-is-always-on-top": { + "identifier": "allow-is-always-on-top", + "description": "Enables the is_always_on_top command without any pre-configured scope.", + "commands": { "allow": ["is_always_on_top"], "deny": [] } + }, + "allow-is-closable": { + "identifier": "allow-is-closable", + "description": "Enables the is_closable command without any pre-configured scope.", + "commands": { "allow": ["is_closable"], "deny": [] } + }, + "allow-is-decorated": { + "identifier": "allow-is-decorated", + "description": "Enables the is_decorated command without any pre-configured scope.", + "commands": { "allow": ["is_decorated"], "deny": [] } + }, + "allow-is-enabled": { + "identifier": "allow-is-enabled", + "description": "Enables the is_enabled command without any pre-configured scope.", + "commands": { "allow": ["is_enabled"], "deny": [] } + }, + "allow-is-focused": { + "identifier": "allow-is-focused", + "description": "Enables the is_focused command without any pre-configured scope.", + "commands": { "allow": ["is_focused"], "deny": [] } + }, + "allow-is-fullscreen": { + "identifier": "allow-is-fullscreen", + "description": "Enables the is_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["is_fullscreen"], "deny": [] } + }, + "allow-is-maximizable": { + "identifier": "allow-is-maximizable", + "description": "Enables the is_maximizable command without any pre-configured scope.", + "commands": { "allow": ["is_maximizable"], "deny": [] } + }, + "allow-is-maximized": { + "identifier": "allow-is-maximized", + "description": "Enables the is_maximized command without any pre-configured scope.", + "commands": { "allow": ["is_maximized"], "deny": [] } + }, + "allow-is-minimizable": { + "identifier": "allow-is-minimizable", + "description": "Enables the is_minimizable command without any pre-configured scope.", + "commands": { "allow": ["is_minimizable"], "deny": [] } + }, + "allow-is-minimized": { + "identifier": "allow-is-minimized", + "description": "Enables the is_minimized command without any pre-configured scope.", + "commands": { "allow": ["is_minimized"], "deny": [] } + }, + "allow-is-resizable": { + "identifier": "allow-is-resizable", + "description": "Enables the is_resizable command without any pre-configured scope.", + "commands": { "allow": ["is_resizable"], "deny": [] } + }, + "allow-is-visible": { + "identifier": "allow-is-visible", + "description": "Enables the is_visible command without any pre-configured scope.", + "commands": { "allow": ["is_visible"], "deny": [] } + }, + "allow-maximize": { + "identifier": "allow-maximize", + "description": "Enables the maximize command without any pre-configured scope.", + "commands": { "allow": ["maximize"], "deny": [] } + }, + "allow-minimize": { + "identifier": "allow-minimize", + "description": "Enables the minimize command without any pre-configured scope.", + "commands": { "allow": ["minimize"], "deny": [] } + }, + "allow-monitor-from-point": { + "identifier": "allow-monitor-from-point", + "description": "Enables the monitor_from_point command without any pre-configured scope.", + "commands": { "allow": ["monitor_from_point"], "deny": [] } + }, + "allow-outer-position": { + "identifier": "allow-outer-position", + "description": "Enables the outer_position command without any pre-configured scope.", + "commands": { "allow": ["outer_position"], "deny": [] } + }, + "allow-outer-size": { + "identifier": "allow-outer-size", + "description": "Enables the outer_size command without any pre-configured scope.", + "commands": { "allow": ["outer_size"], "deny": [] } + }, + "allow-primary-monitor": { + "identifier": "allow-primary-monitor", + "description": "Enables the primary_monitor command without any pre-configured scope.", + "commands": { "allow": ["primary_monitor"], "deny": [] } + }, + "allow-request-user-attention": { + "identifier": "allow-request-user-attention", + "description": "Enables the request_user_attention command without any pre-configured scope.", + "commands": { "allow": ["request_user_attention"], "deny": [] } + }, + "allow-scale-factor": { + "identifier": "allow-scale-factor", + "description": "Enables the scale_factor command without any pre-configured scope.", + "commands": { "allow": ["scale_factor"], "deny": [] } + }, + "allow-set-always-on-bottom": { + "identifier": "allow-set-always-on-bottom", + "description": "Enables the set_always_on_bottom command without any pre-configured scope.", + "commands": { "allow": ["set_always_on_bottom"], "deny": [] } + }, + "allow-set-always-on-top": { + "identifier": "allow-set-always-on-top", + "description": "Enables the set_always_on_top command without any pre-configured scope.", + "commands": { "allow": ["set_always_on_top"], "deny": [] } + }, + "allow-set-background-color": { + "identifier": "allow-set-background-color", + "description": "Enables the set_background_color command without any pre-configured scope.", + "commands": { "allow": ["set_background_color"], "deny": [] } + }, + "allow-set-badge-count": { + "identifier": "allow-set-badge-count", + "description": "Enables the set_badge_count command without any pre-configured scope.", + "commands": { "allow": ["set_badge_count"], "deny": [] } + }, + "allow-set-badge-label": { + "identifier": "allow-set-badge-label", + "description": "Enables the set_badge_label command without any pre-configured scope.", + "commands": { "allow": ["set_badge_label"], "deny": [] } + }, + "allow-set-closable": { + "identifier": "allow-set-closable", + "description": "Enables the set_closable command without any pre-configured scope.", + "commands": { "allow": ["set_closable"], "deny": [] } + }, + "allow-set-content-protected": { + "identifier": "allow-set-content-protected", + "description": "Enables the set_content_protected command without any pre-configured scope.", + "commands": { "allow": ["set_content_protected"], "deny": [] } + }, + "allow-set-cursor-grab": { + "identifier": "allow-set-cursor-grab", + "description": "Enables the set_cursor_grab command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_grab"], "deny": [] } + }, + "allow-set-cursor-icon": { + "identifier": "allow-set-cursor-icon", + "description": "Enables the set_cursor_icon command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_icon"], "deny": [] } + }, + "allow-set-cursor-position": { + "identifier": "allow-set-cursor-position", + "description": "Enables the set_cursor_position command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_position"], "deny": [] } + }, + "allow-set-cursor-visible": { + "identifier": "allow-set-cursor-visible", + "description": "Enables the set_cursor_visible command without any pre-configured scope.", + "commands": { "allow": ["set_cursor_visible"], "deny": [] } + }, + "allow-set-decorations": { + "identifier": "allow-set-decorations", + "description": "Enables the set_decorations command without any pre-configured scope.", + "commands": { "allow": ["set_decorations"], "deny": [] } + }, + "allow-set-effects": { + "identifier": "allow-set-effects", + "description": "Enables the set_effects command without any pre-configured scope.", + "commands": { "allow": ["set_effects"], "deny": [] } + }, + "allow-set-enabled": { + "identifier": "allow-set-enabled", + "description": "Enables the set_enabled command without any pre-configured scope.", + "commands": { "allow": ["set_enabled"], "deny": [] } + }, + "allow-set-focus": { + "identifier": "allow-set-focus", + "description": "Enables the set_focus command without any pre-configured scope.", + "commands": { "allow": ["set_focus"], "deny": [] } + }, + "allow-set-focusable": { + "identifier": "allow-set-focusable", + "description": "Enables the set_focusable command without any pre-configured scope.", + "commands": { "allow": ["set_focusable"], "deny": [] } + }, + "allow-set-fullscreen": { + "identifier": "allow-set-fullscreen", + "description": "Enables the set_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["set_fullscreen"], "deny": [] } + }, + "allow-set-icon": { + "identifier": "allow-set-icon", + "description": "Enables the set_icon command without any pre-configured scope.", + "commands": { "allow": ["set_icon"], "deny": [] } + }, + "allow-set-ignore-cursor-events": { + "identifier": "allow-set-ignore-cursor-events", + "description": "Enables the set_ignore_cursor_events command without any pre-configured scope.", + "commands": { "allow": ["set_ignore_cursor_events"], "deny": [] } + }, + "allow-set-max-size": { + "identifier": "allow-set-max-size", + "description": "Enables the set_max_size command without any pre-configured scope.", + "commands": { "allow": ["set_max_size"], "deny": [] } + }, + "allow-set-maximizable": { + "identifier": "allow-set-maximizable", + "description": "Enables the set_maximizable command without any pre-configured scope.", + "commands": { "allow": ["set_maximizable"], "deny": [] } + }, + "allow-set-min-size": { + "identifier": "allow-set-min-size", + "description": "Enables the set_min_size command without any pre-configured scope.", + "commands": { "allow": ["set_min_size"], "deny": [] } + }, + "allow-set-minimizable": { + "identifier": "allow-set-minimizable", + "description": "Enables the set_minimizable command without any pre-configured scope.", + "commands": { "allow": ["set_minimizable"], "deny": [] } + }, + "allow-set-overlay-icon": { + "identifier": "allow-set-overlay-icon", + "description": "Enables the set_overlay_icon command without any pre-configured scope.", + "commands": { "allow": ["set_overlay_icon"], "deny": [] } + }, + "allow-set-position": { + "identifier": "allow-set-position", + "description": "Enables the set_position command without any pre-configured scope.", + "commands": { "allow": ["set_position"], "deny": [] } + }, + "allow-set-progress-bar": { + "identifier": "allow-set-progress-bar", + "description": "Enables the set_progress_bar command without any pre-configured scope.", + "commands": { "allow": ["set_progress_bar"], "deny": [] } + }, + "allow-set-resizable": { + "identifier": "allow-set-resizable", + "description": "Enables the set_resizable command without any pre-configured scope.", + "commands": { "allow": ["set_resizable"], "deny": [] } + }, + "allow-set-shadow": { + "identifier": "allow-set-shadow", + "description": "Enables the set_shadow command without any pre-configured scope.", + "commands": { "allow": ["set_shadow"], "deny": [] } + }, + "allow-set-simple-fullscreen": { + "identifier": "allow-set-simple-fullscreen", + "description": "Enables the set_simple_fullscreen command without any pre-configured scope.", + "commands": { "allow": ["set_simple_fullscreen"], "deny": [] } + }, + "allow-set-size": { + "identifier": "allow-set-size", + "description": "Enables the set_size command without any pre-configured scope.", + "commands": { "allow": ["set_size"], "deny": [] } + }, + "allow-set-size-constraints": { + "identifier": "allow-set-size-constraints", + "description": "Enables the set_size_constraints command without any pre-configured scope.", + "commands": { "allow": ["set_size_constraints"], "deny": [] } + }, + "allow-set-skip-taskbar": { + "identifier": "allow-set-skip-taskbar", + "description": "Enables the set_skip_taskbar command without any pre-configured scope.", + "commands": { "allow": ["set_skip_taskbar"], "deny": [] } + }, + "allow-set-theme": { + "identifier": "allow-set-theme", + "description": "Enables the set_theme command without any pre-configured scope.", + "commands": { "allow": ["set_theme"], "deny": [] } + }, + "allow-set-title": { + "identifier": "allow-set-title", + "description": "Enables the set_title command without any pre-configured scope.", + "commands": { "allow": ["set_title"], "deny": [] } + }, + "allow-set-title-bar-style": { + "identifier": "allow-set-title-bar-style", + "description": "Enables the set_title_bar_style command without any pre-configured scope.", + "commands": { "allow": ["set_title_bar_style"], "deny": [] } + }, + "allow-set-visible-on-all-organizations": { + "identifier": "allow-set-visible-on-all-organizations", + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", + "commands": { "allow": ["set_visible_on_all_organizations"], "deny": [] } + }, + "allow-show": { + "identifier": "allow-show", + "description": "Enables the show command without any pre-configured scope.", + "commands": { "allow": ["show"], "deny": [] } + }, + "allow-start-dragging": { + "identifier": "allow-start-dragging", + "description": "Enables the start_dragging command without any pre-configured scope.", + "commands": { "allow": ["start_dragging"], "deny": [] } + }, + "allow-start-resize-dragging": { + "identifier": "allow-start-resize-dragging", + "description": "Enables the start_resize_dragging command without any pre-configured scope.", + "commands": { "allow": ["start_resize_dragging"], "deny": [] } + }, + "allow-theme": { + "identifier": "allow-theme", + "description": "Enables the theme command without any pre-configured scope.", + "commands": { "allow": ["theme"], "deny": [] } + }, + "allow-title": { + "identifier": "allow-title", + "description": "Enables the title command without any pre-configured scope.", + "commands": { "allow": ["title"], "deny": [] } + }, + "allow-toggle-maximize": { + "identifier": "allow-toggle-maximize", + "description": "Enables the toggle_maximize command without any pre-configured scope.", + "commands": { "allow": ["toggle_maximize"], "deny": [] } + }, + "allow-unmaximize": { + "identifier": "allow-unmaximize", + "description": "Enables the unmaximize command without any pre-configured scope.", + "commands": { "allow": ["unmaximize"], "deny": [] } + }, + "allow-unminimize": { + "identifier": "allow-unminimize", + "description": "Enables the unminimize command without any pre-configured scope.", + "commands": { "allow": ["unminimize"], "deny": [] } + }, + "deny-available-monitors": { + "identifier": "deny-available-monitors", + "description": "Denies the available_monitors command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["available_monitors"] } + }, + "deny-center": { + "identifier": "deny-center", + "description": "Denies the center command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["center"] } + }, + "deny-close": { + "identifier": "deny-close", + "description": "Denies the close command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["close"] } + }, + "deny-create": { + "identifier": "deny-create", + "description": "Denies the create command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["create"] } + }, + "deny-current-monitor": { + "identifier": "deny-current-monitor", + "description": "Denies the current_monitor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["current_monitor"] } + }, + "deny-cursor-position": { + "identifier": "deny-cursor-position", + "description": "Denies the cursor_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["cursor_position"] } + }, + "deny-destroy": { + "identifier": "deny-destroy", + "description": "Denies the destroy command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["destroy"] } + }, + "deny-get-all-windows": { + "identifier": "deny-get-all-windows", + "description": "Denies the get_all_windows command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["get_all_windows"] } + }, + "deny-hide": { + "identifier": "deny-hide", + "description": "Denies the hide command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["hide"] } + }, + "deny-inner-position": { + "identifier": "deny-inner-position", + "description": "Denies the inner_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["inner_position"] } + }, + "deny-inner-size": { + "identifier": "deny-inner-size", + "description": "Denies the inner_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["inner_size"] } + }, + "deny-internal-toggle-maximize": { + "identifier": "deny-internal-toggle-maximize", + "description": "Denies the internal_toggle_maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["internal_toggle_maximize"] } + }, + "deny-is-always-on-top": { + "identifier": "deny-is-always-on-top", + "description": "Denies the is_always_on_top command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_always_on_top"] } + }, + "deny-is-closable": { + "identifier": "deny-is-closable", + "description": "Denies the is_closable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_closable"] } + }, + "deny-is-decorated": { + "identifier": "deny-is-decorated", + "description": "Denies the is_decorated command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_decorated"] } + }, + "deny-is-enabled": { + "identifier": "deny-is-enabled", + "description": "Denies the is_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_enabled"] } + }, + "deny-is-focused": { + "identifier": "deny-is-focused", + "description": "Denies the is_focused command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_focused"] } + }, + "deny-is-fullscreen": { + "identifier": "deny-is-fullscreen", + "description": "Denies the is_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_fullscreen"] } + }, + "deny-is-maximizable": { + "identifier": "deny-is-maximizable", + "description": "Denies the is_maximizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_maximizable"] } + }, + "deny-is-maximized": { + "identifier": "deny-is-maximized", + "description": "Denies the is_maximized command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_maximized"] } + }, + "deny-is-minimizable": { + "identifier": "deny-is-minimizable", + "description": "Denies the is_minimizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_minimizable"] } + }, + "deny-is-minimized": { + "identifier": "deny-is-minimized", + "description": "Denies the is_minimized command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_minimized"] } + }, + "deny-is-resizable": { + "identifier": "deny-is-resizable", + "description": "Denies the is_resizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_resizable"] } + }, + "deny-is-visible": { + "identifier": "deny-is-visible", + "description": "Denies the is_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["is_visible"] } + }, + "deny-maximize": { + "identifier": "deny-maximize", + "description": "Denies the maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["maximize"] } + }, + "deny-minimize": { + "identifier": "deny-minimize", + "description": "Denies the minimize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["minimize"] } + }, + "deny-monitor-from-point": { + "identifier": "deny-monitor-from-point", + "description": "Denies the monitor_from_point command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["monitor_from_point"] } + }, + "deny-outer-position": { + "identifier": "deny-outer-position", + "description": "Denies the outer_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["outer_position"] } + }, + "deny-outer-size": { + "identifier": "deny-outer-size", + "description": "Denies the outer_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["outer_size"] } + }, + "deny-primary-monitor": { + "identifier": "deny-primary-monitor", + "description": "Denies the primary_monitor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["primary_monitor"] } + }, + "deny-request-user-attention": { + "identifier": "deny-request-user-attention", + "description": "Denies the request_user_attention command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["request_user_attention"] } + }, + "deny-scale-factor": { + "identifier": "deny-scale-factor", + "description": "Denies the scale_factor command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["scale_factor"] } + }, + "deny-set-always-on-bottom": { + "identifier": "deny-set-always-on-bottom", + "description": "Denies the set_always_on_bottom command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_always_on_bottom"] } + }, + "deny-set-always-on-top": { + "identifier": "deny-set-always-on-top", + "description": "Denies the set_always_on_top command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_always_on_top"] } + }, + "deny-set-background-color": { + "identifier": "deny-set-background-color", + "description": "Denies the set_background_color command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_background_color"] } + }, + "deny-set-badge-count": { + "identifier": "deny-set-badge-count", + "description": "Denies the set_badge_count command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_badge_count"] } + }, + "deny-set-badge-label": { + "identifier": "deny-set-badge-label", + "description": "Denies the set_badge_label command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_badge_label"] } + }, + "deny-set-closable": { + "identifier": "deny-set-closable", + "description": "Denies the set_closable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_closable"] } + }, + "deny-set-content-protected": { + "identifier": "deny-set-content-protected", + "description": "Denies the set_content_protected command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_content_protected"] } + }, + "deny-set-cursor-grab": { + "identifier": "deny-set-cursor-grab", + "description": "Denies the set_cursor_grab command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_grab"] } + }, + "deny-set-cursor-icon": { + "identifier": "deny-set-cursor-icon", + "description": "Denies the set_cursor_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_icon"] } + }, + "deny-set-cursor-position": { + "identifier": "deny-set-cursor-position", + "description": "Denies the set_cursor_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_position"] } + }, + "deny-set-cursor-visible": { + "identifier": "deny-set-cursor-visible", + "description": "Denies the set_cursor_visible command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_cursor_visible"] } + }, + "deny-set-decorations": { + "identifier": "deny-set-decorations", + "description": "Denies the set_decorations command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_decorations"] } + }, + "deny-set-effects": { + "identifier": "deny-set-effects", + "description": "Denies the set_effects command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_effects"] } + }, + "deny-set-enabled": { + "identifier": "deny-set-enabled", + "description": "Denies the set_enabled command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_enabled"] } + }, + "deny-set-focus": { + "identifier": "deny-set-focus", + "description": "Denies the set_focus command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_focus"] } + }, + "deny-set-focusable": { + "identifier": "deny-set-focusable", + "description": "Denies the set_focusable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_focusable"] } + }, + "deny-set-fullscreen": { + "identifier": "deny-set-fullscreen", + "description": "Denies the set_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_fullscreen"] } + }, + "deny-set-icon": { + "identifier": "deny-set-icon", + "description": "Denies the set_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_icon"] } + }, + "deny-set-ignore-cursor-events": { + "identifier": "deny-set-ignore-cursor-events", + "description": "Denies the set_ignore_cursor_events command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_ignore_cursor_events"] } + }, + "deny-set-max-size": { + "identifier": "deny-set-max-size", + "description": "Denies the set_max_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_max_size"] } + }, + "deny-set-maximizable": { + "identifier": "deny-set-maximizable", + "description": "Denies the set_maximizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_maximizable"] } + }, + "deny-set-min-size": { + "identifier": "deny-set-min-size", + "description": "Denies the set_min_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_min_size"] } + }, + "deny-set-minimizable": { + "identifier": "deny-set-minimizable", + "description": "Denies the set_minimizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_minimizable"] } + }, + "deny-set-overlay-icon": { + "identifier": "deny-set-overlay-icon", + "description": "Denies the set_overlay_icon command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_overlay_icon"] } + }, + "deny-set-position": { + "identifier": "deny-set-position", + "description": "Denies the set_position command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_position"] } + }, + "deny-set-progress-bar": { + "identifier": "deny-set-progress-bar", + "description": "Denies the set_progress_bar command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_progress_bar"] } + }, + "deny-set-resizable": { + "identifier": "deny-set-resizable", + "description": "Denies the set_resizable command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_resizable"] } + }, + "deny-set-shadow": { + "identifier": "deny-set-shadow", + "description": "Denies the set_shadow command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_shadow"] } + }, + "deny-set-simple-fullscreen": { + "identifier": "deny-set-simple-fullscreen", + "description": "Denies the set_simple_fullscreen command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_simple_fullscreen"] } + }, + "deny-set-size": { + "identifier": "deny-set-size", + "description": "Denies the set_size command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_size"] } + }, + "deny-set-size-constraints": { + "identifier": "deny-set-size-constraints", + "description": "Denies the set_size_constraints command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_size_constraints"] } + }, + "deny-set-skip-taskbar": { + "identifier": "deny-set-skip-taskbar", + "description": "Denies the set_skip_taskbar command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_skip_taskbar"] } + }, + "deny-set-theme": { + "identifier": "deny-set-theme", + "description": "Denies the set_theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_theme"] } + }, + "deny-set-title": { + "identifier": "deny-set-title", + "description": "Denies the set_title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title"] } + }, + "deny-set-title-bar-style": { + "identifier": "deny-set-title-bar-style", + "description": "Denies the set_title_bar_style command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_title_bar_style"] } + }, + "deny-set-visible-on-all-organizations": { + "identifier": "deny-set-visible-on-all-organizations", + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["set_visible_on_all_organizations"] } + }, + "deny-show": { + "identifier": "deny-show", + "description": "Denies the show command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["show"] } + }, + "deny-start-dragging": { + "identifier": "deny-start-dragging", + "description": "Denies the start_dragging command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["start_dragging"] } + }, + "deny-start-resize-dragging": { + "identifier": "deny-start-resize-dragging", + "description": "Denies the start_resize_dragging command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["start_resize_dragging"] } + }, + "deny-theme": { + "identifier": "deny-theme", + "description": "Denies the theme command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["theme"] } + }, + "deny-title": { + "identifier": "deny-title", + "description": "Denies the title command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["title"] } + }, + "deny-toggle-maximize": { + "identifier": "deny-toggle-maximize", + "description": "Denies the toggle_maximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["toggle_maximize"] } + }, + "deny-unmaximize": { + "identifier": "deny-unmaximize", + "description": "Denies the unmaximize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unmaximize"] } + }, + "deny-unminimize": { + "identifier": "deny-unminimize", + "description": "Denies the unminimize command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["unminimize"] } + } + }, + "permission_sets": {}, + "global_scope_schema": null + }, + "shell": { + "default_permission": { + "identifier": "default", + "description": "This permission set configures which\nshell functionality is exposed by default.\n\n#### Granted Permissions\n\nIt allows to use the `open` functionality with a reasonable\nscope pre-configured. It will allow opening `http(s)://`,\n`tel:` and `mailto:` links.\n", + "permissions": ["allow-open"] + }, + "permissions": { + "allow-execute": { + "identifier": "allow-execute", + "description": "Enables the execute command without any pre-configured scope.", + "commands": { "allow": ["execute"], "deny": [] } + }, + "allow-kill": { + "identifier": "allow-kill", + "description": "Enables the kill command without any pre-configured scope.", + "commands": { "allow": ["kill"], "deny": [] } + }, + "allow-open": { + "identifier": "allow-open", + "description": "Enables the open command without any pre-configured scope.", + "commands": { "allow": ["open"], "deny": [] } + }, + "allow-spawn": { + "identifier": "allow-spawn", + "description": "Enables the spawn command without any pre-configured scope.", + "commands": { "allow": ["spawn"], "deny": [] } + }, + "allow-stdin-write": { + "identifier": "allow-stdin-write", + "description": "Enables the stdin_write command without any pre-configured scope.", + "commands": { "allow": ["stdin_write"], "deny": [] } + }, + "deny-execute": { + "identifier": "deny-execute", + "description": "Denies the execute command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["execute"] } + }, + "deny-kill": { + "identifier": "deny-kill", + "description": "Denies the kill command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["kill"] } + }, + "deny-open": { + "identifier": "deny-open", + "description": "Denies the open command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["open"] } + }, + "deny-spawn": { + "identifier": "deny-spawn", + "description": "Denies the spawn command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["spawn"] } + }, + "deny-stdin-write": { + "identifier": "deny-stdin-write", + "description": "Denies the stdin_write command without any pre-configured scope.", + "commands": { "allow": [], "deny": ["stdin_write"] } + } + }, + "permission_sets": {}, + "global_scope_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "anyOf": [ + { + "additionalProperties": false, + "properties": { + "args": { "allOf": [{ "$ref": "#/definitions/ShellScopeEntryAllowedArgs" }], "description": "The allowed arguments for the command execution." }, + "cmd": { + "description": "The command name. It can start with a variable that resolves to a system base directory. The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`, `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`, `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`, `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.", + "type": "string" + }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + } + }, + "required": ["cmd", "name"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "args": { "allOf": [{ "$ref": "#/definitions/ShellScopeEntryAllowedArgs" }], "description": "The allowed arguments for the command execution." }, + "name": { + "description": "The name for this allowed shell command configuration.\n\nThis name will be used inside of the webview API to call this command along with any specified arguments.", + "type": "string" + }, + "sidecar": { "description": "If this command is a sidecar command.", "type": "boolean" } + }, + "required": ["name", "sidecar"], + "type": "object" + } + ], + "definitions": { + "ShellScopeEntryAllowedArg": { + "anyOf": [ + { "description": "A non-configurable argument that is passed to the command in the order it was specified.", "type": "string" }, + { + "additionalProperties": false, + "description": "A variable that is set while calling the command from the webview API.", + "properties": { + "raw": { + "default": false, + "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", + "type": "boolean" + }, + "validator": { + "description": "[regex] validator to require passed values to conform to an expected input.\n\nThis will require the argument value passed to this variable to match the `validator` regex before it will be executed.\n\nThe regex string is by default surrounded by `^...$` to match the full string. For example the `https?://\\w+` regex would be registered as `^https?://\\w+$`.\n\n[regex]: ", + "type": "string" + } + }, + "required": ["validator"], + "type": "object" + } + ], + "description": "A command argument allowed to be executed by the webview API." + }, + "ShellScopeEntryAllowedArgs": { + "anyOf": [ + { "description": "Use a simple boolean to allow all or disable all arguments to this command configuration.", "type": "boolean" }, + { + "description": "A specific set of [`ShellScopeEntryAllowedArg`] that are valid to call for the command configuration.", + "items": { "$ref": "#/definitions/ShellScopeEntryAllowedArg" }, + "type": "array" + } + ], + "description": "A set of command arguments allowed to be executed by the webview API.\n\nA value of `true` will allow any arguments to be passed to the command. `false` will disable all arguments. A list of [`ShellScopeEntryAllowedArg`] will set those arguments as the only valid arguments to be passed to the attached command configuration." + } + }, + "description": "Shell scope entry.", + "title": "ShellScopeEntry" + } + } +} diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json b/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json index f827fe1..34f0a61 100644 --- a/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json +++ b/foundry/packages/desktop/src-tauri/gen/schemas/desktop-schema.json @@ -21,9 +21,7 @@ { "description": "A list of capabilities.", "type": "object", - "required": [ - "capabilities" - ], + "required": ["capabilities"], "properties": { "capabilities": { "description": "The list of capabilities.", @@ -39,10 +37,7 @@ "Capability": { "description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```", "type": "object", - "required": [ - "identifier", - "permissions" - ], + "required": ["identifier", "permissions"], "properties": { "identifier": { "description": "Identifier of the capability.\n\n## Example\n\n`main-user-files-write`", @@ -93,10 +88,7 @@ }, "platforms": { "description": "Limit which target platforms this capability applies to.\n\nBy default all platforms are targeted.\n\n## Example\n\n`[\"macOS\",\"windows\"]`", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Target" } @@ -106,9 +98,7 @@ "CapabilityRemote": { "description": "Configuration for remote URLs that are associated with the capability.", "type": "object", - "required": [ - "urls" - ], + "required": ["urls"], "properties": { "urls": { "description": "Remote domains this capability refers to using the [URLPattern standard](https://urlpattern.spec.whatwg.org/).\n\n## Examples\n\n- \"https://*.mydomain.dev\": allows subdomains of mydomain.dev - \"https://mydomain.dev/api/*\": allows any subpath of mydomain.dev/api", @@ -218,10 +208,7 @@ "anyOf": [ { "type": "object", - "required": [ - "cmd", - "name" - ], + "required": ["cmd", "name"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -244,10 +231,7 @@ }, { "type": "object", - "required": [ - "name", - "sidecar" - ], + "required": ["name", "sidecar"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -278,10 +262,7 @@ "anyOf": [ { "type": "object", - "required": [ - "cmd", - "name" - ], + "required": ["cmd", "name"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -304,10 +285,7 @@ }, { "type": "object", - "required": [ - "name", - "sidecar" - ], + "required": ["name", "sidecar"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -356,20 +334,14 @@ }, "allow": { "description": "Data that defines what is allowed by the scope.", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Value" } }, "deny": { "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Value" } @@ -377,9 +349,7 @@ } } ], - "required": [ - "identifier" - ] + "required": ["identifier"] } ] }, @@ -1845,10 +1815,10 @@ "markdownDescription": "Enables the set_title_bar_style command without any pre-configured scope." }, { - "description": "Enables the set_visible_on_all_workspaces command without any pre-configured scope.", + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", "type": "string", - "const": "core:window:allow-set-visible-on-all-workspaces", - "markdownDescription": "Enables the set_visible_on_all_workspaces command without any pre-configured scope." + "const": "core:window:allow-set-visible-on-all-organizations", + "markdownDescription": "Enables the set_visible_on_all_organizations command without any pre-configured scope." }, { "description": "Enables the show command without any pre-configured scope.", @@ -2301,10 +2271,10 @@ "markdownDescription": "Denies the set_title_bar_style command without any pre-configured scope." }, { - "description": "Denies the set_visible_on_all_workspaces command without any pre-configured scope.", + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", "type": "string", - "const": "core:window:deny-set-visible-on-all-workspaces", - "markdownDescription": "Denies the set_visible_on_all_workspaces command without any pre-configured scope." + "const": "core:window:deny-set-visible-on-all-organizations", + "markdownDescription": "Denies the set_visible_on_all_organizations command without any pre-configured scope." }, { "description": "Denies the show command without any pre-configured scope.", @@ -2482,37 +2452,27 @@ { "description": "MacOS.", "type": "string", - "enum": [ - "macOS" - ] + "enum": ["macOS"] }, { "description": "Windows.", "type": "string", - "enum": [ - "windows" - ] + "enum": ["windows"] }, { "description": "Linux.", "type": "string", - "enum": [ - "linux" - ] + "enum": ["linux"] }, { "description": "Android.", "type": "string", - "enum": [ - "android" - ] + "enum": ["android"] }, { "description": "iOS.", "type": "string", - "enum": [ - "iOS" - ] + "enum": ["iOS"] } ] }, @@ -2526,9 +2486,7 @@ { "description": "A variable that is set while calling the command from the webview API.", "type": "object", - "required": [ - "validator" - ], + "required": ["validator"], "properties": { "raw": { "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", @@ -2561,4 +2519,4 @@ ] } } -} \ No newline at end of file +} diff --git a/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json b/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json index f827fe1..34f0a61 100644 --- a/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json +++ b/foundry/packages/desktop/src-tauri/gen/schemas/macOS-schema.json @@ -21,9 +21,7 @@ { "description": "A list of capabilities.", "type": "object", - "required": [ - "capabilities" - ], + "required": ["capabilities"], "properties": { "capabilities": { "description": "The list of capabilities.", @@ -39,10 +37,7 @@ "Capability": { "description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```", "type": "object", - "required": [ - "identifier", - "permissions" - ], + "required": ["identifier", "permissions"], "properties": { "identifier": { "description": "Identifier of the capability.\n\n## Example\n\n`main-user-files-write`", @@ -93,10 +88,7 @@ }, "platforms": { "description": "Limit which target platforms this capability applies to.\n\nBy default all platforms are targeted.\n\n## Example\n\n`[\"macOS\",\"windows\"]`", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Target" } @@ -106,9 +98,7 @@ "CapabilityRemote": { "description": "Configuration for remote URLs that are associated with the capability.", "type": "object", - "required": [ - "urls" - ], + "required": ["urls"], "properties": { "urls": { "description": "Remote domains this capability refers to using the [URLPattern standard](https://urlpattern.spec.whatwg.org/).\n\n## Examples\n\n- \"https://*.mydomain.dev\": allows subdomains of mydomain.dev - \"https://mydomain.dev/api/*\": allows any subpath of mydomain.dev/api", @@ -218,10 +208,7 @@ "anyOf": [ { "type": "object", - "required": [ - "cmd", - "name" - ], + "required": ["cmd", "name"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -244,10 +231,7 @@ }, { "type": "object", - "required": [ - "name", - "sidecar" - ], + "required": ["name", "sidecar"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -278,10 +262,7 @@ "anyOf": [ { "type": "object", - "required": [ - "cmd", - "name" - ], + "required": ["cmd", "name"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -304,10 +285,7 @@ }, { "type": "object", - "required": [ - "name", - "sidecar" - ], + "required": ["name", "sidecar"], "properties": { "args": { "description": "The allowed arguments for the command execution.", @@ -356,20 +334,14 @@ }, "allow": { "description": "Data that defines what is allowed by the scope.", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Value" } }, "deny": { "description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.", - "type": [ - "array", - "null" - ], + "type": ["array", "null"], "items": { "$ref": "#/definitions/Value" } @@ -377,9 +349,7 @@ } } ], - "required": [ - "identifier" - ] + "required": ["identifier"] } ] }, @@ -1845,10 +1815,10 @@ "markdownDescription": "Enables the set_title_bar_style command without any pre-configured scope." }, { - "description": "Enables the set_visible_on_all_workspaces command without any pre-configured scope.", + "description": "Enables the set_visible_on_all_organizations command without any pre-configured scope.", "type": "string", - "const": "core:window:allow-set-visible-on-all-workspaces", - "markdownDescription": "Enables the set_visible_on_all_workspaces command without any pre-configured scope." + "const": "core:window:allow-set-visible-on-all-organizations", + "markdownDescription": "Enables the set_visible_on_all_organizations command without any pre-configured scope." }, { "description": "Enables the show command without any pre-configured scope.", @@ -2301,10 +2271,10 @@ "markdownDescription": "Denies the set_title_bar_style command without any pre-configured scope." }, { - "description": "Denies the set_visible_on_all_workspaces command without any pre-configured scope.", + "description": "Denies the set_visible_on_all_organizations command without any pre-configured scope.", "type": "string", - "const": "core:window:deny-set-visible-on-all-workspaces", - "markdownDescription": "Denies the set_visible_on_all_workspaces command without any pre-configured scope." + "const": "core:window:deny-set-visible-on-all-organizations", + "markdownDescription": "Denies the set_visible_on_all_organizations command without any pre-configured scope." }, { "description": "Denies the show command without any pre-configured scope.", @@ -2482,37 +2452,27 @@ { "description": "MacOS.", "type": "string", - "enum": [ - "macOS" - ] + "enum": ["macOS"] }, { "description": "Windows.", "type": "string", - "enum": [ - "windows" - ] + "enum": ["windows"] }, { "description": "Linux.", "type": "string", - "enum": [ - "linux" - ] + "enum": ["linux"] }, { "description": "Android.", "type": "string", - "enum": [ - "android" - ] + "enum": ["android"] }, { "description": "iOS.", "type": "string", - "enum": [ - "iOS" - ] + "enum": ["iOS"] } ] }, @@ -2526,9 +2486,7 @@ { "description": "A variable that is set while calling the command from the webview API.", "type": "object", - "required": [ - "validator" - ], + "required": ["validator"], "properties": { "raw": { "description": "Marks the validator as a raw regex, meaning the plugin should not make any modification at runtime.\n\nThis means the regex will not match on the entire string by default, which might be exploited if your regex allow unexpected input to be considered valid. When using this option, make sure your regex is correct.", @@ -2561,4 +2519,4 @@ ] } } -} \ No newline at end of file +} diff --git a/foundry/packages/frontend/src/app/router.tsx b/foundry/packages/frontend/src/app/router.tsx index 8ee0855..dd22724 100644 --- a/foundry/packages/frontend/src/app/router.tsx +++ b/foundry/packages/frontend/src/app/router.tsx @@ -1,6 +1,6 @@ import { type ReactNode, useEffect } from "react"; import type { FoundryBillingPlanId } from "@sandbox-agent/foundry-shared"; -import { useInterest } from "@sandbox-agent/foundry-client"; +import { useSubscription } from "@sandbox-agent/foundry-client"; import { Navigate, Outlet, createRootRoute, createRoute, createRouter } from "@tanstack/react-router"; import { MockLayout } from "../components/mock-layout"; import { @@ -11,8 +11,8 @@ import { MockOrganizationSettingsPage, MockSignInPage, } from "../components/mock-onboarding"; -import { defaultWorkspaceId, isMockFrontendClient } from "../lib/env"; -import { interestManager } from "../lib/interest"; +import { defaultOrganizationId, isMockFrontendClient } from "../lib/env"; +import { subscriptionManager } from "../lib/subscription"; import { activeMockOrganization, getMockOrganizationById, isAppSnapshotBootstrapping, useMockAppClient, useMockAppSnapshot } from "../lib/mock-app"; const rootRoute = createRootRoute({ @@ -61,20 +61,20 @@ const organizationCheckoutRoute = createRoute({ component: OrganizationCheckoutRoute, }); -const workspaceRoute = createRoute({ +const organizationRoute = createRoute({ getParentRoute: () => rootRoute, - path: "/workspaces/$workspaceId", - component: WorkspaceLayoutRoute, + path: "/organizations/$organizationId", + component: OrganizationLayoutRoute, }); -const workspaceIndexRoute = createRoute({ - getParentRoute: () => workspaceRoute, +const organizationIndexRoute = createRoute({ + getParentRoute: () => organizationRoute, path: "/", - component: WorkspaceRoute, + component: OrganizationRoute, }); const taskRoute = createRoute({ - getParentRoute: () => workspaceRoute, + getParentRoute: () => organizationRoute, path: "tasks/$taskId", validateSearch: (search: Record) => ({ sessionId: typeof search.sessionId === "string" && search.sessionId.trim().length > 0 ? search.sessionId : undefined, @@ -83,7 +83,7 @@ const taskRoute = createRoute({ }); const repoRoute = createRoute({ - getParentRoute: () => workspaceRoute, + getParentRoute: () => organizationRoute, path: "repos/$repoId", component: RepoRoute, }); @@ -96,7 +96,7 @@ const routeTree = rootRoute.addChildren([ organizationSettingsRoute, organizationBillingRoute, organizationCheckoutRoute, - workspaceRoute.addChildren([workspaceIndexRoute, taskRoute, repoRoute]), + organizationRoute.addChildren([organizationIndexRoute, taskRoute, repoRoute]), ]); export const router = createRouter({ routeTree }); @@ -107,7 +107,7 @@ declare module "@tanstack/react-router" { } } -function WorkspaceLayoutRoute() { +function OrganizationLayoutRoute() { return ; } @@ -142,7 +142,7 @@ function IndexRoute() { const activeOrganization = activeMockOrganization(snapshot); if (activeOrganization) { - return ; + return ; } return ; @@ -238,54 +238,54 @@ function OrganizationCheckoutRoute() { return ; } -function WorkspaceRoute() { - const { workspaceId } = workspaceRoute.useParams(); +function OrganizationRoute() { + const { organizationId } = organizationRoute.useParams(); return ( - - - + + + ); } -function WorkspaceView({ - workspaceId, +function OrganizationView({ + organizationId, selectedTaskId, selectedSessionId, }: { - workspaceId: string; + organizationId: string; selectedTaskId: string | null; selectedSessionId: string | null; }) { - return ; + return ; } function TaskRoute() { - const { workspaceId, taskId } = taskRoute.useParams(); + const { organizationId, taskId } = taskRoute.useParams(); const { sessionId } = taskRoute.useSearch(); return ( - - - + + + ); } -function TaskView({ workspaceId, taskId, sessionId }: { workspaceId: string; taskId: string; sessionId: string | null }) { - return ; +function TaskView({ organizationId, taskId, sessionId }: { organizationId: string; taskId: string; sessionId: string | null }) { + return ; } function RepoRoute() { - const { workspaceId, repoId } = repoRoute.useParams(); + const { organizationId, repoId } = repoRoute.useParams(); return ( - - - + + + ); } -function AppWorkspaceGate({ workspaceId, children }: { workspaceId: string; children: ReactNode }) { +function AppOrganizationGate({ organizationId, children }: { organizationId: string; children: ReactNode }) { const client = useMockAppClient(); const snapshot = useMockAppSnapshot(); - const organization = snapshot.organizations.find((candidate) => candidate.workspaceId === workspaceId) ?? null; + const organization = snapshot.organizations.find((candidate) => candidate.organizationId === organizationId) ?? null; useEffect(() => { if (organization && snapshot.activeOrganizationId !== organization.id) { @@ -294,7 +294,7 @@ function AppWorkspaceGate({ workspaceId, children }: { workspaceId: string; chil }, [client, organization, snapshot.activeOrganizationId]); if (!isMockFrontendClient && isAppSnapshotBootstrapping(snapshot)) { - return ; + return ; } if (snapshot.auth.status === "signed_out") { @@ -308,13 +308,15 @@ function AppWorkspaceGate({ workspaceId, children }: { workspaceId: string; chil return <>{children}; } -function RepoRouteInner({ workspaceId, repoId }: { workspaceId: string; repoId: string }) { - const workspaceState = useInterest(interestManager, "workspace", { workspaceId }); - const activeTaskId = workspaceState.data?.taskSummaries.find((task) => task.repoId === repoId)?.id; +function RepoRouteInner({ organizationId, repoId }: { organizationId: string; repoId: string }) { + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); + const activeTaskId = organizationState.data?.taskSummaries.find((task) => task.repoId === repoId)?.id; if (!activeTaskId) { - return ; + return ; } - return ; + return ( + + ); } function RootLayout() { diff --git a/foundry/packages/frontend/src/components/dev-panel.tsx b/foundry/packages/frontend/src/components/dev-panel.tsx index 061eff1..56907ff 100644 --- a/foundry/packages/frontend/src/components/dev-panel.tsx +++ b/foundry/packages/frontend/src/components/dev-panel.tsx @@ -2,8 +2,9 @@ import { memo, useEffect, useMemo, useState } from "react"; import { useStyletron } from "baseui"; import { useFoundryTokens } from "../app/theme"; import { isMockFrontendClient } from "../lib/env"; -import { interestManager } from "../lib/interest"; +import { subscriptionManager } from "../lib/subscription"; import type { + FoundryAppSnapshot, FoundryOrganization, TaskStatus, TaskWorkbenchSnapshot, @@ -11,11 +12,12 @@ import type { WorkbenchSessionSummary, WorkbenchTaskStatus, } from "@sandbox-agent/foundry-shared"; -import type { DebugInterestTopic } from "@sandbox-agent/foundry-client"; +import { useSubscription } from "@sandbox-agent/foundry-client"; +import type { DebugSubscriptionTopic } from "@sandbox-agent/foundry-client"; import { describeTaskState } from "../features/tasks/status"; interface DevPanelProps { - workspaceId: string; + organizationId: string; snapshot: TaskWorkbenchSnapshot; organization?: FoundryOrganization | null; focusedTask?: DevPanelFocusedTask | null; @@ -46,12 +48,12 @@ interface TopicInfo { lastRefresh: number | null; } -function topicLabel(topic: DebugInterestTopic): string { +function topicLabel(topic: DebugSubscriptionTopic): string { switch (topic.topicKey) { case "app": return "App"; - case "workspace": - return "Workspace"; + case "organization": + return "Organization"; case "task": return "Task"; case "session": @@ -62,7 +64,7 @@ function topicLabel(topic: DebugInterestTopic): string { } /** Extract the params portion of a cache key (everything after the first `:`) */ -function topicParams(topic: DebugInterestTopic): string { +function topicParams(topic: DebugSubscriptionTopic): string { const idx = topic.cacheKey.indexOf(":"); return idx >= 0 ? topic.cacheKey.slice(idx + 1) : ""; } @@ -133,7 +135,7 @@ function thinkingLabel(sinceMs: number | null, now: number): string | null { return `thinking ${elapsed}s`; } -export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organization, focusedTask }: DevPanelProps) { +export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organization, focusedTask }: DevPanelProps) { const [css] = useStyletron(); const t = useFoundryTokens(); const [now, setNow] = useState(Date.now()); @@ -145,7 +147,7 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza }, []); const topics = useMemo((): TopicInfo[] => { - return interestManager.listDebugTopics().map((topic) => ({ + return subscriptionManager.listDebugTopics().map((topic) => ({ label: topicLabel(topic), key: topic.cacheKey, params: topicParams(topic), @@ -156,12 +158,18 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza })); }, [now]); + const appState = useSubscription(subscriptionManager, "app", {}); + const appSnapshot: FoundryAppSnapshot | null = appState.data ?? null; + const repos = snapshot.repos ?? []; - const prCount = (snapshot.tasks ?? []).filter((task) => task.pullRequest != null).length; + const tasks = snapshot.tasks ?? []; + const prCount = tasks.filter((task) => task.pullRequest != null).length; const focusedTaskStatus = focusedTask?.runtimeStatus ?? focusedTask?.status ?? null; const focusedTaskState = describeTaskState(focusedTaskStatus, focusedTask?.statusMessage ?? null); const lastWebhookAt = organization?.github.lastWebhookAt ?? null; const hasRecentWebhook = lastWebhookAt != null && now - lastWebhookAt < 5 * 60_000; + const totalOrgs = appSnapshot?.organizations.length ?? 0; + const authStatus = appSnapshot?.auth.status ?? "unknown"; const mono = css({ fontFamily: "ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace", @@ -218,8 +226,8 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza {/* Body */}
- {/* Interest Topics */} -
+ {/* Subscription Topics */} +
{topics.map((topic) => (
No active subscriptions}
+ {/* App State */} +
+
+
+ + Auth + {authStatus.replace(/_/g, " ")} +
+
+ + +
+
app topic: {appState.status}
+
+
+ {/* Snapshot Summary */} -
+
- + +
@@ -395,7 +428,7 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza {sandbox.sandboxId.slice(0, 16)} {isActive ? " *" : ""} - {sandbox.providerId} + {sandbox.sandboxProviderId}
{sandbox.cwd &&
cwd: {sandbox.cwd}
} @@ -408,8 +441,8 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza )} {/* GitHub */} - {organization && ( -
+
+ {organization ? (
- App + App Install {organization.github.installationStatus.replace(/_/g, " ")} @@ -438,6 +471,9 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza /> Sync {organization.github.syncStatus} + {organization.github.lastSyncAt != null && ( + {timeAgo(organization.github.lastSyncAt)} + )}
) : ( - never received + never received )}
- - + +
{organization.github.connectedAccount && (
@{organization.github.connectedAccount}
@@ -469,12 +505,14 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza
last sync: {organization.github.lastSyncLabel}
)}
-
- )} + ) : ( + No organization data loaded + )} +
- {/* Workspace */} -
-
{workspaceId}
+ {/* Organization */} +
+
{organizationId}
{organization && (
org: {organization.settings.displayName} ({organization.kind}) diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx index e0f6803..d922ce2 100644 --- a/foundry/packages/frontend/src/components/mock-layout.tsx +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -10,7 +10,7 @@ import { type WorkbenchTaskDetail, type WorkbenchTaskSummary, } from "@sandbox-agent/foundry-shared"; -import { useInterest } from "@sandbox-agent/foundry-client"; +import { useSubscription } from "@sandbox-agent/foundry-client"; import { CircleAlert, PanelLeft, PanelRight } from "lucide-react"; import { useFoundryTokens } from "../app/theme"; @@ -21,7 +21,7 @@ import { MessageList } from "./mock-layout/message-list"; import { PromptComposer } from "./mock-layout/prompt-composer"; import { RightSidebar } from "./mock-layout/right-sidebar"; import { Sidebar } from "./mock-layout/sidebar"; -import { TabStrip } from "./mock-layout/tab-strip"; +import { SessionStrip } from "./mock-layout/session-strip"; import { TerminalPane } from "./mock-layout/terminal-pane"; import { TranscriptHeader } from "./mock-layout/transcript-header"; import { PROMPT_TEXTAREA_MAX_HEIGHT, PROMPT_TEXTAREA_MIN_HEIGHT, SPanel, ScrollBody, Shell, SpinnerDot } from "./mock-layout/ui"; @@ -41,11 +41,11 @@ import { } from "./mock-layout/view-model"; import { activeMockOrganization, useMockAppSnapshot } from "../lib/mock-app"; import { backendClient } from "../lib/backend"; -import { interestManager } from "../lib/interest"; +import { subscriptionManager } from "../lib/subscription"; import { describeTaskState, isProvisioningTaskStatus } from "../features/tasks/status"; -function firstAgentTabId(task: Task): string | null { - return task.tabs[0]?.id ?? null; +function firstAgentSessionId(task: Task): string | null { + return task.sessions[0]?.id ?? null; } function sanitizeOpenDiffs(task: Task, paths: string[] | undefined): string[] { @@ -56,25 +56,25 @@ function sanitizeOpenDiffs(task: Task, paths: string[] | undefined): string[] { return paths.filter((path) => task.diffs[path] != null); } -function sanitizeLastAgentTabId(task: Task, tabId: string | null | undefined): string | null { - if (tabId && task.tabs.some((tab) => tab.id === tabId)) { - return tabId; +function sanitizeLastAgentSessionId(task: Task, sessionId: string | null | undefined): string | null { + if (sessionId && task.sessions.some((tab) => tab.id === sessionId)) { + return sessionId; } - return firstAgentTabId(task); + return firstAgentSessionId(task); } -function sanitizeActiveTabId(task: Task, tabId: string | null | undefined, openDiffs: string[], lastAgentTabId: string | null): string | null { - if (tabId) { - if (task.tabs.some((tab) => tab.id === tabId)) { - return tabId; +function sanitizeActiveSessionId(task: Task, sessionId: string | null | undefined, openDiffs: string[], lastAgentSessionId: string | null): string | null { + if (sessionId) { + if (task.sessions.some((tab) => tab.id === sessionId)) { + return sessionId; } - if (isDiffTab(tabId) && openDiffs.includes(diffPath(tabId))) { - return tabId; + if (isDiffTab(sessionId) && openDiffs.includes(diffPath(sessionId))) { + return sessionId; } } - return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentTabId; + return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentSessionId; } function githubInstallationWarningTitle(organization: FoundryOrganization): string { @@ -85,7 +85,7 @@ function githubInstallationWarningDetail(organization: FoundryOrganization): str const statusDetail = organization.github.lastSyncLabel.trim(); const requirementDetail = organization.github.installationStatus === "install_required" - ? "Webhooks are required for Foundry to function. Repo sync and PR updates will not work until the GitHub App is installed for this workspace." + ? "Webhooks are required for Foundry to function. Repo sync and PR updates will not work until the GitHub App is installed for this organization." : "Webhook delivery is unavailable. Repo sync and PR updates will not work until the GitHub App is reconnected."; return statusDetail ? `${requirementDetail} ${statusDetail}.` : requirementDetail; } @@ -130,10 +130,10 @@ function GithubInstallationWarning({ ); } -function toLegacyTab( +function toSessionModel( summary: WorkbenchSessionSummary, - sessionDetail?: { draft: Task["tabs"][number]["draft"]; transcript: Task["tabs"][number]["transcript"] }, -): Task["tabs"][number] { + sessionDetail?: { draft: Task["sessions"][number]["draft"]; transcript: Task["sessions"][number]["transcript"] }, +): Task["sessions"][number] { return { id: summary.id, sessionId: summary.sessionId, @@ -154,10 +154,10 @@ function toLegacyTab( }; } -function toLegacyTask( +function toTaskModel( summary: WorkbenchTaskSummary, detail?: WorkbenchTaskDetail, - sessionCache?: Map, + sessionCache?: Map, ): Task { const sessions = detail?.sessionsSummary ?? summary.sessionsSummary; return { @@ -171,7 +171,7 @@ function toLegacyTask( updatedAtMs: detail?.updatedAtMs ?? summary.updatedAtMs, branch: detail?.branch ?? summary.branch, pullRequest: detail?.pullRequest ?? summary.pullRequest, - tabs: sessions.map((session) => toLegacyTab(session, sessionCache?.get(session.id))), + sessions: sessions.map((session) => toSessionModel(session, sessionCache?.get(session.id))), fileChanges: detail?.fileChanges ?? [], diffs: detail?.diffs ?? {}, fileTree: detail?.fileTree ?? [], @@ -190,7 +190,7 @@ function isOpenPrTaskId(taskId: string): boolean { return taskId.startsWith(OPEN_PR_TASK_PREFIX); } -function toLegacyOpenPrTask(pullRequest: WorkbenchOpenPrSummary): Task { +function toOpenPrTaskModel(pullRequest: WorkbenchOpenPrSummary): Task { return { id: openPrTaskId(pullRequest.prId), repoId: pullRequest.repoId, @@ -205,7 +205,7 @@ function toLegacyOpenPrTask(pullRequest: WorkbenchOpenPrSummary): Task { number: pullRequest.number, status: pullRequest.isDraft ? "draft" : "ready", }, - tabs: [], + sessions: [], fileChanges: [], diffs: {}, fileTree: [], @@ -214,7 +214,7 @@ function toLegacyOpenPrTask(pullRequest: WorkbenchOpenPrSummary): Task { }; } -function sessionStateMessage(tab: Task["tabs"][number] | null | undefined): string | null { +function sessionStateMessage(tab: Task["sessions"][number] | null | undefined): string | null { if (!tab) { return null; } @@ -230,7 +230,7 @@ function sessionStateMessage(tab: Task["tabs"][number] | null | undefined): stri return null; } -function groupProjects(repos: Array<{ id: string; label: string }>, tasks: Task[]) { +function groupRepositories(repos: Array<{ id: string; label: string }>, tasks: Task[]) { return repos .map((repo) => ({ id: repo.id, @@ -249,21 +249,21 @@ interface WorkbenchActions { branch?: string; onBranch?: string; model?: ModelId; - }): Promise<{ taskId: string; tabId?: string }>; + }): Promise<{ taskId: string; sessionId?: string }>; markTaskUnread(input: { taskId: string }): Promise; renameTask(input: { taskId: string; value: string }): Promise; renameBranch(input: { taskId: string; value: string }): Promise; archiveTask(input: { taskId: string }): Promise; publishPr(input: { taskId: string }): Promise; revertFile(input: { taskId: string; path: string }): Promise; - updateDraft(input: { taskId: string; tabId: string; text: string; attachments: LineAttachment[] }): Promise; - sendMessage(input: { taskId: string; tabId: string; text: string; attachments: LineAttachment[] }): Promise; - stopAgent(input: { taskId: string; tabId: string }): Promise; - setSessionUnread(input: { taskId: string; tabId: string; unread: boolean }): Promise; - renameSession(input: { taskId: string; tabId: string; title: string }): Promise; - closeTab(input: { taskId: string; tabId: string }): Promise; - addTab(input: { taskId: string; model?: string }): Promise<{ tabId: string }>; - changeModel(input: { taskId: string; tabId: string; model: ModelId }): Promise; + updateDraft(input: { taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + sendMessage(input: { taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + stopAgent(input: { taskId: string; sessionId: string }): Promise; + setSessionUnread(input: { taskId: string; sessionId: string; unread: boolean }): Promise; + renameSession(input: { taskId: string; sessionId: string; title: string }): Promise; + closeSession(input: { taskId: string; sessionId: string }): Promise; + addSession(input: { taskId: string; model?: string }): Promise<{ sessionId: string }>; + changeModel(input: { taskId: string; sessionId: string; model: ModelId }): Promise; reloadGithubOrganization(): Promise; reloadGithubPullRequests(): Promise; reloadGithubRepository(repoId: string): Promise; @@ -274,12 +274,12 @@ const TranscriptPanel = memo(function TranscriptPanel({ taskWorkbenchClient, task, hasSandbox, - activeTabId, - lastAgentTabId, + activeSessionId, + lastAgentSessionId, openDiffs, onSyncRouteSession, - onSetActiveTabId, - onSetLastAgentTabId, + onSetActiveSessionId, + onSetLastAgentSessionId, onSetOpenDiffs, sidebarCollapsed, onToggleSidebar, @@ -293,12 +293,12 @@ const TranscriptPanel = memo(function TranscriptPanel({ taskWorkbenchClient: WorkbenchActions; task: Task; hasSandbox: boolean; - activeTabId: string | null; - lastAgentTabId: string | null; + activeSessionId: string | null; + lastAgentSessionId: string | null; openDiffs: string[]; onSyncRouteSession: (taskId: string, sessionId: string | null, replace?: boolean) => void; - onSetActiveTabId: (tabId: string | null) => void; - onSetLastAgentTabId: (tabId: string | null) => void; + onSetActiveSessionId: (sessionId: string | null) => void; + onSetLastAgentSessionId: (sessionId: string | null) => void; onSetOpenDiffs: (paths: string[]) => void; sidebarCollapsed?: boolean; onToggleSidebar?: () => void; @@ -313,37 +313,38 @@ const TranscriptPanel = memo(function TranscriptPanel({ const [defaultModel, setDefaultModel] = useState("claude-sonnet-4"); const [editingField, setEditingField] = useState<"title" | "branch" | null>(null); const [editValue, setEditValue] = useState(""); - const [editingSessionTabId, setEditingSessionTabId] = useState(null); + const [editingSessionId, setEditingSessionId] = useState(null); const [editingSessionName, setEditingSessionName] = useState(""); - const [pendingHistoryTarget, setPendingHistoryTarget] = useState<{ messageId: string; tabId: string } | null>(null); + const [pendingHistoryTarget, setPendingHistoryTarget] = useState<{ messageId: string; sessionId: string } | null>(null); const [copiedMessageId, setCopiedMessageId] = useState(null); const [timerNowMs, setTimerNowMs] = useState(() => Date.now()); const [localDraft, setLocalDraft] = useState(""); const [localAttachments, setLocalAttachments] = useState([]); + const [pendingMessage, setPendingMessage] = useState<{ text: string; sessionId: string; sentAt: number } | null>(null); const lastEditTimeRef = useRef(0); const throttleTimerRef = useRef | null>(null); const pendingDraftRef = useRef<{ text: string; attachments: LineAttachment[] } | null>(null); const scrollRef = useRef(null); const textareaRef = useRef(null); const messageRefs = useRef(new Map()); - const activeDiff = activeTabId && isDiffTab(activeTabId) ? diffPath(activeTabId) : null; - const activeAgentTab = activeDiff ? null : (task.tabs.find((candidate) => candidate.id === activeTabId) ?? task.tabs[0] ?? null); - const promptTab = task.tabs.find((candidate) => candidate.id === lastAgentTabId) ?? task.tabs[0] ?? null; + const activeDiff = activeSessionId && isDiffTab(activeSessionId) ? diffPath(activeSessionId) : null; + const activeAgentSession = activeDiff ? null : (task.sessions.find((candidate) => candidate.id === activeSessionId) ?? task.sessions[0] ?? null); + const promptSession = task.sessions.find((candidate) => candidate.id === lastAgentSessionId) ?? task.sessions[0] ?? null; const isTerminal = task.status === "archived"; - const historyEvents = useMemo(() => buildHistoryEvents(task.tabs), [task.tabs]); - const activeMessages = useMemo(() => buildDisplayMessages(activeAgentTab), [activeAgentTab]); + const historyEvents = useMemo(() => buildHistoryEvents(task.sessions), [task.sessions]); + const activeMessages = useMemo(() => buildDisplayMessages(activeAgentSession), [activeAgentSession]); const taskRuntimeStatus = task.runtimeStatus ?? task.status; const taskState = describeTaskState(taskRuntimeStatus, task.statusMessage ?? null); const taskProvisioning = isProvisioningTaskStatus(taskRuntimeStatus); const taskProvisioningMessage = taskState.detail; - const activeSessionMessage = sessionStateMessage(activeAgentTab); + const activeSessionMessage = sessionStateMessage(activeAgentSession); const showPendingSessionState = !activeDiff && - !!activeAgentTab && - (activeAgentTab.status === "pending_provision" || activeAgentTab.status === "pending_session_create" || activeAgentTab.status === "error") && + !!activeAgentSession && + (activeAgentSession.status === "pending_provision" || activeAgentSession.status === "pending_session_create" || activeAgentSession.status === "error") && activeMessages.length === 0; - const serverDraft = promptTab?.draft.text ?? ""; - const serverAttachments = promptTab?.draft.attachments ?? []; + const serverDraft = promptSession?.draft.text ?? ""; + const serverAttachments = promptSession?.draft.attachments ?? []; // Sync server → local only when user hasn't typed recently (3s cooldown) const DRAFT_SYNC_COOLDOWN_MS = 3_000; @@ -354,12 +355,26 @@ const TranscriptPanel = memo(function TranscriptPanel({ } }, [serverDraft, serverAttachments]); - // Reset local draft immediately on tab/task switch + // Reset local draft immediately on session/task switch useEffect(() => { lastEditTimeRef.current = 0; - setLocalDraft(promptTab?.draft.text ?? ""); - setLocalAttachments(promptTab?.draft.attachments ?? []); - }, [promptTab?.id, task.id]); + setLocalDraft(promptSession?.draft.text ?? ""); + setLocalAttachments(promptSession?.draft.attachments ?? []); + }, [promptSession?.id, task.id]); + + // Clear pending message once the real transcript contains a client message newer than when we sent + const pendingMessageClientCount = useRef(0); + useEffect(() => { + if (!pendingMessage) return; + + const targetSession = task.sessions.find((s) => s.id === pendingMessage.sessionId); + if (!targetSession) return; + + const clientEventCount = targetSession.transcript.filter((event) => event.sender === "client").length; + if (clientEventCount > pendingMessageClientCount.current) { + setPendingMessage(null); + } + }, [task.sessions, pendingMessage]); const draft = localDraft; const attachments = localAttachments; @@ -372,10 +387,10 @@ const TranscriptPanel = memo(function TranscriptPanel({ useEffect(() => { textareaRef.current?.focus(); - }, [activeTabId, task.id]); + }, [activeSessionId, task.id]); useEffect(() => { - setEditingSessionTabId(null); + setEditingSessionId(null); setEditingSessionName(""); }, [task.id]); @@ -389,7 +404,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ const nextHeight = Math.min(textarea.scrollHeight, PROMPT_TEXTAREA_MAX_HEIGHT); textarea.style.height = `${Math.max(PROMPT_TEXTAREA_MIN_HEIGHT, nextHeight)}px`; textarea.style.overflowY = textarea.scrollHeight > PROMPT_TEXTAREA_MAX_HEIGHT ? "auto" : "hidden"; - }, [draft, activeTabId, task.id]); + }, [draft, activeSessionId, task.id]); useEffect(() => { if (!copiedMessageId) { @@ -404,7 +419,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ }, [copiedMessageId]); useEffect(() => { - if (!activeAgentTab || activeAgentTab.status !== "running" || activeAgentTab.thinkingSinceMs === null) { + if (!activeAgentSession || activeAgentSession.status !== "running" || activeAgentSession.thinkingSinceMs === null) { return; } @@ -414,19 +429,19 @@ const TranscriptPanel = memo(function TranscriptPanel({ }, 1_000); return () => window.clearInterval(timer); - }, [activeAgentTab?.id, activeAgentTab?.status, activeAgentTab?.thinkingSinceMs]); + }, [activeAgentSession?.id, activeAgentSession?.status, activeAgentSession?.thinkingSinceMs]); useEffect(() => { - if (!activeAgentTab?.unread) { + if (!activeAgentSession?.unread) { return; } void taskWorkbenchClient.setSessionUnread({ taskId: task.id, - tabId: activeAgentTab.id, + sessionId: activeAgentSession.id, unread: false, }); - }, [activeAgentTab?.id, activeAgentTab?.unread, task.id]); + }, [activeAgentSession?.id, activeAgentSession?.unread, task.id]); const startEditingField = useCallback((field: "title" | "branch", value: string) => { setEditingField(field); @@ -458,10 +473,10 @@ const TranscriptPanel = memo(function TranscriptPanel({ const DRAFT_THROTTLE_MS = 500; const flushDraft = useCallback( - (text: string, nextAttachments: LineAttachment[], tabId: string) => { + (text: string, nextAttachments: LineAttachment[], sessionId: string) => { void taskWorkbenchClient.updateDraft({ taskId: task.id, - tabId, + sessionId, text, attachments: nextAttachments, }); @@ -480,7 +495,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ const updateDraft = useCallback( (nextText: string, nextAttachments: LineAttachment[]) => { - if (!promptTab) { + if (!promptSession) { return; } @@ -495,162 +510,172 @@ const TranscriptPanel = memo(function TranscriptPanel({ throttleTimerRef.current = setTimeout(() => { throttleTimerRef.current = null; if (pendingDraftRef.current) { - flushDraft(pendingDraftRef.current.text, pendingDraftRef.current.attachments, promptTab.id); + flushDraft(pendingDraftRef.current.text, pendingDraftRef.current.attachments, promptSession.id); pendingDraftRef.current = null; } }, DRAFT_THROTTLE_MS); } }, - [promptTab, flushDraft], + [promptSession, flushDraft], ); const sendMessage = useCallback(() => { const text = draft.trim(); - if (!text || !promptTab) { + if (!text || !promptSession) { return; } - onSetActiveTabId(promptTab.id); - onSetLastAgentTabId(promptTab.id); + // Clear draft and show optimistic message immediately (don't wait for server round-trip) + setLocalDraft(""); + setLocalAttachments([]); + lastEditTimeRef.current = Date.now(); + // Snapshot current client message count so we can detect when the server adds ours + pendingMessageClientCount.current = promptSession.transcript.filter((event) => event.sender === "client").length; + setPendingMessage({ text, sessionId: promptSession.id, sentAt: Date.now() }); + + onSetActiveSessionId(promptSession.id); + onSetLastAgentSessionId(promptSession.id); void taskWorkbenchClient.sendMessage({ taskId: task.id, - tabId: promptTab.id, + sessionId: promptSession.id, text, attachments, }); - }, [attachments, draft, task.id, onSetActiveTabId, onSetLastAgentTabId, promptTab]); + }, [attachments, draft, task.id, onSetActiveSessionId, onSetLastAgentSessionId, promptSession]); const stopAgent = useCallback(() => { - if (!promptTab) { + if (!promptSession) { return; } void taskWorkbenchClient.stopAgent({ taskId: task.id, - tabId: promptTab.id, + sessionId: promptSession.id, }); - }, [task.id, promptTab]); + }, [task.id, promptSession]); - const switchTab = useCallback( - (tabId: string) => { - onSetActiveTabId(tabId); + const switchSession = useCallback( + (sessionId: string) => { + onSetActiveSessionId(sessionId); - if (!isDiffTab(tabId)) { - onSetLastAgentTabId(tabId); - const tab = task.tabs.find((candidate) => candidate.id === tabId); - if (tab?.unread) { + if (!isDiffTab(sessionId)) { + onSetLastAgentSessionId(sessionId); + const session = task.sessions.find((candidate) => candidate.id === sessionId); + if (session?.unread) { void taskWorkbenchClient.setSessionUnread({ taskId: task.id, - tabId, + sessionId, unread: false, }); } - onSyncRouteSession(task.id, tabId); + onSyncRouteSession(task.id, sessionId); } }, - [task.id, task.tabs, onSetActiveTabId, onSetLastAgentTabId, onSyncRouteSession], + [task.id, task.sessions, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], ); - const setTabUnread = useCallback( - (tabId: string, unread: boolean) => { - void taskWorkbenchClient.setSessionUnread({ taskId: task.id, tabId, unread }); + const setSessionUnread = useCallback( + (sessionId: string, unread: boolean) => { + void taskWorkbenchClient.setSessionUnread({ taskId: task.id, sessionId, unread }); }, [task.id], ); - const startRenamingTab = useCallback( - (tabId: string) => { - const targetTab = task.tabs.find((candidate) => candidate.id === tabId); - if (!targetTab) { - throw new Error(`Unable to rename missing session tab ${tabId}`); + const startRenamingSession = useCallback( + (sessionId: string) => { + const targetSession = task.sessions.find((candidate) => candidate.id === sessionId); + if (!targetSession) { + throw new Error(`Unable to rename missing session ${sessionId}`); } - setEditingSessionTabId(tabId); - setEditingSessionName(targetTab.sessionName); + setEditingSessionId(sessionId); + setEditingSessionName(targetSession.sessionName); }, - [task.tabs], + [task.sessions], ); - const cancelTabRename = useCallback(() => { - setEditingSessionTabId(null); + const cancelSessionRename = useCallback(() => { + setEditingSessionId(null); setEditingSessionName(""); }, []); - const commitTabRename = useCallback(() => { - if (!editingSessionTabId) { + const commitSessionRename = useCallback(() => { + if (!editingSessionId) { return; } const trimmedName = editingSessionName.trim(); if (!trimmedName) { - cancelTabRename(); + cancelSessionRename(); return; } void taskWorkbenchClient.renameSession({ taskId: task.id, - tabId: editingSessionTabId, + sessionId: editingSessionId, title: trimmedName, }); - cancelTabRename(); - }, [cancelTabRename, editingSessionName, editingSessionTabId, task.id]); + cancelSessionRename(); + }, [cancelSessionRename, editingSessionName, editingSessionId, task.id]); - const closeTab = useCallback( - (tabId: string) => { - const remainingTabs = task.tabs.filter((candidate) => candidate.id !== tabId); - const nextTabId = remainingTabs[0]?.id ?? null; + const closeSession = useCallback( + (sessionId: string) => { + const remainingSessions = task.sessions.filter((candidate) => candidate.id !== sessionId); + const nextSessionId = remainingSessions[0]?.id ?? null; - if (activeTabId === tabId) { - onSetActiveTabId(nextTabId); + if (activeSessionId === sessionId) { + onSetActiveSessionId(nextSessionId); } - if (lastAgentTabId === tabId) { - onSetLastAgentTabId(nextTabId); + if (lastAgentSessionId === sessionId) { + onSetLastAgentSessionId(nextSessionId); } - onSyncRouteSession(task.id, nextTabId); - void taskWorkbenchClient.closeTab({ taskId: task.id, tabId }); + onSyncRouteSession(task.id, nextSessionId); + void taskWorkbenchClient.closeSession({ taskId: task.id, sessionId }); }, - [activeTabId, task.id, task.tabs, lastAgentTabId, onSetActiveTabId, onSetLastAgentTabId, onSyncRouteSession], + [activeSessionId, task.id, task.sessions, lastAgentSessionId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], ); const closeDiffTab = useCallback( (path: string) => { const nextOpenDiffs = openDiffs.filter((candidate) => candidate !== path); onSetOpenDiffs(nextOpenDiffs); - if (activeTabId === diffTabId(path)) { - onSetActiveTabId(nextOpenDiffs.length > 0 ? diffTabId(nextOpenDiffs[nextOpenDiffs.length - 1]!) : (lastAgentTabId ?? firstAgentTabId(task))); + if (activeSessionId === diffTabId(path)) { + onSetActiveSessionId( + nextOpenDiffs.length > 0 ? diffTabId(nextOpenDiffs[nextOpenDiffs.length - 1]!) : (lastAgentSessionId ?? firstAgentSessionId(task)), + ); } }, - [activeTabId, task, lastAgentTabId, onSetActiveTabId, onSetOpenDiffs, openDiffs], + [activeSessionId, task, lastAgentSessionId, onSetActiveSessionId, onSetOpenDiffs, openDiffs], ); - const addTab = useCallback(() => { + const addSession = useCallback(() => { void (async () => { - const { tabId } = await taskWorkbenchClient.addTab({ taskId: task.id }); - onSetLastAgentTabId(tabId); - onSetActiveTabId(tabId); - onSyncRouteSession(task.id, tabId); + const { sessionId } = await taskWorkbenchClient.addSession({ taskId: task.id }); + onSetLastAgentSessionId(sessionId); + onSetActiveSessionId(sessionId); + onSyncRouteSession(task.id, sessionId); })(); - }, [task.id, onSetActiveTabId, onSetLastAgentTabId, onSyncRouteSession]); + }, [task.id, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession]); const changeModel = useCallback( (model: ModelId) => { - if (!promptTab) { - throw new Error(`Unable to change model for task ${task.id} without an active prompt tab`); + if (!promptSession) { + throw new Error(`Unable to change model for task ${task.id} without an active prompt session`); } void taskWorkbenchClient.changeModel({ taskId: task.id, - tabId: promptTab.id, + sessionId: promptSession.id, model, }); }, - [task.id, promptTab], + [task.id, promptSession], ); const addAttachment = useCallback( (filePath: string, lineNumber: number, lineContent: string) => { - if (!promptTab) { + if (!promptSession) { return; } @@ -661,7 +686,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ updateDraft(draft, [...attachments, nextAttachment]); }, - [attachments, draft, promptTab, updateDraft], + [attachments, draft, promptSession, updateDraft], ); const removeAttachment = useCallback( @@ -676,13 +701,13 @@ const TranscriptPanel = memo(function TranscriptPanel({ const jumpToHistoryEvent = useCallback( (event: HistoryEvent) => { - setPendingHistoryTarget({ messageId: event.messageId, tabId: event.tabId }); + setPendingHistoryTarget({ messageId: event.messageId, sessionId: event.sessionId }); - if (activeTabId !== event.tabId) { - switchTab(event.tabId); + if (activeSessionId !== event.sessionId) { + switchSession(event.sessionId); } }, - [activeTabId, switchTab], + [activeSessionId, switchSession], ); const copyMessage = useCallback(async (message: Message) => { @@ -704,26 +729,29 @@ const TranscriptPanel = memo(function TranscriptPanel({ } }, []); + const isOptimisticThinking = pendingMessage !== null && activeAgentSession?.id === pendingMessage.sessionId; const thinkingTimerLabel = - activeAgentTab?.status === "running" && activeAgentTab.thinkingSinceMs !== null - ? formatThinkingDuration(timerNowMs - activeAgentTab.thinkingSinceMs) - : null; + activeAgentSession?.status === "running" && activeAgentSession.thinkingSinceMs !== null + ? formatThinkingDuration(timerNowMs - activeAgentSession.thinkingSinceMs) + : isOptimisticThinking + ? formatThinkingDuration(timerNowMs - pendingMessage.sentAt) + : null; return ( { - if (activeAgentTab) { - setTabUnread(activeAgentTab.id, unread); + onSetActiveSessionUnread={(unread) => { + if (activeAgentSession) { + setSessionUnread(activeAgentSession.id, unread); } }} sidebarCollapsed={sidebarCollapsed} @@ -749,21 +777,21 @@ const TranscriptPanel = memo(function TranscriptPanel({ border: `1px solid ${t.borderDefault}`, }} > - {activeDiff ? ( @@ -773,7 +801,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ diff={task.diffs[activeDiff]} onAddAttachment={addAttachment} /> - ) : task.tabs.length === 0 ? ( + ) : task.sessions.length === 0 ? (
Sessions are where you chat with the agent. Start one now to send the first prompt on this task.

@@ -560,127 +664,230 @@ export const Sidebar = memo(function Sidebar({ ); } - const { project, task } = item; - const isActive = task.id === activeId; - const isPullRequestItem = isPullRequestSidebarItem(task); - const isRunning = task.tabs.some((tab) => tab.status === "running"); - const isProvisioning = - !isPullRequestItem && - (String(task.status).startsWith("init_") || - task.status === "new" || - task.tabs.some((tab) => tab.status === "pending_provision" || tab.status === "pending_session_create")); - const hasUnread = task.tabs.some((tab) => tab.unread); - const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; - const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); - const totalRemoved = task.fileChanges.reduce((sum, file) => sum + file.removed, 0); - const hasDiffs = totalAdded > 0 || totalRemoved > 0; + if (item.type === "task") { + const { repository, task, taskIndex } = item; + const isActive = task.id === activeId; + const isPullRequestItem = isPullRequestSidebarItem(task); + const isRunning = task.sessions.some((s) => s.status === "running"); + const isProvisioning = + !isPullRequestItem && + (String(task.status).startsWith("init_") || + task.status === "new" || + task.sessions.some((s) => s.status === "pending_provision" || s.status === "pending_session_create")); + const hasUnread = task.sessions.some((s) => s.unread); + const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; + const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); + const totalRemoved = task.fileChanges.reduce((sum, file) => sum + file.removed, 0); + const hasDiffs = totalAdded > 0 || totalRemoved > 0; + const isTaskDropTarget = + drag?.type === "task" && drag.repositoryId === repository.id && drag.overIdx === taskIndex && drag.fromIdx !== taskIndex; + const isTaskBeingDragged = drag?.type === "task" && drag.repositoryId === repository.id && drag.fromIdx === taskIndex && didDragRef.current; - return ( -
{ - if (node) { - virtualizer.measureElement(node); - } - }} - style={{ - left: 0, - position: "absolute", - top: 0, - transform: `translateY(${virtualItem.start}px)`, - width: "100%", - }} - > -
-
onSelect(task.id)} - onContextMenu={(event) => { - if (isPullRequestItem && task.pullRequest) { + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + opacity: isTaskBeingDragged ? 0.4 : 1, + transition: "opacity 150ms ease", + }} + onMouseDown={(event) => { + if (event.button !== 0) return; + if (dragRef.current) return; + event.stopPropagation(); + startYRef.current = event.clientY; + didDragRef.current = false; + const state: DragState = { type: "task", repositoryId: repository.id, fromIdx: taskIndex, overIdx: null }; + dragRef.current = state; + setDrag(state); + }} + > + {isTaskDropTarget ? ( +
+ ) : null} +
+
onSelect(task.id)} + onContextMenu={(event) => { + if (isPullRequestItem && task.pullRequest) { + contextMenu.open(event, [ + { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, + { label: "Create task", onClick: () => onSelect(task.id) }, + ]); + return; + } contextMenu.open(event, [ - { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, - { label: "Create task", onClick: () => onSelect(task.id) }, + { label: "Rename task", onClick: () => onRenameTask(task.id) }, + { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, + { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, ]); - return; - } - contextMenu.open(event, [ - { label: "Rename task", onClick: () => onRenameTask(task.id) }, - { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, - { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, - ]); - }} - className={css({ - padding: "8px 12px", - borderRadius: "8px", - backgroundColor: isActive ? t.interactiveHover : "transparent", - cursor: "pointer", - transition: "all 150ms ease", - ":hover": { - backgroundColor: t.interactiveHover, - }, - })} - > -
-
- {isPullRequestItem ? ( - - ) : ( - - )} -
-
- +
+
- {task.title} - - {isPullRequestItem && task.statusMessage ? ( - - {task.statusMessage} - - ) : null} -
- {task.pullRequest != null ? ( - - - #{task.pullRequest.number} - - {task.pullRequest.status === "draft" ? : null} - - ) : ( - - )} - {hasDiffs ? ( -
- +{totalAdded} - -{totalRemoved} + {isPullRequestItem ? ( + + ) : ( + + )}
- ) : null} - - {formatRelativeAge(task.updatedAtMs)} - +
+ + {task.title} + + {isPullRequestItem && task.statusMessage ? ( + + {task.statusMessage} + + ) : null} +
+ {task.pullRequest != null ? ( + + + #{task.pullRequest.number} + + {task.pullRequest.status === "draft" ? : null} + + ) : ( + + )} + {hasDiffs ? ( +
+ +{totalAdded} + -{totalRemoved} +
+ ) : null} + + {formatRelativeAge(task.updatedAtMs)} + +
-
- ); + ); + } + + if (item.type === "task-drop-zone") { + const { repository, taskCount } = item; + const isDropTarget = + drag?.type === "task" && + drag.repositoryId === repository.id && + drag.overIdx === taskCount && + drag.fromIdx !== taskCount; + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + }} + className={css({ + minHeight: "4px", + position: "relative", + "::before": { + content: '""', + position: "absolute", + top: 0, + left: 0, + right: 0, + height: "2px", + backgroundColor: isDropTarget ? t.textPrimary : "transparent", + transition: "background-color 100ms ease", + }, + })} + /> + ); + } + + if (item.type === "repository-drop-zone") { + const isDropTarget = + drag?.type === "repository" && drag.overIdx === item.repositoryCount && drag.fromIdx !== item.repositoryCount; + return ( +
{ + if (node) { + virtualizer.measureElement(node); + } + }} + style={{ + left: 0, + position: "absolute", + top: 0, + transform: `translateY(${virtualItem.start}px)`, + width: "100%", + }} + className={css({ + minHeight: "4px", + position: "relative", + "::before": { + content: '""', + position: "absolute", + top: 0, + left: 0, + right: 0, + height: "2px", + backgroundColor: isDropTarget ? t.textPrimary : "transparent", + transition: "background-color 100ms ease", + }, + })} + /> + ); + } + + return null; })}
@@ -717,19 +924,19 @@ function SidebarFooter() { const snapshot = useMockAppSnapshot(); const organization = activeMockOrganization(snapshot); const [open, setOpen] = useState(false); - const [workspaceFlyoutOpen, setWorkspaceFlyoutOpen] = useState(false); + const [organizationFlyoutOpen, setOrganizationFlyoutOpen] = useState(false); const containerRef = useRef(null); const flyoutTimerRef = useRef | null>(null); - const workspaceTriggerRef = useRef(null); + const organizationTriggerRef = useRef(null); const flyoutRef = useRef(null); const [flyoutPos, setFlyoutPos] = useState<{ top: number; left: number } | null>(null); useLayoutEffect(() => { - if (workspaceFlyoutOpen && workspaceTriggerRef.current) { - const rect = workspaceTriggerRef.current.getBoundingClientRect(); + if (organizationFlyoutOpen && organizationTriggerRef.current) { + const rect = organizationTriggerRef.current.getBoundingClientRect(); setFlyoutPos({ top: rect.top, left: rect.right + 4 }); } - }, [workspaceFlyoutOpen]); + }, [organizationFlyoutOpen]); useEffect(() => { if (!open) return; @@ -739,7 +946,7 @@ function SidebarFooter() { const inFlyout = flyoutRef.current?.contains(target); if (!inContainer && !inFlyout) { setOpen(false); - setWorkspaceFlyoutOpen(false); + setOrganizationFlyoutOpen(false); } } document.addEventListener("mousedown", handleClick); @@ -749,10 +956,10 @@ function SidebarFooter() { const switchToOrg = useCallback( (org: (typeof snapshot.organizations)[number]) => { setOpen(false); - setWorkspaceFlyoutOpen(false); + setOrganizationFlyoutOpen(false); void (async () => { await client.selectOrganization(org.id); - await navigate({ to: `/workspaces/${org.workspaceId}` as never }); + await navigate({ to: `/organizations/${org.organizationId}` as never }); })(); }, [client, navigate], @@ -760,11 +967,11 @@ function SidebarFooter() { const openFlyout = useCallback(() => { if (flyoutTimerRef.current) clearTimeout(flyoutTimerRef.current); - setWorkspaceFlyoutOpen(true); + setOrganizationFlyoutOpen(true); }, []); const closeFlyout = useCallback(() => { - flyoutTimerRef.current = setTimeout(() => setWorkspaceFlyoutOpen(false), 150); + flyoutTimerRef.current = setTimeout(() => setOrganizationFlyoutOpen(false), 150); }, []); const menuItems: Array<{ icon: React.ReactNode; label: string; danger?: boolean; onClick: () => void }> = []; @@ -838,14 +1045,14 @@ function SidebarFooter() { })} >
- {/* Workspace flyout trigger */} + {/* Organization flyout trigger */} {organization ? ( -
+
) : null} - {/* Workspace flyout portal */} - {workspaceFlyoutOpen && organization && flyoutPos + {/* Organization flyout portal */} + {organizationFlyoutOpen && organization && flyoutPos ? createPortal(
{ setOpen((prev) => { - if (prev) setWorkspaceFlyoutOpen(false); + if (prev) setOrganizationFlyoutOpen(false); return !prev; }); }} diff --git a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx index ca9326a..95e6876 100644 --- a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx @@ -1,4 +1,4 @@ -import { type SandboxProcessRecord, useInterest } from "@sandbox-agent/foundry-client"; +import { type SandboxProcessRecord, useSubscription } from "@sandbox-agent/foundry-client"; import { ProcessTerminal } from "@sandbox-agent/react"; import { useQuery } from "@tanstack/react-query"; import { useStyletron } from "baseui"; @@ -7,10 +7,10 @@ import { ChevronDown, ChevronUp, Plus, SquareTerminal, Trash2 } from "lucide-rea import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { SandboxAgent } from "sandbox-agent"; import { backendClient } from "../../lib/backend"; -import { interestManager } from "../../lib/interest"; +import { subscriptionManager } from "../../lib/subscription"; interface TerminalPaneProps { - workspaceId: string; + organizationId: string; taskId: string | null; isExpanded?: boolean; onExpand?: () => void; @@ -95,10 +95,10 @@ function HeaderIconButton({ ); } -export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onCollapse, onStartResize }: TerminalPaneProps) { +export function TerminalPane({ organizationId, taskId, isExpanded, onExpand, onCollapse, onStartResize }: TerminalPaneProps) { const [css] = useStyletron(); const t = useFoundryTokens(); - const [activeTabId, setActiveTabId] = useState(null); + const [activeSessionId, setActiveTabId] = useState(null); const [processTabs, setProcessTabs] = useState([]); const [creatingProcess, setCreatingProcess] = useState(false); const [hoveredTabId, setHoveredTabId] = useState(null); @@ -184,17 +184,17 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl [listWidth], ); - const workspaceState = useInterest(interestManager, "workspace", { workspaceId }); + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); const taskSummary = useMemo( - () => (taskId ? (workspaceState.data?.taskSummaries.find((task) => task.id === taskId) ?? null) : null), - [taskId, workspaceState.data?.taskSummaries], + () => (taskId ? (organizationState.data?.taskSummaries.find((task) => task.id === taskId) ?? null) : null), + [taskId, organizationState.data?.taskSummaries], ); - const taskState = useInterest( - interestManager, + const taskState = useSubscription( + subscriptionManager, "task", taskSummary ? { - workspaceId, + organizationId, repoId: taskSummary.repoId, taskId: taskSummary.id, } @@ -211,7 +211,7 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl }, [taskState.data]); const connectionQuery = useQuery({ - queryKey: ["mock-layout", "sandbox-agent-connection", workspaceId, activeSandbox?.providerId ?? "", activeSandbox?.sandboxId ?? ""], + queryKey: ["mock-layout", "sandbox-agent-connection", organizationId, activeSandbox?.sandboxProviderId ?? "", activeSandbox?.sandboxId ?? ""], enabled: Boolean(activeSandbox?.sandboxId), staleTime: 30_000, refetchOnWindowFocus: false, @@ -220,17 +220,17 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl throw new Error("Cannot load a sandbox connection without an active sandbox."); } - return await backendClient.getSandboxAgentConnection(workspaceId, activeSandbox.providerId, activeSandbox.sandboxId); + return await backendClient.getSandboxAgentConnection(organizationId, activeSandbox.sandboxProviderId, activeSandbox.sandboxId); }, }); - const processesState = useInterest( - interestManager, + const processesState = useSubscription( + subscriptionManager, "sandboxProcesses", activeSandbox ? { - workspaceId, - providerId: activeSandbox.providerId, + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, } : null, @@ -325,11 +325,11 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl }); }, []); - const closeTerminalTab = useCallback((tabId: string) => { + const closeTerminalTab = useCallback((sessionId: string) => { setProcessTabs((current) => { - const next = current.filter((tab) => tab.id !== tabId); + const next = current.filter((tab) => tab.id !== sessionId); setActiveTabId((currentActive) => { - if (currentActive === tabId) { + if (currentActive === sessionId) { return next.length > 0 ? next[next.length - 1]!.id : null; } return currentActive; @@ -346,8 +346,8 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl setCreatingProcess(true); try { const created = await backendClient.createSandboxProcess({ - workspaceId, - providerId: activeSandbox.providerId, + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, request: defaultShellRequest(activeSandbox.cwd), }); @@ -355,10 +355,10 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl } finally { setCreatingProcess(false); } - }, [activeSandbox, openTerminalTab, workspaceId]); + }, [activeSandbox, openTerminalTab, organizationId]); const processTabsById = useMemo(() => new Map(processTabs.map((tab) => [tab.id, tab])), [processTabs]); - const activeProcessTab = activeTabId ? (processTabsById.get(activeTabId) ?? null) : null; + const activeProcessTab = activeSessionId ? (processTabsById.get(activeSessionId) ?? null) : null; const activeTerminalProcess = useMemo( () => (activeProcessTab ? (processes.find((process) => process.id === activeProcessTab.processId) ?? null) : null), [activeProcessTab, processes], @@ -571,9 +571,9 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl css={css} t={t} label="Kill terminal" - disabled={!activeTabId} + disabled={!activeSessionId} onClick={() => { - if (activeTabId) closeTerminalTab(activeTabId); + if (activeSessionId) closeTerminalTab(activeSessionId); }} > @@ -622,7 +622,7 @@ export function TerminalPane({ workspaceId, taskId, isExpanded, onExpand, onColl })} > {processTabs.map((tab, tabIndex) => { - const isActive = activeTabId === tab.id; + const isActive = activeSessionId === tab.id; const isHovered = hoveredTabId === tab.id; const isDropTarget = tabDrag !== null && tabDrag.overIdx === tabIndex && tabDrag.fromIdx !== tabIndex; const isBeingDragged = tabDrag !== null && tabDrag.fromIdx === tabIndex && didTabDrag.current; diff --git a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx index 808c4a6..a024871 100644 --- a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx @@ -6,19 +6,19 @@ import { Clock, PanelLeft, PanelRight } from "lucide-react"; import { useFoundryTokens } from "../../app/theme"; import { deriveHeaderStatus } from "../../features/tasks/status"; import { HeaderStatusPill, PanelHeaderBar } from "./ui"; -import { type AgentTab, type Task } from "./view-model"; +import { type AgentSession, type Task } from "./view-model"; export const TranscriptHeader = memo(function TranscriptHeader({ task, hasSandbox, - activeTab, + activeSession, editingField, editValue, onEditValueChange, onStartEditingField, onCommitEditingField, onCancelEditingField, - onSetActiveTabUnread, + onSetActiveSessionUnread, sidebarCollapsed, onToggleSidebar, onSidebarPeekStart, @@ -29,14 +29,14 @@ export const TranscriptHeader = memo(function TranscriptHeader({ }: { task: Task; hasSandbox: boolean; - activeTab: AgentTab | null | undefined; + activeSession: AgentSession | null | undefined; editingField: "title" | "branch" | null; editValue: string; onEditValueChange: (value: string) => void; onStartEditingField: (field: "title" | "branch", value: string) => void; onCommitEditingField: (field: "title" | "branch") => void; onCancelEditingField: () => void; - onSetActiveTabUnread: (unread: boolean) => void; + onSetActiveSessionUnread: (unread: boolean) => void; sidebarCollapsed?: boolean; onToggleSidebar?: () => void; onSidebarPeekStart?: () => void; @@ -51,8 +51,8 @@ export const TranscriptHeader = memo(function TranscriptHeader({ const needsTrafficLightInset = isDesktop && sidebarCollapsed; const taskStatus = task.runtimeStatus ?? task.status; const headerStatus = useMemo( - () => deriveHeaderStatus(taskStatus, task.statusMessage ?? null, activeTab?.status ?? null, activeTab?.errorMessage ?? null, hasSandbox), - [taskStatus, task.statusMessage, activeTab?.status, activeTab?.errorMessage, hasSandbox], + () => deriveHeaderStatus(taskStatus, task.statusMessage ?? null, activeSession?.status ?? null, activeSession?.errorMessage ?? null, hasSandbox), + [taskStatus, task.statusMessage, activeSession?.status, activeSession?.errorMessage, hasSandbox], ); return ( diff --git a/foundry/packages/frontend/src/components/mock-layout/ui.tsx b/foundry/packages/frontend/src/components/mock-layout/ui.tsx index a036030..d39a408 100644 --- a/foundry/packages/frontend/src/components/mock-layout/ui.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/ui.tsx @@ -4,7 +4,7 @@ import { GitPullRequest, GitPullRequestDraft } from "lucide-react"; import { useFoundryTokens } from "../../app/theme"; import { getFoundryTokens } from "../../styles/tokens"; -import type { AgentKind, AgentTab } from "./view-model"; +import type { AgentKind, AgentSession } from "./view-model"; export interface ContextMenuItem { label: string; @@ -251,10 +251,10 @@ export const HeaderStatusPill = memo(function HeaderStatusPill({ status }: { sta ); }); -export const TabAvatar = memo(function TabAvatar({ tab }: { tab: AgentTab }) { - if (tab.status === "running" || tab.status === "pending_provision" || tab.status === "pending_session_create") return ; - if (tab.unread) return ; - return ; +export const SessionAvatar = memo(function SessionAvatar({ session }: { session: AgentSession }) { + if (session.status === "running" || session.status === "pending_provision" || session.status === "pending_session_create") return ; + if (session.unread) return ; + return ; }); export const Shell = styled("div", ({ $theme }) => { diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts index 810b065..21228fc 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts @@ -1,10 +1,10 @@ import { describe, expect, it } from "vitest"; -import type { WorkbenchAgentTab } from "@sandbox-agent/foundry-shared"; +import type { WorkbenchSession } from "@sandbox-agent/foundry-shared"; import { buildDisplayMessages } from "./view-model"; -function makeTab(transcript: WorkbenchAgentTab["transcript"]): WorkbenchAgentTab { +function makeSession(transcript: WorkbenchSession["transcript"]): WorkbenchSession { return { - id: "tab-1", + id: "session-1", sessionId: "session-1", sessionName: "Session 1", agent: "Codex", @@ -25,7 +25,7 @@ function makeTab(transcript: WorkbenchAgentTab["transcript"]): WorkbenchAgentTab describe("buildDisplayMessages", () => { it("collapses chunked agent output into a single display message", () => { const messages = buildDisplayMessages( - makeTab([ + makeSession([ { id: "evt-setup", eventIndex: 0, @@ -139,7 +139,7 @@ describe("buildDisplayMessages", () => { it("hides non-message session update envelopes", () => { const messages = buildDisplayMessages( - makeTab([ + makeSession([ { id: "evt-client", eventIndex: 1, diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.ts index bb5e72b..83f5c7a 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.ts @@ -1,6 +1,6 @@ import type { WorkbenchAgentKind as AgentKind, - WorkbenchAgentTab as AgentTab, + WorkbenchSession as AgentSession, WorkbenchDiffLineKind as DiffLineKind, WorkbenchFileChange as FileChange, WorkbenchFileTreeNode as FileTreeNode, @@ -10,12 +10,12 @@ import type { WorkbenchModelGroup as ModelGroup, WorkbenchModelId as ModelId, WorkbenchParsedDiffLine as ParsedDiffLine, - WorkbenchProjectSection as ProjectSection, + WorkbenchRepositorySection as RepositorySection, WorkbenchTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; import { extractEventText } from "../../features/sessions/model"; -export type { ProjectSection }; +export type { RepositorySection }; export const MODEL_GROUPS: ModelGroup[] = [ { @@ -138,17 +138,17 @@ function historyDetail(event: TranscriptEvent): string { return content || "Untitled event"; } -export function buildHistoryEvents(tabs: AgentTab[]): HistoryEvent[] { - return tabs - .flatMap((tab) => - tab.transcript +export function buildHistoryEvents(sessions: AgentSession[]): HistoryEvent[] { + return sessions + .flatMap((session) => + session.transcript .filter((event) => event.sender === "client") .map((event) => ({ - id: `history-${tab.id}-${event.id}`, + id: `history-${session.id}-${event.id}`, messageId: event.id, preview: historyPreview(event), - sessionName: tab.sessionName, - tabId: tab.id, + sessionName: session.sessionName, + sessionId: session.id, createdAtMs: event.createdAt, detail: historyDetail(event), })), @@ -255,8 +255,8 @@ function shouldDisplayEvent(event: TranscriptEvent): boolean { return Boolean(extractEventText(payload).trim()); } -export function buildDisplayMessages(tab: AgentTab | null | undefined): Message[] { - if (!tab) { +export function buildDisplayMessages(session: AgentSession | null | undefined): Message[] { + if (!session) { return []; } @@ -270,7 +270,7 @@ export function buildDisplayMessages(tab: AgentTab | null | undefined): Message[ pendingAgentMessage = null; }; - for (const event of tab.transcript) { + for (const event of session.transcript) { const chunkText = isAgentChunkEvent(event); if (chunkText !== null) { if (!pendingAgentMessage) { @@ -329,7 +329,7 @@ export function parseDiffLines(diff: string): ParsedDiffLine[] { export type { AgentKind, - AgentTab, + AgentSession, DiffLineKind, FileChange, FileTreeNode, diff --git a/foundry/packages/frontend/src/components/mock-onboarding.tsx b/foundry/packages/frontend/src/components/mock-onboarding.tsx index 66bcfcc..4528695 100644 --- a/foundry/packages/frontend/src/components/mock-onboarding.tsx +++ b/foundry/packages/frontend/src/components/mock-onboarding.tsx @@ -103,8 +103,8 @@ function formatDate(value: string | null): string { return dateFormatter.format(new Date(value)); } -function workspacePath(organization: FoundryOrganization): string { - return `/workspaces/${organization.workspaceId}`; +function organizationPath(organization: FoundryOrganization): string { + return `/organizations/${organization.organizationId}`; } function settingsPath(organization: FoundryOrganization): string { @@ -121,7 +121,7 @@ function checkoutPath(organization: FoundryOrganization, planId: FoundryBillingP function statusBadge(t: FoundryTokens, organization: FoundryOrganization) { if (organization.kind === "personal") { - return Personal workspace; + return Personal organization; } return GitHub organization; } @@ -347,11 +347,11 @@ export function MockOrganizationSelectorPage() { /> -

Select a workspace

+

Select a organization

Choose where you want to work.

- {/* Workspace list */} + {/* Organization list */}
{ void (async () => { await client.selectOrganization(organization.id); - await navigate({ to: workspacePath(organization) }); + await navigate({ to: organizationPath(organization) }); })(); }} style={{ @@ -580,13 +580,13 @@ function SettingsLayout({ overflowY: "auto", }} > - {/* Back to workspace */} + {/* Back to organization */} {/* User header */} @@ -775,7 +775,7 @@ export function MockOrganizationSettingsPage({ organization }: { organization: F
{[ "Hand off tasks to teammates for review or continuation", - "Shared workspace with unified billing across your org", + "Shared organization with unified billing across your org", "200 task hours per seat, with bulk hour purchases available", "Collaborative task history and audit trail", ].map((feature) => ( @@ -1132,7 +1132,7 @@ export function MockAccountSettingsPage() { }} > - Back to workspace + Back to organization
diff --git a/foundry/packages/frontend/src/components/workspace-dashboard.tsx b/foundry/packages/frontend/src/components/organization-dashboard.tsx similarity index 79% rename from foundry/packages/frontend/src/components/workspace-dashboard.tsx rename to foundry/packages/frontend/src/components/organization-dashboard.tsx index 569c4bf..461ee90 100644 --- a/foundry/packages/frontend/src/components/workspace-dashboard.tsx +++ b/foundry/packages/frontend/src/components/organization-dashboard.tsx @@ -1,6 +1,6 @@ import { useEffect, useMemo, useState, type ReactNode } from "react"; -import type { AgentType, RepoBranchRecord, RepoOverview, RepoStackAction, TaskWorkbenchSnapshot, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared"; -import { useInterest } from "@sandbox-agent/foundry-client"; +import type { AgentType, RepoBranchRecord, RepoOverview, TaskWorkbenchSnapshot, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared"; +import { currentFoundryOrganization, useSubscription } from "@sandbox-agent/foundry-client"; import { useMutation, useQuery } from "@tanstack/react-query"; import { Link, useNavigate } from "@tanstack/react-router"; import { Button } from "baseui/button"; @@ -13,17 +13,17 @@ import { Textarea } from "baseui/textarea"; import { StyledDivider } from "baseui/divider"; import { styled, useStyletron } from "baseui"; import { HeadingSmall, HeadingXSmall, LabelSmall, LabelXSmall, MonoLabelSmall, ParagraphSmall } from "baseui/typography"; -import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal, Shuffle } from "lucide-react"; +import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal } from "lucide-react"; import { formatDiffStat } from "../features/tasks/model"; import { deriveHeaderStatus, describeTaskState } from "../features/tasks/status"; import { HeaderStatusPill } from "./mock-layout/ui"; import { buildTranscript, resolveSessionSelection } from "../features/sessions/model"; import { backendClient } from "../lib/backend"; -import { interestManager } from "../lib/interest"; +import { subscriptionManager } from "../lib/subscription"; import { DevPanel, useDevPanel } from "./dev-panel"; -interface WorkspaceDashboardProps { - workspaceId: string; +interface OrganizationDashboardProps { + organizationId: string; selectedTaskId?: string; selectedRepoId?: string; } @@ -142,8 +142,6 @@ function repoSummary(overview: RepoOverview | undefined): { total: number; mapped: number; unmapped: number; - conflicts: number; - needsRestack: number; openPrs: number; } { if (!overview) { @@ -151,27 +149,17 @@ function repoSummary(overview: RepoOverview | undefined): { total: 0, mapped: 0, unmapped: 0, - conflicts: 0, - needsRestack: 0, openPrs: 0, }; } let mapped = 0; - let conflicts = 0; - let needsRestack = 0; let openPrs = 0; for (const row of overview.branches) { if (row.taskId) { mapped += 1; } - if (row.conflictsWithMain) { - conflicts += 1; - } - if (row.trackedInStack && row.parentBranch && row.hasUnpushed) { - needsRestack += 1; - } if (row.prNumber && row.prState !== "MERGED" && row.prState !== "CLOSED") { openPrs += 1; } @@ -181,16 +169,11 @@ function repoSummary(overview: RepoOverview | undefined): { total: overview.branches.length, mapped, unmapped: Math.max(0, overview.branches.length - mapped), - conflicts, - needsRestack, openPrs, }; } function branchKind(row: RepoBranchRecord): StatusTagKind { - if (row.conflictsWithMain) { - return "negative"; - } if (row.prState === "OPEN" || row.prState === "DRAFT") { return "warning"; } @@ -333,7 +316,7 @@ function MetaRow({ label, value, mono = false }: { label: string; value: string; ); } -export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }: WorkspaceDashboardProps) { +export function OrganizationDashboard({ organizationId, selectedTaskId, selectedRepoId }: OrganizationDashboardProps) { const [css, theme] = useStyletron(); const navigate = useNavigate(); const showDevPanel = useDevPanel(); @@ -346,16 +329,9 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId const [newTitle, setNewTitle] = useState(""); const [newBranchName, setNewBranchName] = useState(""); const [createOnBranch, setCreateOnBranch] = useState(null); - const [addRepoOpen, setAddRepoOpen] = useState(false); const [createTaskOpen, setCreateTaskOpen] = useState(false); - const [addRepoRemote, setAddRepoRemote] = useState(""); - const [addRepoError, setAddRepoError] = useState(null); - const [stackActionError, setStackActionError] = useState(null); - const [stackActionMessage, setStackActionMessage] = useState(null); const [selectedOverviewBranch, setSelectedOverviewBranch] = useState(null); const [overviewFilter, setOverviewFilter] = useState("active"); - const [reparentBranchName, setReparentBranchName] = useState(null); - const [reparentParentBranch, setReparentParentBranch] = useState(""); const [newAgentType, setNewAgentType] = useState(() => { try { const raw = globalThis.localStorage?.getItem("hf.settings.agentType"); @@ -366,16 +342,19 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }); const [createError, setCreateError] = useState(null); - const workspaceState = useInterest(interestManager, "workspace", { workspaceId }); - const repos = workspaceState.data?.repos ?? []; - const rows = workspaceState.data?.taskSummaries ?? []; + const appState = useSubscription(subscriptionManager, "app", {}); + const activeOrg = appState.data ? currentFoundryOrganization(appState.data) : null; + + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); + const repos = organizationState.data?.repos ?? []; + const rows = organizationState.data?.taskSummaries ?? []; const selectedSummary = useMemo(() => rows.find((row) => row.id === selectedTaskId) ?? rows[0] ?? null, [rows, selectedTaskId]); - const taskState = useInterest( - interestManager, + const taskState = useSubscription( + subscriptionManager, "task", !repoOverviewMode && selectedSummary ? { - workspaceId, + organizationId, repoId: selectedSummary.repoId, taskId: selectedSummary.id, } @@ -384,13 +363,13 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId const activeRepoId = selectedRepoId ?? createRepoId; const repoOverviewQuery = useQuery({ - queryKey: ["workspace", workspaceId, "repo-overview", activeRepoId], + queryKey: ["organization", organizationId, "repo-overview", activeRepoId], enabled: Boolean(repoOverviewMode && activeRepoId), queryFn: async () => { if (!activeRepoId) { throw new Error("No repo selected"); } - return backendClient.getRepoOverview(workspaceId, activeRepoId); + return backendClient.getRepoOverview(organizationId, activeRepoId); }, }); @@ -455,16 +434,16 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId useEffect(() => { if (!repoOverviewMode && !selectedTaskId && rows.length > 0) { void navigate({ - to: "/workspaces/$workspaceId/tasks/$taskId", + to: "/organizations/$organizationId/tasks/$taskId", params: { - workspaceId, + organizationId, taskId: rows[0]!.id, }, search: { sessionId: undefined }, replace: true, }); } - }, [navigate, repoOverviewMode, rows, selectedTaskId, workspaceId]); + }, [navigate, repoOverviewMode, rows, selectedTaskId, organizationId]); useEffect(() => { setActiveSessionId(null); @@ -494,12 +473,12 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId ); const resolvedSessionId = sessionSelection.sessionId; const staleSessionId = sessionSelection.staleSessionId; - const sessionState = useInterest( - interestManager, + const sessionState = useSubscription( + subscriptionManager, "session", selectedForSession && resolvedSessionId ? { - workspaceId, + organizationId, repoId: selectedForSession.repoId, taskId: selectedForSession.id, sessionId: resolvedSessionId, @@ -537,9 +516,9 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }, [repoOverviewMode, selectedForSession, selectedSummary]); const devPanelSnapshot = useMemo( (): TaskWorkbenchSnapshot => ({ - workspaceId, + organizationId, repos: repos.map((repo) => ({ id: repo.id, label: repo.label })), - projects: [], + repositories: [], tasks: rows.map((task) => ({ id: task.id, repoId: task.repoId, @@ -551,7 +530,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId updatedAtMs: task.updatedAtMs, branch: task.branch ?? null, pullRequest: task.pullRequest, - tabs: task.sessionsSummary.map((session) => ({ + sessions: task.sessionsSummary.map((session) => ({ ...session, draft: { text: "", @@ -567,7 +546,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId activeSandboxId: selectedForSession?.id === task.id ? selectedForSession.activeSandboxId : null, })), }), - [repos, rows, selectedForSession, workspaceId], + [repos, rows, selectedForSession, organizationId], ); const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => { @@ -575,8 +554,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId throw new Error("No sandbox is available for this task"); } return backendClient.createSandboxSession({ - workspaceId, - providerId: activeSandbox.providerId, + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, prompt: selectedForSession.task, cwd: activeSandbox.cwd ?? undefined, @@ -607,8 +586,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId } const sessionId = await ensureSessionForPrompt(); await backendClient.sendSandboxPrompt({ - workspaceId, - providerId: activeSandbox.providerId, + organizationId, + sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, sessionId, prompt, @@ -634,7 +613,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId const draftBranchName = newBranchName.trim(); return backendClient.createTask({ - workspaceId, + organizationId, repoId, task, agentType: newAgentType, @@ -651,9 +630,9 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId setCreateOnBranch(null); setCreateTaskOpen(false); await navigate({ - to: "/workspaces/$workspaceId/tasks/$taskId", + to: "/organizations/$organizationId/tasks/$taskId", params: { - workspaceId, + organizationId, taskId: task.taskId, }, search: { sessionId: undefined }, @@ -664,63 +643,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }, }); - const addRepo = useMutation({ - mutationFn: async (remoteUrl: string) => { - const trimmed = remoteUrl.trim(); - if (!trimmed) { - throw new Error("Remote URL is required"); - } - return backendClient.addRepo(workspaceId, trimmed); - }, - onSuccess: async (created) => { - setAddRepoError(null); - setAddRepoRemote(""); - setAddRepoOpen(false); - setCreateRepoId(created.repoId); - if (repoOverviewMode) { - await navigate({ - to: "/workspaces/$workspaceId/repos/$repoId", - params: { - workspaceId, - repoId: created.repoId, - }, - }); - } - }, - onError: (error) => { - setAddRepoError(error instanceof Error ? error.message : String(error)); - }, - }); - - const runStackAction = useMutation({ - mutationFn: async (input: { action: RepoStackAction; branchName?: string; parentBranch?: string }) => { - if (!activeRepoId) { - throw new Error("No repository selected"); - } - return backendClient.runRepoStackAction({ - workspaceId, - repoId: activeRepoId, - action: input.action, - branchName: input.branchName, - parentBranch: input.parentBranch, - }); - }, - onSuccess: async (result) => { - if (result.executed) { - setStackActionError(null); - setStackActionMessage(result.message); - } else { - setStackActionMessage(null); - setStackActionError(result.message); - } - await repoOverviewQuery.refetch(); - }, - onError: (error) => { - setStackActionMessage(null); - setStackActionError(error instanceof Error ? error.message : String(error)); - }, - }); - const openCreateFromBranch = (repoId: string, branchName: string): void => { setCreateRepoId(repoId); setCreateOnBranch(branchName); @@ -747,7 +669,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId const overview = repoOverviewQuery.data; const overviewStats = repoSummary(overview); - const stackActionsEnabled = Boolean(overview?.stackAvailable) && !runStackAction.isPending; const filteredOverviewBranches = useMemo(() => { if (!overview?.branches?.length) { return []; @@ -774,26 +695,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId } }, [filteredOverviewBranches, selectedOverviewBranch]); - const handleReparentSubmit = (): void => { - if (!reparentBranchName || !reparentParentBranch.trim()) { - return; - } - setStackActionError(null); - void runStackAction - .mutateAsync({ - action: "reparent_branch", - branchName: reparentBranchName, - parentBranch: reparentParentBranch.trim(), - }) - .then(() => { - setReparentBranchName(null); - setReparentParentBranch(""); - }) - .catch(() => { - // mutation state is surfaced above - }); - }; - const modalOverrides = useMemo( () => ({ Dialog: { @@ -834,7 +735,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId gap: "2px", })} > - Workspace + Organization
- {workspaceId} + {organizationId}
@@ -853,12 +754,14 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId size="compact" kind="secondary" onClick={() => { - setAddRepoError(null); - setAddRepoOpen(true); + void navigate({ + to: "/organizations/$organizationId/settings", + params: { organizationId }, + }); }} - data-testid="repo-add-open" + data-testid="organization-settings-open" > - Add Repo + GitHub Settings
@@ -873,14 +776,14 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId - {workspaceState.status === "loading" ? ( + {organizationState.status === "loading" ? ( <> ) : null} - {workspaceState.status !== "loading" && repoGroups.length === 0 ? ( - No repos or tasks yet. Add a repo to start a workspace. + {organizationState.status !== "loading" && repoGroups.length === 0 ? ( + No repos or tasks yet. Create the repository in GitHub, then sync repos from organization settings. ) : null} {repoGroups.map((group) => ( @@ -894,8 +797,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId })} >
- - - -
@@ -1099,28 +967,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId Branches {overviewStats.total} Mapped {overviewStats.mapped} Unmapped {overviewStats.unmapped} - Conflicts {overviewStats.conflicts} Open PRs {overviewStats.openPrs} - Needs restack {overviewStats.needsRestack}
- - {overview && !overview.stackAvailable ? ( - - git-spice is unavailable for this repo. Stack actions are disabled. - - ) : null} - - {stackActionError ? ( - - {stackActionError} - - ) : null} - - {stackActionMessage ? ( - - {stackActionMessage} - - ) : null} @@ -1139,10 +987,10 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId className={css({ minWidth: "980px", display: "grid", - gridTemplateColumns: "2fr 1.3fr 0.8fr 1fr 1fr 1.4fr", + gridTemplateColumns: "2fr 1.3fr 1fr 1fr 0.9fr 1.2fr", })} > - {["Branch", "Parent", "Ahead", "PR", "CI/Review", "Actions"].map((label) => ( + {["Branch", "Task", "PR", "CI / Review", "Updated", "Actions"].map((label) => (
- - {formatRelativeAge(branch.updatedAt)} - {branch.taskId ? "task" : "unmapped"} - {branch.trackedInStack ? stack : null} + + {branch.commitSha.slice(0, 10) || "-"} +
-
{branch.parentBranch ?? "-"}
-
{branch.hasUnpushed ? "yes" : "-"}
+
{branch.taskTitle ?? branch.taskId ?? "-"}
+
{formatRelativeAge(branch.updatedAt)}
- - - - - - {!branch.taskId ? (
@@ -1636,7 +1434,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId > -
@@ -1659,10 +1456,10 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId })} > - - + + )}
@@ -1764,49 +1561,6 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId - setAddRepoOpen(false)} overrides={modalOverrides}> - Add Repo - -
- - Add a git remote URL to this workspace. - - setAddRepoRemote(event.target.value)} - overrides={inputTestIdOverrides("repo-add-remote")} - /> - {addRepoError ? ( - - {addRepoError} - - ) : null} -
-
- - - - -
- { @@ -1847,34 +1601,9 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId overrides={selectTestIdOverrides("task-create-repo")} /> {repos.length === 0 ? ( -
- - No repos yet. - - -
+ + No imported repos yet. Create the repository in GitHub first, then sync repos from organization settings. + ) : null} @@ -1967,52 +1696,10 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
- - { - setReparentBranchName(null); - setReparentParentBranch(""); - }} - overrides={modalOverrides} - > - Reparent Branch - -
- - {reparentBranchName ? `Move ${reparentBranchName} onto a different parent branch.` : ""} - - setReparentParentBranch(event.target.value)} - placeholder="Parent branch" - overrides={inputTestIdOverrides("repo-overview-reparent-input")} - /> -
-
- - - - -
- {showDevPanel ? : null} + {showDevPanel ? ( + + ) : null} ); } diff --git a/foundry/packages/frontend/src/features/tasks/model.test.ts b/foundry/packages/frontend/src/features/tasks/model.test.ts index dc62f30..6db9bb5 100644 --- a/foundry/packages/frontend/src/features/tasks/model.test.ts +++ b/foundry/packages/frontend/src/features/tasks/model.test.ts @@ -3,14 +3,14 @@ import type { TaskRecord } from "@sandbox-agent/foundry-shared"; import { formatDiffStat, groupTasksByRepo } from "./model"; const base: TaskRecord = { - workspaceId: "default", + organizationId: "default", repoId: "repo-a", repoRemote: "https://example.com/repo-a.git", taskId: "task-1", branchName: "feature/one", title: "Feature one", task: "Ship one", - providerId: "local", + sandboxProviderId: "local", status: "running", statusMessage: null, activeSandboxId: "sandbox-1", @@ -18,7 +18,7 @@ const base: TaskRecord = { sandboxes: [ { sandboxId: "sandbox-1", - providerId: "local", + sandboxProviderId: "local", sandboxActorId: null, switchTarget: "sandbox://local/sandbox-1", cwd: null, diff --git a/foundry/packages/frontend/src/lib/backend.ts b/foundry/packages/frontend/src/lib/backend.ts index 158e701..b57cc51 100644 --- a/foundry/packages/frontend/src/lib/backend.ts +++ b/foundry/packages/frontend/src/lib/backend.ts @@ -1,8 +1,8 @@ import { createBackendClient } from "@sandbox-agent/foundry-client"; -import { backendEndpoint, defaultWorkspaceId, frontendClientMode } from "./env"; +import { backendEndpoint, defaultOrganizationId, frontendClientMode } from "./env"; export const backendClient = createBackendClient({ endpoint: backendEndpoint, - defaultWorkspaceId, + defaultOrganizationId, mode: frontendClientMode, }); diff --git a/foundry/packages/frontend/src/lib/env.ts b/foundry/packages/frontend/src/lib/env.ts index ea53e85..5476f83 100644 --- a/foundry/packages/frontend/src/lib/env.ts +++ b/foundry/packages/frontend/src/lib/env.ts @@ -1,6 +1,6 @@ type FoundryRuntimeConfig = { backendEndpoint?: string; - defaultWorkspaceId?: string; + defaultOrganizationId?: string; frontendClientMode?: string; }; @@ -26,7 +26,7 @@ const runtimeConfig = typeof window !== "undefined" ? window.__FOUNDRY_RUNTIME_C export const backendEndpoint = runtimeConfig?.backendEndpoint?.trim() || import.meta.env.VITE_HF_BACKEND_ENDPOINT?.trim() || resolveDefaultBackendEndpoint(); -export const defaultWorkspaceId = runtimeConfig?.defaultWorkspaceId?.trim() || import.meta.env.VITE_HF_WORKSPACE?.trim() || "default"; +export const defaultOrganizationId = runtimeConfig?.defaultOrganizationId?.trim() || import.meta.env.VITE_HF_WORKSPACE?.trim() || "default"; function resolveFrontendClientMode(): "mock" | "remote" { const raw = runtimeConfig?.frontendClientMode?.trim().toLowerCase() || frontendEnv.FOUNDRY_FRONTEND_CLIENT_MODE?.trim().toLowerCase(); diff --git a/foundry/packages/frontend/src/lib/interest.ts b/foundry/packages/frontend/src/lib/interest.ts deleted file mode 100644 index a736e71..0000000 --- a/foundry/packages/frontend/src/lib/interest.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { MockInterestManager, RemoteInterestManager } from "@sandbox-agent/foundry-client"; -import { backendClient } from "./backend"; -import { frontendClientMode } from "./env"; - -export const interestManager = frontendClientMode === "mock" ? new MockInterestManager() : new RemoteInterestManager(backendClient); diff --git a/foundry/packages/frontend/src/lib/mock-app.ts b/foundry/packages/frontend/src/lib/mock-app.ts index c72a708..acf3009 100644 --- a/foundry/packages/frontend/src/lib/mock-app.ts +++ b/foundry/packages/frontend/src/lib/mock-app.ts @@ -1,7 +1,7 @@ import { useSyncExternalStore } from "react"; import { createFoundryAppClient, - useInterest, + useSubscription, currentFoundryOrganization, currentFoundryUser, eligibleFoundryOrganizations, @@ -9,7 +9,7 @@ import { } from "@sandbox-agent/foundry-client"; import type { FoundryAppSnapshot, FoundryBillingPlanId, FoundryOrganization, UpdateFoundryOrganizationProfileInput } from "@sandbox-agent/foundry-shared"; import { backendClient } from "./backend"; -import { interestManager } from "./interest"; +import { subscriptionManager } from "./subscription"; import { frontendClientMode } from "./env"; const REMOTE_APP_SESSION_STORAGE_KEY = "sandbox-agent-foundry:remote-app-session"; @@ -37,10 +37,10 @@ const legacyAppClient: FoundryAppClient = createFoundryAppClient({ const remoteAppClient: FoundryAppClient = { getSnapshot(): FoundryAppSnapshot { - return interestManager.getSnapshot("app", {}) ?? EMPTY_APP_SNAPSHOT; + return subscriptionManager.getSnapshot("app", {}) ?? EMPTY_APP_SNAPSHOT; }, subscribe(listener: () => void): () => void { - return interestManager.subscribe("app", {}, listener); + return subscriptionManager.subscribe("app", {}, listener); }, async signInWithGithub(userId?: string): Promise { void userId; @@ -79,8 +79,8 @@ const remoteAppClient: FoundryAppClient = { async reconnectGithub(organizationId: string): Promise { await backendClient.reconnectAppGithub(organizationId); }, - async recordSeatUsage(workspaceId: string): Promise { - await backendClient.recordAppSeatUsage(workspaceId); + async recordSeatUsage(organizationId: string): Promise { + await backendClient.recordAppSeatUsage(organizationId); }, }; @@ -88,7 +88,7 @@ const appClient: FoundryAppClient = frontendClientMode === "remote" ? remoteAppC export function useMockAppSnapshot(): FoundryAppSnapshot { if (frontendClientMode === "remote") { - const app = useInterest(interestManager, "app", {}); + const app = useSubscription(subscriptionManager, "app", {}); if (app.status !== "loading") { firstSnapshotDelivered = true; } diff --git a/foundry/packages/frontend/src/lib/subscription.ts b/foundry/packages/frontend/src/lib/subscription.ts new file mode 100644 index 0000000..c1618fb --- /dev/null +++ b/foundry/packages/frontend/src/lib/subscription.ts @@ -0,0 +1,5 @@ +import { MockSubscriptionManager, RemoteSubscriptionManager } from "@sandbox-agent/foundry-client"; +import { backendClient } from "./backend"; +import { frontendClientMode } from "./env"; + +export const subscriptionManager = frontendClientMode === "mock" ? new MockSubscriptionManager() : new RemoteSubscriptionManager(backendClient); diff --git a/foundry/packages/shared/src/app-shell.ts b/foundry/packages/shared/src/app-shell.ts index 31ee235..93d3b02 100644 --- a/foundry/packages/shared/src/app-shell.ts +++ b/foundry/packages/shared/src/app-shell.ts @@ -65,7 +65,7 @@ export interface FoundryOrganizationSettings { export interface FoundryOrganization { id: string; - workspaceId: string; + organizationId: string; kind: FoundryOrganizationKind; settings: FoundryOrganizationSettings; github: FoundryGithubState; diff --git a/foundry/packages/shared/src/config.ts b/foundry/packages/shared/src/config.ts index 3f7e0b0..44ea722 100644 --- a/foundry/packages/shared/src/config.ts +++ b/foundry/packages/shared/src/config.ts @@ -15,7 +15,7 @@ export const ConfigSchema = z.object({ }) .optional(), notify: z.array(NotifyBackendSchema).default(["terminal"]), - workspace: z + organization: z .object({ default: z.string().min(1).default("default"), }) @@ -39,7 +39,7 @@ export const ConfigSchema = z.object({ backup_interval_secs: 3600, backup_retention_days: 7, }), - providers: z + sandboxProviders: z .object({ local: z .object({ diff --git a/foundry/packages/shared/src/contracts.ts b/foundry/packages/shared/src/contracts.ts index 40c4f53..d6725f7 100644 --- a/foundry/packages/shared/src/contracts.ts +++ b/foundry/packages/shared/src/contracts.ts @@ -1,14 +1,14 @@ import { z } from "zod"; -export const WorkspaceIdSchema = z +export const OrganizationIdSchema = z .string() .min(1) .max(64) .regex(/^[a-zA-Z0-9._-]+$/); -export type WorkspaceId = z.infer; +export type OrganizationId = z.infer; -export const ProviderIdSchema = z.enum(["e2b", "local"]); -export type ProviderId = z.infer; +export const SandboxProviderIdSchema = z.enum(["e2b", "local"]); +export type SandboxProviderId = z.infer; export const AgentTypeSchema = z.enum(["claude", "codex"]); export type AgentType = z.infer; @@ -39,7 +39,7 @@ export const TaskStatusSchema = z.enum([ export type TaskStatus = z.infer; export const RepoRecordSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: RepoIdSchema, remoteUrl: RepoRemoteSchema, createdAt: z.number().int(), @@ -47,33 +47,27 @@ export const RepoRecordSchema = z.object({ }); export type RepoRecord = z.infer; -export const AddRepoInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, - remoteUrl: RepoRemoteSchema, -}); -export type AddRepoInput = z.infer; - export const CreateTaskInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: RepoIdSchema, task: z.string().min(1), explicitTitle: z.string().trim().min(1).optional(), explicitBranchName: z.string().trim().min(1).optional(), - providerId: ProviderIdSchema.optional(), + sandboxProviderId: SandboxProviderIdSchema.optional(), agentType: AgentTypeSchema.optional(), onBranch: z.string().trim().min(1).optional(), }); export type CreateTaskInput = z.infer; export const TaskRecordSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: z.string().min(1), repoRemote: RepoRemoteSchema, taskId: z.string().min(1), branchName: z.string().min(1).nullable(), title: z.string().min(1).nullable(), task: z.string().min(1), - providerId: ProviderIdSchema, + sandboxProviderId: SandboxProviderIdSchema, status: TaskStatusSchema, statusMessage: z.string().nullable(), activeSandboxId: z.string().nullable(), @@ -81,7 +75,7 @@ export const TaskRecordSchema = z.object({ sandboxes: z.array( z.object({ sandboxId: z.string().min(1), - providerId: ProviderIdSchema, + sandboxProviderId: SandboxProviderIdSchema, sandboxActorId: z.string().nullable(), switchTarget: z.string().min(1), cwd: z.string().nullable(), @@ -106,7 +100,7 @@ export const TaskRecordSchema = z.object({ export type TaskRecord = z.infer; export const TaskSummarySchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: z.string().min(1), taskId: z.string().min(1), branchName: z.string().min(1).nullable(), @@ -117,21 +111,21 @@ export const TaskSummarySchema = z.object({ export type TaskSummary = z.infer; export const TaskActionInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, taskId: z.string().min(1), }); export type TaskActionInput = z.infer; export const SwitchResultSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, taskId: z.string().min(1), - providerId: ProviderIdSchema, + sandboxProviderId: SandboxProviderIdSchema, switchTarget: z.string().min(1), }); export type SwitchResult = z.infer; export const ListTasksInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: RepoIdSchema.optional(), }); export type ListTasksInput = z.infer; @@ -139,11 +133,6 @@ export type ListTasksInput = z.infer; export const RepoBranchRecordSchema = z.object({ branchName: z.string().min(1), commitSha: z.string().min(1), - parentBranch: z.string().nullable(), - trackedInStack: z.boolean(), - diffStat: z.string().nullable(), - hasUnpushed: z.boolean(), - conflictsWithMain: z.boolean(), taskId: z.string().nullable(), taskTitle: z.string().nullable(), taskStatus: TaskStatusSchema.nullable(), @@ -153,69 +142,27 @@ export const RepoBranchRecordSchema = z.object({ ciStatus: z.string().nullable(), reviewStatus: z.string().nullable(), reviewer: z.string().nullable(), - firstSeenAt: z.number().int().nullable(), - lastSeenAt: z.number().int().nullable(), updatedAt: z.number().int(), }); export type RepoBranchRecord = z.infer; export const RepoOverviewSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: RepoIdSchema, remoteUrl: RepoRemoteSchema, baseRef: z.string().nullable(), - stackAvailable: z.boolean(), fetchedAt: z.number().int(), - branchSyncAt: z.number().int().nullable(), - prSyncAt: z.number().int().nullable(), - branchSyncStatus: z.enum(["pending", "syncing", "synced", "error"]), - prSyncStatus: z.enum(["pending", "syncing", "synced", "error"]), - repoActionJobs: z.array( - z.object({ - jobId: z.string().min(1), - action: z.enum(["sync_repo", "restack_repo", "restack_subtree", "rebase_branch", "reparent_branch"]), - branchName: z.string().nullable(), - parentBranch: z.string().nullable(), - status: z.enum(["queued", "running", "completed", "error"]), - message: z.string().min(1), - createdAt: z.number().int(), - updatedAt: z.number().int(), - completedAt: z.number().int().nullable(), - }), - ), branches: z.array(RepoBranchRecordSchema), }); export type RepoOverview = z.infer; -export const RepoStackActionSchema = z.enum(["sync_repo", "restack_repo", "restack_subtree", "rebase_branch", "reparent_branch"]); -export type RepoStackAction = z.infer; - -export const RepoStackActionInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, - repoId: RepoIdSchema, - action: RepoStackActionSchema, - branchName: z.string().trim().min(1).optional(), - parentBranch: z.string().trim().min(1).optional(), +export const OrganizationUseInputSchema = z.object({ + organizationId: OrganizationIdSchema, }); -export type RepoStackActionInput = z.infer; - -export const RepoStackActionResultSchema = z.object({ - jobId: z.string().min(1).nullable().optional(), - action: RepoStackActionSchema, - executed: z.boolean(), - status: z.enum(["queued", "running", "completed", "error"]).optional(), - message: z.string().min(1), - at: z.number().int(), -}); -export type RepoStackActionResult = z.infer; - -export const WorkspaceUseInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, -}); -export type WorkspaceUseInput = z.infer; +export type OrganizationUseInput = z.infer; export const StarSandboxAgentRepoInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, }); export type StarSandboxAgentRepoInput = z.infer; @@ -226,7 +173,7 @@ export const StarSandboxAgentRepoResultSchema = z.object({ export type StarSandboxAgentRepoResult = z.infer; export const HistoryQueryInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, limit: z.number().int().positive().max(500).optional(), branch: z.string().min(1).optional(), taskId: z.string().min(1).optional(), @@ -235,7 +182,7 @@ export type HistoryQueryInput = z.infer; export const HistoryEventSchema = z.object({ id: z.number().int(), - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, repoId: z.string().nullable(), taskId: z.string().nullable(), branchName: z.string().nullable(), @@ -246,14 +193,14 @@ export const HistoryEventSchema = z.object({ export type HistoryEvent = z.infer; export const PruneInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, dryRun: z.boolean(), yes: z.boolean(), }); export type PruneInput = z.infer; export const KillInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, taskId: z.string().min(1), deleteBranch: z.boolean(), abandon: z.boolean(), @@ -261,13 +208,13 @@ export const KillInputSchema = z.object({ export type KillInput = z.infer; export const StatuslineInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, format: z.enum(["table", "claude-code"]), }); export type StatuslineInput = z.infer; export const ListInputSchema = z.object({ - workspaceId: WorkspaceIdSchema, + organizationId: OrganizationIdSchema, format: z.enum(["table", "json"]), full: z.boolean(), }); diff --git a/foundry/packages/shared/src/index.ts b/foundry/packages/shared/src/index.ts index be629a6..754bf21 100644 --- a/foundry/packages/shared/src/index.ts +++ b/foundry/packages/shared/src/index.ts @@ -4,4 +4,4 @@ export * from "./config.js"; export * from "./logging.js"; export * from "./realtime-events.js"; export * from "./workbench.js"; -export * from "./workspace.js"; +export * from "./organization.js"; diff --git a/foundry/packages/shared/src/organization.ts b/foundry/packages/shared/src/organization.ts new file mode 100644 index 0000000..73e1867 --- /dev/null +++ b/foundry/packages/shared/src/organization.ts @@ -0,0 +1,13 @@ +import type { AppConfig } from "./config.js"; + +export function resolveOrganizationId(flagOrganization: string | undefined, config: AppConfig): string { + if (flagOrganization && flagOrganization.trim().length > 0) { + return flagOrganization.trim(); + } + + if (config.organization.default.trim().length > 0) { + return config.organization.default.trim(); + } + + return "default"; +} diff --git a/foundry/packages/shared/src/realtime-events.ts b/foundry/packages/shared/src/realtime-events.ts index 739dd51..ddb5c2b 100644 --- a/foundry/packages/shared/src/realtime-events.ts +++ b/foundry/packages/shared/src/realtime-events.ts @@ -1,5 +1,5 @@ import type { FoundryAppSnapshot } from "./app-shell.js"; -import type { WorkbenchOpenPrSummary, WorkbenchRepoSummary, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary } from "./workbench.js"; +import type { WorkbenchOpenPrSummary, WorkbenchRepositorySummary, WorkbenchSessionDetail, WorkbenchTaskDetail, WorkbenchTaskSummary } from "./workbench.js"; export interface SandboxProcessSnapshot { id: string; @@ -15,12 +15,12 @@ export interface SandboxProcessSnapshot { tty: boolean; } -/** Workspace-level events broadcast by the workspace actor. */ -export type WorkspaceEvent = +/** Organization-level events broadcast by the organization actor. */ +export type OrganizationEvent = | { type: "taskSummaryUpdated"; taskSummary: WorkbenchTaskSummary } | { type: "taskRemoved"; taskId: string } - | { type: "repoAdded"; repo: WorkbenchRepoSummary } - | { type: "repoUpdated"; repo: WorkbenchRepoSummary } + | { type: "repoAdded"; repo: WorkbenchRepositorySummary } + | { type: "repoUpdated"; repo: WorkbenchRepositorySummary } | { type: "repoRemoved"; repoId: string } | { type: "pullRequestUpdated"; pullRequest: WorkbenchOpenPrSummary } | { type: "pullRequestRemoved"; prId: string }; @@ -31,7 +31,7 @@ export type TaskEvent = { type: "taskDetailUpdated"; detail: WorkbenchTaskDetail /** Session-level events broadcast by the task actor and filtered by sessionId on the client. */ export type SessionEvent = { type: "sessionUpdated"; session: WorkbenchSessionDetail }; -/** App-level events broadcast by the app workspace actor. */ +/** App-level events broadcast by the app organization actor. */ export type AppEvent = { type: "appUpdated"; snapshot: FoundryAppSnapshot }; /** Sandbox process events broadcast by the sandbox instance actor. */ diff --git a/foundry/packages/shared/src/workbench.ts b/foundry/packages/shared/src/workbench.ts index 078ed31..6a0df2e 100644 --- a/foundry/packages/shared/src/workbench.ts +++ b/foundry/packages/shared/src/workbench.ts @@ -1,4 +1,4 @@ -import type { AgentType, ProviderId, TaskStatus } from "./contracts.js"; +import type { AgentType, SandboxProviderId, TaskStatus } from "./contracts.js"; export type WorkbenchTaskStatus = TaskStatus | "new"; export type WorkbenchAgentKind = "Claude" | "Codex" | "Cursor"; @@ -32,7 +32,10 @@ export interface WorkbenchComposerDraft { /** Session metadata without transcript content. */ export interface WorkbenchSessionSummary { id: string; - sessionId: string | null; + /** Stable UI session id used for routing and task-local identity. */ + sessionId: string; + /** Underlying sandbox session id when provisioning has completed. */ + sandboxSessionId?: string | null; sessionName: string; agent: WorkbenchAgentKind; model: WorkbenchModelId; @@ -43,11 +46,10 @@ export interface WorkbenchSessionSummary { errorMessage?: string | null; } -/** Full session content — only fetched when viewing a specific session tab. */ +/** Full session content — only fetched when viewing a specific session. */ export interface WorkbenchSessionDetail { - /** Stable UI tab id used for the session topic key and routing. */ + /** Stable UI session id used for the session topic key and routing. */ sessionId: string; - tabId: string; sandboxSessionId: string | null; sessionName: string; agent: WorkbenchAgentKind; @@ -87,7 +89,7 @@ export interface WorkbenchHistoryEvent { messageId: string; preview: string; sessionName: string; - tabId: string; + sessionId: string; createdAtMs: number; detail: string; } @@ -121,12 +123,12 @@ export interface WorkbenchOpenPrSummary { } export interface WorkbenchSandboxSummary { - providerId: ProviderId; + sandboxProviderId: SandboxProviderId; sandboxId: string; cwd: string | null; } -/** Sidebar-level task data. Materialized in the workspace actor's SQLite. */ +/** Sidebar-level task data. Materialized in the organization actor's SQLite. */ export interface WorkbenchTaskSummary { id: string; repoId: string; @@ -162,8 +164,8 @@ export interface WorkbenchTaskDetail extends WorkbenchTaskSummary { activeSandboxId: string | null; } -/** Repo-level summary for workspace sidebar. */ -export interface WorkbenchRepoSummary { +/** Repo-level summary for organization sidebar. */ +export interface WorkbenchRepositorySummary { id: string; label: string; /** Aggregated branch/task overview state (replaces getRepoOverview polling). */ @@ -171,19 +173,15 @@ export interface WorkbenchRepoSummary { latestActivityMs: number; } -/** Workspace-level snapshot — initial fetch for the workspace topic. */ -export interface WorkspaceSummarySnapshot { - workspaceId: string; - repos: WorkbenchRepoSummary[]; +/** Organization-level snapshot — initial fetch for the organization topic. */ +export interface OrganizationSummarySnapshot { + organizationId: string; + repos: WorkbenchRepositorySummary[]; taskSummaries: WorkbenchTaskSummary[]; openPullRequests: WorkbenchOpenPrSummary[]; } -/** - * Deprecated compatibility aliases for older mock/view-model code. - * New code should use the summary/detail/topic-specific types above. - */ -export interface WorkbenchAgentTab extends WorkbenchSessionSummary { +export interface WorkbenchSession extends WorkbenchSessionSummary { draft: WorkbenchComposerDraft; transcript: WorkbenchTranscriptEvent[]; } @@ -199,7 +197,7 @@ export interface WorkbenchTask { updatedAtMs: number; branch: string | null; pullRequest: WorkbenchPullRequestSummary | null; - tabs: WorkbenchAgentTab[]; + sessions: WorkbenchSession[]; fileChanges: WorkbenchFileChange[]; diffs: Record; fileTree: WorkbenchFileTreeNode[]; @@ -212,7 +210,7 @@ export interface WorkbenchRepo { label: string; } -export interface WorkbenchProjectSection { +export interface WorkbenchRepositorySection { id: string; label: string; updatedAtMs: number; @@ -220,9 +218,9 @@ export interface WorkbenchProjectSection { } export interface TaskWorkbenchSnapshot { - workspaceId: string; + organizationId: string; repos: WorkbenchRepo[]; - projects: WorkbenchProjectSection[]; + repositories: WorkbenchRepositorySection[]; tasks: WorkbenchTask[]; } @@ -256,30 +254,30 @@ export interface TaskWorkbenchRenameInput { export interface TaskWorkbenchSendMessageInput { taskId: string; - tabId: string; + sessionId: string; text: string; attachments: WorkbenchLineAttachment[]; } -export interface TaskWorkbenchTabInput { +export interface TaskWorkbenchSessionInput { taskId: string; - tabId: string; + sessionId: string; } -export interface TaskWorkbenchRenameSessionInput extends TaskWorkbenchTabInput { +export interface TaskWorkbenchRenameSessionInput extends TaskWorkbenchSessionInput { title: string; } -export interface TaskWorkbenchChangeModelInput extends TaskWorkbenchTabInput { +export interface TaskWorkbenchChangeModelInput extends TaskWorkbenchSessionInput { model: WorkbenchModelId; } -export interface TaskWorkbenchUpdateDraftInput extends TaskWorkbenchTabInput { +export interface TaskWorkbenchUpdateDraftInput extends TaskWorkbenchSessionInput { text: string; attachments: WorkbenchLineAttachment[]; } -export interface TaskWorkbenchSetSessionUnreadInput extends TaskWorkbenchTabInput { +export interface TaskWorkbenchSetSessionUnreadInput extends TaskWorkbenchSessionInput { unread: boolean; } @@ -290,9 +288,9 @@ export interface TaskWorkbenchDiffInput { export interface TaskWorkbenchCreateTaskResponse { taskId: string; - tabId?: string; + sessionId?: string; } -export interface TaskWorkbenchAddTabResponse { - tabId: string; +export interface TaskWorkbenchAddSessionResponse { + sessionId: string; } diff --git a/foundry/packages/shared/src/workspace.ts b/foundry/packages/shared/src/workspace.ts deleted file mode 100644 index fb8e1b7..0000000 --- a/foundry/packages/shared/src/workspace.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { AppConfig } from "./config.js"; - -export function resolveWorkspaceId(flagWorkspace: string | undefined, config: AppConfig): string { - if (flagWorkspace && flagWorkspace.trim().length > 0) { - return flagWorkspace.trim(); - } - - if (config.workspace.default.trim().length > 0) { - return config.workspace.default.trim(); - } - - return "default"; -} diff --git a/foundry/packages/shared/test/workspace.test.ts b/foundry/packages/shared/test/organization.test.ts similarity index 59% rename from foundry/packages/shared/test/workspace.test.ts rename to foundry/packages/shared/test/organization.test.ts index c57173a..f1cd3f6 100644 --- a/foundry/packages/shared/test/workspace.test.ts +++ b/foundry/packages/shared/test/organization.test.ts @@ -1,10 +1,10 @@ import { describe, expect, it } from "vitest"; -import { ConfigSchema, resolveWorkspaceId, type AppConfig } from "../src/index.js"; +import { ConfigSchema, resolveOrganizationId, type AppConfig } from "../src/index.js"; const cfg: AppConfig = ConfigSchema.parse({ auto_submit: true, notify: ["terminal"], - workspace: { default: "team-a" }, + organization: { default: "team-a" }, backend: { host: "127.0.0.1", port: 7741, @@ -14,27 +14,27 @@ const cfg: AppConfig = ConfigSchema.parse({ backup_interval_secs: 3600, backup_retention_days: 7, }, - providers: { + sandboxProviders: { local: {}, e2b: {}, }, }); -describe("resolveWorkspaceId", () => { +describe("resolveOrganizationId", () => { it("prefers explicit flag", () => { - expect(resolveWorkspaceId("feature", cfg)).toBe("feature"); + expect(resolveOrganizationId("feature", cfg)).toBe("feature"); }); it("falls back to config default", () => { - expect(resolveWorkspaceId(undefined, cfg)).toBe("team-a"); + expect(resolveOrganizationId(undefined, cfg)).toBe("team-a"); }); it("falls back to literal default when config value is empty", () => { const empty = { ...cfg, - workspace: { default: "" }, + organization: { default: "" }, } as AppConfig; - expect(resolveWorkspaceId(undefined, empty)).toBe("default"); + expect(resolveOrganizationId(undefined, empty)).toBe("default"); }); }); diff --git a/foundry/research/friction/general.mdx b/foundry/research/friction/general.mdx index b152287..fce920b 100644 --- a/foundry/research/friction/general.mdx +++ b/foundry/research/friction/general.mdx @@ -15,8 +15,8 @@ The root cause of the duplicate HTTP request is unknown. It is not `appWorkspace ### Attempted Fix / Workaround 1. Made `completeAppGithubAuth` clear `oauthState`/`oauthStateExpiresAt` immediately after validation and before `exchangeCode`, so any duplicate request fails the state check instead of hitting GitHub with a consumed code. -2. Split `syncGithubSessionFromToken` into a fast path (`initGithubSession` — exchange code, get viewer, store token+identity) and a slow path (`syncGithubOrganizations` — list orgs, list installations, sync each workspace). -3. `completeAppGithubAuth` now uses the fast path and enqueues the slow org sync to the workspace workflow queue (`workspace.command.syncGithubSession`, fire-and-forget). The HTTP callback returns a 302 redirect in ~2s instead of ~18s, eliminating the proxy timeout window. +2. Split `syncGithubSessionFromToken` into a fast path (`initGithubSession` — exchange code, get viewer, store token+identity) and a slow path (`syncGithubOrganizations` — list orgs, list installations, sync each organization). +3. `completeAppGithubAuth` now uses the fast path and enqueues the slow org sync to the organization workflow queue (`organization.command.syncGithubSession`, fire-and-forget). The HTTP callback returns a 302 redirect in ~2s instead of ~18s, eliminating the proxy timeout window. 4. The frontend already polls `getAppSnapshot` every 500ms when any org has `syncStatus === "syncing"`, so the deferred sync is transparent to the user. 5. `bootstrapAppGithubSession` (dev-only) still calls the full synchronous `syncGithubSessionFromToken` since proxy timeouts are not a concern in dev and it needs the session fully populated before returning. @@ -38,14 +38,14 @@ Verifying the BaseUI frontend against the real `rivet-dev/sandbox-agent-testing` Three separate issues stacked together during live verification: -1. A half-created task actor remained in project indexes after earlier runtime failures. The actor state existed, but its durable task row did not, so repo overview polling spammed `Task not found` and kept trying to load an orphaned task. +1. A half-created task actor remained in repository indexes after earlier runtime failures. The actor state existed, but its durable task row did not, so repo overview polling spammed `Task not found` and kept trying to load an orphaned task. 2. Rebuilding the backend container outside `just dev` dropped injected GitHub auth, which made repo overview fall back to `Open PRs 0` until `GITHUB_TOKEN`/`GH_TOKEN` were passed back into `docker compose`. 3. In the create-task modal, the BaseUI-controlled form looked populated in the browser, but submit gating/click behavior was unreliable under browser automation, making it hard to distinguish frontend state bugs from backend failures. ### Attempted Fix / Workaround -1. Updated project-actor stale task pruning to treat `Task not found:` the same as actor-not-found and rebuilt the backend image. -2. Recovered the orphaned task by forcing an initialize attempt, which surfaced a missing `body?.providerId` guard in the task init workflow and led to pruning the stale project index row. +1. Updated repository-actor stale task pruning to treat `Task not found:` the same as actor-not-found and rebuilt the backend image. +2. Recovered the orphaned task by forcing an initialize attempt, which surfaced a missing `body?.providerId` guard in the task init workflow and led to pruning the stale repository index row. 3. Recreated the backend with `GITHUB_TOKEN="$(gh auth token)" GH_TOKEN="$(gh auth token)" docker compose ... up -d --build backend` so PR sync could see live GitHub data again. 4. Used `agent-browser` plus screenshots to separate working paths (repo overview + PR visibility) from the remaining broken path (modal submit / task creation UI). @@ -80,22 +80,22 @@ The Docker dev backend container was starting on Bun `1.2.23` and accepting TCP ### What I Was Working On -Implementing Daytona snapshot-based sandbox creation and running required workspace validation. +Implementing Daytona snapshot-based sandbox creation and running required organization validation. ### Friction / Issue -The workspace `node_modules` tree is partially root-owned in this environment. `pnpm install`/cleanup failed with `EACCES` and left missing local tool entrypoints (for example `turbo`/`typescript`), which blocked `pnpm -w typecheck/build/test` from running end-to-end. +The organization `node_modules` tree is partially root-owned in this environment. `pnpm install`/cleanup failed with `EACCES` and left missing local tool entrypoints (for example `turbo`/`typescript`), which blocked `pnpm -w typecheck/build/test` from running end-to-end. ### Attempted Fix / Workaround -1. Attempted workspace reinstall (`pnpm install`, `CI=true pnpm install`) and package-level reinstall. +1. Attempted organization reinstall (`pnpm install`, `CI=true pnpm install`) and package-level reinstall. 2. Attempted cleanup/recreate of `node_modules`, but root-owned files could not be removed. 3. Added temporary local shims for missing tool entrypoints to continue targeted validation. ### Outcome - Daytona-specific changes and backend tests were validated. -- Full workspace validation remains blocked until `node_modules` ownership is repaired (or container is recreated). +- Full organization validation remains blocked until `node_modules` ownership is repaired (or container is recreated). ## 2026-02-16 - uncommitted @@ -187,7 +187,7 @@ Vitest ESM module namespace exports are non-configurable, so `vi.spyOn(childProc ### Outcome - Backend manager tests are stable under ESM. -- Full workspace tests pass with lifecycle coverage for outdated-backend restart behavior. +- Full organization tests pass with lifecycle coverage for outdated-backend restart behavior. ## 2026-02-08 - uncommitted @@ -202,8 +202,8 @@ The environment did not provide `rg`, and docs/policy files still described Rust ### Attempted Fix / Workaround 1. Switched repository discovery to `find`/`grep`. -2. Rewrote project guidance files (`CLAUDE.md`, `skills/SKILL.md`, docs, `SPEC.md`) to match the TypeScript architecture. -3. Added missing TUI test coverage so workspace-wide test runs no longer fail on packages without tests. +2. Rewrote repository guidance files (`CLAUDE.md`, `skills/SKILL.md`, docs, `SPEC.md`) to match the TypeScript architecture. +3. Added missing TUI test coverage so monorepo-wide test runs no longer fail on packages without tests. ### Outcome @@ -214,7 +214,7 @@ The environment did not provide `rg`, and docs/policy files still described Rust ### What I Was Working On -Running full workspace test validation (`pnpm -w test`) for the migrated monorepo. +Running full organization test validation (`pnpm -w test`) for the migrated monorepo. ### Friction / Issue @@ -228,7 +228,7 @@ Backend integration tests depend on native `better-sqlite3` bindings, which were ### Outcome -- Full workspace test suite passes consistently. +- Full organization test suite passes consistently. - Backend unit coverage always runs; DB integration tests run automatically on environments with native bindings. ## 2026-02-09 - aab1012 (working tree) @@ -309,13 +309,13 @@ Running backend tests with the integration flag enabled triggered unrelated acto ### Attempted Fix / Workaround 1. Switched to package-targeted test runs for deterministic coverage (`@sandbox-agent/foundry-backend` + `@sandbox-agent/foundry-frontend`). -2. Relied on required workspace validation (`pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test`) plus targeted stack test files. +2. Relied on required organization validation (`pnpm -w typecheck`, `pnpm -w build`, `pnpm -w test`) plus targeted stack test files. 3. Stopped the runaway integration run and recorded this friction for follow-up. ### Outcome - New stack-focused tests pass in deterministic targeted runs. -- Full required workspace checks pass. +- Full required organization checks pass. - Integration-gated suite remains noisy and needs separate stabilization. ## 2026-03-05 - uncommitted @@ -326,7 +326,7 @@ Reviewing architecture for simplification opportunities. ### Friction / Issue -Considered merging `projectPrSync` (30s) and `projectBranchSync` (5s) into a single `projectSync` actor that polls at the faster cadence and does PR fetches every Nth tick. This would reduce actor count by one per repo but violates the single-responsibility-per-actor pattern established in the codebase. Mixed cadences within one actor add conditional tick logic, make the polling intervals harder to reason about independently, and couple two unrelated data sources (git branches vs GitHub API) into one failure domain. +Considered merging `repositoryPrSync` (30s) and `repositoryBranchSync` (5s) into a single `repositorySync` actor that polls at the faster cadence and does PR fetches every Nth tick. This would reduce actor count by one per repo but violates the single-responsibility-per-actor pattern established in the codebase. Mixed cadences within one actor add conditional tick logic, make the polling intervals harder to reason about independently, and couple two unrelated data sources (git branches vs GitHub API) into one failure domain. ### Attempted Fix / Workaround @@ -334,7 +334,7 @@ None — rejected the idea during review. ### Outcome -- Keep `projectPrSync` and `projectBranchSync` as separate actors. +- Keep `repositoryPrSync` and `repositoryBranchSync` as separate actors. - Single-responsibility-per-sync-actor is the right pattern for this codebase. ## 2026-03-06 - 77341ff @@ -345,13 +345,13 @@ Bringing up the Docker-based local dev stack with `just dev` after the BaseUI fr ### Friction / Issue -Docker Desktop recovered, but the frontend container failed immediately with `Cannot find module @rollup/rollup-linux-arm64-gnu`. The dev compose setup bind-mounted the host workspace into `/app`, so the Linux container picked up macOS `node_modules` and missed Rollup's Linux optional package. +Docker Desktop recovered, but the frontend container failed immediately with `Cannot find module @rollup/rollup-linux-arm64-gnu`. The dev compose setup bind-mounted the host organization into `/app`, so the Linux container picked up macOS `node_modules` and missed Rollup's Linux optional package. ### Attempted Fix / Workaround 1. Confirmed Docker itself was healthy again by checking the Unix socket, `docker version`, and the backend health endpoint. 2. Reproduced the frontend crash inside `docker compose`. -3. Changed the frontend dev service to use named volumes for workspace `node_modules` and the pnpm store, and to run `pnpm install --frozen-lockfile` inside the container before starting Vite. +3. Changed the frontend dev service to use named volumes for organization `node_modules` and the pnpm store, and to run `pnpm install --frozen-lockfile` inside the container before starting Vite. ### Outcome diff --git a/foundry/research/friction/rivet.mdx b/foundry/research/friction/rivet.mdx index c9cb8eb..a2e4649 100644 --- a/foundry/research/friction/rivet.mdx +++ b/foundry/research/friction/rivet.mdx @@ -12,7 +12,7 @@ Resolving GitHub OAuth callback failures caused by stale actor state after squas 2. **No programmatic way to list or destroy actors on Rivet Cloud without the service key.** The public runner token (`pk_*`) lacks permissions for actor management (list/destroy). The Cloud API token (`cloud_api_*`) in our `.env` was returning "token not found". The actual working token format is the service key (`sk_*`) from the namespace connection URL. This was not documented — the destroy docs reference "admin tokens" which are described as "currently not supported on Rivet Cloud" ([#3530](https://github.com/rivet-dev/rivet/issues/3530)), but the `sk_*` token works. The disconnect between the docs and reality cost significant debugging time. -3. **Actor errors during `getOrCreate` are opaque.** When the `workspace.completeAppGithubAuth` action triggered `getOrCreate` for org workspace actors, the migration failure inside the newly-woken actor was surfaced as `"Internal error"` with no indication that it was a migration/schema issue. The actual error (`table already exists`) was only visible in actor-level logs, not in the action response or the calling backend's logs. +3. **Actor errors during `getOrCreate` are opaque.** When the `organization.completeAppGithubAuth` action triggered `getOrCreate` for org organization actors, the migration failure inside the newly-woken actor was surfaced as `"Internal error"` with no indication that it was a migration/schema issue. The actual error (`table already exists`) was only visible in actor-level logs, not in the action response or the calling backend's logs. ### Attempted Fix / Workaround @@ -22,7 +22,7 @@ Resolving GitHub OAuth callback failures caused by stale actor state after squas ### Outcome -- All 4 stale workspace actors destroyed (3 org workspaces + 1 old v2-prefixed app workspace). +- All 4 stale organization actors destroyed (3 org organizations + 1 old v2-prefixed app organization). - Reverted `IF NOT EXISTS` migration changes so Drizzle migrations remain standard. - After redeploy, new actors will be created fresh with the correct squashed migration journal. - **RivetKit improvement opportunities:** @@ -112,17 +112,17 @@ Diagnosing stuck tasks (`init_create_sandbox`) after switching to a linked Rivet ### Friction / Issue 1. File-system driver actor-state writes still attempted to serialize legacy `kvStorage`, which can exceed Bare's buffer limit and trigger `Failed to save actor state: BareError: (byte:0) too large buffer`. -2. Project snapshots swallowed missing task actors and only logged warnings, so stale `task_index` rows persisted and appeared as stuck/ghost tasks in the UI. +2. Repository snapshots swallowed missing task actors and only logged warnings, so stale `task_index` rows persisted and appeared as stuck/ghost tasks in the UI. ### Attempted Fix / Workaround 1. In RivetKit file-system driver writes, force persisted `kvStorage` to `[]` (runtime KV is SQLite-only) so oversized legacy payloads are never re-serialized. -2. In backend project actor flows (`hydrate`, `snapshot`, `repo overview`, branch registration, PR-close archive), detect `Actor not found` and prune stale `task_index` rows immediately. +2. In backend repository actor flows (`hydrate`, `snapshot`, `repo overview`, branch registration, PR-close archive), detect `Actor not found` and prune stale `task_index` rows immediately. ### Outcome - Prevents repeated serialization crashes caused by legacy oversized state blobs. -- Missing task actors are now self-healed from project indexes instead of repeatedly surfacing as silent warnings. +- Missing task actors are now self-healed from repository indexes instead of repeatedly surfacing as silent warnings. ## 2026-02-12 - uncommitted @@ -193,7 +193,7 @@ Adopt these concrete repo conventions: - Schema rule (critical): - SQLite is **per actor instance**, not a shared DB across all instances. -- Do not “namespace” rows with `workspaceId`/`repoId`/`taskId` columns when those identifiers already live in the actor key/state. +- Do not “namespace” rows with `organizationId`/`repoId`/`taskId` columns when those identifiers already live in the actor key/state. - Prefer single-row tables for single-instance storage (e.g. `id=1`) when appropriate. - Migration generation flow (Bun + DrizzleKit): @@ -247,7 +247,7 @@ Verifying Daytona-backed task/session flows for the new frontend and sandbox-ins ### Friction / Issue -Task workflow steps intermittently entered failed state with `StepExhaustedError` and `unknown error` during initialization replay (`init-start-sandbox-instance`, then `init-write-db`), which caused `task.get` to time out and cascaded into `project snapshot timed out` / `workspace list_tasks timed out`. +Task workflow steps intermittently entered failed state with `StepExhaustedError` and `unknown error` during initialization replay (`init-start-sandbox-instance`, then `init-write-db`), which caused `task.get` to time out and cascaded into `repository snapshot timed out` / `organization list_tasks timed out`. ### Attempted Fix / Workaround @@ -305,7 +305,7 @@ if (msg.type === "TickProjectRefresh") { // Coalesce duplicate ticks for a short window. while (Date.now() < deadline) { - const next = await c.queue.next("project", { timeout: deadline - Date.now() }); + const next = await c.queue.next("repository", { timeout: deadline - Date.now() }); if (!next) break; // timeout if (next.type === "TickProjectRefresh") { @@ -348,7 +348,7 @@ Two mistakes in the prior proposal: 2. **Coalesce by message names, not `msg.type`.** - Keep one message name per command/tick channel. -- When a tick window opens, drain and coalesce multiple tick names (e.g. `tick.project.refresh`, `tick.pr.refresh`, `tick.sandbox.health`) into one execution per name. +- When a tick window opens, drain and coalesce multiple tick names (e.g. `tick.repository.refresh`, `tick.pr.refresh`, `tick.sandbox.health`) into one execution per name. 3. **Tick coalesce pattern with timeout (single loop):** @@ -375,7 +375,7 @@ while (true) { // Timeout reached => one or more ticks are due. const due = new Set(); const at = Date.now(); - if (at >= nextProjectRefreshAt) due.add("tick.project.refresh"); + if (at >= nextProjectRefreshAt) due.add("tick.repository.refresh"); if (at >= nextPrRefreshAt) due.add("tick.pr.refresh"); if (at >= nextSandboxHealthAt) due.add("tick.sandbox.health"); @@ -388,7 +388,7 @@ while (true) { } // Execute each due tick once, in deterministic order. - if (due.has("tick.project.refresh")) { + if (due.has("tick.repository.refresh")) { await refreshProjectSnapshot(); nextProjectRefreshAt = Date.now() + 5_000; } @@ -424,7 +424,7 @@ Even with queue-timeout ticks, packing multiple independent timer cadences into ### Final Pattern 1. **Parent actors are command-only loops with no timeout.** -- `WorkspaceActor`, `ProjectActor`, `TaskActor`, and `HistoryActor` wait on queue messages only. +- `OrganizationActor`, `RepositoryActor`, `TaskActor`, and `HistoryActor` wait on queue messages only. 2. **Periodic work moves to dedicated child sync actors.** - Each child actor has exactly one timeout cadence (e.g. PR sync, branch sync, task status sync). @@ -439,7 +439,7 @@ Even with queue-timeout ticks, packing multiple independent timer cadences into ### Example Structure -- `ProjectActor` (no timeout): handles commands + applies `project.pr_sync.result` / `project.branch_sync.result` writes. +- `RepositoryActor` (no timeout): handles commands + applies `repository.pr_sync.result` / `repository.branch_sync.result` writes. - `ProjectPrSyncActor` (timeout 30s): polls PR data, sends result message. - `ProjectBranchSyncActor` (timeout 5s): polls branch data, sends result message. - `TaskActor` (no timeout): handles lifecycle + applies `task.status_sync.result` writes. @@ -502,7 +502,7 @@ Removing custom backend REST endpoints and migrating CLI/TUI calls to direct `ri ### Friction / Issue -We had implemented a `/v1/*` HTTP shim (`/v1/tasks`, `/v1/workspaces/use`, etc.) between clients and actors, which duplicated actor APIs and introduced an unnecessary transport layer. +We had implemented a `/v1/*` HTTP shim (`/v1/tasks`, `/v1/organizations/use`, etc.) between clients and actors, which duplicated actor APIs and introduced an unnecessary transport layer. ### Attempted Fix / Workaround @@ -575,21 +575,21 @@ Removing `*Actor` suffix from all actor export names and registry keys. ### Friction / Issue -RivetKit's `setup({ use: { ... } })` uses property names as actor identifiers in `client.` calls. All 8 actors were exported as `workspaceActor`, `projectActor`, `taskActor`, etc., which meant client code used verbose `client.workspaceActor.getOrCreate(...)` instead of `client.workspace.getOrCreate(...)`. +RivetKit's `setup({ use: { ... } })` uses property names as actor identifiers in `client.` calls. All 8 actors were exported as `organization`, `repository`, `taskActor`, etc., which meant client code used verbose `client.organization.getOrCreate(...)` instead of `client.organization.getOrCreate(...)`. The `Actor` suffix is redundant — everything in the registry is an actor by definition. It also leaked into type names (`WorkspaceActorHandle`, `ProjectActorInput`, `HistoryActorInput`) and local function names (`workspaceActorKey`, `taskActorKey`). ### Attempted Fix / Workaround -1. Renamed all 8 actor exports: `workspaceActor` → `workspace`, `projectActor` → `project`, `taskActor` → `task`, `sandboxInstanceActor` → `sandboxInstance`, `historyActor` → `history`, `projectPrSyncActor` → `projectPrSync`, `projectBranchSyncActor` → `projectBranchSync`, `taskStatusSyncActor` → `taskStatusSync`. +1. Renamed all 8 actor exports: `organization` → `organization`, `repository` → `repository`, `taskActor` → `task`, `sandboxInstanceActor` → `sandboxInstance`, `historyActor` → `history`, `repositoryPrSync` → `repositoryPrSync`, `repositoryBranchSync` → `repositoryBranchSync`, `taskStatusSyncActor` → `taskStatusSync`. 2. Updated registry keys in `actors/index.ts`. 3. Renamed all `client.Actor` references across 14 files (actor definitions, backend entry, CLI client, tests). -4. Renamed associated types (`ProjectActorInput` → `ProjectInput`, `HistoryActorInput` → `HistoryInput`, `WorkspaceActorHandle` → `WorkspaceHandle`, `TaskActorHandle` → `TaskHandle`). +4. Renamed associated types (`ProjectActorInput` → `RepositoryInput`, `HistoryActorInput` → `HistoryInput`, `WorkspaceActorHandle` → `OrganizationHandle`, `TaskActorHandle` → `TaskHandle`). ### Outcome - Actor names are now concise and match their semantic role. -- Client code reads naturally: `client.workspace.getOrCreate(...)`, `client.task.get(...)`. +- Client code reads naturally: `client.organization.getOrCreate(...)`, `client.task.get(...)`. - No runtime behavior change — registry property names drive actor routing. ## 2026-02-09 - uncommitted @@ -609,8 +609,8 @@ Concrete examples from our codebase: | Actor | Pattern | Why | |-------|---------|-----| -| `workspace` | Plain run | Every handler is a DB query or single actor delegation | -| `project` | Plain run | Handlers are DB upserts or delegate to task actor | +| `organization` | Plain run | Every handler is a DB query or single actor delegation | +| `repository` | Plain run | Handlers are DB upserts or delegate to task actor | | `task` | **Needs workflow** | `initialize` is a 7-step pipeline (createSandbox → ensureAgent → createSession → DB writes → start child actors); post-idle is a 5-step pipeline (commit → push → PR → cache → notify) | | `history` | Plain run | Single DB insert per message | | `sandboxInstance` | Plain run | Single-table CRUD per message | @@ -647,7 +647,7 @@ This matters when reasoning about workflow `listen()` behavior: you might assume RivetKit docs should clarify: 1. Queue names are **per-actor-instance** — two different actor instances can use the same queue name without collision. -2. The dotted naming convention (e.g. `project.command.ensure`) is a user convention for readability, not a routing hierarchy. +2. The dotted naming convention (e.g. `repository.command.ensure`) is a user convention for readability, not a routing hierarchy. 3. `c.queue.next(["a", "b"])` listens on queues named `"a"` and `"b"` *within this actor*, not across actors. ### Outcome @@ -662,7 +662,7 @@ Migrating task actor to durable workflows. AI-generated queue names used dotted ### Friction / Issue -When generating actor queue names, the AI (and our own codebase) defaulted to dotted names like `task.command.initialize`, `project.pr_sync.result`, `task.status_sync.control.start`. These work fine in plain `run` loops, but create friction when interacting with the workflow system because `workflowQueueName()` prefixes them with `__workflow:`, producing names like `__workflow:task.command.initialize`. +When generating actor queue names, the AI (and our own codebase) defaulted to dotted names like `task.command.initialize`, `repository.pr_sync.result`, `task.status_sync.control.start`. These work fine in plain `run` loops, but create friction when interacting with the workflow system because `workflowQueueName()` prefixes them with `__workflow:`, producing names like `__workflow:task.command.initialize`. Queue names should always be **camelCase** (e.g. `initializeTask`, `statusSyncResult`, `attachTask`). Dotted names are misleading — they imply hierarchy or routing semantics that don't exist (queues are flat, per-actor-instance strings). They also look like object property paths, which causes confusion when used as dynamic property keys on queue handles (`actor.queue["task.command.initialize"]`). @@ -754,4 +754,4 @@ Using `better-sqlite3` and `node:sqlite` in backend DB bootstrap caused Bun runt - Backend starts successfully under Bun. - Shared Drizzle/SQLite actor DB path still works. -- Workspace build + tests pass. +- Organization build + tests pass. diff --git a/foundry/research/friction/sandboxes.mdx b/foundry/research/friction/sandboxes.mdx index e30e85b..38d4b3f 100644 --- a/foundry/research/friction/sandboxes.mdx +++ b/foundry/research/friction/sandboxes.mdx @@ -8,7 +8,7 @@ Implementing provider adapters (`worktree`, `daytona`) under the backend package ### Friction / Issue -Provider interface intentionally keeps `DestroySandboxRequest` minimal (`workspaceId`, `sandboxId`), but local git worktree cleanup may need repo context. +Provider interface intentionally keeps `DestroySandboxRequest` minimal (`organizationId`, `sandboxId`), but local git worktree cleanup may need repo context. ### Attempted Fix / Workaround @@ -54,8 +54,8 @@ The previous end-to-end flow implicitly depended on local filesystem paths (`rep ### Attempted Fix / Workaround -1. Introduced explicit repo remote records (`WorkspaceActor.addRepo`) and validated remotes with `git ls-remote`. -2. Made `ProjectActor` assert a backend-owned local clone exists on wake and fetch remote branch state from that clone. +1. Introduced explicit imported repository records sourced from GitHub sync instead of local organization paths. +2. Made `RepositoryActor` assert a backend-owned local clone exists on wake and fetch remote branch state from that clone. 3. Updated PR creation to avoid requiring a checked-out branch by using `gh pr create --head `. 4. Updated `DaytonaProvider.createSandbox` to clone the repo and checkout the branch into a deterministic workdir and return it as `cwd` for sandbox-agent sessions. diff --git a/foundry/research/realtime-interest-manager-spec.md b/foundry/research/realtime-interest-manager-spec.md index 9c0fc93..dff2aea 100644 --- a/foundry/research/realtime-interest-manager-spec.md +++ b/foundry/research/realtime-interest-manager-spec.md @@ -4,7 +4,7 @@ Replace the current polling + empty-notification + full-refetch architecture with a push-based realtime system. The client subscribes to topics, receives the initial state, and then receives full replacement payloads for changed entities over WebSocket. No polling. No re-fetching. -This spec covers three layers: backend (materialized state + broadcast), client library (interest manager), and frontend (hook consumption). Comment architecture-related code throughout so new contributors can understand the data flow from comments alone. +This spec covers three layers: backend (materialized state + broadcast), client library (subscription manager), and frontend (hook consumption). Comment architecture-related code throughout so new contributors can understand the data flow from comments alone. --- @@ -17,7 +17,7 @@ This spec covers three layers: backend (materialized state + broadcast), client Currently `WorkbenchTask` is a single flat type carrying everything (sidebar fields + transcripts + diffs + file tree). Split it: ```typescript -/** Sidebar-level task data. Materialized in the workspace actor's SQLite. */ +/** Sidebar-level task data. Materialized in the organization actor's SQLite. */ export interface WorkbenchTaskSummary { id: string; repoId: string; @@ -44,7 +44,7 @@ export interface WorkbenchSessionSummary { created: boolean; } -/** Repo-level summary for workspace sidebar. */ +/** Repo-level summary for organization sidebar. */ export interface WorkbenchRepoSummary { id: string; label: string; @@ -93,9 +93,9 @@ export interface WorkbenchSessionDetail { transcript: WorkbenchTranscriptEvent[]; } -/** Workspace-level snapshot — initial fetch for the workspace topic. */ -export interface WorkspaceSummarySnapshot { - workspaceId: string; +/** Organization-level snapshot — initial fetch for the organization topic. */ +export interface OrganizationSummarySnapshot { + organizationId: string; repos: WorkbenchRepoSummary[]; taskSummaries: WorkbenchTaskSummary[]; } @@ -110,8 +110,8 @@ Remove the old `TaskWorkbenchSnapshot` type and `WorkbenchTask` type once migrat Each event carries the full new state of the changed entity — not a patch, not an empty notification. ```typescript -/** Workspace-level events broadcast by the workspace actor. */ -export type WorkspaceEvent = +/** Organization-level events broadcast by the organization actor. */ +export type OrganizationEvent = | { type: "taskSummaryUpdated"; taskSummary: WorkbenchTaskSummary } | { type: "taskRemoved"; taskId: string } | { type: "repoAdded"; repo: WorkbenchRepoSummary } @@ -126,7 +126,7 @@ export type TaskEvent = export type SessionEvent = | { type: "sessionUpdated"; session: WorkbenchSessionDetail }; -/** App-level events broadcast by the app workspace actor. */ +/** App-level events broadcast by the app organization actor. */ export type AppEvent = | { type: "appUpdated"; snapshot: FoundryAppSnapshot }; @@ -139,13 +139,13 @@ export type SandboxProcessesEvent = ## 2. Backend: Materialized State + Broadcasts -### 2.1 Workspace actor — materialized sidebar state +### 2.1 Organization actor — materialized sidebar state **Files:** -- `packages/backend/src/actors/workspace/db/schema.ts` — add tables -- `packages/backend/src/actors/workspace/actions.ts` — replace `buildWorkbenchSnapshot`, add delta handlers +- `packages/backend/src/actors/organization/db/schema.ts` — add tables +- `packages/backend/src/actors/organization/actions.ts` — replace `buildWorkbenchSnapshot`, add delta handlers -Add to workspace actor SQLite schema: +Add to organization actor SQLite schema: ```typescript export const taskSummaries = sqliteTable("task_summaries", { @@ -161,7 +161,7 @@ export const taskSummaries = sqliteTable("task_summaries", { }); ``` -New workspace actions: +New organization actions: ```typescript /** @@ -176,23 +176,23 @@ async applyTaskSummaryUpdate(c, input: { taskSummary: WorkbenchTaskSummary }) { await c.db.insert(taskSummaries).values(toRow(input.taskSummary)) .onConflictDoUpdate({ target: taskSummaries.taskId, set: toRow(input.taskSummary) }).run(); // Broadcast to connected clients - c.broadcast("workspaceUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary }); + c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary }); } async removeTaskSummary(c, input: { taskId: string }) { await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); - c.broadcast("workspaceUpdated", { type: "taskRemoved", taskId: input.taskId }); + c.broadcast("organizationUpdated", { type: "taskRemoved", taskId: input.taskId }); } /** - * Initial fetch for the workspace topic. + * Initial fetch for the organization topic. * Reads entirely from local SQLite — no fan-out to child actors. */ -async getWorkspaceSummary(c, input: { workspaceId: string }): Promise { +async getWorkspaceSummary(c, input: { organizationId: string }): Promise { const repoRows = await c.db.select().from(repos).orderBy(desc(repos.updatedAt)).all(); const taskRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); return { - workspaceId: c.state.workspaceId, + organizationId: c.state.organizationId, repos: repoRows.map(toRepoSummary), taskSummaries: taskRows.map(toTaskSummary), }; @@ -201,7 +201,7 @@ async getWorkspaceSummary(c, input: { workspaceId: string }): Promise { ... } async getSessionDetail(c, input: { sessionId: string }): Promise { ... } ``` -### 2.4 App workspace actor +### 2.4 App organization actor -**File:** `packages/backend/src/actors/workspace/app-shell.ts` +**File:** `packages/backend/src/actors/organization/app-shell.ts` Change `c.broadcast("appUpdated", { at: Date.now(), sessionId })` to: ```typescript @@ -304,12 +304,12 @@ function broadcastProcessesUpdated(c: any): void { ```typescript /** - * Topic definitions for the interest manager. + * Topic definitions for the subscription manager. * * Each topic defines how to connect to an actor, fetch initial state, * which event to listen for, and how to apply incoming events to cached state. * - * The interest manager uses these definitions to manage WebSocket connections, + * The subscription manager uses these definitions to manage WebSocket connections, * cached state, and subscriptions for all realtime data flows. */ @@ -331,10 +331,10 @@ export interface TopicDefinition { } export interface AppTopicParams {} -export interface WorkspaceTopicParams { workspaceId: string } -export interface TaskTopicParams { workspaceId: string; repoId: string; taskId: string } -export interface SessionTopicParams { workspaceId: string; repoId: string; taskId: string; sessionId: string } -export interface SandboxProcessesTopicParams { workspaceId: string; providerId: string; sandboxId: string } +export interface OrganizationTopicParams { organizationId: string } +export interface TaskTopicParams { organizationId: string; repoId: string; taskId: string } +export interface SessionTopicParams { organizationId: string; repoId: string; taskId: string; sessionId: string } +export interface SandboxProcessesTopicParams { organizationId: string; providerId: string; sandboxId: string } export const topicDefinitions = { app: { @@ -345,12 +345,12 @@ export const topicDefinitions = { applyEvent: (_current, event: AppEvent) => event.snapshot, } satisfies TopicDefinition, - workspace: { - key: (p) => `workspace:${p.workspaceId}`, - event: "workspaceUpdated", - connect: (b, p) => b.connectWorkspace(p.workspaceId), - fetchInitial: (b, p) => b.getWorkspaceSummary(p.workspaceId), - applyEvent: (current, event: WorkspaceEvent) => { + organization: { + key: (p) => `organization:${p.organizationId}`, + event: "organizationUpdated", + connect: (b, p) => b.connectWorkspace(p.organizationId), + fetchInitial: (b, p) => b.getWorkspaceSummary(p.organizationId), + applyEvent: (current, event: OrganizationEvent) => { switch (event.type) { case "taskSummaryUpdated": return { @@ -375,22 +375,22 @@ export const topicDefinitions = { }; } }, - } satisfies TopicDefinition, + } satisfies TopicDefinition, task: { - key: (p) => `task:${p.workspaceId}:${p.taskId}`, + key: (p) => `task:${p.organizationId}:${p.taskId}`, event: "taskUpdated", - connect: (b, p) => b.connectTask(p.workspaceId, p.repoId, p.taskId), - fetchInitial: (b, p) => b.getTaskDetail(p.workspaceId, p.repoId, p.taskId), + connect: (b, p) => b.connectTask(p.organizationId, p.repoId, p.taskId), + fetchInitial: (b, p) => b.getTaskDetail(p.organizationId, p.repoId, p.taskId), applyEvent: (_current, event: TaskEvent) => event.detail, } satisfies TopicDefinition, session: { - key: (p) => `session:${p.workspaceId}:${p.taskId}:${p.sessionId}`, + key: (p) => `session:${p.organizationId}:${p.taskId}:${p.sessionId}`, event: "sessionUpdated", // Reuses the task actor connection — same actor, different event. - connect: (b, p) => b.connectTask(p.workspaceId, p.repoId, p.taskId), - fetchInitial: (b, p) => b.getSessionDetail(p.workspaceId, p.repoId, p.taskId, p.sessionId), + connect: (b, p) => b.connectTask(p.organizationId, p.repoId, p.taskId), + fetchInitial: (b, p) => b.getSessionDetail(p.organizationId, p.repoId, p.taskId, p.sessionId), applyEvent: (current, event: SessionEvent) => { // Filter: only apply if this event is for our session if (event.session.sessionId !== current.sessionId) return current; @@ -399,10 +399,10 @@ export const topicDefinitions = { } satisfies TopicDefinition, sandboxProcesses: { - key: (p) => `sandbox:${p.workspaceId}:${p.sandboxId}`, + key: (p) => `sandbox:${p.organizationId}:${p.sandboxId}`, event: "processesUpdated", - connect: (b, p) => b.connectSandbox(p.workspaceId, p.providerId, p.sandboxId), - fetchInitial: (b, p) => b.listSandboxProcesses(p.workspaceId, p.providerId, p.sandboxId), + connect: (b, p) => b.connectSandbox(p.organizationId, p.providerId, p.sandboxId), + fetchInitial: (b, p) => b.listSandboxProcesses(p.organizationId, p.providerId, p.sandboxId), applyEvent: (_current, event: SandboxProcessesEvent) => event.processes, } satisfies TopicDefinition, } as const; @@ -413,16 +413,16 @@ export type TopicParams = Parameters<(typeof topicDefinition export type TopicData = Awaited>; ``` -### 3.2 Interest manager interface +### 3.2 Subscription manager interface **File:** `packages/client/src/interest/manager.ts` (new) ```typescript /** - * The InterestManager owns all realtime actor connections and cached state. + * The SubscriptionManager owns all realtime actor connections and cached state. * * Architecture: - * - Each topic (app, workspace, task, session, sandboxProcesses) maps to an actor + event. + * - Each topic (app, organization, task, session, sandboxProcesses) maps to an actor + event. * - On first subscription, the manager opens a WebSocket connection, fetches initial state, * and listens for events. Events carry full replacement payloads for the changed entity. * - Multiple subscribers to the same topic share one connection and one cached state. @@ -430,7 +430,7 @@ export type TopicData = Awaited { const GRACE_PERIOD_MS = 30_000; /** - * Remote implementation of InterestManager. + * Remote implementation of SubscriptionManager. * Manages WebSocket connections to RivetKit actors via BackendClient. */ -export class RemoteInterestManager implements InterestManager { +export class RemoteSubscriptionManager implements SubscriptionManager { private entries = new Map>(); constructor(private backend: BackendClient) {} @@ -634,7 +634,7 @@ class TopicEntry { **File:** `packages/client/src/interest/mock-manager.ts` (new) -Same `InterestManager` interface. Uses in-memory state. Topic definitions provide mock data. Mutations call `applyEvent` directly on the entry to simulate broadcasts. No WebSocket connections. +Same `SubscriptionManager` interface. Uses in-memory state. Topic definitions provide mock data. Mutations call `applyEvent` directly on the entry to simulate broadcasts. No WebSocket connections. ### 3.5 React hook @@ -651,17 +651,17 @@ import { useSyncExternalStore, useMemo } from "react"; * - Multiple components subscribing to the same topic share one connection. * * @example - * // Subscribe to workspace sidebar data - * const workspace = useInterest("workspace", { workspaceId }); + * // Subscribe to organization sidebar data + * const organization = useSubscription("organization", { organizationId }); * * // Subscribe to task detail (only when viewing a task) - * const task = useInterest("task", selectedTaskId ? { workspaceId, repoId, taskId } : null); + * const task = useSubscription("task", selectedTaskId ? { organizationId, repoId, taskId } : null); * * // Subscribe to active session content - * const session = useInterest("session", activeSessionId ? { workspaceId, repoId, taskId, sessionId } : null); + * const session = useSubscription("session", activeSessionId ? { organizationId, repoId, taskId, sessionId } : null); */ -export function useInterest( - manager: InterestManager, +export function useSubscription( + manager: SubscriptionManager, topicKey: K, params: TopicParams | null, ): TopicState { @@ -698,18 +698,18 @@ Add to the `BackendClient` interface: ```typescript // New connection methods (return WebSocket-based ActorConn) -connectWorkspace(workspaceId: string): Promise; -connectTask(workspaceId: string, repoId: string, taskId: string): Promise; -connectSandbox(workspaceId: string, providerId: string, sandboxId: string): Promise; +connectWorkspace(organizationId: string): Promise; +connectTask(organizationId: string, repoId: string, taskId: string): Promise; +connectSandbox(organizationId: string, providerId: string, sandboxId: string): Promise; // New fetch methods (read from materialized state) -getWorkspaceSummary(workspaceId: string): Promise; -getTaskDetail(workspaceId: string, repoId: string, taskId: string): Promise; -getSessionDetail(workspaceId: string, repoId: string, taskId: string, sessionId: string): Promise; +getWorkspaceSummary(organizationId: string): Promise; +getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; +getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; ``` Remove: -- `subscribeWorkbench`, `subscribeApp`, `subscribeSandboxProcesses` (replaced by interest manager) +- `subscribeWorkbench`, `subscribeApp`, `subscribeSandboxProcesses` (replaced by subscription manager) - `getWorkbench` (replaced by `getWorkspaceSummary` + `getTaskDetail`) --- @@ -721,16 +721,16 @@ Remove: **File:** `packages/frontend/src/lib/interest.ts` (new) ```typescript -import { RemoteInterestManager } from "@sandbox-agent/foundry-client"; +import { RemoteSubscriptionManager } from "@sandbox-agent/foundry-client"; import { backendClient } from "./backend"; -export const interestManager = new RemoteInterestManager(backendClient); +export const subscriptionManager = new RemoteSubscriptionManager(backendClient); ``` Or for mock mode: ```typescript -import { MockInterestManager } from "@sandbox-agent/foundry-client"; -export const interestManager = new MockInterestManager(); +import { MockSubscriptionManager } from "@sandbox-agent/foundry-client"; +export const subscriptionManager = new MockSubscriptionManager(); ``` ### 4.2 Replace MockLayout workbench subscription @@ -739,7 +739,7 @@ export const interestManager = new MockInterestManager(); Before: ```typescript -const taskWorkbenchClient = useMemo(() => getTaskWorkbenchClient(workspaceId), [workspaceId]); +const taskWorkbenchClient = useMemo(() => getTaskWorkbenchClient(organizationId), [organizationId]); const viewModel = useSyncExternalStore( taskWorkbenchClient.subscribe.bind(taskWorkbenchClient), taskWorkbenchClient.getSnapshot.bind(taskWorkbenchClient), @@ -749,9 +749,9 @@ const tasks = viewModel.tasks ?? []; After: ```typescript -const workspace = useInterest(interestManager, "workspace", { workspaceId }); -const taskSummaries = workspace.data?.taskSummaries ?? []; -const repos = workspace.data?.repos ?? []; +const organization = useSubscription(subscriptionManager, "organization", { organizationId }); +const taskSummaries = organization.data?.taskSummaries ?? []; +const repos = organization.data?.repos ?? []; ``` ### 4.3 Replace MockLayout task detail @@ -759,8 +759,8 @@ const repos = workspace.data?.repos ?? []; When a task is selected, subscribe to its detail: ```typescript -const taskDetail = useInterest(interestManager, "task", - selectedTaskId ? { workspaceId, repoId: activeRepoId, taskId: selectedTaskId } : null +const taskDetail = useSubscription(subscriptionManager, "task", + selectedTaskId ? { organizationId, repoId: activeRepoId, taskId: selectedTaskId } : null ); ``` @@ -769,25 +769,25 @@ const taskDetail = useInterest(interestManager, "task", When a session tab is active: ```typescript -const sessionDetail = useInterest(interestManager, "session", - activeSessionId ? { workspaceId, repoId, taskId, sessionId: activeSessionId } : null +const sessionDetail = useSubscription(subscriptionManager, "session", + activeSessionId ? { organizationId, repoId, taskId, sessionId: activeSessionId } : null ); ``` -### 4.5 Replace workspace-dashboard.tsx polling +### 4.5 Replace organization-dashboard.tsx polling Remove ALL `useQuery` with `refetchInterval` in this file: -- `tasksQuery` (2.5s polling) → `useInterest("workspace", ...)` -- `taskDetailQuery` (2.5s polling) → `useInterest("task", ...)` -- `reposQuery` (10s polling) → `useInterest("workspace", ...)` -- `repoOverviewQuery` (5s polling) → `useInterest("workspace", ...)` -- `sessionsQuery` (3s polling) → `useInterest("task", ...)` (sessionsSummary field) -- `eventsQuery` (2.5s polling) → `useInterest("session", ...)` +- `tasksQuery` (2.5s polling) → `useSubscription("organization", ...)` +- `taskDetailQuery` (2.5s polling) → `useSubscription("task", ...)` +- `reposQuery` (10s polling) → `useSubscription("organization", ...)` +- `repoOverviewQuery` (5s polling) → `useSubscription("organization", ...)` +- `sessionsQuery` (3s polling) → `useSubscription("task", ...)` (sessionsSummary field) +- `eventsQuery` (2.5s polling) → `useSubscription("session", ...)` ### 4.6 Replace terminal-pane.tsx polling -- `taskQuery` (2s polling) → `useInterest("task", ...)` -- `processesQuery` (3s polling) → `useInterest("sandboxProcesses", ...)` +- `taskQuery` (2s polling) → `useSubscription("task", ...)` +- `processesQuery` (3s polling) → `useSubscription("sandboxProcesses", ...)` - Remove `subscribeSandboxProcesses` useEffect ### 4.7 Replace app client subscription @@ -804,14 +804,14 @@ export function useMockAppSnapshot(): FoundryAppSnapshot { After: ```typescript export function useAppSnapshot(): FoundryAppSnapshot { - const app = useInterest(interestManager, "app", {}); + const app = useSubscription(subscriptionManager, "app", {}); return app.data ?? DEFAULT_APP_SNAPSHOT; } ``` ### 4.8 Mutations -Mutations (`createTask`, `renameTask`, `sendMessage`, etc.) no longer need manual `refetch()` or `refresh()` calls after completion. The backend mutation triggers a broadcast, which the interest manager receives and applies automatically. +Mutations (`createTask`, `renameTask`, `sendMessage`, etc.) no longer need manual `refetch()` or `refresh()` calls after completion. The backend mutation triggers a broadcast, which the subscription manager receives and applies automatically. Before: ```typescript @@ -841,24 +841,24 @@ const createSession = useMutation({ | File/Code | Reason | |---|---| -| `packages/client/src/remote/workbench-client.ts` | Replaced by interest manager `workspace` + `task` topics | -| `packages/client/src/remote/app-client.ts` | Replaced by interest manager `app` topic | +| `packages/client/src/remote/workbench-client.ts` | Replaced by subscription manager `organization` + `task` topics | +| `packages/client/src/remote/app-client.ts` | Replaced by subscription manager `app` topic | | `packages/client/src/workbench-client.ts` | Factory for above — no longer needed | | `packages/client/src/app-client.ts` | Factory for above — no longer needed | -| `packages/frontend/src/lib/workbench.ts` | Workbench client singleton — replaced by interest manager | -| `subscribeWorkbench` in `backend-client.ts` | Replaced by `connectWorkspace` + interest manager | -| `subscribeSandboxProcesses` in `backend-client.ts` | Replaced by `connectSandbox` + interest manager | -| `subscribeApp` in `backend-client.ts` | Replaced by `connectWorkspace("app")` + interest manager | -| `buildWorkbenchSnapshot` in `workspace/actions.ts` | Replaced by `getWorkspaceSummary` (local reads). Keep as `reconcileWorkbenchState` for recovery only. | -| `notifyWorkbenchUpdated` in `workspace/actions.ts` | Replaced by `applyTaskSummaryUpdate` + `c.broadcast` with payload | +| `packages/frontend/src/lib/workbench.ts` | Workbench client singleton — replaced by subscription manager | +| `subscribeWorkbench` in `backend-client.ts` | Replaced by `connectWorkspace` + subscription manager | +| `subscribeSandboxProcesses` in `backend-client.ts` | Replaced by `connectSandbox` + subscription manager | +| `subscribeApp` in `backend-client.ts` | Replaced by `connectWorkspace("app")` + subscription manager | +| `buildWorkbenchSnapshot` in `organization/actions.ts` | Replaced by `getWorkspaceSummary` (local reads). Keep as `reconcileWorkbenchState` for recovery only. | +| `notifyWorkbenchUpdated` in `organization/actions.ts` | Replaced by `applyTaskSummaryUpdate` + `c.broadcast` with payload | | `notifyWorkbenchUpdated` in `task/workbench.ts` | Replaced by `broadcastTaskUpdate` helper | -| `TaskWorkbenchSnapshot` in `shared/workbench.ts` | Replaced by `WorkspaceSummarySnapshot` + `WorkbenchTaskDetail` | +| `TaskWorkbenchSnapshot` in `shared/workbench.ts` | Replaced by `OrganizationSummarySnapshot` + `WorkbenchTaskDetail` | | `WorkbenchTask` in `shared/workbench.ts` | Split into `WorkbenchTaskSummary` + `WorkbenchTaskDetail` | -| `getWorkbench` action on workspace actor | Replaced by `getWorkspaceSummary` | -| `TaskWorkbenchClient` interface | Replaced by `InterestManager` + `useInterest` hook | -| All `useQuery` with `refetchInterval` in `workspace-dashboard.tsx` | Replaced by `useInterest` | -| All `useQuery` with `refetchInterval` in `terminal-pane.tsx` | Replaced by `useInterest` | -| Mock workbench client (`packages/client/src/mock/workbench-client.ts`) | Replaced by `MockInterestManager` | +| `getWorkbench` action on organization actor | Replaced by `getWorkspaceSummary` | +| `TaskWorkbenchClient` interface | Replaced by `SubscriptionManager` + `useSubscription` hook | +| All `useQuery` with `refetchInterval` in `organization-dashboard.tsx` | Replaced by `useSubscription` | +| All `useQuery` with `refetchInterval` in `terminal-pane.tsx` | Replaced by `useSubscription` | +| Mock workbench client (`packages/client/src/mock/workbench-client.ts`) | Replaced by `MockSubscriptionManager` | --- @@ -867,27 +867,27 @@ const createSession = useMutation({ Implement in this order to keep the system working at each step: ### Phase 1: Types and backend materialization -1. Add new types to `packages/shared` (`WorkbenchTaskSummary`, `WorkbenchTaskDetail`, `WorkbenchSessionSummary`, `WorkbenchSessionDetail`, `WorkspaceSummarySnapshot`, event types). -2. Add `taskSummaries` table to workspace actor schema. -3. Add `applyTaskSummaryUpdate`, `removeTaskSummary`, `getWorkspaceSummary` actions to workspace actor. +1. Add new types to `packages/shared` (`WorkbenchTaskSummary`, `WorkbenchTaskDetail`, `WorkbenchSessionSummary`, `WorkbenchSessionDetail`, `OrganizationSummarySnapshot`, event types). +2. Add `taskSummaries` table to organization actor schema. +3. Add `applyTaskSummaryUpdate`, `removeTaskSummary`, `getWorkspaceSummary` actions to organization actor. 4. Add `getTaskDetail`, `getSessionDetail` actions to task actor. 5. Replace all `notifyWorkbenchUpdated` call sites with `broadcastTaskUpdate` that pushes summary + broadcasts detail with payload. 6. Change app actor broadcast to include snapshot payload. 7. Change sandbox actor broadcast to include process list payload. 8. Add one-time reconciliation action to populate `taskSummaries` table from existing task actors (run on startup or on-demand). -### Phase 2: Client interest manager -9. Add `InterestManager` interface, `RemoteInterestManager`, `MockInterestManager` to `packages/client`. +### Phase 2: Client subscription manager +9. Add `SubscriptionManager` interface, `RemoteSubscriptionManager`, `MockSubscriptionManager` to `packages/client`. 10. Add topic definitions registry. -11. Add `useInterest` hook. +11. Add `useSubscription` hook. 12. Add `connectWorkspace`, `connectTask`, `connectSandbox`, `getWorkspaceSummary`, `getTaskDetail`, `getSessionDetail` to `BackendClient`. ### Phase 3: Frontend migration -13. Replace `useMockAppSnapshot` with `useInterest("app", ...)`. -14. Replace `MockLayout` workbench subscription with `useInterest("workspace", ...)`. -15. Replace task detail view with `useInterest("task", ...)` + `useInterest("session", ...)`. -16. Replace `workspace-dashboard.tsx` polling queries with `useInterest`. -17. Replace `terminal-pane.tsx` polling queries with `useInterest`. +13. Replace `useMockAppSnapshot` with `useSubscription("app", ...)`. +14. Replace `MockLayout` workbench subscription with `useSubscription("organization", ...)`. +15. Replace task detail view with `useSubscription("task", ...)` + `useSubscription("session", ...)`. +16. Replace `organization-dashboard.tsx` polling queries with `useSubscription`. +17. Replace `terminal-pane.tsx` polling queries with `useSubscription`. 18. Remove manual `refetch()` calls from mutations. ### Phase 4: Cleanup @@ -902,10 +902,10 @@ Implement in this order to keep the system working at each step: Add doc comments at these locations: - **Topic definitions** — explain the materialized state pattern, why events carry full entity state instead of patches, and the relationship between topics. -- **`broadcastTaskUpdate` helper** — explain the dual-broadcast pattern (push summary to workspace + broadcast detail to direct subscribers). -- **`InterestManager` interface** — explain the grace period, deduplication, and why mock/remote share the same interface. -- **`useInterest` hook** — explain `useSyncExternalStore` integration, null params for conditional interest, and how params key stabilization works. -- **Workspace actor `taskSummaries` table** — explain this is a materialized read projection maintained by task actor pushes, not a source of truth. +- **`broadcastTaskUpdate` helper** — explain the dual-broadcast pattern (push summary to organization + broadcast detail to direct subscribers). +- **`SubscriptionManager` interface** — explain the grace period, deduplication, and why mock/remote share the same interface. +- **`useSubscription` hook** — explain `useSyncExternalStore` integration, null params for conditional interest, and how params key stabilization works. +- **Organization actor `taskSummaries` table** — explain this is a materialized read projection maintained by task actor pushes, not a source of truth. - **`applyTaskSummaryUpdate` action** — explain this is the write path for the materialized projection, called by task actors, not by clients. - **`getWorkspaceSummary` action** — explain this reads from local SQLite only, no fan-out, and why that's the correct pattern. @@ -913,7 +913,7 @@ Add doc comments at these locations: ## 8. Testing -- Interest manager unit tests: subscribe/unsubscribe lifecycle, grace period, deduplication, event application. -- Mock implementation tests: verify same behavior as remote through shared test suite against the `InterestManager` interface. +- Subscription manager unit tests: subscribe/unsubscribe lifecycle, grace period, deduplication, event application. +- Mock implementation tests: verify same behavior as remote through shared test suite against the `SubscriptionManager` interface. - Backend integration: verify `applyTaskSummaryUpdate` correctly materializes and broadcasts. - E2E: verify that a task mutation (e.g. rename) updates the sidebar in realtime without polling. diff --git a/foundry/research/specs/async-action-fixes/00-end-to-end-async-realtime-plan.md b/foundry/research/specs/async-action-fixes/00-end-to-end-async-realtime-plan.md index cd9dcbf..1cb4d37 100644 --- a/foundry/research/specs/async-action-fixes/00-end-to-end-async-realtime-plan.md +++ b/foundry/research/specs/async-action-fixes/00-end-to-end-async-realtime-plan.md @@ -28,7 +28,7 @@ The goal is not just to make individual endpoints faster. The goal is to move Fo ### Workbench -- `getWorkbench` still represents a monolithic workspace read that aggregates repo, project, and task state. +- `getWorkbench` still represents a monolithic organization read that aggregates repo, repository, and task state. - The remote workbench store still responds to every event by pulling a full fresh snapshot. - Some task/workbench detail is still too expensive to compute inline and too broad to refresh after every mutation. @@ -57,7 +57,7 @@ Requests should not block on provider calls, repo sync, sandbox provisioning, tr ### View-model rule - App shell view connects to app/session state and only the org actors visible on screen. -- Workspace/task-list view connects to a workspace-owned summary projection. +- Organization/task-list view connects to a organization-owned summary projection. - Task detail view connects directly to the selected task actor. - Sandbox/session detail connects only when the user opens that detail. @@ -99,7 +99,7 @@ The app shell should stop using `/app/snapshot` as the steady-state read model. #### Changes -1. Introduce a small app-shell projection owned by the app workspace actor: +1. Introduce a small app-shell projection owned by the app organization actor: - auth status - current user summary - active org id @@ -121,7 +121,7 @@ The app shell should stop using `/app/snapshot` as the steady-state read model. #### Likely files -- `foundry/packages/backend/src/actors/workspace/app-shell.ts` +- `foundry/packages/backend/src/actors/organization/app-shell.ts` - `foundry/packages/client/src/backend-client.ts` - `foundry/packages/client/src/remote/app-client.ts` - `foundry/packages/shared/src/app-shell.ts` @@ -133,42 +133,42 @@ The app shell should stop using `/app/snapshot` as the steady-state read model. - Selecting an org returns quickly and the UI updates from actor events. - App shell refresh cost is bounded by visible state, not every eligible organization on every poll. -### 3. Workspace summary becomes a projection, not a full snapshot +### 3. Organization summary becomes a projection, not a full snapshot -The task list should read a workspace-owned summary projection instead of calling into every task actor on each refresh. +The task list should read a organization-owned summary projection instead of calling into every task actor on each refresh. #### Changes -1. Define a durable workspace summary model with only list-screen fields: +1. Define a durable organization summary model with only list-screen fields: - repo summary - - project summary + - repository summary - task summary - selected/open task ids - unread/session status summary - coarse git/PR state summary -2. Update workspace actor workflows so task/project changes incrementally update this projection. +2. Update organization actor workflows so task/repository changes incrementally update this projection. 3. Change `getWorkbench` to return the projection only. 4. Change `workbenchUpdated` from "invalidate and refetch everything" to "here is the updated projection version or changed entity ids". 5. Remove task-actor fan-out from the default list read path. #### Likely files -- `foundry/packages/backend/src/actors/workspace/actions.ts` -- `foundry/packages/backend/src/actors/project/actions.ts` +- `foundry/packages/backend/src/actors/organization/actions.ts` +- `foundry/packages/backend/src/actors/repository/actions.ts` - `foundry/packages/backend/src/actors/task/index.ts` - `foundry/packages/backend/src/actors/task/workbench.ts` -- task/workspace DB schema and migrations +- task/organization DB schema and migrations - `foundry/packages/client/src/remote/workbench-client.ts` #### Acceptance criteria - Workbench list refresh does not call every task actor. - A websocket event does not force a full cross-actor rebuild. -- Initial task-list load time scales roughly with workspace summary size, not repo count times task count times detail reads. +- Initial task-list load time scales roughly with organization summary size, not repo count times task count times detail reads. ### 4. Task detail moves to direct actor reads and events -Heavy task detail should move out of the workspace summary and into the selected task actor. +Heavy task detail should move out of the organization summary and into the selected task actor. #### Changes @@ -258,7 +258,7 @@ Do not delete bootstrap endpoints first. Shrink them after the subscription mode 4. `06-daytona-provisioning-staged-background-flow.md` 5. App shell realtime subscription model 6. `02-repo-overview-from-cached-projection.md` -7. Workspace summary projection +7. Organization summary projection 8. `04-workbench-session-creation-without-inline-provisioning.md` 9. `05-workbench-snapshot-from-derived-state.md` 10. Task-detail direct actor reads/subscriptions @@ -270,7 +270,7 @@ Do not delete bootstrap endpoints first. Shrink them after the subscription mode - Runtime hardening removes the most dangerous correctness bug before more UI load shifts onto actor connections. - The first async workflow items reduce the biggest user-visible stalls quickly. - App shell realtime is smaller and lower-risk than the workbench migration, and it removes the current polling loop. -- Workspace summary and task-detail split should happen after the async workflow moves so the projection model does not encode old synchronous assumptions. +- Organization summary and task-detail split should happen after the async workflow moves so the projection model does not encode old synchronous assumptions. - Auth simplification is valuable but not required to remove the current refresh/polling/runtime problems. ## Observability Requirements @@ -291,7 +291,7 @@ Each log line should include a request id or actor/event correlation id where po 1. Ship runtime hardening and observability first. 2. Ship app-shell realtime behind a client flag while keeping snapshot bootstrap. -3. Ship workspace summary projection behind a separate flag. +3. Ship organization summary projection behind a separate flag. 4. Migrate one heavy detail pane at a time off the monolithic workbench payload. 5. Remove polling once the matching event path is proven stable. 6. Only then remove or demote the old snapshot-heavy steady-state flows. diff --git a/foundry/research/specs/async-action-fixes/01-task-creation-bootstrap-only.md b/foundry/research/specs/async-action-fixes/01-task-creation-bootstrap-only.md index 2aa9f50..1eb1594 100644 --- a/foundry/research/specs/async-action-fixes/01-task-creation-bootstrap-only.md +++ b/foundry/research/specs/async-action-fixes/01-task-creation-bootstrap-only.md @@ -10,8 +10,8 @@ That makes a user-facing action depend on queue-backed and provider-backed work ## Current Code Context -- Workspace entry point: `foundry/packages/backend/src/actors/workspace/actions.ts` -- Project task creation path: `foundry/packages/backend/src/actors/project/actions.ts` +- Organization entry point: `foundry/packages/backend/src/actors/organization/actions.ts` +- Repository task creation path: `foundry/packages/backend/src/actors/repository/actions.ts` - Task action surface: `foundry/packages/backend/src/actors/task/index.ts` - Task workflow: `foundry/packages/backend/src/actors/task/workflow/index.ts` - Task init/provision steps: `foundry/packages/backend/src/actors/task/workflow/init.ts` @@ -33,8 +33,8 @@ That makes a user-facing action depend on queue-backed and provider-backed work - persisting any immediately-known metadata - returning the current task record 3. After initialize completes, enqueue `task.command.provision` with `wait: false`. -4. Change `workspace.createTask` to: - - create or resolve the project +4. Change `organization.createTask` to: + - create or resolve the repository - create the task actor - call `task.initialize(...)` - stop awaiting `task.provision(...)` @@ -51,12 +51,12 @@ That makes a user-facing action depend on queue-backed and provider-backed work ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` -- `foundry/packages/backend/src/actors/project/actions.ts` +- `foundry/packages/backend/src/actors/organization/actions.ts` +- `foundry/packages/backend/src/actors/repository/actions.ts` - `foundry/packages/backend/src/actors/task/index.ts` - `foundry/packages/backend/src/actors/task/workflow/index.ts` - `foundry/packages/backend/src/actors/task/workflow/init.ts` -- `foundry/packages/frontend/src/components/workspace-dashboard.tsx` +- `foundry/packages/frontend/src/components/organization-dashboard.tsx` - `foundry/packages/client/src/remote/workbench-client.ts` ## Client Impact diff --git a/foundry/research/specs/async-action-fixes/02-repo-overview-from-cached-projection.md b/foundry/research/specs/async-action-fixes/02-repo-overview-from-cached-projection.md index 27afad5..1d31216 100644 --- a/foundry/research/specs/async-action-fixes/02-repo-overview-from-cached-projection.md +++ b/foundry/research/specs/async-action-fixes/02-repo-overview-from-cached-projection.md @@ -15,11 +15,11 @@ The frontend polls repo overview repeatedly, so this design multiplies slow work ## Current Code Context -- Workspace overview entry point: `foundry/packages/backend/src/actors/workspace/actions.ts` -- Project overview implementation: `foundry/packages/backend/src/actors/project/actions.ts` -- Branch sync poller: `foundry/packages/backend/src/actors/project-branch-sync/index.ts` -- PR sync poller: `foundry/packages/backend/src/actors/project-pr-sync/index.ts` -- Repo overview client polling: `foundry/packages/frontend/src/components/workspace-dashboard.tsx` +- Organization overview entry point: `foundry/packages/backend/src/actors/organization/actions.ts` +- Repository overview implementation: `foundry/packages/backend/src/actors/repository/actions.ts` +- Branch sync poller: `foundry/packages/backend/src/actors/repository-branch-sync/index.ts` +- PR sync poller: `foundry/packages/backend/src/actors/repository-pr-sync/index.ts` +- Repo overview client polling: `foundry/packages/frontend/src/components/organization-dashboard.tsx` ## Target Contract @@ -30,27 +30,27 @@ The frontend polls repo overview repeatedly, so this design multiplies slow work ## Proposed Fix 1. Remove inline `forceProjectSync()` from `getRepoOverview`. -2. Add freshness fields to the project projection, for example: +2. Add freshness fields to the repository projection, for example: - `branchSyncAt` - `prSyncAt` - `branchSyncStatus` - `prSyncStatus` 3. Let the existing polling actors own cache refresh. -4. If the client needs a manual refresh, add a non-blocking command such as `project.requestOverviewRefresh` that: +4. If the client needs a manual refresh, add a non-blocking command such as `repository.requestOverviewRefresh` that: - enqueues refresh work - updates sync status to `queued` or `running` - returns immediately -5. Keep `getRepoOverview` as a pure read over project SQLite state. +5. Keep `getRepoOverview` as a pure read over repository SQLite state. ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` -- `foundry/packages/backend/src/actors/project/actions.ts` -- `foundry/packages/backend/src/actors/project/db/schema.ts` -- `foundry/packages/backend/src/actors/project/db/migrations.ts` -- `foundry/packages/backend/src/actors/project-branch-sync/index.ts` -- `foundry/packages/backend/src/actors/project-pr-sync/index.ts` -- `foundry/packages/frontend/src/components/workspace-dashboard.tsx` +- `foundry/packages/backend/src/actors/organization/actions.ts` +- `foundry/packages/backend/src/actors/repository/actions.ts` +- `foundry/packages/backend/src/actors/repository/db/schema.ts` +- `foundry/packages/backend/src/actors/repository/db/migrations.ts` +- `foundry/packages/backend/src/actors/repository-branch-sync/index.ts` +- `foundry/packages/backend/src/actors/repository-pr-sync/index.ts` +- `foundry/packages/frontend/src/components/organization-dashboard.tsx` ## Client Impact diff --git a/foundry/research/specs/async-action-fixes/03-repo-actions-via-background-workflow.md b/foundry/research/specs/async-action-fixes/03-repo-actions-via-background-workflow.md index 2c1738c..9fdd46a 100644 --- a/foundry/research/specs/async-action-fixes/03-repo-actions-via-background-workflow.md +++ b/foundry/research/specs/async-action-fixes/03-repo-actions-via-background-workflow.md @@ -10,20 +10,20 @@ These flows depend on repo/network state and can take minutes. They should not h ## Current Code Context -- Workspace repo action entry point: `foundry/packages/backend/src/actors/workspace/actions.ts` -- Project repo action implementation: `foundry/packages/backend/src/actors/project/actions.ts` -- Branch/task index state lives in the project actor SQLite DB. +- Organization repo action entry point: `foundry/packages/backend/src/actors/organization/actions.ts` +- Repository repo action implementation: `foundry/packages/backend/src/actors/repository/actions.ts` +- Branch/task index state lives in the repository actor SQLite DB. - Current forced sync uses the PR and branch polling actors before and after the action. ## Target Contract - Repo-affecting actions are accepted quickly and run in the background. -- The project actor owns a durable action record with progress and final result. -- Clients observe status via project/task state instead of waiting for a single response. +- The repository actor owns a durable action record with progress and final result. +- Clients observe status via repository/task state instead of waiting for a single response. ## Proposed Fix -1. Introduce a project-level workflow/job model for repo actions, for example: +1. Introduce a repository-level workflow/job model for repo actions, for example: - `sync_repo` - `restack_repo` - `restack_subtree` @@ -49,11 +49,11 @@ These flows depend on repo/network state and can take minutes. They should not h ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` -- `foundry/packages/backend/src/actors/project/actions.ts` -- `foundry/packages/backend/src/actors/project/db/schema.ts` -- `foundry/packages/backend/src/actors/project/db/migrations.ts` -- `foundry/packages/frontend/src/components/workspace-dashboard.tsx` +- `foundry/packages/backend/src/actors/organization/actions.ts` +- `foundry/packages/backend/src/actors/repository/actions.ts` +- `foundry/packages/backend/src/actors/repository/db/schema.ts` +- `foundry/packages/backend/src/actors/repository/db/migrations.ts` +- `foundry/packages/frontend/src/components/organization-dashboard.tsx` - Any shared types in `foundry/packages/shared/src` ## Client Impact @@ -70,5 +70,5 @@ These flows depend on repo/network state and can take minutes. They should not h ## Implementation Notes - Keep validation cheap in the request path; expensive repo inspection belongs in the workflow. -- If job rows are added, decide whether they are project-owned only or also mirrored into history events for UI consumption. +- If job rows are added, decide whether they are repository-owned only or also mirrored into history events for UI consumption. - Fresh-agent check: branch-backed task creation and explicit repo stack actions should use the same background job/status vocabulary where possible. diff --git a/foundry/research/specs/async-action-fixes/04-workbench-session-creation-without-inline-provisioning.md b/foundry/research/specs/async-action-fixes/04-workbench-session-creation-without-inline-provisioning.md index 9221780..d48e4f0 100644 --- a/foundry/research/specs/async-action-fixes/04-workbench-session-creation-without-inline-provisioning.md +++ b/foundry/research/specs/async-action-fixes/04-workbench-session-creation-without-inline-provisioning.md @@ -8,7 +8,7 @@ Creating a workbench tab currently provisions the whole task if no active sandbo ## Current Code Context -- Workspace workbench action entry point: `foundry/packages/backend/src/actors/workspace/actions.ts` +- Organization workbench action entry point: `foundry/packages/backend/src/actors/organization/actions.ts` - Task workbench behavior: `foundry/packages/backend/src/actors/task/workbench.ts` - Task provision action: `foundry/packages/backend/src/actors/task/index.ts` - Sandbox session creation path: `foundry/packages/backend/src/actors/sandbox-instance/index.ts` @@ -36,7 +36,7 @@ Creating a workbench tab currently provisions the whole task if no active sandbo ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` +- `foundry/packages/backend/src/actors/organization/actions.ts` - `foundry/packages/backend/src/actors/task/workbench.ts` - `foundry/packages/backend/src/actors/task/index.ts` - `foundry/packages/backend/src/actors/task/db/schema.ts` diff --git a/foundry/research/specs/async-action-fixes/05-workbench-snapshot-from-derived-state.md b/foundry/research/specs/async-action-fixes/05-workbench-snapshot-from-derived-state.md index 55401a7..07cc0a5 100644 --- a/foundry/research/specs/async-action-fixes/05-workbench-snapshot-from-derived-state.md +++ b/foundry/research/specs/async-action-fixes/05-workbench-snapshot-from-derived-state.md @@ -17,7 +17,7 @@ The remote workbench client refreshes after each action and on update events, so ## Current Code Context -- Workspace workbench snapshot builder: `foundry/packages/backend/src/actors/workspace/actions.ts` +- Organization workbench snapshot builder: `foundry/packages/backend/src/actors/organization/actions.ts` - Task workbench snapshot builder: `foundry/packages/backend/src/actors/task/workbench.ts` - Sandbox session event persistence: `foundry/packages/backend/src/actors/sandbox-instance/persist.ts` - Remote workbench client refresh loop: `foundry/packages/client/src/remote/workbench-client.ts` @@ -43,7 +43,7 @@ The remote workbench client refreshes after each action and on update events, so ## Files Likely To Change -- `foundry/packages/backend/src/actors/workspace/actions.ts` +- `foundry/packages/backend/src/actors/organization/actions.ts` - `foundry/packages/backend/src/actors/task/workbench.ts` - `foundry/packages/backend/src/actors/task/db/schema.ts` - `foundry/packages/backend/src/actors/task/db/migrations.ts` diff --git a/foundry/research/specs/async-action-fixes/07-auth-identity-simplification.md b/foundry/research/specs/async-action-fixes/07-auth-identity-simplification.md index 50f3b56..dbaf976 100644 --- a/foundry/research/specs/async-action-fixes/07-auth-identity-simplification.md +++ b/foundry/research/specs/async-action-fixes/07-auth-identity-simplification.md @@ -17,8 +17,8 @@ Authentication and user identity are conflated into a single `appSessions` table ## Current Code Context - Custom OAuth flow: `foundry/packages/backend/src/services/app-github.ts` (`buildAuthorizeUrl`, `exchangeCode`, `getViewer`) -- Session + identity management: `foundry/packages/backend/src/actors/workspace/app-shell.ts` (`ensureAppSession`, `updateAppSession`, `initGithubSession`, `syncGithubOrganizations`) -- Session schema: `foundry/packages/backend/src/actors/workspace/db/schema.ts` (`appSessions` table) +- Session + identity management: `foundry/packages/backend/src/actors/organization/app-shell.ts` (`ensureAppSession`, `updateAppSession`, `initGithubSession`, `syncGithubOrganizations`) +- Session schema: `foundry/packages/backend/src/actors/organization/db/schema.ts` (`appSessions` table) - Shared types: `foundry/packages/shared/src/app-shell.ts` (`FoundryUser`, `FoundryAppSnapshot`) - HTTP routes: `foundry/packages/backend/src/index.ts` (`resolveSessionId`, `/v1/auth/github/*`, all `/v1/app/*` routes) - Frontend session persistence: `foundry/packages/client/src/backend-client.ts` (`persistAppSessionId`, `x-foundry-session` header, `foundrySession` URL param extraction) @@ -41,7 +41,7 @@ Authentication and user identity are conflated into a single `appSessions` table - BetterAuth uses a custom adapter that routes all DB operations through RivetKit actors. - Each user has their own actor. BetterAuth's `user`, `session`, and `account` tables live in the per-user actor's SQLite via `c.db`. - The adapter resolves which actor to target based on the primary key BetterAuth passes for each operation (user ID, session ID, account ID). -- A lightweight **session index** on the app-shell workspace actor maps session tokens → user actor identity, so inbound requests can be routed to the correct user actor without knowing the user ID upfront. +- A lightweight **session index** on the app-shell organization actor maps session tokens → user actor identity, so inbound requests can be routed to the correct user actor without knowing the user ID upfront. ### Canonical user record @@ -70,9 +70,9 @@ BetterAuth expects a single database. Foundry uses per-actor SQLite — each act When an HTTP request arrives, the backend has a session token but doesn't know the user ID yet. BetterAuth calls adapter methods like `findSession(sessionId)` to resolve this. But which actor holds that session row? -**Solution: session index on the app-shell workspace actor.** +**Solution: session index on the app-shell organization actor.** -The app-shell workspace actor (which already handles auth routing) maintains a lightweight index table: +The app-shell organization actor (which already handles auth routing) maintains a lightweight index table: ``` sessionIndex @@ -83,7 +83,7 @@ sessionIndex The adapter flow for session lookup: 1. BetterAuth calls `findSession(sessionId)`. -2. Adapter queries `sessionIndex` on the workspace actor to resolve `userActorKey`. +2. Adapter queries `sessionIndex` on the organization actor to resolve `userActorKey`. 3. Adapter gets the user actor handle and queries BetterAuth's `session` table in that actor's `c.db`. The adapter flow for user creation (OAuth callback): @@ -91,12 +91,12 @@ The adapter flow for user creation (OAuth callback): 2. Adapter resolves the GitHub numeric ID from the user data. 3. Adapter creates/gets the user actor keyed by GitHub ID. 4. Adapter inserts into BetterAuth's `user` table in that actor's `c.db`. -5. When `createSession` follows, adapter writes to the user actor's `session` table AND inserts into the workspace actor's `sessionIndex`. +5. When `createSession` follows, adapter writes to the user actor's `session` table AND inserts into the organization actor's `sessionIndex`. ### User actor shape ```text -UserActor (key: ["ws", workspaceId, "user", githubNumericId]) +UserActor (key: ["ws", organizationId, "user", githubNumericId]) ├── BetterAuth tables: user, session, account (managed by BetterAuth schema) ├── userProfiles (app-specific: eligibleOrganizationIds, starterRepoStatus, roleLabel) └── sessionState (app-specific: activeOrganizationId per session) @@ -127,15 +127,15 @@ The adapter must inspect `model` and `where` to determine the target actor: | Model | Routing strategy | |-------|-----------------| | `user` (by id) | User actor key derived directly from user ID | -| `user` (by email) | `emailIndex` on workspace actor → user actor key | -| `session` (by token) | `sessionIndex` on workspace actor → user actor key | -| `session` (by id) | `sessionIndex` on workspace actor → user actor key | +| `user` (by email) | `emailIndex` on organization actor → user actor key | +| `session` (by token) | `sessionIndex` on organization actor → user actor key | +| `session` (by id) | `sessionIndex` on organization actor → user actor key | | `session` (by userId) | User actor key derived directly from userId | | `account` | Always has `userId` in where or data → user actor key | -| `verification` | Workspace actor (not user-scoped — used for email verification, password reset) | +| `verification` | Organization actor (not user-scoped — used for email verification, password reset) | -On `create` for `session` model: write to user actor's `session` table AND insert into workspace actor's `sessionIndex`. -On `delete` for `session` model: delete from user actor's `session` table AND remove from workspace actor's `sessionIndex`. +On `create` for `session` model: write to user actor's `session` table AND insert into organization actor's `sessionIndex`. +On `delete` for `session` model: delete from user actor's `session` table AND remove from organization actor's `sessionIndex`. #### Adapter construction @@ -188,14 +188,14 @@ session: { #### BetterAuth core tables -Four tables, all in the per-user actor's SQLite (except `verification` which goes on workspace actor): +Four tables, all in the per-user actor's SQLite (except `verification` which goes on organization actor): **`user`**: `id`, `name`, `email`, `emailVerified`, `image`, `createdAt`, `updatedAt` **`session`**: `id`, `token`, `userId`, `expiresAt`, `ipAddress?`, `userAgent?`, `createdAt`, `updatedAt` **`account`**: `id`, `userId`, `accountId` (GitHub numeric ID), `providerId` ("github"), `accessToken?`, `refreshToken?`, `scope?`, `createdAt`, `updatedAt` **`verification`**: `id`, `identifier`, `value`, `expiresAt`, `createdAt`, `updatedAt` -For `findUserByEmail`, a secondary index (email → user actor key) is needed on the workspace actor alongside `sessionIndex`. +For `findUserByEmail`, a secondary index (email → user actor key) is needed on the organization actor alongside `sessionIndex`. ## Implementation Plan @@ -210,12 +210,12 @@ Research confirms: 1. **Prototype the adapter + user actor end-to-end** — wire up `createAdapterFactory` with a minimal actor-routed implementation. Confirm that BetterAuth's GitHub OAuth flow completes successfully with user/session/account records landing in the correct per-user actor's SQLite. 2. **Verify `findOne` for session model** — confirm the `where` clause BetterAuth passes for session lookup includes the `token` field (not just `id`), so the adapter can route via `sessionIndex` keyed by token. -3. **Measure cookie-cached vs uncached request latency** — confirm that with cookie caching enabled, the adapter is not called on every request, and that the uncached fallback (workspace actor index → user actor → session table) is acceptable. +3. **Measure cookie-cached vs uncached request latency** — confirm that with cookie caching enabled, the adapter is not called on every request, and that the uncached fallback (organization actor index → user actor → session table) is acceptable. ### Phase 1: User actor + adapter infrastructure (no behavior change) 1. **Install `better-auth` package** in `packages/backend`. -2. **Define `UserActor`** with actor key `["ws", workspaceId, "user", githubNumericId]`. Include BetterAuth's required tables (`user`, `session`, `account`) plus app-specific tables in its schema. +2. **Define `UserActor`** with actor key `["ws", organizationId, "user", githubNumericId]`. Include BetterAuth's required tables (`user`, `session`, `account`) plus app-specific tables in its schema. 3. **Create `userProfiles` table** in user actor schema: ``` userProfiles @@ -237,7 +237,7 @@ Research confirms: ├── createdAt (integer) ├── updatedAt (integer) ``` -5. **Create `sessionIndex` and `emailIndex` tables** on the app-shell workspace actor: +5. **Create `sessionIndex` and `emailIndex` tables** on the app-shell organization actor: ``` sessionIndex ├── sessionId (text, PK) @@ -256,7 +256,7 @@ Research confirms: ### Phase 2: Migrate OAuth flow to BetterAuth 1. **Replace `startAppGithubAuth`** — delegate to BetterAuth's GitHub OAuth initiation instead of hand-rolling `buildAuthorizeUrl` + `oauthState` + `oauthStateExpiresAt`. -2. **Replace `completeAppGithubAuth`** — delegate to BetterAuth's callback handler. BetterAuth creates/updates the user record in the user actor and creates a signed session. The adapter writes to `sessionIndex` on the workspace actor. +2. **Replace `completeAppGithubAuth`** — delegate to BetterAuth's callback handler. BetterAuth creates/updates the user record in the user actor and creates a signed session. The adapter writes to `sessionIndex` on the organization actor. 3. **After BetterAuth callback completes**, populate `userProfiles` in the user actor with app-specific fields and enqueue the slow org sync (same background workflow pattern as today). 4. **Replace `signOutApp`** — delegate to BetterAuth session invalidation. Adapter removes entry from `sessionIndex`. 5. **Update `resolveSessionId`** in `index.ts` — validate the session via BetterAuth (which routes through the adapter → `sessionIndex` → user actor). BetterAuth verifies the signature and checks expiration. @@ -288,18 +288,18 @@ Research confirms: ## Constraints - **Actor-routed adapter.** BetterAuth does not natively support per-user actor databases. The custom adapter must route every DB operation to the correct actor. This adds a layer of indirection and latency (actor handle resolution + message) on adapter calls. -- **Session index cost is mitigated by cookie caching.** With `cookieCache` enabled, BetterAuth validates sessions from a signed cookie on most requests — the adapter (and thus the `sessionIndex` lookup + user actor round-trip) is only called when the cache expires or on writes. Without caching, every authenticated request would hit the workspace actor's `sessionIndex` table then the user actor. -- **Two-actor write on session create/destroy.** Creating or destroying a session requires writing to both the user actor (BetterAuth's `session` table) and the workspace actor (`sessionIndex`). These must be consistent — if the user actor write succeeds but the index write fails, the session exists but is unreachable. +- **Session index cost is mitigated by cookie caching.** With `cookieCache` enabled, BetterAuth validates sessions from a signed cookie on most requests — the adapter (and thus the `sessionIndex` lookup + user actor round-trip) is only called when the cache expires or on writes. Without caching, every authenticated request would hit the organization actor's `sessionIndex` table then the user actor. +- **Two-actor write on session create/destroy.** Creating or destroying a session requires writing to both the user actor (BetterAuth's `session` table) and the organization actor (`sessionIndex`). These must be consistent — if the user actor write succeeds but the index write fails, the session exists but is unreachable. - **Background org sync pattern must be preserved.** The fast-path/slow-path split (`initGithubSession` returns immediately, `syncGithubOrganizations` runs in workflow queue) is critical for avoiding proxy timeout retries. BetterAuth handles the OAuth exchange, but the org sync stays as a background workflow. - **`GitHubAppClient` is still needed.** BetterAuth replaces the OAuth user-auth flow, but installation tokens, webhook verification, repo listing, and org listing are GitHub App operations that BetterAuth does not cover. - **User ID migration.** Changing user IDs from `user-${slugify(login)}` to GitHub numeric IDs affects `organizationMembers`, `seatAssignments`, and any cross-actor references to user IDs. Existing data needs a migration path. -- **`findUserByEmail` requires a secondary index.** BetterAuth sometimes looks up users by email (e.g., account linking). An `emailIndex` table on the workspace actor is needed. This must be kept in sync with the user actor's email field. +- **`findUserByEmail` requires a secondary index.** BetterAuth sometimes looks up users by email (e.g., account linking). An `emailIndex` table on the organization actor is needed. This must be kept in sync with the user actor's email field. ## Risk Assessment - **Adapter call context — RESOLVED.** Research confirms BetterAuth adapter methods are plain async functions with no request context dependency. The adapter closes over the RivetKit registry at init time and resolves actor handles on demand. No ambient `c` context needed. - **Hot-path latency — MITIGATED.** Cookie caching (`cookieCache` with `strategy: "compact"`) means most authenticated requests validate the session from a signed cookie without calling the adapter at all. The adapter (and thus the actor round-trip) is only hit when the cache expires (configurable, e.g., every 5 minutes) or on writes. This makes the session index + user actor lookup acceptable. -- **Two-actor consistency.** Session create/destroy touches two actors (user actor + workspace index). If either write fails, the system is in an inconsistent state. Recommended: write index first, then user actor. A dangling index entry pointing to a nonexistent session is benign — BetterAuth treats it as "session not found" and the user just re-authenticates. +- **Two-actor consistency.** Session create/destroy touches two actors (user actor + organization index). If either write fails, the system is in an inconsistent state. Recommended: write index first, then user actor. A dangling index entry pointing to a nonexistent session is benign — BetterAuth treats it as "session not found" and the user just re-authenticates. - **Cookie vs header auth.** BetterAuth defaults to HTTP-only cookies (`better-auth.session_token`). The current system uses a custom `x-foundry-session` header with `localStorage`. BetterAuth supports `bearer` token mode for programmatic clients via its `bearer` plugin. Enable both for browser + API access. - **Dev bootstrap flow.** `bootstrapAppGithubSession` bypasses the normal OAuth flow for local development. BetterAuth supports programmatic session creation via its internal adapter — the dev path can call the adapter's `create` method directly for the `session` and `account` models. - **Actor lifecycle for users.** User actors are long-lived but low-traffic. RivetKit will idle/unload them. With cookie caching, cold-start only happens when the cache expires — not on every request. Acceptable. diff --git a/foundry/research/specs/async-action-fixes/README.md b/foundry/research/specs/async-action-fixes/README.md index 1dae650..a26fd0e 100644 --- a/foundry/research/specs/async-action-fixes/README.md +++ b/foundry/research/specs/async-action-fixes/README.md @@ -19,7 +19,7 @@ The governing policy now lives in `foundry/CLAUDE.md`: - Backend actor entry points live under `foundry/packages/backend/src/actors`. - Provider-backed long-running work lives under `foundry/packages/backend/src/providers`. - The main UI consumers are: - - `foundry/packages/frontend/src/components/workspace-dashboard.tsx` + - `foundry/packages/frontend/src/components/organization-dashboard.tsx` - `foundry/packages/frontend/src/components/mock-layout.tsx` - `foundry/packages/client/src/remote/workbench-client.ts` - Existing non-blocking examples already exist in app-shell GitHub auth/import flows. Use those as the reference pattern for request returns plus background completion. @@ -32,7 +32,7 @@ The governing policy now lives in `foundry/CLAUDE.md`: 4. `06-daytona-provisioning-staged-background-flow.md` 5. App shell realtime subscription work from `00-end-to-end-async-realtime-plan.md` 6. `02-repo-overview-from-cached-projection.md` -7. Workspace summary projection work from `00-end-to-end-async-realtime-plan.md` +7. Organization summary projection work from `00-end-to-end-async-realtime-plan.md` 8. `04-workbench-session-creation-without-inline-provisioning.md` 9. `05-workbench-snapshot-from-derived-state.md` 10. Task-detail direct subscription work from `00-end-to-end-async-realtime-plan.md` @@ -42,7 +42,7 @@ The governing policy now lives in `foundry/CLAUDE.md`: - Runtime hardening and the first async workflow items remove the highest-risk correctness and timeout issues first. - App shell realtime is a smaller migration than the workbench and removes the current polling loop early. -- Workspace summary and task-detail subscription work are easier once long-running mutations already report durable background state. +- Organization summary and task-detail subscription work are easier once long-running mutations already report durable background state. - Auth simplification is important, but it should not block the snapshot/polling/runtime fixes. ## Fresh Agent Checklist diff --git a/foundry/research/specs/frontend.md b/foundry/research/specs/frontend.md index 2eb4ce5..6c384ae 100644 --- a/foundry/research/specs/frontend.md +++ b/foundry/research/specs/frontend.md @@ -24,8 +24,8 @@ be thorough and careful with your impelmentaiton. this is going to be the ground - left sidebar is similar to the hf switch ui: - list each repo - under each repo, show all of the tasks - - you should see all tasks for the entire workspace here grouped by repo -- the main content area shows the current workspace + - you should see all tasks for the entire organization here grouped by repo +- the main content area shows the current organization - there is a main agent session for the main agent thatn's making the change, so show this by default - build a ui for interacting with sessions - see ~/sandbox-agent/frontend/packages/inspector/ for reference ui diff --git a/foundry/research/specs/github-data-actor.md b/foundry/research/specs/github-data-actor.md index d3af6ab..75a71a1 100644 --- a/foundry/research/specs/github-data-actor.md +++ b/foundry/research/specs/github-data-actor.md @@ -4,7 +4,7 @@ Replace the per-repo polling PR sync actor (`ProjectPrSyncActor`) and per-repo PR cache (`prCache` table) with a single organization-scoped `github-state` actor that owns all GitHub data (repos, PRs, members). All GitHub state updates flow exclusively through webhooks, with a one-shot full sync on initial connection. Manual reload actions are exposed per-entity (org, repo, PR) for recovery from missed webhooks. -Open PRs are surfaced in the left sidebar alongside tasks via a unified workspace interest topic, with lazy task/sandbox creation when a user clicks on a PR. +Open PRs are surfaced in the left sidebar alongside tasks via a unified organization subscription topic, with lazy task/sandbox creation when a user clicks on a PR. ## Reference Implementation @@ -18,7 +18,7 @@ Use `git show 0aca2c7:` to read the reference files. Adapt (don't copy bli ## Constraints -1. **No polling.** Delete `ProjectPrSyncActor` (`actors/project-pr-sync/`), all references to it in handles/keys/index, and the `prCache` table in `ProjectActor`'s DB schema. Remove `prSyncStatus`/`prSyncAt` from `getRepoOverview`. +1. **No polling.** Delete `ProjectPrSyncActor` (`actors/repository-pr-sync/`), all references to it in handles/keys/index, and the `prCache` table in `RepositoryActor`'s DB schema. Remove `prSyncStatus`/`prSyncAt` from `getRepoOverview`. 2. **Keep `ProjectBranchSyncActor`.** This polls the local git clone (not GitHub API) and is the sandbox git status mechanism. It stays. 3. **Webhooks are the sole live update path.** The only GitHub API calls happen during: - Initial full sync on org connection/installation @@ -72,16 +72,16 @@ Replace the current TODO at `app-shell.ts:1521` with dispatch logic adapted from When `github-state` receives a PR update (webhook or manual reload), it should: 1. Update its own `github_pull_requests` table -2. Call `notifyOrganizationUpdated()` → which broadcasts `workspaceUpdated` to connected clients -3. If the PR branch matches an existing task's branch, update that task's `pullRequest` summary in the workspace actor +2. Call `notifyOrganizationUpdated()` → which broadcasts `organizationUpdated` to connected clients +3. If the PR branch matches an existing task's branch, update that task's `pullRequest` summary in the organization actor -### Workspace Summary Changes +### Organization Summary Changes -Extend `WorkspaceSummarySnapshot` to include open PRs: +Extend `OrganizationSummarySnapshot` to include open PRs: ```typescript -export interface WorkspaceSummarySnapshot { - workspaceId: string; +export interface OrganizationSummarySnapshot { + organizationId: string; repos: WorkbenchRepoSummary[]; taskSummaries: WorkbenchTaskSummary[]; openPullRequests: WorkbenchOpenPrSummary[]; // NEW @@ -103,13 +103,13 @@ export interface WorkbenchOpenPrSummary { } ``` -The workspace actor fetches open PRs from the `github-state` actor when building the summary snapshot. PRs that already have an associated task (matched by branch name) should be excluded from `openPullRequests` (they already appear in `taskSummaries` with their `pullRequest` field populated). +The organization actor fetches open PRs from the `github-state` actor when building the summary snapshot. PRs that already have an associated task (matched by branch name) should be excluded from `openPullRequests` (they already appear in `taskSummaries` with their `pullRequest` field populated). ### Interest Manager -The `workspace` interest topic already returns `WorkspaceSummarySnapshot`. Adding `openPullRequests` to that type means the sidebar automatically gets PR data without a new topic. +The `organization` subscription topic already returns `OrganizationSummarySnapshot`. Adding `openPullRequests` to that type means the sidebar automatically gets PR data without a new topic. -`workspaceUpdated` events should include a new variant for PR changes: +`organizationUpdated` events should include a new variant for PR changes: ```typescript { type: "pullRequestUpdated", pullRequest: WorkbenchOpenPrSummary } { type: "pullRequestRemoved", prId: string } @@ -117,7 +117,7 @@ The `workspace` interest topic already returns `WorkspaceSummarySnapshot`. Addin ### Sidebar Changes -The left sidebar currently renders `projects: ProjectSection[]` where each project has `tasks: Task[]`. Extend this to include open PRs as lightweight entries within each project section: +The left sidebar currently renders `repositories: RepositorySection[]` where each repository has `tasks: Task[]`. Extend this to include open PRs as lightweight entries within each repository section: - Open PRs appear in the same list as tasks, sorted by `updatedAtMs` - PRs should be visually distinct: show PR icon instead of task indicator, display `#number` and author @@ -134,7 +134,7 @@ Add a "three dots" menu button in the top-right of the sidebar header. Dropdown - **Reload all PRs** — calls `githubState.fullSync({ force: true })` (convenience shortcut) For per-repo and per-PR reload, add context menu options: -- Right-click a project header → "Reload repository" +- Right-click a repository header → "Reload repository" - Right-click a PR entry → "Reload pull request" These call the corresponding `reloadRepository`/`reloadPullRequest` actions on the `github-state` actor. @@ -143,27 +143,27 @@ These call the corresponding `reloadRepository`/`reloadPullRequest` actions on t Files/code to remove: -1. `foundry/packages/backend/src/actors/project-pr-sync/` — entire directory -2. `foundry/packages/backend/src/actors/project/db/schema.ts` — `prCache` table -3. `foundry/packages/backend/src/actors/project/actions.ts` — `applyPrSyncResultMutation`, `getPullRequestForBranch` (moves to github-state), `prSyncStatus`/`prSyncAt` from `getRepoOverview` +1. `foundry/packages/backend/src/actors/repository-pr-sync/` — entire directory +2. `foundry/packages/backend/src/actors/repository/db/schema.ts` — `prCache` table +3. `foundry/packages/backend/src/actors/repository/actions.ts` — `applyPrSyncResultMutation`, `getPullRequestForBranch` (moves to github-state), `prSyncStatus`/`prSyncAt` from `getRepoOverview` 4. `foundry/packages/backend/src/actors/handles.ts` — `getOrCreateProjectPrSync`, `selfProjectPrSync` 5. `foundry/packages/backend/src/actors/keys.ts` — any PR sync key helper -6. `foundry/packages/backend/src/actors/index.ts` — `projectPrSync` import and registration -7. All call sites in `ProjectActor` that spawn or call the PR sync actor (`initProject`, `refreshProject`) +6. `foundry/packages/backend/src/actors/index.ts` — `repositoryPrSync` import and registration +7. All call sites in `RepositoryActor` that spawn or call the PR sync actor (`initProject`, `refreshProject`) ## Migration Path -The `prCache` table in `ProjectActor`'s DB can simply be dropped — no data migration needed since the `github-state` actor will re-fetch everything on its first `fullSync`. Existing task `pullRequest` fields are populated from the github-state actor going forward. +The `prCache` table in `RepositoryActor`'s DB can simply be dropped — no data migration needed since the `github-state` actor will re-fetch everything on its first `fullSync`. Existing task `pullRequest` fields are populated from the github-state actor going forward. ## Implementation Order 1. Create `github-state` actor (adapt from checkpoint `0aca2c7`) 2. Wire up actor in registry, handles, keys 3. Implement webhook dispatch in app-shell (replace TODO) -4. Delete `ProjectPrSyncActor` and `prCache` from project actor +4. Delete `ProjectPrSyncActor` and `prCache` from repository actor 5. Add manual reload actions to github-state -6. Extend `WorkspaceSummarySnapshot` with `openPullRequests` -7. Wire through interest manager + workspace events +6. Extend `OrganizationSummarySnapshot` with `openPullRequests` +7. Wire through subscription manager + organization events 8. Update sidebar to render open PRs 9. Add three-dots menu with reload options 10. Update task creation flow for lazy PR→task conversion diff --git a/foundry/research/specs/remove-local-git-clone.md b/foundry/research/specs/remove-local-git-clone.md new file mode 100644 index 0000000..261ffc2 --- /dev/null +++ b/foundry/research/specs/remove-local-git-clone.md @@ -0,0 +1,381 @@ +# Remove Local Git Clone from Backend + +## Goal + +The Foundry backend stores zero git state. No clones, no refs, no working trees, no git-spice. All git operations execute inside sandboxes. Repo metadata (branches, default branch, PRs) comes from GitHub API/webhooks which we already have. + +## Terminology renames + +Rename Foundry domain terms across the entire `foundry/` directory. All changes are breaking — no backwards compatibility needed. Execute as separate atomic commits in this order. `pnpm -w typecheck && pnpm -w build && pnpm -w test` must pass between each. + +| New name | Old name (current code) | +|---|---| +| **Organization** | Workspace | +| **Repository** | Project | +| **Session** (not "tab") | Tab / Session (mixed) | +| **Subscription** | Interest | +| **SandboxProviderId** | ProviderId | + +### Rename 1: `interest` → `subscription` + +The realtime pub/sub system in `client/src/interest/`. Rename the directory, all types (`InterestManager` → `SubscriptionManager`, `MockInterestManager` → `MockSubscriptionManager`, `RemoteInterestManager` → `RemoteSubscriptionManager`, `DebugInterestTopic` → `DebugSubscriptionTopic`), the `useInterest` hook → `useSubscription`, and all imports in client + frontend. Rename `frontend/src/lib/interest.ts` → `subscription.ts`. Rename test file `client/test/interest-manager.test.ts` → `subscription-manager.test.ts`. + +### Rename 2: `tab` → `session` + +The UI "tab" concept is really a session. Rename `TabStrip` → `SessionStrip`, `tabId` → `sessionId`, `closeTab` → `closeSession`, `addTab` → `addSession`, `WorkbenchAgentTab` → `WorkbenchAgentSession`, `TaskWorkbenchTabInput` → `TaskWorkbenchSessionInput`, `TaskWorkbenchAddTabResponse` → `TaskWorkbenchAddSessionResponse`, and all related props/DOM attrs (`activeTabId` → `activeSessionId`, `onSwitchTab` → `onSwitchSession`, `onCloseTab` → `onCloseSession`, `data-tab` → `data-session`, `editingSessionTabId` → `editingSessionId`). Rename file `tab-strip.tsx` → `session-strip.tsx`. **Leave "diff tabs" alone** (`isDiffTab`, `diffTabId`) — those are file viewer panes, a different concept. + +### Rename 3: `ProviderId` → `SandboxProviderId` + +The `ProviderId` type (`"e2b" | "local"`) is specifically a sandbox provider. Rename the type (`ProviderId` → `SandboxProviderId`), schema (`ProviderIdSchema` → `SandboxProviderIdSchema`), and all `providerId` fields that refer to sandbox hosting (`CreateTaskInput`, `TaskRecord`, `SwitchResult`, `WorkbenchSandboxSummary`, task DB schema `task.provider_id` → `sandbox_provider_id`, `task_sandboxes.provider_id` → `sandbox_provider_id`, topic params). Rename config key `providers` → `sandboxProviders`. DB column renames need Drizzle migrations. + +**Do NOT rename**: `model.provider` (AI model provider), `auth_account_index.provider_id` (auth provider), `providerAgent()` (model→agent mapping), `WorkbenchModelGroup.provider`. + +Also **delete the `providerProfiles` table entirely** — it's written but never read (dead code). Remove the table definition from the organization actor DB schema, all writes in organization actions, and the `refreshProviderProfiles` queue command/handler/interface. + +### Rename 4: `project` → `repository` + +The "project" actor/entity is a git repository. Rename: +- Actor directory `actors/project/` → `actors/repository/` +- Actor directory `actors/project-branch-sync/` → `actors/repository-branch-sync/` +- Actor registry keys `project` → `repository`, `projectBranchSync` → `repositoryBranchSync` +- Actor name string `"Project"` → `"Repository"` +- All functions: `projectKey` → `repositoryKey`, `getOrCreateProject` → `getOrCreateRepository`, `getProject` → `getRepository`, `selfProject` → `selfRepository`, `projectBranchSyncKey` → `repositoryBranchSyncKey`, `projectPrSyncKey` → `repositoryPrSyncKey`, `projectWorkflowQueueName` → `repositoryWorkflowQueueName` +- Types: `ProjectInput` → `RepositoryInput`, `WorkbenchProjectSection` → `WorkbenchRepositorySection`, `PROJECT_QUEUE_NAMES` → `REPOSITORY_QUEUE_NAMES` +- Queue names: `"project.command.*"` → `"repository.command.*"` +- Actor key strings: change `"project"` to `"repository"` in key arrays (e.g. `["ws", id, "project", repoId]` → `["org", id, "repository", repoId]`) +- Frontend: `projects` → `repositories`, `collapsedProjects` → `collapsedRepositories`, `hoveredProjectId` → `hoveredRepositoryId`, `PROJECT_COLORS` → `REPOSITORY_COLORS`, `data-project-*` → `data-repository-*`, `groupWorkbenchProjects` → `groupWorkbenchRepositories` +- Client keys: `projectKey()` → `repositoryKey()`, `projectBranchSyncKey()` → `repositoryBranchSyncKey()`, `projectPrSyncKey()` → `repositoryPrSyncKey()` + +### Rename 5: `workspace` → `organization` + +The "workspace" is really an organization. Rename: +- Actor directory `actors/workspace/` → `actors/organization/` +- Actor registry key `workspace` → `organization` +- Actor name string `"Workspace"` → `"Organization"` +- All types: `WorkspaceIdSchema` → `OrganizationIdSchema`, `WorkspaceId` → `OrganizationId`, `WorkspaceEvent` → `OrganizationEvent`, `WorkspaceSummarySnapshot` → `OrganizationSummarySnapshot`, `WorkspaceUseInputSchema` → `OrganizationUseInputSchema`, `WorkspaceHandle` → `OrganizationHandle`, `WorkspaceTopicParams` → `OrganizationTopicParams` +- All `workspaceId` fields/params → `organizationId` (~20+ schemas in contracts.ts, plus topic params, task snapshot, etc.) +- `FoundryOrganization.workspaceId` → `FoundryOrganization.organizationId` (or just `id`) +- All functions: `workspaceKey` → `organizationKey`, `getOrCreateWorkspace` → `getOrCreateOrganization`, `selfWorkspace` → `selfOrganization`, `resolveWorkspaceId` → `resolveOrganizationId`, `defaultWorkspace` → `defaultOrganization`, `workspaceWorkflowQueueName` → `organizationWorkflowQueueName`, `WORKSPACE_QUEUE_NAMES` → `ORGANIZATION_QUEUE_NAMES` +- Actor key strings: change `"ws"` to `"org"` in key arrays (e.g. `["ws", id]` → `["org", id]`) +- Queue names: `"workspace.command.*"` → `"organization.command.*"` +- Topic keys: `"workspace:${id}"` → `"organization:${id}"`, event `"workspaceUpdated"` → `"organizationUpdated"` +- Methods: `connectWorkspace` → `connectOrganization`, `getWorkspaceSummary` → `getOrganizationSummary`, `useWorkspace` → `useOrganization` +- Files: `shared/src/workspace.ts` → `organization.ts`, `backend/src/config/workspace.ts` → `organization.ts` +- Config keys: `config.workspace.default` → `config.organization.default` +- URL paths: `/workspaces/$workspaceId` → `/organizations/$organizationId` +- UI strings: `"Loading workspace..."` → `"Loading organization..."` +- Tests: rename `workspace-*.test.ts` files, update `workspaceSnapshot()` → `organizationSnapshot()`, `workspaceId: "ws-1"` → `organizationId: "org-1"` + +### After all renames: update CLAUDE.md files + +Update `foundry/CLAUDE.md` and `foundry/packages/backend/CLAUDE.md` to use new terminology throughout (organization instead of workspace, repository instead of project, etc.). The rest of this spec already uses the new names. + +## What gets deleted + +### Entire directories/files + +| Path (relative to `packages/backend/src/`) | Reason | +|---|---| +| `integrations/git/index.ts` | All local git operations | +| `integrations/git-spice/index.ts` | Stack management via git-spice | +| `actors/repository-branch-sync/` (currently `project-branch-sync/`) | Polling actor that fetches + reads local clone every 5s | +| `actors/project-pr-sync/` | Empty directory, already dead | +| `actors/repository/stack-model.ts` (currently `project/stack-model.ts`) | Stack parent/sort model (git-spice dependent) | +| `test/git-spice.test.ts` | Tests for deleted git-spice integration | +| `test/git-validate-remote.test.ts` | Tests for deleted git validation | +| `test/stack-model.test.ts` | Tests for deleted stack model | + +### Driver interfaces removed from `driver.ts` + +- `GitDriver` — entire interface deleted +- `StackDriver` — entire interface deleted +- `BackendDriver.git` — removed +- `BackendDriver.stack` — removed +- All imports from `integrations/git/` and `integrations/git-spice/` + +`BackendDriver` keeps only `github` and `tmux`. + +### Test driver cleanup (`test/helpers/test-driver.ts`) + +- Delete `createTestGitDriver()` +- Delete `createTestStackDriver()` +- Remove `git` and `stack` from `createTestDriver()` + +### Docker volume removed (`compose.dev.yaml`, `compose.preview.yaml`) + +- Remove `foundry_git_repos` volume and its mount at `/root/.local/share/foundry/repos` +- Remove the CLAUDE.md note about the repos volume + +### Actor registry cleanup (`actors/index.ts`, `actors/keys.ts`, `actors/handles.ts`) + +- Remove `RepositoryBranchSyncActor` (currently `ProjectBranchSyncActor`) registration +- Remove `repositoryBranchSyncKey` (currently `projectBranchSyncKey`) +- Remove branch sync handle helpers + +### Client key cleanup (`packages/client/src/keys.ts`, `packages/client/test/keys.test.ts`) + +- Remove `repositoryBranchSyncKey` (currently `projectBranchSyncKey`) if exported + +### Dead code removal: `providerProfiles` table + +The `providerProfiles` table in the organization actor (currently workspace actor) DB is written but never read. Delete: + +- Table definition in `actors/organization/db/schema.ts` (currently `workspace/db/schema.ts`) +- All writes in `actors/organization/actions.ts` (currently `workspace/actions.ts`) +- The `refreshProviderProfiles` queue command and handler +- The `RefreshProviderProfilesCommand` interface +- Add a DB migration to drop the `provider_profiles` table + +### Ensure pattern cleanup (`actors/repository/actions.ts`, currently `project/actions.ts`) + +Delete all `ensure*` functions that block action handlers on external I/O or cross-actor fan-out: + +- **`ensureLocalClone()`** — Delete (git clone removal). +- **`ensureProjectReady()`** / **`ensureRepositoryReady()`** — Delete (wrapper around `ensureLocalClone` + sync actors). +- **`ensureProjectReadyForRead()`** / **`ensureRepositoryReadyForRead()`** — Delete (dispatches ensure with 10s wait on read path). +- **`ensureProjectSyncActors()`** / **`ensureRepositorySyncActors()`** — Delete (spawns branch sync actor which is being removed). +- **`forceProjectSync()`** / **`forceRepositorySync()`** — Delete (triggers branch sync actor). +- **`ensureTaskIndexHydrated()`** — Delete. This is the migration path from `HistoryActor` → `task_index` table. Since we assume fresh repositories, no migration needed. The task index is populated on write (`createTask` inserts the row). +- **`ensureTaskIndexHydratedForRead()`** — Delete (wrapper that dispatches `hydrateTaskIndex`). +- **`taskIndexHydrated` state flag** — Delete from repository actor state. + +The `ensureAskpassScript()` is fine — it's a fast local operation. + +### Dead schema tables and helpers (`actors/repository/db/schema.ts`, `actors/repository/actions.ts`) + +With the branch sync actor and git-spice stack operations deleted, these tables have no writer and should be removed: + +- **`branches` table** — populated by `RepositoryBranchSyncActor` from the local clone. Delete the table, its schema definition, and all reads from it (including `enrichTaskRecord` which reads `diffStat`, `hasUnpushed`, `conflictsWithMain`, `parentBranch` from this table). +- **`repoActionJobs` table** — populated by `runRepoStackAction()` for git-spice stack operations. Delete the table, its schema definition, and all helpers: `ensureRepoActionJobsTable()`, `writeRepoActionJob()`, `listRepoActionJobRows()`. + +## What gets modified + +### `actors/repository/actions.ts` (currently `project/actions.ts`) + +This is the biggest change. Current git operations in this file: + +1. **`createTaskMutation()`** — Currently calls `listLocalRemoteRefs` to check branch name conflicts against remote branches. Replace: branch conflict checking uses only the repository actor's `task_index` table (which branches are already taken by tasks). We don't need to check against remote branches — if the branch already exists on the remote, `git push` in the sandbox will handle it. +2. **`registerTaskBranch()`** — Currently does `fetch` + `remoteDefaultBaseRef` + `revParse` + git-spice stack tracking. Replace: default base branch comes from GitHub repo metadata (already stored from webhook/API at repo add time). SHA resolution is not needed at task creation — the sandbox handles it. Delete all git-spice stack tracking. +3. **`getRepoOverview()`** — Currently calls `listLocalRemoteRefs` + `remoteDefaultBaseRef` + `stack.available` + `stack.listStack`. Replace: branch data comes from GitHub API data we already store from webhooks (push/create/delete events feed branch state). Stack data is deleted. The overview returns branches from stored GitHub webhook data. +4. **`runRepoStackAction()`** — Delete entirely (all git-spice stack operations). +5. **All `normalizeBaseBranchName` imports from git-spice** — Inline or move to a simple utility if still needed. +6. **All `ensureTaskIndexHydrated*` / `ensureRepositoryReady*` call sites** — Remove. Read actions query the `task_index` table directly; if it's empty, it's empty. Write actions populate it on create. + +### `actors/repository/index.ts` (currently `project/index.ts`) + +- Remove local clone path from state/initialization +- Remove branch sync actor spawning +- Remove any `ensureLocalClone` calls in lifecycle + +### `actors/task/workbench.ts` + +- **`ensureSandboxRepo()` line 405**: Currently calls `driver.git.remoteDefaultBaseRef()` on the local clone. Replace: read default branch from repository actor state (which gets it from GitHub API/webhook data at repo add time). + +### `actors/organization/actions.ts` (currently `workspace/actions.ts`) + +- **`addRemote()` line 320**: Currently calls `driver.git.validateRemote()` which runs `git ls-remote`. Replace: validate via GitHub API — `GET /repos/{owner}/{repo}` returns 404 for invalid repos. We already parse the remote URL into owner/repo for GitHub operations. + +### `actors/keys.ts` / `actors/handles.ts` + +- Remove `repositoryBranchSyncKey` (currently `projectBranchSyncKey`) export +- Remove branch sync handle creation + +## What stays the same + +- `driver.github.*` — already uses GitHub API, no changes +- `driver.tmux.*` — unrelated, no changes +- `integrations/github/index.ts` — already GitHub API based, keeps working +- All sandbox execution (`executeInSandbox()`) — already correct pattern +- Webhook handlers for push/create/delete events — already feed GitHub data into backend + +## CLAUDE.md updates + +### `foundry/packages/backend/CLAUDE.md` + +Remove `RepositoryBranchSyncActor` (currently `ProjectBranchSyncActor`) from the actor hierarchy tree: + +```text +OrganizationActor +├─ HistoryActor(organization-scoped global feed) +├─ GithubDataActor +├─ RepositoryActor(repo) +│ └─ TaskActor(task) +│ ├─ TaskSessionActor(session) x N +│ │ └─ SessionStatusSyncActor(session) x 0..1 +│ └─ Task-local workbench state +└─ SandboxInstanceActor(sandboxProviderId, sandboxId) x N +``` + +Add to Ownership Rules: + +> - The backend stores no local git state. No clones, no refs, no working trees, no git-spice. Repo metadata (branches, default branch) comes from GitHub API and webhook events. All git operations that require a working tree execute inside sandboxes via `executeInSandbox()`. + +### `foundry/CLAUDE.md` + +Add a new section: + +```markdown +## Git State Policy + +- The backend stores **zero git state**. No local clones, no refs, no working trees, no git-spice. +- Repo metadata (branches, default branch, PRs) comes from GitHub API and webhook events already flowing into the system. +- All git operations that require a working tree (diff, push, conflict check, rev-parse) execute inside the task's sandbox via `executeInSandbox()`. +- Do not add local git clone paths, `git fetch`, `git for-each-ref`, or any direct git CLI calls to the backend. If you need git data, either read it from stored GitHub webhook/API data or run it in a sandbox. +- The `BackendDriver` has no `GitDriver` or `StackDriver`. Only `GithubDriver` and `TmuxDriver` remain. +- git-spice is not used anywhere in the system. +``` + +Remove from CLAUDE.md: + +> - Docker dev: `compose.dev.yaml` mounts a named volume at `/root/.local/share/foundry/repos` to persist backend-managed git clones across restarts. Code must still work if this volume is not present (create directories as needed). + +## Concerns + +1. **Concurrent agent work**: Another agent is currently modifying `workspace/actions.ts`, `project/actions.ts`, `task/workbench.ts`, `task/workflow/init.ts`, `task/workflow/queue.ts`, `driver.ts`, and `project-branch-sync/index.ts`. Those changes are adding `listLocalRemoteRefs` to the driver and removing polling loops/timeouts. The git clone removal work will **delete** the code the other agent is modifying. Coordinate: let the other agent's changes land first, then this spec deletes the git integration entirely. + +2. **Rename ordering**: The rename spec (workspace→organization, project→repository, etc.) should ideally land **before** this spec is executed, so the file paths and identifiers match. If not, the implementing agent should map old names → new names using the table above. + +3. **`project-pr-sync/` directory**: This is already an empty directory. Delete it as part of cleanup. + +4. **`ensureRepoActionJobsTable()`**: The current spec mentions this should stay but the `repoActionJobs` table is being deleted. Updating: both the table and the ensure function should be deleted. + +## Validation + +After implementation, run: + +```bash +pnpm -w typecheck +pnpm -w build +pnpm -w test +``` + +Then restart the dev stack and run the main user flow end-to-end: + +```bash +just foundry-dev-down && just foundry-dev +``` + +Verify: +1. Add a repo to an organization +2. Create a task (should return immediately with taskId) +3. Task appears in sidebar with pending status +4. Task provisions and transitions to ready +5. Session is created and initial message is sent +6. Agent responds in the session transcript + +This must work against a real GitHub repo (`rivet-dev/sandbox-agent-testing`) with the dev environment credentials. + +### Codebase grep validation + +After implementation, verify no local git operations or git-spice references remain in the backend: + +```bash +# No local git CLI calls (excludes integrations/github which is GitHub API, not local git) +rg -l 'execFileAsync\("git"' foundry/packages/backend/src/ && echo "FAIL: local git CLI calls found" || echo "PASS" + +# No git-spice references +rg -l 'git.spice|gitSpice|git_spice' foundry/packages/backend/src/ && echo "FAIL: git-spice references found" || echo "PASS" + +# No GitDriver or StackDriver references +rg -l 'GitDriver|StackDriver' foundry/packages/backend/src/ && echo "FAIL: deleted driver interfaces still referenced" || echo "PASS" + +# No local clone path references +rg -l 'localPath|ensureCloned|ensureLocalClone|foundryRepoClonePath' foundry/packages/backend/src/ && echo "FAIL: local clone references found" || echo "PASS" + +# No branch sync actor references +rg -l 'BranchSync|branchSync|branch.sync' foundry/packages/backend/src/ && echo "FAIL: branch sync references found" || echo "PASS" + +# No deleted ensure patterns +rg -l 'ensureProjectReady|ensureTaskIndexHydrated|taskIndexHydrated' foundry/packages/backend/src/ && echo "FAIL: deleted ensure patterns found" || echo "PASS" + +# integrations/git/ and integrations/git-spice/ directories should not exist +ls foundry/packages/backend/src/integrations/git/index.ts 2>/dev/null && echo "FAIL: git integration not deleted" || echo "PASS" +ls foundry/packages/backend/src/integrations/git-spice/index.ts 2>/dev/null && echo "FAIL: git-spice integration not deleted" || echo "PASS" +``` + +All checks must pass before the change is considered complete. + +### Rename verification + +After the rename spec has landed, verify no old names remain anywhere in `foundry/`: + +```bash +# --- workspace → organization --- +# No "WorkspaceActor", "WorkspaceEvent", "WorkspaceId", "WorkspaceSummary", etc. (exclude pnpm-workspace.yaml, node_modules, .turbo) +rg -l 'WorkspaceActor|WorkspaceEvent|WorkspaceId|WorkspaceSummary|WorkspaceHandle|WorkspaceUseInput|WorkspaceTopicParams' foundry/packages/ && echo "FAIL: workspace type references remain" || echo "PASS" + +# No workspaceId in domain code (exclude pnpm-workspace, node_modules, .turbo, this spec file) +rg -l 'workspaceId' foundry/packages/ --glob '!node_modules' --glob '!*.md' && echo "FAIL: workspaceId references remain" || echo "PASS" + +# No workspace actor directory +ls foundry/packages/backend/src/actors/workspace/ 2>/dev/null && echo "FAIL: workspace actor directory not renamed" || echo "PASS" + +# No workspaceKey function +rg 'workspaceKey|selfWorkspace|getOrCreateWorkspace|resolveWorkspaceId|defaultWorkspace' foundry/packages/ --glob '!node_modules' && echo "FAIL: workspace function references remain" || echo "PASS" + +# No "ws" actor key string (the old key prefix) +rg '"\\"ws\\""|\["ws"' foundry/packages/ --glob '!node_modules' && echo "FAIL: old 'ws' actor key strings remain" || echo "PASS" + +# No workspace queue names +rg 'workspace\.command\.' foundry/packages/ --glob '!node_modules' --glob '!*.md' && echo "FAIL: workspace queue names remain" || echo "PASS" + +# No /workspaces/ URL paths +rg '/workspaces/' foundry/packages/ --glob '!node_modules' --glob '!*.md' && echo "FAIL: /workspaces/ URL paths remain" || echo "PASS" + +# No config.workspace +rg 'config\.workspace' foundry/packages/ --glob '!node_modules' --glob '!*.md' && echo "FAIL: config.workspace references remain" || echo "PASS" + +# --- project → repository --- +# No ProjectActor, ProjectInput, ProjectSection, etc. +rg -l 'ProjectActor|ProjectInput|ProjectSection|PROJECT_QUEUE|PROJECT_COLORS' foundry/packages/ --glob '!node_modules' && echo "FAIL: project type references remain" || echo "PASS" + +# No project actor directory +ls foundry/packages/backend/src/actors/project/ 2>/dev/null && echo "FAIL: project actor directory not renamed" || echo "PASS" + +# No projectKey, selfProject, getOrCreateProject, etc. +rg 'projectKey|selfProject|getOrCreateProject|getProject\b|projectBranchSync|projectPrSync|projectWorkflow' foundry/packages/ --glob '!node_modules' && echo "FAIL: project function references remain" || echo "PASS" + +# No "project" actor key string +rg '"\\"project\\""|\[".*"project"' foundry/packages/ --glob '!node_modules' --glob '!*.md' && echo "FAIL: old project actor key strings remain" || echo "PASS" + +# No project.command.* queue names +rg 'project\.command\.' foundry/packages/ --glob '!node_modules' --glob '!*.md' && echo "FAIL: project queue names remain" || echo "PASS" + +# --- tab → session --- +# No WorkbenchAgentTab, TaskWorkbenchTabInput, TabStrip, tabId (in workbench context) +rg -l 'WorkbenchAgentTab|TaskWorkbenchTabInput|TaskWorkbenchAddTabResponse|TabStrip' foundry/packages/ --glob '!node_modules' && echo "FAIL: tab type references remain" || echo "PASS" + +# No tabId (should be sessionId now) +rg '\btabId\b' foundry/packages/ --glob '!node_modules' && echo "FAIL: tabId references remain" || echo "PASS" + +# No tab-strip.tsx file +ls foundry/packages/frontend/src/components/mock-layout/tab-strip.tsx 2>/dev/null && echo "FAIL: tab-strip.tsx not renamed" || echo "PASS" + +# No closeTab/addTab (should be closeSession/addSession) +rg '\bcloseTab\b|\baddTab\b' foundry/packages/ --glob '!node_modules' && echo "FAIL: closeTab/addTab references remain" || echo "PASS" + +# --- interest → subscription --- +# No InterestManager, useInterest, etc. +rg -l 'InterestManager|useInterest|DebugInterestTopic' foundry/packages/ --glob '!node_modules' && echo "FAIL: interest type references remain" || echo "PASS" + +# No interest/ directory +ls foundry/packages/client/src/interest/ 2>/dev/null && echo "FAIL: interest directory not renamed" || echo "PASS" + +# --- ProviderId → SandboxProviderId --- +# No bare ProviderId/ProviderIdSchema (but allow sandboxProviderId, model.provider, auth provider_id) +rg '\bProviderIdSchema\b|\bProviderId\b' foundry/packages/shared/src/contracts.ts && echo "FAIL: bare ProviderId in contracts.ts" || echo "PASS" + +# No bare providerId for sandbox context (check task schema) +rg '\bproviderId\b' foundry/packages/backend/src/actors/task/db/schema.ts && echo "FAIL: bare providerId in task schema" || echo "PASS" + +# No providerProfiles table (dead code, should be deleted) +rg 'providerProfiles|provider_profiles|refreshProviderProfiles' foundry/packages/ --glob '!node_modules' --glob '!*.md' && echo "FAIL: providerProfiles references remain" || echo "PASS" + +# --- Verify new names exist --- +rg -l 'OrganizationActor|OrganizationEvent|OrganizationId' foundry/packages/ --glob '!node_modules' | head -3 || echo "WARN: new organization names not found" +rg -l 'RepositoryActor|RepositoryInput|RepositorySection' foundry/packages/ --glob '!node_modules' | head -3 || echo "WARN: new repository names not found" +rg -l 'SubscriptionManager|useSubscription' foundry/packages/ --glob '!node_modules' | head -3 || echo "WARN: new subscription names not found" +rg -l 'SandboxProviderIdSchema|SandboxProviderId' foundry/packages/ --glob '!node_modules' | head -3 || echo "WARN: new sandbox provider names not found" +``` + +All checks must pass. False positives from markdown files, comments referencing old names in migration context, or `node_modules` should be excluded via the globs above. diff --git a/foundry/research/specs/rivetkit-opentui-migration-plan.md b/foundry/research/specs/rivetkit-opentui-migration-plan.md index d078c9a..78acccc 100644 --- a/foundry/research/specs/rivetkit-opentui-migration-plan.md +++ b/foundry/research/specs/rivetkit-opentui-migration-plan.md @@ -6,19 +6,19 @@ Date: 2026-02-08 ## Locked Decisions 1. Entire rewrite is TypeScript. All Rust code will be deleted at cutover. -2. Repo stays a single monorepo, managed with `pnpm` workspaces + Turborepo. +2. Repo stays a single monorepo, managed with `pnpm` organizations + Turborepo. 3. `core` package is renamed to `shared`. 4. `integrations` and `providers` live inside the backend package (not top-level packages). 5. Rivet-backed state uses SQLite + Drizzle only. 6. RivetKit dependencies come from local `../rivet` builds only; no published npm packages. -7. Everything is workspace-scoped. Workspace is configurable from CLI. -8. `ControlPlaneActor` is renamed to `WorkspaceActor` (workspace coordinator). -9. Every actor key is prefixed by workspace. -10. `--workspace` is optional; commands resolve workspace via flag -> config default -> `default`. +7. Everything is organization-scoped. Organization is configurable from CLI. +8. `ControlPlaneActor` is renamed to `OrganizationActor` (organization coordinator). +9. Every actor key is prefixed by organization. +10. `--organization` is optional; commands resolve organization via flag -> config default -> `default`. 11. RivetKit local dependency wiring is `link:`-based. 12. Keep the existing config file path (`~/.config/foundry/config.toml`) and evolve keys in place. 13. `.agents` and skill files are in scope for migration updates. -14. Parent orchestration actors (`workspace`, `project`, `task`) use command-only loops with no timeout. +14. Parent orchestration actors (`organization`, `repository`, `task`) use command-only loops with no timeout. 15. Periodic syncing/polling runs in dedicated child actors, each with a single timeout cadence. 16. For each actor, define the main loop and exactly what data it mutates; keep single-writer ownership strict. @@ -38,10 +38,10 @@ The core architecture changes from "worktree-per-task" to "provider-selected san 1. Rust binaries/backend removed. 2. Existing IPC replaced by new TypeScript transport. -3. Configuration schema changes for workspace selection and sandbox provider defaults. -4. Runtime model changes from global control plane to workspace coordinator actor. -5. Database schema migrates to workspace + provider + sandbox identity model. -6. Command options evolve to include workspace and provider selection. +3. Configuration schema changes for organization selection and sandbox provider defaults. +4. Runtime model changes from global control plane to organization coordinator actor. +5. Database schema migrates to organization + provider + sandbox identity model. +6. Command options evolve to include organization and provider selection. ## Monorepo and Build Tooling @@ -49,7 +49,7 @@ Root tooling is standardized: - `pnpm-workspace.yaml` - `turbo.json` -- workspace scripts through `pnpm` + `turbo run ...` +- organization scripts through `pnpm` + `turbo run ...` Target package layout: @@ -59,13 +59,13 @@ packages/ backend/ src/ actors/ - workspace.ts - project.ts + organization.ts + repository.ts task.ts sandbox-instance.ts history.ts - project-pr-sync.ts - project-branch-sync.ts + repository-pr-sync.ts + repository-branch-sync.ts task-status-sync.ts keys.ts events.ts @@ -88,13 +88,13 @@ packages/ server.ts types.ts config/ - workspace.ts + organization.ts backend.ts cli/ # hf command surface src/ commands/ client/ # backend transport client - workspace/ # workspace selection resolver + organization/ # organization selection resolver tui/ # OpenTUI app src/ app/ @@ -111,13 +111,13 @@ CLI and TUI are separate packages in the same monorepo, not separate repositorie Backend actor files and responsibilities: -1. `packages/backend/src/actors/workspace.ts` -- `WorkspaceActor` implementation. -- Provider profile resolution and workspace-level coordination. -- Spawns/routes to `ProjectActor` handles. +1. `packages/backend/src/actors/organization.ts` +- `OrganizationActor` implementation. +- Provider profile resolution and organization-level coordination. +- Spawns/routes to `RepositoryActor` handles. -2. `packages/backend/src/actors/project.ts` -- `ProjectActor` implementation. +2. `packages/backend/src/actors/repository.ts` +- `RepositoryActor` implementation. - Branch snapshot refresh, PR cache orchestration, stream publication. - Routes task actions to `TaskActor`. @@ -134,7 +134,7 @@ Backend actor files and responsibilities: - Writes workflow events to SQLite via Drizzle. 6. `packages/backend/src/actors/keys.ts` -- Workspace-prefixed actor key builders/parsers. +- Organization-prefixed actor key builders/parsers. 7. `packages/backend/src/actors/events.ts` - Internal actor event envelopes and stream payload types. @@ -145,13 +145,13 @@ Backend actor files and responsibilities: 9. `packages/backend/src/actors/index.ts` - Actor exports and composition wiring. -10. `packages/backend/src/actors/project-pr-sync.ts` +10. `packages/backend/src/actors/repository-pr-sync.ts` - Read-only PR polling loop (single timeout cadence). -- Sends sync results back to `ProjectActor`. +- Sends sync results back to `RepositoryActor`. -11. `packages/backend/src/actors/project-branch-sync.ts` +11. `packages/backend/src/actors/repository-branch-sync.ts` - Read-only branch snapshot polling loop (single timeout cadence). -- Sends sync results back to `ProjectActor`. +- Sends sync results back to `RepositoryActor`. 12. `packages/backend/src/actors/task-status-sync.ts` - Read-only session/sandbox status polling loop (single timeout cadence). @@ -169,17 +169,17 @@ pnpm build -F rivetkit 2. Consume via local `link:` dependencies to built artifacts. 3. Keep dependency wiring deterministic and documented in repo scripts. -## Workspace Model +## Organization Model -Every command executes against a resolved workspace context. +Every command executes against a resolved organization context. -Workspace selection: +Organization selection: -1. CLI flag: `--workspace ` -2. Config default workspace +1. CLI flag: `--organization ` +2. Config default organization 3. Fallback to `default` -Workspace controls: +Organization controls: 1. provider profile defaults 2. sandbox policy @@ -188,45 +188,45 @@ Workspace controls: ## New Actor Implementation Overview -RivetKit registry actor keys are workspace-prefixed: +RivetKit registry actor keys are organization-prefixed: -1. `WorkspaceActor` (workspace coordinator) -- Key: `["ws", workspaceId]` -- Owns workspace config/runtime coordination, provider registry, workspace health. -- Resolves provider defaults and workspace-level policies. +1. `OrganizationActor` (organization coordinator) +- Key: `["ws", organizationId]` +- Owns organization config/runtime coordination, provider registry, organization health. +- Resolves provider defaults and organization-level policies. -2. `ProjectActor` -- Key: `["ws", workspaceId, "project", repoId]` +2. `RepositoryActor` +- Key: `["ws", organizationId, "repository", repoId]` - Owns repo snapshot cache and PR cache refresh orchestration. - Routes branch/task commands to task actors. -- Streams project updates to CLI/TUI subscribers. +- Streams repository updates to CLI/TUI subscribers. 3. `TaskActor` -- Key: `["ws", workspaceId, "project", repoId, "task", taskId]` +- Key: `["ws", organizationId, "repository", repoId, "task", taskId]` - Owns task metadata/runtime state. - Creates/resumes sandbox + session through provider adapter. - Handles attach/push/sync/merge/archive/kill and post-idle automation. 4. `SandboxInstanceActor` (optional but recommended) -- Key: `["ws", workspaceId, "provider", providerId, "sandbox", sandboxId]` +- Key: `["ws", organizationId, "provider", providerId, "sandbox", sandboxId]` - Owns sandbox lifecycle, heartbeat, endpoint readiness, recovery. 5. `HistoryActor` -- Key: `["ws", workspaceId, "project", repoId, "history"]` +- Key: `["ws", organizationId, "repository", repoId, "history"]` - Owns `events` writes and workflow timeline completeness. 6. `ProjectPrSyncActor` (child poller) -- Key: `["ws", workspaceId, "project", repoId, "pr-sync"]` -- Polls PR state on interval and emits results to `ProjectActor`. +- Key: `["ws", organizationId, "repository", repoId, "pr-sync"]` +- Polls PR state on interval and emits results to `RepositoryActor`. - Does not write DB directly. 7. `ProjectBranchSyncActor` (child poller) -- Key: `["ws", workspaceId, "project", repoId, "branch-sync"]` -- Polls branch/worktree state on interval and emits results to `ProjectActor`. +- Key: `["ws", organizationId, "repository", repoId, "branch-sync"]` +- Polls branch/worktree state on interval and emits results to `RepositoryActor`. - Does not write DB directly. 8. `TaskStatusSyncActor` (child poller) -- Key: `["ws", workspaceId, "project", repoId, "task", taskId, "status-sync"]` +- Key: `["ws", organizationId, "repository", repoId, "task", taskId, "status-sync"]` - Polls agent/session/sandbox health on interval and emits results to `TaskActor`. - Does not write DB directly. @@ -236,10 +236,10 @@ Ownership rule: each table/row has one actor writer. Always define actor run-loop + mutated state together: -1. `WorkspaceActor` -- Mutates: `workspaces`, `workspace_provider_profiles`. +1. `OrganizationActor` +- Mutates: `organizations`, `workspace_provider_profiles`. -2. `ProjectActor` +2. `RepositoryActor` - Mutates: `repos`, `branches`, `pr_cache` (applies child poller results). 3. `TaskActor` @@ -251,30 +251,30 @@ Always define actor run-loop + mutated state together: 5. `HistoryActor` - Mutates: `events`. -6. Child sync actors (`project-pr-sync`, `project-branch-sync`, `task-status-sync`) +6. Child sync actors (`repository-pr-sync`, `repository-branch-sync`, `task-status-sync`) - Mutates: none (read-only pollers; publish result messages only). ## Run Loop Patterns (Required) Parent orchestration actors: no timeout, command-only queue loops. -### `WorkspaceActor` (no timeout) +### `OrganizationActor` (no timeout) ```ts run: async (c) => { while (true) { - const msg = await c.queue.next("workspace.command"); - await handleWorkspaceCommand(c, msg); // writes workspace-owned tables only + const msg = await c.queue.next("organization.command"); + await handleOrganizationCommand(c, msg); // writes organization-owned tables only } }; ``` -### `ProjectActor` (no timeout) +### `RepositoryActor` (no timeout) ```ts run: async (c) => { while (true) { - const msg = await c.queue.next("project.command"); + const msg = await c.queue.next("repository.command"); await handleProjectCommand(c, msg); // includes applying sync results to branches/pr_cache } }; @@ -321,10 +321,10 @@ Child sync actors: one timeout each, one cadence each. run: async (c) => { const intervalMs = 30_000; while (true) { - const msg = await c.queue.next("project.pr_sync.command", { timeout: intervalMs }); + const msg = await c.queue.next("repository.pr_sync.command", { timeout: intervalMs }); if (!msg) { const result = await pollPrState(); - await sendToProject({ name: "project.pr_sync.result", result }); + await sendToProject({ name: "repository.pr_sync.result", result }); continue; } await handlePrSyncControl(c, msg); // force/stop/update-interval @@ -338,10 +338,10 @@ run: async (c) => { run: async (c) => { const intervalMs = 5_000; while (true) { - const msg = await c.queue.next("project.branch_sync.command", { timeout: intervalMs }); + const msg = await c.queue.next("repository.branch_sync.command", { timeout: intervalMs }); if (!msg) { const result = await pollBranchState(); - await sendToProject({ name: "project.branch_sync.result", result }); + await sendToProject({ name: "repository.branch_sync.result", result }); continue; } await handleBranchSyncControl(c, msg); @@ -368,7 +368,7 @@ run: async (c) => { ## Sandbox Provider Interface -Provider contract lives under `packages/backend/src/providers/provider-api` and is consumed by workspace/project/task actors. +Provider contract lives under `packages/backend/src/providers/provider-api` and is consumed by organization/repository/task actors. ```ts interface SandboxProvider { @@ -398,26 +398,26 @@ Initial providers: - Boots/ensures Sandbox Agent inside sandbox. - Returns endpoint/token for session operations. -## Command Surface (Workspace + Provider Aware) +## Command Surface (Organization + Provider Aware) -1. `hf create ... --workspace --provider ` -2. `hf switch --workspace [target]` -3. `hf attach --workspace [task]` -4. `hf list --workspace ` -5. `hf kill|archive|merge|push|sync --workspace ...` -6. `hf workspace use ` to set default workspace +1. `hf create ... --organization --provider ` +2. `hf switch --organization [target]` +3. `hf attach --organization [task]` +4. `hf list --organization ` +5. `hf kill|archive|merge|push|sync --organization ...` +6. `hf organization use ` to set default organization List/TUI include provider and sandbox health metadata. -`--workspace` remains optional; omitted values use the standard resolution order. +`--organization` remains optional; omitted values use the standard resolution order. ## Data Model v2 (SQLite + Drizzle) All persistent state is SQLite via Drizzle schema + migrations. -Tables (workspace-scoped): +Tables (organization-scoped): -1. `workspaces` +1. `organizations` 2. `workspace_provider_profiles` 3. `repos` (`workspace_id`, `repo_id`, ...) 4. `branches` (`workspace_id`, `repo_id`, ...) @@ -433,10 +433,10 @@ Migration approach: one-way migration from existing schema during TS backend boo 1. TypeScript backend exposes local control API (socket or localhost HTTP). 2. CLI/TUI are thin clients; all mutations go through backend actors. -3. OpenTUI subscribes to project streams from workspace-scoped project actors. -4. Workspace is required context on all backend mutation requests. +3. OpenTUI subscribes to repository streams from organization-scoped repository actors. +4. Organization is required context on all backend mutation requests. -CLI/TUI are responsible for resolving workspace context before calling backend mutations. +CLI/TUI are responsible for resolving organization context before calling backend mutations. ## CLI + TUI Packaging @@ -451,10 +451,10 @@ The package still calls the same backend API and shares contracts from `packages ## Implementation Phases -## Phase 0: Contracts and Workspace Spec +## Phase 0: Contracts and Organization Spec -1. Freeze workspace model, provider contract, and actor ownership map. -2. Freeze command flags for workspace + provider selection. +1. Freeze organization model, provider contract, and actor ownership map. +2. Freeze command flags for organization + provider selection. 3. Define Drizzle schema draft and migration plan. Exit criteria: @@ -462,7 +462,7 @@ Exit criteria: ## Phase 1: TypeScript Monorepo Bootstrap -1. Add `pnpm` workspace + Turborepo pipeline. +1. Add `pnpm` organization + Turborepo pipeline. 2. Create `shared`, `backend`, and `cli` packages (with TUI integrated into CLI). 3. Add strict TypeScript config and CI checks. @@ -473,10 +473,10 @@ Exit criteria: 1. Wire local RivetKit dependency from `../rivet`. 2. Add SQLite + Drizzle migrations and query layer. -3. Implement actor registry with workspace-prefixed keys. +3. Implement actor registry with organization-prefixed keys. Exit criteria: -- Backend boot + workspace actor health checks pass. +- Backend boot + organization actor health checks pass. ## Phase 3: Provider Layer in Backend @@ -487,9 +487,9 @@ Exit criteria: Exit criteria: - `create/list/switch/attach/push/sync/kill` pass on worktree provider. -## Phase 4: Workspace/Task Lifecycle +## Phase 4: Organization/Task Lifecycle -1. Implement workspace coordinator flows. +1. Implement organization coordinator flows. 2. Implement TaskActor full lifecycle + post-idle automation. 3. Implement history events and PR/CI/review change tracking. @@ -509,7 +509,7 @@ Exit criteria: 1. Build interactive list/switch UI in OpenTUI. 2. Implement key actions (attach/open PR/archive/merge/sync). -3. Add workspace switcher UX and provider/sandbox indicators. +3. Add organization switcher UX and provider/sandbox indicators. Exit criteria: - TUI parity and responsive streaming updates. @@ -534,7 +534,7 @@ Exit criteria: 2. Integration tests - backend + sqlite + provider fakes -- workspace isolation boundaries +- organization isolation boundaries - session recovery and restart handling 3. E2E tests diff --git a/foundry/scripts/build-test-image.sh b/foundry/scripts/build-test-image.sh index 284c8bc..a8cae9b 100755 --- a/foundry/scripts/build-test-image.sh +++ b/foundry/scripts/build-test-image.sh @@ -2,7 +2,7 @@ set -euo pipefail echo "Docker integration test image is not part of the TypeScript migration baseline." -echo "Use workspace tests instead:" +echo "Use monorepo tests instead:" echo " pnpm -w typecheck" echo " pnpm -w build" echo " pnpm -w test" diff --git a/foundry/scripts/data/rivet-dev.json b/foundry/scripts/data/rivet-dev.json index 2b1b6f0..3534cac 100644 --- a/foundry/scripts/data/rivet-dev.json +++ b/foundry/scripts/data/rivet-dev.json @@ -1060,7 +1060,7 @@ }, { "number": 222, - "title": "Recover wellington workspace state", + "title": "Recover wellington organization state", "state": "open", "draft": false, "headRefName": "recovery/wellington-20260309", @@ -1070,7 +1070,7 @@ }, { "number": 220, - "title": "Recover lisbon workspace state", + "title": "Recover lisbon organization state", "state": "open", "draft": false, "headRefName": "recovery/lisbon-20260309", @@ -1080,7 +1080,7 @@ }, { "number": 219, - "title": "Recover karachi-v2 workspace state", + "title": "Recover karachi-v2 organization state", "state": "open", "draft": false, "headRefName": "recovery/karachi-v2-20260309", @@ -1090,7 +1090,7 @@ }, { "number": 218, - "title": "Recover hamburg workspace state", + "title": "Recover hamburg organization state", "state": "open", "draft": false, "headRefName": "recovery/hamburg-20260309", @@ -1100,7 +1100,7 @@ }, { "number": 217, - "title": "Recover geneva workspace state", + "title": "Recover geneva organization state", "state": "open", "draft": false, "headRefName": "recovery/geneva-20260309", @@ -1110,7 +1110,7 @@ }, { "number": 216, - "title": "Recover edinburgh workspace state", + "title": "Recover edinburgh organization state", "state": "open", "draft": false, "headRefName": "recovery/edinburgh-20260309", diff --git a/foundry/scripts/pull-org-data.ts b/foundry/scripts/pull-org-data.ts index 1759cad..3580baa 100644 --- a/foundry/scripts/pull-org-data.ts +++ b/foundry/scripts/pull-org-data.ts @@ -2,8 +2,8 @@ /** * Pull public GitHub organization data into a JSON fixture file. * - * This script mirrors the sync logic in the backend workspace actor - * (see: packages/backend/src/actors/workspace/app-shell.ts — syncGithubOrganizations + * This script mirrors the sync logic in the backend organization actor + * (see: packages/backend/src/actors/organization/app-shell.ts — syncGithubOrganizations * and syncGithubOrganizationRepos). Keep the two in sync: when the backend * sync workflow changes what data it fetches or how it structures organizations, * update this script to match. @@ -205,8 +205,8 @@ async function pullOrgData(orgLogin: string): Promise { console.log(` ${members.length} public members`); // 4. Fetch open PRs across all public repos - // Backend equivalent: ProjectPrSyncActor polls GitHub for open PRs per repo - // and stores them in the pr_cache table on the project actor + // Backend equivalent: open PR metadata is pulled from GitHub and merged into + // the organization/repository projections used by the UI. const openPullRequests: OrgFixturePullRequest[] = []; for (const repo of repos) { const rawPrs = await githubPaginate<{ diff --git a/sdks/CLAUDE.md b/sdks/CLAUDE.md new file mode 100644 index 0000000..a71eac4 --- /dev/null +++ b/sdks/CLAUDE.md @@ -0,0 +1,37 @@ +# SDK Instructions + +## TypeScript SDK Architecture + +- TypeScript clients are split into: + - `acp-http-client`: protocol-pure ACP-over-HTTP (`/v1/acp`) with no Sandbox-specific HTTP helpers. + - `sandbox-agent`: `SandboxAgent` SDK wrapper that combines ACP session operations with Sandbox control-plane and filesystem helpers. +- `SandboxAgent` entry points are `SandboxAgent.connect(...)` and `SandboxAgent.start(...)`. +- Stable Sandbox session methods are `createSession`, `resumeSession`, `resumeOrCreateSession`, `destroySession`, `rawSendSessionMethod`, `onSessionEvent`, `setSessionMode`, `setSessionModel`, `setSessionThoughtLevel`, `setSessionConfigOption`, `getSessionConfigOptions`, `getSessionModes`, `respondPermission`, `rawRespondPermission`, and `onPermissionRequest`. +- `Session` helpers are `prompt(...)`, `rawSend(...)`, `onEvent(...)`, `setMode(...)`, `setModel(...)`, `setThoughtLevel(...)`, `setConfigOption(...)`, `getConfigOptions()`, `getModes()`, `respondPermission(...)`, `rawRespondPermission(...)`, and `onPermissionRequest(...)`. +- Cleanup is `sdk.dispose()`. + +### React Component Methodology + +- Shared React UI belongs in `sdks/react` only when it is reusable outside the Inspector. +- If the same UI pattern is shared between the Sandbox Agent Inspector and Foundry, prefer extracting it into `sdks/react` instead of maintaining parallel implementations. +- Keep shared components unstyled by default: behavior in the package, styling in the consumer via `className`, slot-level `classNames`, render overrides, and `data-*` hooks. +- Prefer extracting reusable pieces such as transcript, composer, and conversation surfaces. Keep Inspector-specific shells such as session selection, session headers, and control-plane actions in `frontend/packages/inspector/`. +- Document all shared React components in `docs/react-components.mdx`, and keep that page aligned with the exported surface in `sdks/react/src/index.ts`. + +### TypeScript SDK Naming Conventions + +- Use `respond(id, reply)` for SDK methods that reply to an agent-initiated request (e.g. `respondPermission`). This is the standard pattern for answering any inbound JSON-RPC request from the agent. +- Prefix raw/low-level escape hatches with `raw` (e.g. `rawRespondPermission`, `rawSend`). These accept protocol-level types directly and bypass SDK abstractions. + +### Docs Source Of Truth + +- For TypeScript docs/examples, source of truth is implementation in: + - `sdks/typescript/src/client.ts` + - `sdks/typescript/src/index.ts` + - `sdks/acp-http-client/src/index.ts` +- Do not document TypeScript APIs unless they are exported and implemented in those files. + +## Tests + +- TypeScript SDK tests should run against a real running server/runtime over real `/v1` HTTP APIs, typically using the real `mock` agent for deterministic behavior. +- Do not use Vitest fetch/transport mocks to simulate server functionality in TypeScript SDK tests. diff --git a/server/CLAUDE.md b/server/CLAUDE.md index b56223c..88f4f0a 100644 --- a/server/CLAUDE.md +++ b/server/CLAUDE.md @@ -1,18 +1,47 @@ # Server Instructions -## Architecture +## ACP v1 Baseline -- Public API routes are defined in `server/packages/sandbox-agent/src/router.rs`. -- ACP proxy runtime is in `server/packages/sandbox-agent/src/acp_proxy_runtime.rs`. -- All API endpoints are under `/v1`. -- Keep binary filesystem transfer endpoints as dedicated HTTP APIs: +- v1 is ACP-native. +- `/v1/*` is removed and returns `410 Gone` (`application/problem+json`). +- `/opencode/*` is disabled during ACP core phases and returns `503`. +- Prompt/session traffic is ACP JSON-RPC over streamable HTTP on `/v1/rpc`: + - `POST /v1/rpc` + - `GET /v1/rpc` (SSE) + - `DELETE /v1/rpc` +- Control-plane endpoints: + - `GET /v1/health` + - `GET /v1/agents` + - `POST /v1/agents/{agent}/install` +- Binary filesystem transfer endpoints (intentionally HTTP, not ACP extension methods): - `GET /v1/fs/file` - `PUT /v1/fs/file` - `POST /v1/fs/upload-batch` - - Rationale: host-owned cross-agent-consistent behavior and large binary transfer needs that ACP JSON-RPC is not suited to stream efficiently. - - Maintain ACP variants in parallel only when they share the same underlying filesystem implementation; SDK defaults should still prefer HTTP for large/binary transfers. -- `/opencode/*` stays disabled (`503`) until Phase 7. -- Agent install logic (native + ACP agent process + lazy install) is handled by `server/packages/agent-management/`. +- Sandbox Agent ACP extension method naming: + - Custom ACP methods use `_sandboxagent/...` (not `_sandboxagent/v1/...`). + - Session detach method is `_sandboxagent/session/detach`. + +## API Scope + +- ACP is the primary protocol for agent/session behavior and all functionality that talks directly to the agent. +- ACP extensions may be used for gaps (for example `skills`, `models`, and related metadata), but the default is that agent-facing behavior is implemented by the agent through ACP. +- Custom HTTP APIs are for non-agent/session platform services (for example filesystem, terminals, and other host/runtime capabilities). +- Filesystem and terminal APIs remain Sandbox Agent-specific HTTP contracts and are not ACP. + - Do not make Sandbox Agent core flows depend on ACP client implementations of `fs/*` or `terminal/*`; in practice those client-side capabilities are often incomplete or inconsistent. + - ACP-native filesystem and terminal methods are also too limited for Sandbox Agent host/runtime needs, so prefer the native HTTP APIs for richer behavior. +- Keep `GET /v1/fs/file`, `PUT /v1/fs/file`, and `POST /v1/fs/upload-batch` on HTTP: + - These are Sandbox Agent host/runtime operations with cross-agent-consistent behavior. + - They may involve very large binary transfers that ACP JSON-RPC envelopes are not suited to stream. + - This is intentionally separate from ACP native `fs/read_text_file` and `fs/write_text_file`. + - ACP extension variants may exist in parallel, but SDK defaults should prefer HTTP for these binary transfer operations. + +## Architecture + +- HTTP contract and problem/error mapping: `server/packages/sandbox-agent/src/router.rs` +- ACP proxy runtime: `server/packages/sandbox-agent/src/acp_proxy_runtime.rs` +- ACP client runtime and agent process bridge: `server/packages/sandbox-agent/src/acp_runtime/mod.rs` +- Agent install logic (native + ACP agent process + lazy install): `server/packages/agent-management/` +- Inspector UI served at `/ui/` and bound to ACP over HTTP from `frontend/packages/inspector/` ## API Contract Rules @@ -21,6 +50,24 @@ - Regenerate `docs/openapi.json` after endpoint contract changes. - Keep CLI and HTTP endpoint behavior aligned (`docs/cli.mdx`). +## ACP Protocol Compliance + +- Before adding any new ACP method, property, or config option category to the SDK, verify it exists in the ACP spec at `https://agentclientprotocol.com/llms-full.txt`. +- Valid `SessionConfigOptionCategory` values are: `mode`, `model`, `thought_level`, `other`, or custom categories prefixed with `_` (e.g. `_permission_mode`). +- Do not invent ACP properties or categories (e.g. `permission_mode` is not a valid ACP category — use `_permission_mode` if it's a custom extension, or use existing ACP mechanisms like `session/set_mode`). +- `NewSessionRequest` only has `_meta`, `cwd`, and `mcpServers`. Do not add non-ACP fields to it. +- Sandbox Agent SDK abstractions (like `SessionCreateRequest`) may add convenience properties, but must clearly map to real ACP methods internally and not send fabricated fields over the wire. + +## Source Documents + +- ACP protocol specification (full LLM-readable reference): `https://agentclientprotocol.com/llms-full.txt` +- `~/misc/acp-docs/schema/schema.json` +- `~/misc/acp-docs/schema/meta.json` +- `research/acp/spec.md` +- `research/acp/v1-schema-to-acp-mapping.md` +- `research/acp/friction.md` +- `research/acp/todo.md` + ## Tests Primary v1 integration coverage: @@ -38,3 +85,9 @@ cargo test -p sandbox-agent --test v1_agent_process_matrix - Keep `research/acp/spec.md` as the source spec. - Update `research/acp/todo.md` when scope/status changes. - Log blockers/decisions in `research/acp/friction.md`. + +## Docker Examples (Dev Testing) + +- When manually testing bleeding-edge (unreleased) versions of sandbox-agent in `examples/`, use `SANDBOX_AGENT_DEV=1` with the Docker-based examples. +- This triggers a local build of `docker/runtime/Dockerfile.full` which builds the server binary from local source and packages it into the Docker image. +- Example: `SANDBOX_AGENT_DEV=1 pnpm --filter @sandbox-agent/example-mcp start` From 57a07f6a0a7da4f7edde26c193bcb67195d4e79c Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sat, 14 Mar 2026 23:47:43 -0700 Subject: [PATCH 05/48] wip (#256) --- foundry/packages/backend/CLAUDE.md | 57 +++++++++++++ .../backend/src/actors/github-data/index.ts | 65 +++++++++++++- .../src/actors/organization/actions.ts | 19 +---- .../src/actors/organization/app-shell.ts | 84 +++---------------- .../src/actors/organization/db/schema.ts | 30 ++++++- .../src/actors/repository/db/schema.ts | 7 ++ .../backend/src/actors/task/db/schema.ts | 11 +++ .../backend/src/actors/task/workbench.ts | 21 ++++- .../src/components/mock-layout/sidebar.tsx | 11 +-- .../src/features/tasks/status.test.ts | 7 +- .../frontend/src/features/tasks/status.ts | 7 +- 11 files changed, 206 insertions(+), 113 deletions(-) diff --git a/foundry/packages/backend/CLAUDE.md b/foundry/packages/backend/CLAUDE.md index aef6cfd..432bc85 100644 --- a/foundry/packages/backend/CLAUDE.md +++ b/foundry/packages/backend/CLAUDE.md @@ -16,6 +16,47 @@ OrganizationActor └─ SandboxInstanceActor(sandboxProviderId, sandboxId) × N ``` +## Coordinator Pattern + +Actors follow a coordinator pattern where each coordinator is responsible for: +1. **Index tables** — keeping a local SQLite index/summary of its child actors' data +2. **Create/destroy** — handling lifecycle of child actors +3. **Routing** — resolving lookups to the correct child actor + +Children push updates **up** to their direct coordinator only. Coordinators broadcast changes to connected clients. This keeps the read path local (no fan-out to children). + +### Coordinator hierarchy and index tables + +```text +OrganizationActor (coordinator for repos + auth users) +│ +│ Index tables: +│ ├─ repos → RepositoryActor index (repo catalog) +│ ├─ taskLookup → TaskActor index (taskId → repoId routing) +│ ├─ taskSummaries → TaskActor index (materialized sidebar projection) +│ ├─ authSessionIndex → AuthUserActor index (session token → userId) +│ ├─ authEmailIndex → AuthUserActor index (email → userId) +│ └─ authAccountIndex → AuthUserActor index (OAuth account → userId) +│ +├─ RepositoryActor (coordinator for tasks) +│ │ +│ │ Index tables: +│ │ └─ taskIndex → TaskActor index (taskId → branchName) +│ │ +│ └─ TaskActor (coordinator for sessions + sandboxes) +│ │ +│ │ Index tables: +│ │ ├─ taskWorkbenchSessions → Session index (session metadata, transcript, draft) +│ │ └─ taskSandboxes → SandboxInstanceActor index (sandbox history) +│ │ +│ └─ SandboxInstanceActor (leaf) +│ +├─ HistoryActor (organization-scoped audit log, not a coordinator) +└─ GithubDataActor (GitHub API cache, not a coordinator) +``` + +When adding a new index table, annotate it in the schema file with a doc comment identifying it as a coordinator index and which child actor it indexes (see existing examples). + ## Ownership Rules - `OrganizationActor` is the organization coordinator and lookup/index owner. @@ -29,8 +70,24 @@ OrganizationActor - `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity. - The backend stores no local git state. No clones, no refs, no working trees, and no git-spice. Repository metadata comes from GitHub API data and webhook events. Any working-tree git operation runs inside a sandbox via `executeInSandbox()`. - When a backend request path must aggregate multiple independent actor calls or reads, prefer bounded parallelism over sequential fan-out when correctness permits. Do not serialize independent work by default. +- Only a coordinator creates/destroys its children. Do not create child actors from outside the coordinator. +- Children push state changes up to their direct coordinator only — never skip levels (e.g., task pushes to repo, not directly to org, unless org is the direct coordinator for that index). +- Read paths must use the coordinator's local index tables. Do not fan out to child actors on the hot read path. +- Never build "enriched" read actions that chain through multiple actors (e.g., coordinator → child actor → sibling actor). If data from multiple actors is needed for a read, it should already be materialized in the coordinator's index tables via push updates. If it's not there, fix the write path to push it — do not add a fan-out read path. + +## Multiplayer Correctness + +Per-user UI state must live on the user actor, not on shared task/session actors. This is critical for multiplayer — multiple users may view the same task simultaneously with different active sessions, unread states, and in-progress drafts. + +**Per-user state (user actor):** active session tab, unread counts, draft text, draft attachments. Keyed by `(userId, taskId, sessionId)`. + +**Task-global state (task actor):** session transcript, session model, session runtime status, sandbox identity, task status, branch name, PR state. These are shared across all users viewing the task — that is correct behavior. + +Do not store per-user preferences, selections, or ephemeral UI state on shared actors. If a field's value should differ between two users looking at the same task, it belongs on the user actor. ## Maintenance - Keep this file up to date whenever actor ownership, hierarchy, or lifecycle responsibilities change. - If the real actor tree diverges from this document, update this document in the same change. +- When adding, removing, or renaming coordinator index tables, update the hierarchy diagram above in the same change. +- When adding a new coordinator index table in a schema file, add a doc comment identifying which child actor it indexes (pattern: `/** Coordinator index of {ChildActor} instances. ... */`). diff --git a/foundry/packages/backend/src/actors/github-data/index.ts b/foundry/packages/backend/src/actors/github-data/index.ts index accfb5d..08c815d 100644 --- a/foundry/packages/backend/src/actors/github-data/index.ts +++ b/foundry/packages/backend/src/actors/github-data/index.ts @@ -1,6 +1,7 @@ // @ts-nocheck import { eq } from "drizzle-orm"; -import { actor } from "rivetkit"; +import { actor, queue } from "rivetkit"; +import { workflow, Loop } from "rivetkit/workflow"; import type { FoundryOrganization } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; import { getOrCreateOrganization, getTask } from "../handles.js"; @@ -536,8 +537,69 @@ async function runFullSync(c: any, input: FullSyncInput = {}) { }; } +const GITHUB_DATA_QUEUE_NAMES = ["githubData.command.syncRepos"] as const; + +async function runGithubDataWorkflow(ctx: any): Promise { + // Initial sync: if this actor was just created and has never synced, + // kick off the first full sync automatically. + await ctx.step({ + name: "github-data-initial-sync", + timeout: 5 * 60_000, + run: async () => { + const meta = await readMeta(ctx); + if (meta.syncStatus !== "pending") { + return; // Already synced or syncing — skip initial sync + } + try { + await runFullSync(ctx, { label: "Importing repository catalog..." }); + } catch (error) { + // Best-effort initial sync. Write the error to meta so the client + // sees the failure and can trigger a manual retry. + const currentMeta = await readMeta(ctx); + const organization = await getOrCreateOrganization(ctx, ctx.state.organizationId); + await organization.markOrganizationSyncFailed({ + message: error instanceof Error ? error.message : "GitHub import failed", + installationStatus: currentMeta.installationStatus, + }); + } + }, + }); + + // Command loop for explicit sync requests (reload, re-import, etc.) + await ctx.loop("github-data-command-loop", async (loopCtx: any) => { + const msg = await loopCtx.queue.next("next-github-data-command", { + names: [...GITHUB_DATA_QUEUE_NAMES], + completable: true, + }); + if (!msg) { + return Loop.continue(undefined); + } + + try { + if (msg.name === "githubData.command.syncRepos") { + await loopCtx.step({ + name: "github-data-sync-repos", + timeout: 5 * 60_000, + run: async () => { + const body = msg.body as FullSyncInput; + await runFullSync(loopCtx, body); + }, + }); + await msg.complete({ ok: true }); + return Loop.continue(undefined); + } + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + await msg.complete({ error: message }).catch(() => {}); + } + + return Loop.continue(undefined); + }); +} + export const githubData = actor({ db: githubDataDb, + queues: Object.fromEntries(GITHUB_DATA_QUEUE_NAMES.map((name) => [name, queue()])), options: { name: "GitHub Data", icon: "github", @@ -546,6 +608,7 @@ export const githubData = actor({ createState: (_c, input: GithubDataInput) => ({ organizationId: input.organizationId, }), + run: workflow(runGithubDataWorkflow), actions: { async getSummary(c) { const repositories = await c.db.select().from(githubRepositories).all(); diff --git a/foundry/packages/backend/src/actors/organization/actions.ts b/foundry/packages/backend/src/actors/organization/actions.ts index d83e776..70da62b 100644 --- a/foundry/packages/backend/src/actors/organization/actions.ts +++ b/foundry/packages/backend/src/actors/organization/actions.ts @@ -61,11 +61,7 @@ interface RepoOverviewInput { repoId: string; } -const ORGANIZATION_QUEUE_NAMES = [ - "organization.command.createTask", - "organization.command.syncGithubOrganizationRepos", - "organization.command.syncGithubSession", -] as const; +const ORGANIZATION_QUEUE_NAMES = ["organization.command.createTask", "organization.command.syncGithubSession"] as const; const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent"; type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number]; @@ -384,19 +380,6 @@ export async function runOrganizationWorkflow(ctx: any): Promise { await msg.complete({ ok: true }); return Loop.continue(undefined); } - - if (msg.name === "organization.command.syncGithubOrganizationRepos") { - await loopCtx.step({ - name: "organization-sync-github-organization-repos", - timeout: 60_000, - run: async () => { - const { syncGithubOrganizationRepos } = await import("./app-shell.js"); - await syncGithubOrganizationRepos(loopCtx, msg.body as { sessionId: string; organizationId: string }); - }, - }); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } } catch (error) { const message = resolveErrorMessage(error); logActorWarning("organization", "organization workflow command failed", { diff --git a/foundry/packages/backend/src/actors/organization/app-shell.ts b/foundry/packages/backend/src/actors/organization/app-shell.ts index 20febfd..3339590 100644 --- a/foundry/packages/backend/src/actors/organization/app-shell.ts +++ b/foundry/packages/backend/src/actors/organization/app-shell.ts @@ -596,49 +596,6 @@ async function syncGithubOrganizationsInternal(c: any, input: { sessionId: strin }); } -export async function syncGithubOrganizationRepos(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - - const organizationHandle = await getOrCreateOrganization(c, input.organizationId); - const organizationState = await getOrganizationState(organizationHandle); - const githubData = await getOrCreateGithubData(c, input.organizationId); - - try { - await githubData.fullSync({ - accessToken: session.githubAccessToken, - connectedAccount: organizationState.snapshot.github.connectedAccount, - installationId: organizationState.githubInstallationId, - installationStatus: organizationState.snapshot.github.installationStatus, - githubLogin: organizationState.githubLogin, - kind: organizationState.snapshot.kind, - label: "Importing repository catalog...", - }); - - // Broadcast updated app snapshot so connected clients see the new repos - c.broadcast("appUpdated", { - type: "appUpdated", - snapshot: await buildAppSnapshot(c, input.sessionId), - }); - } catch (error) { - const installationStatus = - error instanceof GitHubAppError && (error.status === 403 || error.status === 404) - ? "reconnect_required" - : organizationState.snapshot.github.installationStatus; - await organizationHandle.markOrganizationSyncFailed({ - message: error instanceof Error ? error.message : "GitHub import failed", - installationStatus, - }); - - // Broadcast sync failure so the client updates status - c.broadcast("appUpdated", { - type: "appUpdated", - snapshot: await buildAppSnapshot(c, input.sessionId), - }); - } -} - async function readOrganizationProfileRow(c: any) { assertOrganizationShell(c); return await c.db.select().from(organizationProfile).where(eq(organizationProfile.id, PROFILE_ROW_ID)).get(); @@ -1113,26 +1070,11 @@ export const organizationAppActions = { requireEligibleOrganization(session, input.organizationId); await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId); - const organizationHandle = await getOrCreateOrganization(c, input.organizationId); - const organizationState = await getOrganizationState(organizationHandle); - if (organizationState.snapshot.github.syncStatus !== "synced") { - if (organizationState.snapshot.github.syncStatus !== "syncing") { - await organizationHandle.markOrganizationSyncStarted({ - label: "Importing repository catalog...", - }); + // Ensure the GitHub data actor exists. If it's newly created, its own + // workflow will detect the pending sync status and run the initial + // full sync automatically — no orchestration needed here. + await getOrCreateGithubData(c, input.organizationId); - const self = selfOrganization(c); - await self.send( - "organization.command.syncGithubOrganizationRepos", - { sessionId: input.sessionId, organizationId: input.organizationId }, - { - wait: false, - }, - ); - } - - return await buildAppSnapshot(c, input.sessionId); - } return await buildAppSnapshot(c, input.sessionId); }, @@ -1157,24 +1099,20 @@ export const organizationAppActions = { const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); - const organizationHandle = await getOrCreateOrganization(c, input.organizationId); - const organizationState = await getOrganizationState(organizationHandle); - if (organizationState.snapshot.github.syncStatus === "syncing") { + const githubData = await getOrCreateGithubData(c, input.organizationId); + const summary = await githubData.getSummary({}); + if (summary.syncStatus === "syncing") { return await buildAppSnapshot(c, input.sessionId); } + // Mark sync started on the organization, then send directly to the + // GitHub data actor's own workflow queue. + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); await organizationHandle.markOrganizationSyncStarted({ label: "Importing repository catalog...", }); - const self = selfOrganization(c); - await self.send( - "organization.command.syncGithubOrganizationRepos", - { sessionId: input.sessionId, organizationId: input.organizationId }, - { - wait: false, - }, - ); + await githubData.send("githubData.command.syncRepos", { label: "Importing repository catalog..." }, { wait: false }); return await buildAppSnapshot(c, input.sessionId); }, diff --git a/foundry/packages/backend/src/actors/organization/db/schema.ts b/foundry/packages/backend/src/actors/organization/db/schema.ts index f1e069e..dd4fa40 100644 --- a/foundry/packages/backend/src/actors/organization/db/schema.ts +++ b/foundry/packages/backend/src/actors/organization/db/schema.ts @@ -2,6 +2,11 @@ import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; // SQLite is per organization actor instance, so no organizationId column needed. +/** + * Coordinator index of RepositoryActor instances. + * The organization actor is the coordinator for repositories. + * Rows are created/removed when repos are added/removed from the organization. + */ export const repos = sqliteTable("repos", { repoId: text("repo_id").notNull().primaryKey(), remoteUrl: text("remote_url").notNull(), @@ -9,15 +14,21 @@ export const repos = sqliteTable("repos", { updatedAt: integer("updated_at").notNull(), }); +/** + * Coordinator index of TaskActor instances. + * Fast taskId → repoId lookup so the organization can route requests + * to the correct RepositoryActor without scanning all repos. + */ export const taskLookup = sqliteTable("task_lookup", { taskId: text("task_id").notNull().primaryKey(), repoId: text("repo_id").notNull(), }); /** - * Materialized sidebar projection maintained by task actors. - * The source of truth still lives on each task actor; this table exists so - * organization reads can stay local and avoid fan-out across child actors. + * Coordinator index of TaskActor instances — materialized sidebar projection. + * Task actors push summary updates to the organization actor via + * applyTaskSummaryUpdate(). Source of truth lives on each TaskActor; + * this table exists so organization reads stay local without fan-out. */ export const taskSummaries = sqliteTable("task_summaries", { taskId: text("task_id").notNull().primaryKey(), @@ -87,6 +98,11 @@ export const invoices = sqliteTable("invoices", { createdAt: integer("created_at").notNull(), }); +/** + * Coordinator index of AuthUserActor instances — routes session token → userId. + * Better Auth adapter uses this to resolve which user actor to query + * before the user identity is known. + */ export const authSessionIndex = sqliteTable("auth_session_index", { sessionId: text("session_id").notNull().primaryKey(), sessionToken: text("session_token").notNull(), @@ -95,12 +111,20 @@ export const authSessionIndex = sqliteTable("auth_session_index", { updatedAt: integer("updated_at").notNull(), }); +/** + * Coordinator index of AuthUserActor instances — routes email → userId. + * Better Auth adapter uses this to resolve which user actor to query. + */ export const authEmailIndex = sqliteTable("auth_email_index", { email: text("email").notNull().primaryKey(), userId: text("user_id").notNull(), updatedAt: integer("updated_at").notNull(), }); +/** + * Coordinator index of AuthUserActor instances — routes OAuth account → userId. + * Better Auth adapter uses this to resolve which user actor to query. + */ export const authAccountIndex = sqliteTable("auth_account_index", { id: text("id").notNull().primaryKey(), providerId: text("provider_id").notNull(), diff --git a/foundry/packages/backend/src/actors/repository/db/schema.ts b/foundry/packages/backend/src/actors/repository/db/schema.ts index ddb2f19..2f597e8 100644 --- a/foundry/packages/backend/src/actors/repository/db/schema.ts +++ b/foundry/packages/backend/src/actors/repository/db/schema.ts @@ -8,6 +8,13 @@ export const repoMeta = sqliteTable("repo_meta", { updatedAt: integer("updated_at").notNull(), }); +/** + * Coordinator index of TaskActor instances. + * The repository actor is the coordinator for tasks. Each row maps a + * taskId to its branch name. Used for branch conflict checking and + * task-by-branch lookups. Rows are inserted at task creation and + * updated on branch rename. + */ export const taskIndex = sqliteTable("task_index", { taskId: text("task_id").notNull().primaryKey(), branchName: text("branch_name"), diff --git a/foundry/packages/backend/src/actors/task/db/schema.ts b/foundry/packages/backend/src/actors/task/db/schema.ts index 0c1f6cd..889aa31 100644 --- a/foundry/packages/backend/src/actors/task/db/schema.ts +++ b/foundry/packages/backend/src/actors/task/db/schema.ts @@ -37,6 +37,11 @@ export const taskRuntime = sqliteTable( (table) => [check("task_runtime_singleton_id_check", sql`${table.id} = 1`)], ); +/** + * Coordinator index of SandboxInstanceActor instances. + * Tracks all sandbox instances provisioned for this task. Only one + * is active at a time (referenced by taskRuntime.activeSandboxId). + */ export const taskSandboxes = sqliteTable("task_sandboxes", { sandboxId: text("sandbox_id").notNull().primaryKey(), sandboxProviderId: text("sandbox_provider_id").notNull(), @@ -48,6 +53,12 @@ export const taskSandboxes = sqliteTable("task_sandboxes", { updatedAt: integer("updated_at").notNull(), }); +/** + * Coordinator index of workbench sessions within this task. + * The task actor is the coordinator for sessions. Each row holds session + * metadata, model, status, transcript, and draft state. Sessions are + * sub-entities of the task — no separate session actor in the DB. + */ export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", { sessionId: text("session_id").notNull().primaryKey(), sandboxSessionId: text("sandbox_session_id"), diff --git a/foundry/packages/backend/src/actors/task/workbench.ts b/foundry/packages/backend/src/actors/task/workbench.ts index d689b3a..d6698ca 100644 --- a/foundry/packages/backend/src/actors/task/workbench.ts +++ b/foundry/packages/backend/src/actors/task/workbench.ts @@ -386,11 +386,24 @@ async function getTaskSandboxRuntime( }; } -async function ensureSandboxRepo(c: any, sandbox: any, record: any): Promise { +/** + * Track whether the sandbox repo has been fully prepared (cloned + fetched + checked out) + * for the current actor lifecycle. Subsequent calls can skip the expensive `git fetch` + * when `skipFetch` is true (used by sendWorkbenchMessage to avoid blocking on every prompt). + */ +let sandboxRepoPrepared = false; + +async function ensureSandboxRepo(c: any, sandbox: any, record: any, opts?: { skipFetchIfPrepared?: boolean }): Promise { if (!record.branchName) { throw new Error("cannot prepare a sandbox repo before the task branch exists"); } + // If the repo was already prepared and the caller allows skipping fetch, just return. + // The clone, fetch, and checkout already happened on a prior call. + if (opts?.skipFetchIfPrepared && sandboxRepoPrepared) { + return; + } + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); const metadata = await repository.getRepositoryMetadata({}); @@ -426,6 +439,8 @@ async function ensureSandboxRepo(c: any, sandbox: any, record: any): Promise `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`)].filter( Boolean, ); diff --git a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx index f639716..7ccb18c 100644 --- a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx @@ -671,7 +671,7 @@ export const Sidebar = memo(function Sidebar({ const isRunning = task.sessions.some((s) => s.status === "running"); const isProvisioning = !isPullRequestItem && - (String(task.status).startsWith("init_") || + ((String(task.status).startsWith("init_") && task.status !== "init_complete") || task.status === "new" || task.sessions.some((s) => s.status === "pending_provision" || s.status === "pending_session_create")); const hasUnread = task.sessions.some((s) => s.unread); @@ -810,11 +810,7 @@ export const Sidebar = memo(function Sidebar({ if (item.type === "task-drop-zone") { const { repository, taskCount } = item; - const isDropTarget = - drag?.type === "task" && - drag.repositoryId === repository.id && - drag.overIdx === taskCount && - drag.fromIdx !== taskCount; + const isDropTarget = drag?.type === "task" && drag.repositoryId === repository.id && drag.overIdx === taskCount && drag.fromIdx !== taskCount; return (
{ }); describe("isProvisioningTaskStatus", () => { - it("treats all init states as provisioning", () => { + it("treats in-progress init states as provisioning", () => { expect(isProvisioningTaskStatus("init_bootstrap_db")).toBe(true); expect(isProvisioningTaskStatus("init_ensure_name")).toBe(true); - expect(isProvisioningTaskStatus("init_complete")).toBe(true); + }); + + it("does not treat init_complete as provisioning (task is ready)", () => { + expect(isProvisioningTaskStatus("init_complete")).toBe(false); }); it("does not treat steady-state or terminal states as provisioning", () => { diff --git a/foundry/packages/frontend/src/features/tasks/status.ts b/foundry/packages/frontend/src/features/tasks/status.ts index 3e8ec82..90a6673 100644 --- a/foundry/packages/frontend/src/features/tasks/status.ts +++ b/foundry/packages/frontend/src/features/tasks/status.ts @@ -10,12 +10,7 @@ export interface TaskStateDescriptor { export function isProvisioningTaskStatus(status: TaskDisplayStatus | null | undefined): boolean { return ( - status === "new" || - status === "init_bootstrap_db" || - status === "init_enqueue_provision" || - status === "init_ensure_name" || - status === "init_assert_name" || - status === "init_complete" + status === "new" || status === "init_bootstrap_db" || status === "init_enqueue_provision" || status === "init_ensure_name" || status === "init_assert_name" ); } From 284fe66be48a60acbb792839b521991874666a33 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 12:37:42 -0700 Subject: [PATCH 06/48] wip (#258) --- CLAUDE.md | 1 + docs/credentials.mdx | 115 ------------------ docs/docs.json | 2 +- docs/llm-credentials.mdx | 250 +++++++++++++++++++++++++++++++++++++++ docs/quickstart.mdx | 3 + docs/sdk-overview.mdx | 4 + 6 files changed, 259 insertions(+), 116 deletions(-) delete mode 100644 docs/credentials.mdx create mode 100644 docs/llm-credentials.mdx diff --git a/CLAUDE.md b/CLAUDE.md index f8771fb..624602a 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -13,6 +13,7 @@ - Never mention "ACP" in user-facing docs (`docs/**/*.mdx`) except in docs that are specifically about ACP itself (e.g. `docs/acp-http-client.mdx`). - Never expose underlying protocol method names (e.g. `session/request_permission`, `session/create`, `_sandboxagent/session/detach`) in non-ACP docs. Describe the behavior in user-facing terms instead. - Do not describe the underlying protocol implementation in docs. Only document the SDK surface (methods, types, options). ACP protocol details belong exclusively in ACP-specific pages. +- Do not use em dashes (`—`) in docs. Use commas, periods, or parentheses instead. ### Docs Source Of Truth (HTTP/CLI) diff --git a/docs/credentials.mdx b/docs/credentials.mdx deleted file mode 100644 index 38bc7c4..0000000 --- a/docs/credentials.mdx +++ /dev/null @@ -1,115 +0,0 @@ ---- -title: "Credentials" -description: "How Sandbox Agent discovers and uses provider credentials." ---- - -Sandbox Agent discovers API credentials from environment variables and local agent config files. -These credentials are passed through to underlying agent runtimes. - -## Credential sources - -Credentials are discovered in priority order. - -### Environment variables (highest priority) - -API keys first: - -| Variable | Provider | -|----------|----------| -| `ANTHROPIC_API_KEY` | Anthropic | -| `CLAUDE_API_KEY` | Anthropic fallback | -| `OPENAI_API_KEY` | OpenAI | -| `CODEX_API_KEY` | OpenAI fallback | - -OAuth tokens (used when OAuth extraction is enabled): - -| Variable | Provider | -|----------|----------| -| `CLAUDE_CODE_OAUTH_TOKEN` | Anthropic | -| `ANTHROPIC_AUTH_TOKEN` | Anthropic fallback | - -### Agent config files - -| Agent | Config path | Provider | -|-------|-------------|----------| -| Amp | `~/.amp/config.json` | Anthropic | -| Claude Code | `~/.claude.json`, `~/.claude/.credentials.json` | Anthropic | -| Codex | `~/.codex/auth.json` | OpenAI | -| OpenCode | `~/.local/share/opencode/auth.json` | Anthropic/OpenAI | - -## Provider requirements by agent - -| Agent | Required provider | -|-------|-------------------| -| Claude Code | Anthropic | -| Amp | Anthropic | -| Codex | OpenAI | -| OpenCode | Anthropic or OpenAI | -| Mock | None | - -## Error handling behavior - -Credential extraction is best-effort: - -- Missing or malformed files are skipped. -- Discovery continues to later sources. -- Missing credentials mark providers unavailable instead of failing server startup. - -When prompting, Sandbox Agent does not pre-validate provider credentials. Agent-native authentication errors surface through session events/output. - -## Checking credential status - -### API - -`sdk.listAgents()` includes `credentialsAvailable` per agent. - -```json -{ - "agents": [ - { - "id": "claude", - "installed": true, - "credentialsAvailable": true - }, - { - "id": "codex", - "installed": true, - "credentialsAvailable": false - } - ] -} -``` - -### TypeScript SDK - -```typescript -const result = await sdk.listAgents(); - -for (const agent of result.agents) { - console.log(`${agent.id}: ${agent.credentialsAvailable ? "authenticated" : "no credentials"}`); -} -``` - -## Passing credentials explicitly - -Set environment variables before starting Sandbox Agent: - -```bash -export ANTHROPIC_API_KEY=sk-ant-... -export OPENAI_API_KEY=sk-... -sandbox-agent daemon start -``` - -Or with SDK-managed local spawn: - -```typescript -import { SandboxAgent } from "sandbox-agent"; - -const sdk = await SandboxAgent.start({ - spawn: { - env: { - ANTHROPIC_API_KEY: process.env.MY_ANTHROPIC_KEY, - }, - }, -}); -``` diff --git a/docs/docs.json b/docs/docs.json index a6c2087..9ba082c 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -51,6 +51,7 @@ "pages": [ "quickstart", "sdk-overview", + "llm-credentials", "react-components", { "group": "Deploy", @@ -90,7 +91,6 @@ { "group": "More", "pages": [ - "credentials", "daemon", "cors", "session-restoration", diff --git a/docs/llm-credentials.mdx b/docs/llm-credentials.mdx new file mode 100644 index 0000000..e771740 --- /dev/null +++ b/docs/llm-credentials.mdx @@ -0,0 +1,250 @@ +--- +title: "LLM Credentials" +description: "Strategies for providing LLM provider credentials to agents." +icon: "key" +--- + +Sandbox Agent needs LLM provider credentials (Anthropic, OpenAI, etc.) to run agent sessions. + +## Configuration + +Pass credentials via `spawn.env` when starting a sandbox. Each call to `SandboxAgent.start()` can use different credentials: + +```typescript +import { SandboxAgent } from "sandbox-agent"; + +const sdk = await SandboxAgent.start({ + spawn: { + env: { + ANTHROPIC_API_KEY: "sk-ant-...", + OPENAI_API_KEY: "sk-...", + }, + }, +}); +``` + +Each agent requires credentials from a specific provider. Sandbox Agent checks environment variables (including those passed via `spawn.env`) and host config files: + +| Agent | Provider | Environment variables | Config files | +|-------|----------|----------------------|--------------| +| Claude Code | Anthropic | `ANTHROPIC_API_KEY`, `CLAUDE_API_KEY` | `~/.claude.json`, `~/.claude/.credentials.json` | +| Amp | Anthropic | `ANTHROPIC_API_KEY`, `CLAUDE_API_KEY` | `~/.amp/config.json` | +| Codex | OpenAI | `OPENAI_API_KEY`, `CODEX_API_KEY` | `~/.codex/auth.json` | +| OpenCode | Anthropic or OpenAI | `ANTHROPIC_API_KEY`, `OPENAI_API_KEY` | `~/.local/share/opencode/auth.json` | +| Mock | None | - | - | + +## Credential strategies + +LLM credentials are passed into the sandbox as environment variables. The agent and everything inside the sandbox has access to the token, so it's important to choose the right strategy for how you provision and scope these credentials. + +| Strategy | Who pays | Cost attribution | Best for | +|----------|----------|-----------------|----------| +| **Per-tenant gateway** (recommended) | Your organization, billed back per tenant | Per-tenant keys with budgets | Multi-tenant SaaS, usage-based billing | +| **Bring your own key** | Each user (usage-based) | Per-user by default | Dev environments, internal tools | +| **Shared API key** | Your organization | None (single bill) | Single-tenant apps, internal platforms | +| **Personal subscription** | Each user (existing subscription) | Per-user by default | Local dev, internal tools where users have Claude or Codex subscriptions | + +### Per-tenant gateway (recommended) + +Route LLM traffic through a gateway that mints per-tenant API keys, each with its own spend tracking and budget limits. + +```mermaid +graph LR + B[Your Backend] -->|tenant key| S[Sandbox] + S -->|LLM requests| G[Gateway] + G -->|scoped key| P[LLM Provider] +``` + +Your backend issues a scoped key per tenant, then passes it to the sandbox. This is the typical pattern when using sandbox providers (E2B, Daytona, Docker). + +```typescript expandable +import { SandboxAgent } from "sandbox-agent"; + +async function createTenantSandbox(tenantId: string) { + // Issue a scoped key for this tenant via OpenRouter + const res = await fetch("https://openrouter.ai/api/v1/keys", { + method: "POST", + headers: { + Authorization: `Bearer ${process.env.OPENROUTER_PROVISIONING_KEY}`, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + name: `tenant-${tenantId}`, + limit: 50, + limitResetType: "monthly", + }), + }); + const { key } = await res.json(); + + // Start a sandbox with the tenant's scoped key + const sdk = await SandboxAgent.start({ + spawn: { + env: { + OPENAI_API_KEY: key, // OpenRouter uses OpenAI-compatible endpoints + }, + }, + }); + + const session = await sdk.createSession({ + agent: "claude", + sessionInit: { cwd: "/workspace" }, + }); + + return { sdk, session }; +} +``` + +#### Security + +Recommended for multi-tenant applications. Each tenant gets a scoped key with its own budget, so exfiltration only exposes that tenant's allowance. + +#### Use cases + +- **Multi-tenant SaaS**: per-tenant spend tracking and budget limits +- **Production apps**: exposed to end users who need isolated credentials +- **Usage-based billing**: each tenant pays for their own consumption + +#### Choosing a gateway + + + + + +Managed service, zero infrastructure. [OpenRouter](https://openrouter.ai/docs/features/provisioning-api-keys) provides per-tenant API keys with spend tracking and budget limits via their Provisioning API. Pass the tenant key to Sandbox Agent as `OPENAI_API_KEY` (OpenRouter uses OpenAI-compatible endpoints). + +```bash +# Create a key for a tenant with a $50/month budget +curl https://openrouter.ai/api/v1/keys \ + -H "Authorization: Bearer $PROVISIONING_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "name": "tenant-acme", + "limit": 50, + "limitResetType": "monthly" + }' +``` + +Easiest to set up but not open-source. See [OpenRouter pricing](https://openrouter.ai/docs/framework/pricing) for details. + + + + + +Self-hosted, open-source (MIT). [LiteLLM](https://github.com/BerriAI/litellm) is an OpenAI-compatible proxy with hierarchical budgets (org, team, user, key), virtual keys, and spend tracking. Requires Python + PostgreSQL. + +```bash +# Create a team (tenant) with a $500 budget +curl http://litellm:4000/team/new \ + -H "Authorization: Bearer $LITELLM_MASTER_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "team_alias": "tenant-acme", + "max_budget": 500 + }' + +# Generate a key for that team +curl http://litellm:4000/key/generate \ + -H "Authorization: Bearer $LITELLM_MASTER_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "team_id": "team-abc123", + "max_budget": 100 + }' +``` + +Full control with no vendor lock-in. Organization-level features require an enterprise license. + + + + + +Self-hosted, open-source (Apache 2.0). [Portkey](https://github.com/Portkey-AI/gateway) is a lightweight OpenAI-compatible gateway supporting 200+ providers. Single binary, no database required. Create virtual keys with per-tenant budget limits and pass them to Sandbox Agent. + +Lightest operational footprint of the self-hosted options. Observability and analytics require the managed platform or your own tooling. + + + + + +To bill tenants for LLM usage, use [Stripe token billing](https://docs.stripe.com/billing/token-billing) (integrates natively with OpenRouter) or query your gateway's spend API and feed usage into your billing system. + +### Bring your own key + +Each user provides their own API key. Users are billed directly by the LLM provider with no additional infrastructure needed. + +Pass the user's key via `spawn.env`: + +```typescript +const sdk = await SandboxAgent.start({ + spawn: { + env: { + ANTHROPIC_API_KEY: userProvidedKey, + }, + }, +}); +``` + +#### Security + +API keys are typically long-lived. The key is visible to the agent and anything running inside the sandbox, so exfiltration is possible. This is usually acceptable for developer-facing tools where the user owns the key. + +#### Use cases + +- **Developer tools**: each user manages their own API key +- **Internal platforms**: users already have LLM provider accounts +- **Per-user billing**: no extra infrastructure needed + +### Shared credentials + +A single organization-wide API key is used for all sessions. All token usage appears on one bill with no per-user or per-tenant cost attribution. + +```typescript +const sdk = await SandboxAgent.start({ + spawn: { + env: { + ANTHROPIC_API_KEY: process.env.ORG_ANTHROPIC_KEY!, + OPENAI_API_KEY: process.env.ORG_OPENAI_KEY!, + }, + }, +}); +``` + +If you need to track or limit spend per tenant, use a per-tenant gateway instead. + +#### Security + +Not recommended for anything other than internal tooling. A single exfiltrated key exposes your organization's entire LLM budget. If you need org-paid credentials for external users, use a per-tenant gateway with scoped keys instead. + +#### Use cases + +- **Single-tenant apps**: small number of users, one bill +- **Prototyping**: cost attribution not needed yet +- **Simplicity over security**: acceptable when exfiltration risk is low + +### Personal subscription + +If the user is signed into Claude Code or Codex on the host machine, Sandbox Agent automatically picks up their OAuth tokens. No configuration is needed. + +#### Remote sandboxes + +Extract credentials locally and pass them to a remote sandbox via `spawn.env`: + +```bash +$ sandbox-agent credentials extract-env +ANTHROPIC_API_KEY=sk-ant-... +CLAUDE_API_KEY=sk-ant-... +OPENAI_API_KEY=sk-... +CODEX_API_KEY=sk-... +``` + +Use `-e` to prefix with `export` for shell sourcing. + +#### Security + +Personal subscriptions use OAuth tokens with a limited lifespan. These are the same credentials used when running an agent normally on the host. If a token is exfiltrated from the sandbox, the exposure window is short. + +#### Use cases + +- **Local development**: users are already signed into Claude Code or Codex +- **Internal tools**: every user has their own subscription +- **Prototyping**: no key management needed \ No newline at end of file diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index a6293fe..7b5beed 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -77,6 +77,9 @@ icon: "rocket" Use the `mock` agent for SDK and integration testing without provider credentials. + + For per-tenant token tracking, budget enforcement, or usage-based billing, see [LLM Credentials](/llm-credentials) for gateway options like OpenRouter, LiteLLM, and Portkey. + diff --git a/docs/sdk-overview.mdx b/docs/sdk-overview.mdx index 228060b..fc4aee1 100644 --- a/docs/sdk-overview.mdx +++ b/docs/sdk-overview.mdx @@ -225,3 +225,7 @@ Parameters: - `fetch` (optional): Custom fetch implementation used by SDK HTTP and session calls - `waitForHealth` (optional, defaults to enabled): waits for `/v1/health` before HTTP helpers and session setup proceed; pass `false` to disable or `{ timeoutMs }` to bound the wait - `signal` (optional): aborts the startup `/v1/health` wait used by `connect()` + +## LLM credentials + +Sandbox Agent supports personal API keys, shared organization keys, and per-tenant gateway keys with budget enforcement. See [LLM Credentials](/llm-credentials) for setup details. From e740d28e0af3cbf33ca536ed2329d1d6e0916118 Mon Sep 17 00:00:00 2001 From: waltertang27 Date: Sun, 15 Mar 2026 16:14:59 -0400 Subject: [PATCH 07/48] Add modal sandbox support (#192) * add modal sandbox example * add test instructions --------- Co-authored-by: Nathan Flurry --- docs/deploy/modal.mdx | 97 ++++++++ examples/modal/package.json | 20 ++ examples/modal/src/modal.ts | 123 +++++++++ examples/modal/tests/modal.test.ts | 28 +++ examples/modal/tsconfig.json | 16 ++ pnpm-lock.yaml | 383 ++++++----------------------- 6 files changed, 365 insertions(+), 302 deletions(-) create mode 100644 docs/deploy/modal.mdx create mode 100644 examples/modal/package.json create mode 100644 examples/modal/src/modal.ts create mode 100644 examples/modal/tests/modal.test.ts create mode 100644 examples/modal/tsconfig.json diff --git a/docs/deploy/modal.mdx b/docs/deploy/modal.mdx new file mode 100644 index 0000000..cb081b0 --- /dev/null +++ b/docs/deploy/modal.mdx @@ -0,0 +1,97 @@ +--- +title: "Modal" +description: "Deploy Sandbox Agent inside a Modal sandbox." +--- + +## Prerequisites + +- `MODAL_TOKEN_ID` and `MODAL_TOKEN_SECRET` from [modal.com/settings](https://modal.com/settings) +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` + +## TypeScript example + +```typescript +import { ModalClient } from "modal"; +import { SandboxAgent } from "sandbox-agent"; + +const modal = new ModalClient(); +const app = await modal.apps.fromName("sandbox-agent", { createIfMissing: true }); + +const image = modal.images + .fromRegistry("ubuntu:22.04") + .dockerfileCommands([ + "RUN apt-get update && apt-get install -y curl ca-certificates", + "RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.2.x/install.sh | sh", + ]); + +const envs: Record = {}; +if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const secrets = Object.keys(envs).length > 0 + ? [await modal.secrets.fromObject(envs)] + : []; + +const sb = await modal.sandboxes.create(app, image, { + encryptedPorts: [3000], + secrets, +}); + +const exec = async (cmd: string) => { + const p = await sb.exec(["bash", "-c", cmd], { stdout: "pipe", stderr: "pipe" }); + const exitCode = await p.wait(); + if (exitCode !== 0) { + const stderr = await p.stderr.readText(); + throw new Error(`Command failed (exit ${exitCode}): ${cmd}\n${stderr}`); + } +}; + +await exec("sandbox-agent install-agent claude"); +await exec("sandbox-agent install-agent codex"); + +await sb.exec( + ["bash", "-c", "sandbox-agent server --no-token --host 0.0.0.0 --port 3000 &"], +); + +const tunnels = await sb.tunnels(); +const baseUrl = tunnels[3000].url; + +const sdk = await SandboxAgent.connect({ baseUrl }); + +const session = await sdk.createSession({ agent: "claude" }); +const off = session.onEvent((event) => { + console.log(event.sender, event.payload); +}); + +await session.prompt([{ type: "text", text: "Summarize this repository" }]); +off(); + +await sb.terminate(); +``` + +## Faster cold starts + +Modal caches image layers, so the `dockerfileCommands` that install `curl` and `sandbox-agent` only run on the first build. Subsequent sandbox creates reuse the cached image. + +## Running the test + +The example includes a health-check test. First, build the SDK: + +```bash +pnpm --filter sandbox-agent build +``` + +Then run the test with your Modal credentials: + +```bash +MODAL_TOKEN_ID= MODAL_TOKEN_SECRET= npx vitest run +``` + +Run from `examples/modal/`. The test will skip if credentials are not set. + +## Notes + +- Modal sandboxes use [gVisor](https://gvisor.dev/) for strong isolation. +- Ports are exposed via encrypted tunnels (`encryptedPorts`). Use `sb.tunnels()` to get the public HTTPS URL. +- Environment variables (API keys) are passed as Modal [Secrets](https://modal.com/docs/guide/secrets) rather than plain env vars for security. +- Always call `sb.terminate()` when done to avoid leaking sandbox resources. diff --git a/examples/modal/package.json b/examples/modal/package.json new file mode 100644 index 0000000..61debbd --- /dev/null +++ b/examples/modal/package.json @@ -0,0 +1,20 @@ +{ + "name": "@sandbox-agent/example-modal", + "private": true, + "type": "module", + "scripts": { + "start": "tsx src/modal.ts", + "typecheck": "tsc --noEmit" + }, + "dependencies": { + "modal": "latest", + "@sandbox-agent/example-shared": "workspace:*", + "sandbox-agent": "workspace:*" + }, + "devDependencies": { + "@types/node": "latest", + "tsx": "latest", + "typescript": "latest", + "vitest": "^3.0.0" + } +} diff --git a/examples/modal/src/modal.ts b/examples/modal/src/modal.ts new file mode 100644 index 0000000..d525ad3 --- /dev/null +++ b/examples/modal/src/modal.ts @@ -0,0 +1,123 @@ +import { ModalClient } from "modal"; +import { SandboxAgent } from "sandbox-agent"; +import { detectAgent, buildInspectorUrl, waitForHealth } from "@sandbox-agent/example-shared"; +import { fileURLToPath } from "node:url"; +import { resolve } from "node:path"; +import { run } from "node:test"; + +const PORT = 3000; +const APP_NAME = "sandbox-agent"; + +async function buildSecrets(modal: ModalClient) { + const envVars: Record = {}; + if (process.env.ANTHROPIC_API_KEY) + envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) + envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + + if (Object.keys(envVars).length === 0) return []; + return [await modal.secrets.fromObject(envVars)]; +} + +export async function setupModalSandboxAgent(): Promise<{ + baseUrl: string; + cleanup: () => Promise; +}> { + const modal = new ModalClient(); + const app = await modal.apps.fromName(APP_NAME, { createIfMissing: true }); + + const image = modal.images + .fromRegistry("ubuntu:22.04") + .dockerfileCommands([ + "RUN apt-get update && apt-get install -y curl ca-certificates", + "RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.2.x/install.sh | sh", + ]); + + const secrets = await buildSecrets(modal); + + console.log("Creating Modal sandbox!"); + const sb = await modal.sandboxes.create(app, image, { + secrets: secrets, + encryptedPorts: [PORT], + }); + console.log(`Sandbox created: ${sb.sandboxId}`); + + const exec = async (cmd: string) => { + const p = await sb.exec(["bash", "-c", cmd], { + stdout: "pipe", + stderr: "pipe", + }); + const exitCode = await p.wait(); + if (exitCode !== 0) { + const stderr = await p.stderr.readText(); + throw new Error(`Command failed (exit ${exitCode}): ${cmd}\n${stderr}`); + } + }; + + if (process.env.ANTHROPIC_API_KEY) { + console.log("Installing Claude agent..."); + await exec("sandbox-agent install-agent claude"); + } + if (process.env.OPENAI_API_KEY) { + console.log("Installing Codex agent..."); + await exec("sandbox-agent install-agent codex"); + } + + console.log("Starting server..."); + + await sb.exec( + ["bash", "-c", `sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT} &`], + ); + + const tunnels = await sb.tunnels(); + const tunnel = tunnels[PORT]; + if (!tunnel) { + throw new Error(`No tunnel found for port ${PORT}`); + } + const baseUrl = tunnel.url; + + console.log("Waiting for server..."); + await waitForHealth({ baseUrl }); + + const cleanup = async () => { + try { + await sb.terminate(); + } catch (error) { + console.warn("Cleanup failed:", error instanceof Error ? error.message : error); + } + }; + + return { baseUrl, cleanup }; +} + +export async function runModalExample(): Promise { + const { baseUrl, cleanup } = await setupModalSandboxAgent(); + + const handleExit = async () => { + await cleanup(); + process.exit(0); + }; + + process.once("SIGINT", handleExit); + process.once("SIGTERM", handleExit); + + const client = await SandboxAgent.connect({ baseUrl }); + const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/root", mcpServers: [] } }); + const sessionId = session.id; + + console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); + console.log(" Press Ctrl+C to stop."); + + await new Promise(() => {}); +} + +const isDirectRun = Boolean( + process.argv[1] && resolve(process.argv[1]) === fileURLToPath(import.meta.url), +); + +if (isDirectRun) { + runModalExample().catch((error) => { + console.error(error instanceof Error ? error.message : error); + process.exit(1); + }); +} diff --git a/examples/modal/tests/modal.test.ts b/examples/modal/tests/modal.test.ts new file mode 100644 index 0000000..9c27a21 --- /dev/null +++ b/examples/modal/tests/modal.test.ts @@ -0,0 +1,28 @@ +import { describe, it, expect } from "vitest"; +import { buildHeaders } from "@sandbox-agent/example-shared"; +import { setupModalSandboxAgent } from "../src/modal.ts"; + +const shouldRun = Boolean(process.env.MODAL_TOKEN_ID && process.env.MODAL_TOKEN_SECRET); +const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000; + +const testFn = shouldRun ? it : it.skip; + +describe("modal example", () => { + testFn( + "starts sandbox-agent and responds to /v1/health", + async () => { + const { baseUrl, cleanup } = await setupModalSandboxAgent(); + try { + const response = await fetch(`${baseUrl}/v1/health`, { + headers: buildHeaders({}), + }); + expect(response.ok).toBe(true); + const data = await response.json(); + expect(data.status).toBe("ok"); + } finally { + await cleanup(); + } + }, + timeoutMs, + ); +}); diff --git a/examples/modal/tsconfig.json b/examples/modal/tsconfig.json new file mode 100644 index 0000000..96ba2fd --- /dev/null +++ b/examples/modal/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "ES2022", + "lib": ["ES2022", "DOM"], + "module": "ESNext", + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "noEmit": true, + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "**/*.test.ts"] +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ad12a3e..14f8572 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -296,6 +296,31 @@ importers: specifier: latest version: 5.9.3 + examples/modal: + dependencies: + '@sandbox-agent/example-shared': + specifier: workspace:* + version: link:../shared + modal: + specifier: latest + version: 0.7.1 + sandbox-agent: + specifier: workspace:* + version: link:../../sdks/typescript + devDependencies: + '@types/node': + specifier: latest + version: 25.3.0 + tsx: + specifier: latest + version: 4.21.0 + typescript: + specifier: latest + version: 5.9.3 + vitest: + specifier: ^3.0.0 + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + examples/persist-memory: dependencies: '@sandbox-agent/example-shared': @@ -1715,12 +1740,6 @@ packages: cpu: [ppc64] os: [aix] - '@esbuild/aix-ppc64@0.27.2': - resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==} - engines: {node: '>=18'} - cpu: [ppc64] - os: [aix] - '@esbuild/aix-ppc64@0.27.3': resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==} engines: {node: '>=18'} @@ -1751,12 +1770,6 @@ packages: cpu: [arm64] os: [android] - '@esbuild/android-arm64@0.27.2': - resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==} - engines: {node: '>=18'} - cpu: [arm64] - os: [android] - '@esbuild/android-arm64@0.27.3': resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==} engines: {node: '>=18'} @@ -1787,12 +1800,6 @@ packages: cpu: [arm] os: [android] - '@esbuild/android-arm@0.27.2': - resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==} - engines: {node: '>=18'} - cpu: [arm] - os: [android] - '@esbuild/android-arm@0.27.3': resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==} engines: {node: '>=18'} @@ -1823,12 +1830,6 @@ packages: cpu: [x64] os: [android] - '@esbuild/android-x64@0.27.2': - resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==} - engines: {node: '>=18'} - cpu: [x64] - os: [android] - '@esbuild/android-x64@0.27.3': resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==} engines: {node: '>=18'} @@ -1859,12 +1860,6 @@ packages: cpu: [arm64] os: [darwin] - '@esbuild/darwin-arm64@0.27.2': - resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [darwin] - '@esbuild/darwin-arm64@0.27.3': resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==} engines: {node: '>=18'} @@ -1895,12 +1890,6 @@ packages: cpu: [x64] os: [darwin] - '@esbuild/darwin-x64@0.27.2': - resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==} - engines: {node: '>=18'} - cpu: [x64] - os: [darwin] - '@esbuild/darwin-x64@0.27.3': resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==} engines: {node: '>=18'} @@ -1931,12 +1920,6 @@ packages: cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-arm64@0.27.2': - resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==} - engines: {node: '>=18'} - cpu: [arm64] - os: [freebsd] - '@esbuild/freebsd-arm64@0.27.3': resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==} engines: {node: '>=18'} @@ -1967,12 +1950,6 @@ packages: cpu: [x64] os: [freebsd] - '@esbuild/freebsd-x64@0.27.2': - resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==} - engines: {node: '>=18'} - cpu: [x64] - os: [freebsd] - '@esbuild/freebsd-x64@0.27.3': resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==} engines: {node: '>=18'} @@ -2003,12 +1980,6 @@ packages: cpu: [arm64] os: [linux] - '@esbuild/linux-arm64@0.27.2': - resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==} - engines: {node: '>=18'} - cpu: [arm64] - os: [linux] - '@esbuild/linux-arm64@0.27.3': resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==} engines: {node: '>=18'} @@ -2039,12 +2010,6 @@ packages: cpu: [arm] os: [linux] - '@esbuild/linux-arm@0.27.2': - resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==} - engines: {node: '>=18'} - cpu: [arm] - os: [linux] - '@esbuild/linux-arm@0.27.3': resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==} engines: {node: '>=18'} @@ -2075,12 +2040,6 @@ packages: cpu: [ia32] os: [linux] - '@esbuild/linux-ia32@0.27.2': - resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==} - engines: {node: '>=18'} - cpu: [ia32] - os: [linux] - '@esbuild/linux-ia32@0.27.3': resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==} engines: {node: '>=18'} @@ -2111,12 +2070,6 @@ packages: cpu: [loong64] os: [linux] - '@esbuild/linux-loong64@0.27.2': - resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==} - engines: {node: '>=18'} - cpu: [loong64] - os: [linux] - '@esbuild/linux-loong64@0.27.3': resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==} engines: {node: '>=18'} @@ -2147,12 +2100,6 @@ packages: cpu: [mips64el] os: [linux] - '@esbuild/linux-mips64el@0.27.2': - resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==} - engines: {node: '>=18'} - cpu: [mips64el] - os: [linux] - '@esbuild/linux-mips64el@0.27.3': resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==} engines: {node: '>=18'} @@ -2183,12 +2130,6 @@ packages: cpu: [ppc64] os: [linux] - '@esbuild/linux-ppc64@0.27.2': - resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==} - engines: {node: '>=18'} - cpu: [ppc64] - os: [linux] - '@esbuild/linux-ppc64@0.27.3': resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==} engines: {node: '>=18'} @@ -2219,12 +2160,6 @@ packages: cpu: [riscv64] os: [linux] - '@esbuild/linux-riscv64@0.27.2': - resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==} - engines: {node: '>=18'} - cpu: [riscv64] - os: [linux] - '@esbuild/linux-riscv64@0.27.3': resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==} engines: {node: '>=18'} @@ -2255,12 +2190,6 @@ packages: cpu: [s390x] os: [linux] - '@esbuild/linux-s390x@0.27.2': - resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==} - engines: {node: '>=18'} - cpu: [s390x] - os: [linux] - '@esbuild/linux-s390x@0.27.3': resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==} engines: {node: '>=18'} @@ -2291,12 +2220,6 @@ packages: cpu: [x64] os: [linux] - '@esbuild/linux-x64@0.27.2': - resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==} - engines: {node: '>=18'} - cpu: [x64] - os: [linux] - '@esbuild/linux-x64@0.27.3': resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==} engines: {node: '>=18'} @@ -2315,12 +2238,6 @@ packages: cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-arm64@0.27.2': - resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==} - engines: {node: '>=18'} - cpu: [arm64] - os: [netbsd] - '@esbuild/netbsd-arm64@0.27.3': resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==} engines: {node: '>=18'} @@ -2351,12 +2268,6 @@ packages: cpu: [x64] os: [netbsd] - '@esbuild/netbsd-x64@0.27.2': - resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==} - engines: {node: '>=18'} - cpu: [x64] - os: [netbsd] - '@esbuild/netbsd-x64@0.27.3': resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==} engines: {node: '>=18'} @@ -2375,12 +2286,6 @@ packages: cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-arm64@0.27.2': - resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==} - engines: {node: '>=18'} - cpu: [arm64] - os: [openbsd] - '@esbuild/openbsd-arm64@0.27.3': resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==} engines: {node: '>=18'} @@ -2411,12 +2316,6 @@ packages: cpu: [x64] os: [openbsd] - '@esbuild/openbsd-x64@0.27.2': - resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==} - engines: {node: '>=18'} - cpu: [x64] - os: [openbsd] - '@esbuild/openbsd-x64@0.27.3': resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==} engines: {node: '>=18'} @@ -2435,12 +2334,6 @@ packages: cpu: [arm64] os: [openharmony] - '@esbuild/openharmony-arm64@0.27.2': - resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==} - engines: {node: '>=18'} - cpu: [arm64] - os: [openharmony] - '@esbuild/openharmony-arm64@0.27.3': resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==} engines: {node: '>=18'} @@ -2471,12 +2364,6 @@ packages: cpu: [x64] os: [sunos] - '@esbuild/sunos-x64@0.27.2': - resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==} - engines: {node: '>=18'} - cpu: [x64] - os: [sunos] - '@esbuild/sunos-x64@0.27.3': resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==} engines: {node: '>=18'} @@ -2507,12 +2394,6 @@ packages: cpu: [arm64] os: [win32] - '@esbuild/win32-arm64@0.27.2': - resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==} - engines: {node: '>=18'} - cpu: [arm64] - os: [win32] - '@esbuild/win32-arm64@0.27.3': resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==} engines: {node: '>=18'} @@ -2543,12 +2424,6 @@ packages: cpu: [ia32] os: [win32] - '@esbuild/win32-ia32@0.27.2': - resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==} - engines: {node: '>=18'} - cpu: [ia32] - os: [win32] - '@esbuild/win32-ia32@0.27.3': resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==} engines: {node: '>=18'} @@ -2579,12 +2454,6 @@ packages: cpu: [x64] os: [win32] - '@esbuild/win32-x64@0.27.2': - resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==} - engines: {node: '>=18'} - cpu: [x64] - os: [win32] - '@esbuild/win32-x64@0.27.3': resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==} engines: {node: '>=18'} @@ -3824,6 +3693,9 @@ packages: '@vitest/utils@3.2.4': resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} + abort-controller-x@0.4.3: + resolution: {integrity: sha512-VtUwTNU8fpMwvWGn4xE93ywbogTYsuT+AUxAXOeelbXuQVIwNmC5YLeho9sH4vZ4ITW8414TTAOG1nW6uIVHCA==} + accepts@2.0.0: resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} engines: {node: '>= 0.6'} @@ -4804,11 +4676,6 @@ packages: engines: {node: '>=18'} hasBin: true - esbuild@0.27.2: - resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==} - engines: {node: '>=18'} - hasBin: true - esbuild@0.27.3: resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==} engines: {node: '>=18'} @@ -5717,8 +5584,8 @@ packages: mlly@1.8.0: resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==} - mockdate@2.0.5: - resolution: {integrity: sha512-ST0PnThzWKcgSLyc+ugLVql45PvESt3Ul/wrdV/OPc/6Pr8dbLAIJsN1cIp41FLzbN+srVTNIRn+5Cju0nyV6A==} + modal@0.7.1: + resolution: {integrity: sha512-WFn5mfVD7BbdNytqDODjKXG+RkF4bubTKiu7gZvq/JITcLIU1JWYnZQSJ41cE1TlrBlxFADSx8d7Q2AXF1GT+A==} module-details-from-path@1.0.4: resolution: {integrity: sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==} @@ -5772,6 +5639,12 @@ packages: resolution: {integrity: sha512-Z4SmBUweYa09+o6pG+eASabEpP6QkQ70yHj351pQoEXIs8uHbaU2DWVmzBANKgflPa47A50PtB2+NgRpQvr7vA==} engines: {node: '>= 10'} + nice-grpc-common@2.0.2: + resolution: {integrity: sha512-7RNWbls5kAL1QVUOXvBsv1uO0wPQK3lHv+cY1gwkTzirnG1Nop4cBJZubpgziNbaVc/bl9QJcyvsf/NQxa3rjQ==} + + nice-grpc@2.1.14: + resolution: {integrity: sha512-GK9pKNxlvnU5FAdaw7i2FFuR9CqBspcE+if2tqnKXBcE0R8525wj4BZvfcwj7FjvqbssqKxRHt2nwedalbJlww==} + nlcst-to-string@4.0.0: resolution: {integrity: sha512-YKLBCcUYKAg0FNlOBT6aI91qFmSiFKiluk655WzPF+DDMA02qIyy8uiRqI8QXtcFpEvll12LpL5MXqEmAZ+dcA==} @@ -6892,6 +6765,9 @@ packages: trough@2.2.0: resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} + ts-error@1.0.6: + resolution: {integrity: sha512-tLJxacIQUM82IR7JO1UUkKlYuUTmoY9HBJAmNWFzheSlDS5SPMcNIepejHJa4BpPQLAcbRhRf3GDJzyj6rbKvA==} + ts-interface-checker@0.1.13: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} @@ -7166,6 +7042,10 @@ packages: resolution: {integrity: sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==} hasBin: true + uuid@11.1.0: + resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==} + hasBin: true + uuid@12.0.0: resolution: {integrity: sha512-USe1zesMYh4fjCA8ZH5+X5WIVD0J4V1Jksm1bFTVBX2F/cwSXt0RO5w/3UXbdLKmZX65MiWV+hwhSS8p6oBTGA==} hasBin: true @@ -8602,9 +8482,6 @@ snapshots: '@esbuild/aix-ppc64@0.25.12': optional: true - '@esbuild/aix-ppc64@0.27.2': - optional: true - '@esbuild/aix-ppc64@0.27.3': optional: true @@ -8620,9 +8497,6 @@ snapshots: '@esbuild/android-arm64@0.25.12': optional: true - '@esbuild/android-arm64@0.27.2': - optional: true - '@esbuild/android-arm64@0.27.3': optional: true @@ -8638,9 +8512,6 @@ snapshots: '@esbuild/android-arm@0.25.12': optional: true - '@esbuild/android-arm@0.27.2': - optional: true - '@esbuild/android-arm@0.27.3': optional: true @@ -8656,9 +8527,6 @@ snapshots: '@esbuild/android-x64@0.25.12': optional: true - '@esbuild/android-x64@0.27.2': - optional: true - '@esbuild/android-x64@0.27.3': optional: true @@ -8674,9 +8542,6 @@ snapshots: '@esbuild/darwin-arm64@0.25.12': optional: true - '@esbuild/darwin-arm64@0.27.2': - optional: true - '@esbuild/darwin-arm64@0.27.3': optional: true @@ -8692,9 +8557,6 @@ snapshots: '@esbuild/darwin-x64@0.25.12': optional: true - '@esbuild/darwin-x64@0.27.2': - optional: true - '@esbuild/darwin-x64@0.27.3': optional: true @@ -8710,9 +8572,6 @@ snapshots: '@esbuild/freebsd-arm64@0.25.12': optional: true - '@esbuild/freebsd-arm64@0.27.2': - optional: true - '@esbuild/freebsd-arm64@0.27.3': optional: true @@ -8728,9 +8587,6 @@ snapshots: '@esbuild/freebsd-x64@0.25.12': optional: true - '@esbuild/freebsd-x64@0.27.2': - optional: true - '@esbuild/freebsd-x64@0.27.3': optional: true @@ -8746,9 +8602,6 @@ snapshots: '@esbuild/linux-arm64@0.25.12': optional: true - '@esbuild/linux-arm64@0.27.2': - optional: true - '@esbuild/linux-arm64@0.27.3': optional: true @@ -8764,9 +8617,6 @@ snapshots: '@esbuild/linux-arm@0.25.12': optional: true - '@esbuild/linux-arm@0.27.2': - optional: true - '@esbuild/linux-arm@0.27.3': optional: true @@ -8782,9 +8632,6 @@ snapshots: '@esbuild/linux-ia32@0.25.12': optional: true - '@esbuild/linux-ia32@0.27.2': - optional: true - '@esbuild/linux-ia32@0.27.3': optional: true @@ -8800,9 +8647,6 @@ snapshots: '@esbuild/linux-loong64@0.25.12': optional: true - '@esbuild/linux-loong64@0.27.2': - optional: true - '@esbuild/linux-loong64@0.27.3': optional: true @@ -8818,9 +8662,6 @@ snapshots: '@esbuild/linux-mips64el@0.25.12': optional: true - '@esbuild/linux-mips64el@0.27.2': - optional: true - '@esbuild/linux-mips64el@0.27.3': optional: true @@ -8836,9 +8677,6 @@ snapshots: '@esbuild/linux-ppc64@0.25.12': optional: true - '@esbuild/linux-ppc64@0.27.2': - optional: true - '@esbuild/linux-ppc64@0.27.3': optional: true @@ -8854,9 +8692,6 @@ snapshots: '@esbuild/linux-riscv64@0.25.12': optional: true - '@esbuild/linux-riscv64@0.27.2': - optional: true - '@esbuild/linux-riscv64@0.27.3': optional: true @@ -8872,9 +8707,6 @@ snapshots: '@esbuild/linux-s390x@0.25.12': optional: true - '@esbuild/linux-s390x@0.27.2': - optional: true - '@esbuild/linux-s390x@0.27.3': optional: true @@ -8890,9 +8722,6 @@ snapshots: '@esbuild/linux-x64@0.25.12': optional: true - '@esbuild/linux-x64@0.27.2': - optional: true - '@esbuild/linux-x64@0.27.3': optional: true @@ -8902,9 +8731,6 @@ snapshots: '@esbuild/netbsd-arm64@0.25.12': optional: true - '@esbuild/netbsd-arm64@0.27.2': - optional: true - '@esbuild/netbsd-arm64@0.27.3': optional: true @@ -8920,9 +8746,6 @@ snapshots: '@esbuild/netbsd-x64@0.25.12': optional: true - '@esbuild/netbsd-x64@0.27.2': - optional: true - '@esbuild/netbsd-x64@0.27.3': optional: true @@ -8932,9 +8755,6 @@ snapshots: '@esbuild/openbsd-arm64@0.25.12': optional: true - '@esbuild/openbsd-arm64@0.27.2': - optional: true - '@esbuild/openbsd-arm64@0.27.3': optional: true @@ -8950,9 +8770,6 @@ snapshots: '@esbuild/openbsd-x64@0.25.12': optional: true - '@esbuild/openbsd-x64@0.27.2': - optional: true - '@esbuild/openbsd-x64@0.27.3': optional: true @@ -8962,9 +8779,6 @@ snapshots: '@esbuild/openharmony-arm64@0.25.12': optional: true - '@esbuild/openharmony-arm64@0.27.2': - optional: true - '@esbuild/openharmony-arm64@0.27.3': optional: true @@ -8980,9 +8794,6 @@ snapshots: '@esbuild/sunos-x64@0.25.12': optional: true - '@esbuild/sunos-x64@0.27.2': - optional: true - '@esbuild/sunos-x64@0.27.3': optional: true @@ -8998,9 +8809,6 @@ snapshots: '@esbuild/win32-arm64@0.25.12': optional: true - '@esbuild/win32-arm64@0.27.2': - optional: true - '@esbuild/win32-arm64@0.27.3': optional: true @@ -9016,9 +8824,6 @@ snapshots: '@esbuild/win32-ia32@0.25.12': optional: true - '@esbuild/win32-ia32@0.27.2': - optional: true - '@esbuild/win32-ia32@0.27.3': optional: true @@ -9034,9 +8839,6 @@ snapshots: '@esbuild/win32-x64@0.25.12': optional: true - '@esbuild/win32-x64@0.27.2': - optional: true - '@esbuild/win32-x64@0.27.3': optional: true @@ -10310,7 +10112,7 @@ snapshots: '@types/better-sqlite3@7.6.13': dependencies: - '@types/node': 24.10.9 + '@types/node': 25.3.0 '@types/bun@1.3.10': dependencies: @@ -10378,7 +10180,7 @@ snapshots: '@types/pg@8.16.0': dependencies: - '@types/node': 24.10.9 + '@types/node': 25.3.0 pg-protocol: 1.11.0 pg-types: 2.2.0 @@ -10407,7 +10209,7 @@ snapshots: '@types/sax@1.2.7': dependencies: - '@types/node': 24.10.9 + '@types/node': 25.3.0 '@types/semver@7.7.1': {} @@ -10484,29 +10286,21 @@ snapshots: chai: 5.3.3 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(vite@5.4.21(@types/node@22.19.7))': + '@vitest/mocker@3.2.4(vite@6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 5.4.21(@types/node@22.19.7) + vite: 6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) - '@vitest/mocker@3.2.4(vite@5.4.21(@types/node@24.10.9))': + '@vitest/mocker@3.2.4(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 5.4.21(@types/node@24.10.9) - - '@vitest/mocker@3.2.4(vite@5.4.21(@types/node@25.5.0))': - dependencies: - '@vitest/spy': 3.2.4 - estree-walker: 3.0.3 - magic-string: 0.30.21 - optionalDependencies: - vite: 5.4.21(@types/node@25.5.0) + vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) '@vitest/pretty-format@3.2.4': dependencies: @@ -10534,6 +10328,8 @@ snapshots: loupe: 3.2.1 tinyrainbow: 2.0.0 + abort-controller-x@0.4.3: {} + accepts@2.0.0: dependencies: mime-types: 3.0.2 @@ -11589,35 +11385,6 @@ snapshots: '@esbuild/win32-ia32': 0.25.12 '@esbuild/win32-x64': 0.25.12 - esbuild@0.27.2: - optionalDependencies: - '@esbuild/aix-ppc64': 0.27.2 - '@esbuild/android-arm': 0.27.2 - '@esbuild/android-arm64': 0.27.2 - '@esbuild/android-x64': 0.27.2 - '@esbuild/darwin-arm64': 0.27.2 - '@esbuild/darwin-x64': 0.27.2 - '@esbuild/freebsd-arm64': 0.27.2 - '@esbuild/freebsd-x64': 0.27.2 - '@esbuild/linux-arm': 0.27.2 - '@esbuild/linux-arm64': 0.27.2 - '@esbuild/linux-ia32': 0.27.2 - '@esbuild/linux-loong64': 0.27.2 - '@esbuild/linux-mips64el': 0.27.2 - '@esbuild/linux-ppc64': 0.27.2 - '@esbuild/linux-riscv64': 0.27.2 - '@esbuild/linux-s390x': 0.27.2 - '@esbuild/linux-x64': 0.27.2 - '@esbuild/netbsd-arm64': 0.27.2 - '@esbuild/netbsd-x64': 0.27.2 - '@esbuild/openbsd-arm64': 0.27.2 - '@esbuild/openbsd-x64': 0.27.2 - '@esbuild/openharmony-arm64': 0.27.2 - '@esbuild/sunos-x64': 0.27.2 - '@esbuild/win32-arm64': 0.27.2 - '@esbuild/win32-ia32': 0.27.2 - '@esbuild/win32-x64': 0.27.2 - esbuild@0.27.3: optionalDependencies: '@esbuild/aix-ppc64': 0.27.3 @@ -12766,7 +12533,14 @@ snapshots: pkg-types: 1.3.1 ufo: 1.6.3 - mockdate@2.0.5: {} + modal@0.7.1: + dependencies: + cbor-x: 1.6.0 + long: 5.3.2 + nice-grpc: 2.1.14 + protobufjs: 7.5.4 + smol-toml: 1.6.0 + uuid: 11.1.0 module-details-from-path@1.0.4: {} @@ -12805,6 +12579,16 @@ snapshots: neotraverse@0.6.18: {} + nice-grpc-common@2.0.2: + dependencies: + ts-error: 1.0.6 + + nice-grpc@2.1.14: + dependencies: + '@grpc/grpc-js': 1.14.3 + abort-controller-x: 0.4.3 + nice-grpc-common: 2.0.2 + nlcst-to-string@4.0.0: dependencies: '@types/nlcst': 2.0.3 @@ -14121,6 +13905,8 @@ snapshots: trough@2.2.0: {} + ts-error@1.0.6: {} + ts-interface-checker@0.1.13: {} ts-json-schema-generator@2.5.0: @@ -14171,7 +13957,7 @@ snapshots: tsx@4.21.0: dependencies: - esbuild: 0.27.2 + esbuild: 0.27.3 get-tsconfig: 4.13.0 optionalDependencies: fsevents: 2.3.3 @@ -14347,6 +14133,8 @@ snapshots: uuid@10.0.0: {} + uuid@11.1.0: {} + uuid@12.0.0: {} uuid@13.0.0: {} @@ -14437,16 +14225,7 @@ snapshots: - tsx - yaml - vite@5.4.21(@types/node@22.19.7): - dependencies: - esbuild: 0.21.5 - postcss: 8.5.6 - rollup: 4.56.0 - optionalDependencies: - '@types/node': 22.19.7 - fsevents: 2.3.3 - - vite@5.4.21(@types/node@24.10.9): + vite@5.4.21(@types/node@25.3.0): dependencies: esbuild: 0.21.5 postcss: 8.5.6 @@ -14532,7 +14311,7 @@ snapshots: dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@5.4.21(@types/node@22.19.7)) + '@vitest/mocker': 3.2.4(vite@6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -14550,7 +14329,7 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 5.4.21(@types/node@22.19.7) + vite: 6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) vite-node: 3.2.4(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: @@ -14616,7 +14395,7 @@ snapshots: dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@5.4.21(@types/node@25.5.0)) + '@vitest/mocker': 3.2.4(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -14634,8 +14413,8 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 5.4.21(@types/node@25.5.0) - vite-node: 3.2.4(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite-node: 3.2.4(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: '@types/debug': 4.1.12 From d850a3b77a5097431b560dc22336871589a6b461 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 18:52:59 -0700 Subject: [PATCH 08/48] fix: normalize Pi ACP bootstrap payloads (#227) * fix: normalize pi ACP bootstrap payloads * docs(cli): document custom pi binary override * docs(quickstart): list all supported agent IDs * docs(code): clarify Pi payload normalization rationale --- docs/cli.mdx | 33 +++++++ docs/quickstart.mdx | 2 + .../sandbox-agent/src/acp_proxy_runtime.rs | 62 +++++++++++- .../tests/v1_api/acp_transport.rs | 94 +++++++++++++++++++ 4 files changed, 190 insertions(+), 1 deletion(-) diff --git a/docs/cli.mdx b/docs/cli.mdx index a3cd839..6177fb3 100644 --- a/docs/cli.mdx +++ b/docs/cli.mdx @@ -59,6 +59,39 @@ sandbox-agent install-agent claude --reinstall sandbox-agent install-agent --all ``` +### Custom Pi implementation path + +If you use a forked/custom `pi` binary with `pi-acp`, you can override what executable gets launched. + +#### Option 1: explicit command override (recommended) + +Set `PI_ACP_PI_COMMAND` in the environment where `sandbox-agent` runs: + +```bash +PI_ACP_PI_COMMAND=/absolute/path/to/your/pi-fork sandbox-agent server +``` + +This is forwarded to `pi-acp`, which uses it instead of looking up `pi` on `PATH`. + +#### Option 2: PATH override + +Put your custom `pi` first on `PATH` before starting `sandbox-agent`: + +```bash +export PATH="/path/to/custom-pi-dir:$PATH" +sandbox-agent server +``` + +#### Option 3: symlink override + +Point `pi` to your custom binary via symlink in a directory that is early on `PATH`: + +```bash +ln -sf /absolute/path/to/your/pi-fork /usr/local/bin/pi +``` + +Then start `sandbox-agent` normally. + ## opencode (experimental) Start/reuse daemon and run `opencode attach` against `/opencode`. diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index 7b5beed..caf2c21 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -217,6 +217,8 @@ icon: "rocket" + Supported agent IDs: `claude`, `codex`, `opencode`, `amp`, `pi`, `cursor`, `mock`. + To preinstall agents: ```bash diff --git a/server/packages/sandbox-agent/src/acp_proxy_runtime.rs b/server/packages/sandbox-agent/src/acp_proxy_runtime.rs index 47bc2b0..3710e2f 100644 --- a/server/packages/sandbox-agent/src/acp_proxy_runtime.rs +++ b/server/packages/sandbox-agent/src/acp_proxy_runtime.rs @@ -11,7 +11,7 @@ use futures::Stream; use sandbox_agent_agent_management::agents::{AgentId, AgentManager, InstallOptions}; use sandbox_agent_error::SandboxError; use sandbox_agent_opencode_adapter::{AcpDispatch, AcpDispatchResult, AcpPayloadStream}; -use serde_json::Value; +use serde_json::{Number, Value}; use tokio::sync::{Mutex, RwLock}; const DEFAULT_REQUEST_TIMEOUT_MS: u64 = 120_000; @@ -134,6 +134,8 @@ impl AcpProxyRuntime { "acp_proxy: instance resolved" ); + let payload = normalize_payload_for_agent(instance.agent, payload); + match instance.runtime.post(payload).await { Ok(PostOutcome::Response(value)) => { let total_ms = start.elapsed().as_millis() as u64; @@ -510,6 +512,64 @@ fn map_adapter_error(err: AdapterError, agent: Option) -> SandboxError } } +fn normalize_payload_for_agent(agent: AgentId, payload: Value) -> Value { + if agent != AgentId::Pi { + return payload; + } + + // Pi's ACP adapter is stricter than other adapters for a couple of bootstrap + // fields. Normalize here so older/raw ACP clients still work against Pi. + normalize_pi_payload(payload) +} + +fn normalize_pi_payload(mut payload: Value) -> Value { + let method = payload + .get("method") + .and_then(Value::as_str) + .unwrap_or_default(); + + match method { + "initialize" => { + // Some clients send ACP protocolVersion as a string ("1.0"), but + // pi-acp expects a numeric JSON value and rejects strings. + if let Some(protocol) = payload.pointer_mut("/params/protocolVersion") { + if let Some(raw) = protocol.as_str() { + if let Some(number) = parse_json_number(raw) { + *protocol = Value::Number(number); + } + } + } + } + "session/new" => { + // The TypeScript SDK and opencode adapter already send mcpServers: [], + // but raw /v1/acp callers may omit it. pi-acp currently validates + // mcpServers as required, so default it here for compatibility. + if let Some(params) = payload.get_mut("params").and_then(Value::as_object_mut) { + params + .entry("mcpServers".to_string()) + .or_insert_with(|| Value::Array(Vec::new())); + } + } + _ => {} + } + + payload +} + +fn parse_json_number(raw: &str) -> Option { + let trimmed = raw.trim(); + + if let Ok(unsigned) = trimmed.parse::() { + return Some(Number::from(unsigned)); + } + + if let Ok(signed) = trimmed.parse::() { + return Some(Number::from(signed)); + } + + trimmed.parse::().ok().and_then(Number::from_f64) +} + /// Inspect JSON-RPC error responses from agent processes and add helpful hints /// when we can infer the root cause from a known error pattern. fn annotate_agent_error(agent: AgentId, mut value: Value) -> Value { diff --git a/server/packages/sandbox-agent/tests/v1_api/acp_transport.rs b/server/packages/sandbox-agent/tests/v1_api/acp_transport.rs index 46b0198..147d844 100644 --- a/server/packages/sandbox-agent/tests/v1_api/acp_transport.rs +++ b/server/packages/sandbox-agent/tests/v1_api/acp_transport.rs @@ -33,6 +33,51 @@ done write_executable(path, &script); } +fn write_strict_pi_agent_process(path: &Path) { + // This stub intentionally mirrors the strict bootstrap validation behavior + // observed in pi-acp: + // - initialize.params.protocolVersion must be numeric + // - session/new.params.mcpServers must be present (array) + // + // The proxy normalization layer should adapt legacy/raw client payloads so + // requests still succeed against this stricter contract. + let script = r#"#!/usr/bin/env sh +if [ "${1:-}" = "--help" ] || [ "${1:-}" = "--version" ] || [ "${1:-}" = "version" ] || [ "${1:-}" = "-V" ]; then + echo "pi-agent-process 0.0.1" + exit 0 +fi + +while IFS= read -r line; do + method=$(printf '%s\n' "$line" | sed -n 's/.*"method"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/p') + id=$(printf '%s\n' "$line" | sed -n 's/.*"id"[[:space:]]*:[[:space:]]*\([^,}]*\).*/\1/p') + + if [ "$method" = "initialize" ] && [ -n "$id" ]; then + if printf '%s\n' "$line" | grep -Eq '"protocolVersion"[[:space:]]*:[[:space:]]*"'; then + printf '{"jsonrpc":"2.0","id":%s,"error":{"code":-32603,"message":"Internal error","data":[{"expected":"number","code":"invalid_type","path":["protocolVersion"],"message":"Invalid input: expected number, received string"}]}}\n' "$id" + else + printf '{"jsonrpc":"2.0","id":%s,"result":{"ok":true,"echoedMethod":"initialize"}}\n' "$id" + fi + continue + fi + + if [ "$method" = "session/new" ] && [ -n "$id" ]; then + if printf '%s\n' "$line" | grep -Eq '"mcpServers"[[:space:]]*:[[:space:]]*\['; then + printf '{"jsonrpc":"2.0","id":%s,"result":{"sessionId":"pi-session","echoedMethod":"session/new"}}\n' "$id" + else + printf '{"jsonrpc":"2.0","id":%s,"error":{"code":-32603,"message":"Internal error","data":[{"expected":"array","code":"invalid_type","path":["mcpServers"],"message":"Invalid input: expected array, received undefined"}]}}\n' "$id" + fi + continue + fi + + if [ -n "$method" ] && [ -n "$id" ]; then + printf '{"jsonrpc":"2.0","id":%s,"result":{"ok":true,"echoedMethod":"%s"}}\n' "$id" "$method" + fi +done +"#; + + write_executable(path, script); +} + fn setup_stub_artifacts(install_dir: &Path, agent: &str) { let native = install_dir.join(agent); write_stub_native(&native, agent); @@ -47,6 +92,17 @@ fn setup_stub_artifacts(install_dir: &Path, agent: &str) { write_stub_agent_process(&launcher, agent); } +fn setup_strict_pi_agent_process_only(install_dir: &Path) { + let agent_processes = install_dir.join("agent_processes"); + fs::create_dir_all(&agent_processes).expect("create agent processes dir"); + let launcher = if cfg!(windows) { + agent_processes.join("pi-acp.cmd") + } else { + agent_processes.join("pi-acp") + }; + write_strict_pi_agent_process(&launcher); +} + #[tokio::test] async fn acp_bootstrap_requires_agent_query() { let test_app = TestApp::new(AuthConfig::disabled()); @@ -115,6 +171,44 @@ async fn acp_round_trip_and_replay() { assert!(second_event_id > first_event_id); } +#[cfg(unix)] +#[tokio::test] +async fn pi_initialize_and_session_new_are_normalized() { + let test_app = TestApp::with_setup(AuthConfig::disabled(), |install_dir| { + setup_strict_pi_agent_process_only(install_dir); + }); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/acp/server-pi?agent=pi", + Some(initialize_payload()), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["result"]["echoedMethod"], "initialize"); + + let session_new = json!({ + "jsonrpc": "2.0", + "id": 2, + "method": "session/new", + "params": { + "cwd": "/tmp" + } + }); + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/acp/server-pi", + Some(session_new), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["result"]["echoedMethod"], "session/new"); +} + #[cfg(unix)] #[tokio::test] async fn acp_agent_mismatch_returns_conflict() { From 3426cbc6ec9c94d026a55883768b02b4ff225b80 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 19:46:26 -0700 Subject: [PATCH 09/48] chore: update ACP SDK to 0.16.1 and add e2e testing guidance (#259) - Bump @agentclientprotocol/sdk from 0.14.1 to 0.16.1 in acp-http-client - Update adapters.json to reflect new SDK version - Migrate unstableListSessions to listSessions (stabilized in SDK 0.16.0) - Add CLAUDE.md guidance: request token location before e2e agent testing All 5 ACP adapters remain at their latest versions. E2E testing confirms Claude, Codex, Pi, and Cursor agents work end-to-end with credentials. Co-authored-by: Claude Haiku 4.5 --- CLAUDE.md | 13 ++ pnpm-lock.yaml | 203 ++++++++++-------- sdks/acp-http-client/package.json | 2 +- sdks/acp-http-client/src/index.ts | 4 +- .../packages/agent-management/src/agents.rs | 141 ++++++------ 5 files changed, 184 insertions(+), 179 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 624602a..d7e091b 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -22,6 +22,19 @@ - `server/packages/sandbox-agent/src/cli.rs` - Keep docs aligned to implemented endpoints/commands only (for example ACP under `/v1/acp`, not legacy `/v1/sessions` APIs). +## E2E Agent Testing + +- When asked to test agents e2e and you do not have the API tokens/credentials required, always stop and ask the user where to find the tokens before proceeding. + +## ACP Adapter Audit + +- `scripts/audit-acp-deps/adapters.json` is the single source of truth for ACP adapter npm packages, pinned versions, and the `@agentclientprotocol/sdk` pin. +- The Rust fallback install path in `server/packages/agent-management/src/agents.rs` reads adapter entries from `adapters.json` at compile time via `include_str!`. +- Run `cd scripts/audit-acp-deps && npx tsx audit.ts` to compare our pinned versions against the ACP registry and npm latest. +- When bumping an adapter version, update `adapters.json` only — the Rust code picks it up automatically. +- When adding a new agent, add an entry to `adapters.json` (the `_` fallback arm in `install_agent_process_fallback` handles it). +- When updating the `@agentclientprotocol/sdk` pin, update both `adapters.json` (sdkDeps) and `sdks/acp-http-client/package.json`. + ## Change Tracking - If the user asks to "push" changes, treat that as permission to commit and push all current workspace changes, not a hand-picked subset, unless the user explicitly scopes the push. diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 14f8572..8396837 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -73,7 +73,7 @@ importers: devDependencies: '@cloudflare/workers-types': specifier: latest - version: 4.20260313.1 + version: 4.20260316.1 '@types/node': specifier: latest version: 25.5.0 @@ -97,7 +97,7 @@ importers: version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) wrangler: specifier: latest - version: 4.73.0(@cloudflare/workers-types@4.20260313.1) + version: 4.73.0(@cloudflare/workers-types@4.20260316.1) examples/computesdk: dependencies: @@ -277,6 +277,31 @@ importers: specifier: latest version: 5.9.3 + examples/modal: + dependencies: + '@sandbox-agent/example-shared': + specifier: workspace:* + version: link:../shared + modal: + specifier: latest + version: 0.7.3 + sandbox-agent: + specifier: workspace:* + version: link:../../sdks/typescript + devDependencies: + '@types/node': + specifier: latest + version: 25.5.0 + tsx: + specifier: latest + version: 4.21.0 + typescript: + specifier: latest + version: 5.9.3 + vitest: + specifier: ^3.0.0 + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + examples/permissions: dependencies: commander: @@ -296,31 +321,6 @@ importers: specifier: latest version: 5.9.3 - examples/modal: - dependencies: - '@sandbox-agent/example-shared': - specifier: workspace:* - version: link:../shared - modal: - specifier: latest - version: 0.7.1 - sandbox-agent: - specifier: workspace:* - version: link:../../sdks/typescript - devDependencies: - '@types/node': - specifier: latest - version: 25.3.0 - tsx: - specifier: latest - version: 4.21.0 - typescript: - specifier: latest - version: 5.9.3 - vitest: - specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) - examples/persist-memory: dependencies: '@sandbox-agent/example-shared': @@ -497,7 +497,7 @@ importers: version: link:../../../sdks/persist-rivet better-auth: specifier: ^1.5.5 - version: 1.5.5(@cloudflare/workers-types@4.20260313.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) + version: 1.5.5(@cloudflare/workers-types@4.20260316.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) dockerode: specifier: ^4.0.9 version: 4.0.9 @@ -506,7 +506,7 @@ importers: version: 0.31.9 drizzle-orm: specifier: ^0.44.5 - version: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + version: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) hono: specifier: ^4.11.9 version: 4.12.2 @@ -515,7 +515,7 @@ importers: version: 10.3.1 rivetkit: specifier: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a - version: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) + version: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) sandbox-agent: specifier: workspace:* version: link:../../../sdks/typescript @@ -546,7 +546,7 @@ importers: version: 19.2.4 rivetkit: specifier: 2.1.6 - version: 2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) + version: 2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) sandbox-agent: specifier: workspace:* version: link:../../../sdks/typescript @@ -796,8 +796,8 @@ importers: sdks/acp-http-client: dependencies: '@agentclientprotocol/sdk': - specifier: ^0.14.1 - version: 0.14.1(zod@4.3.6) + specifier: ^0.16.1 + version: 0.16.1(zod@4.3.6) devDependencies: '@types/node': specifier: ^22.0.0 @@ -1070,6 +1070,11 @@ packages: peerDependencies: zod: ^3.25.0 || ^4.0.0 + '@agentclientprotocol/sdk@0.16.1': + resolution: {integrity: sha512-1ad+Sc/0sCtZGHthxxvgEUo5Wsbw16I+aF+YwdiLnPwkZG8KAGUEAPK6LM6Pf69lCyJPt1Aomk1d+8oE3C4ZEw==} + peerDependencies: + zod: ^3.25.0 || ^4.0.0 + '@alloc/quick-lru@5.2.0': resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} engines: {node: '>=10'} @@ -1661,8 +1666,8 @@ packages: cpu: [x64] os: [win32] - '@cloudflare/workers-types@4.20260313.1': - resolution: {integrity: sha512-jMEeX3RKfOSVqqXRKr/ulgglcTloeMzSH3FdzIfqJHtvc12/ELKd5Ldsg8ZHahKX/4eRxYdw3kbzb8jLXbq/jQ==} + '@cloudflare/workers-types@4.20260316.1': + resolution: {integrity: sha512-HUZ+vQD8/1A4Fz/8WAlzYWcS5W5u3Nu7Dv9adkIkmLfeKqMIRn01vc4nSUBar60KkmohyQHkPi8jtWV/zazvAg==} '@computesdk/cmd@0.4.1': resolution: {integrity: sha512-hhcYrwMnOpRSwWma3gkUeAVsDFG56nURwSaQx8vCepv0IuUv39bK4mMkgszolnUQrVjBDdW7b3lV+l5B2S8fRA==} @@ -5584,8 +5589,11 @@ packages: mlly@1.8.0: resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==} - modal@0.7.1: - resolution: {integrity: sha512-WFn5mfVD7BbdNytqDODjKXG+RkF4bubTKiu7gZvq/JITcLIU1JWYnZQSJ41cE1TlrBlxFADSx8d7Q2AXF1GT+A==} + mockdate@2.0.5: + resolution: {integrity: sha512-ST0PnThzWKcgSLyc+ugLVql45PvESt3Ul/wrdV/OPc/6Pr8dbLAIJsN1cIp41FLzbN+srVTNIRn+5Cju0nyV6A==} + + modal@0.7.3: + resolution: {integrity: sha512-4CliqNF15sZPBGpSoCj5Y9fd8fTp1ONrBLIJiC4amm/Qzc1rn8CH45SVzSu+1DokHCIRiZqQ1xMhRKpDvDCkBw==} module-details-from-path@1.0.4: resolution: {integrity: sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==} @@ -7400,6 +7408,10 @@ snapshots: dependencies: zod: 4.3.6 + '@agentclientprotocol/sdk@0.16.1(zod@4.3.6)': + dependencies: + zod: 4.3.6 + '@alloc/quick-lru@5.2.0': {} '@antfu/ni@0.23.2': {} @@ -8228,7 +8240,7 @@ snapshots: '@balena/dockerignore@1.0.2': {} - '@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1)': + '@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1)': dependencies: '@better-auth/utils': 0.3.1 '@better-fetch/fetch': 1.1.21 @@ -8239,39 +8251,39 @@ snapshots: nanostores: 1.1.1 zod: 4.3.6 optionalDependencies: - '@cloudflare/workers-types': 4.20260313.1 + '@cloudflare/workers-types': 4.20260316.1 - '@better-auth/drizzle-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))': + '@better-auth/drizzle-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 optionalDependencies: - drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) - '@better-auth/kysely-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11)': + '@better-auth/kysely-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11)': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 kysely: 0.28.11 - '@better-auth/memory-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': + '@better-auth/memory-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 - '@better-auth/mongo-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': + '@better-auth/mongo-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 - '@better-auth/prisma-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': + '@better-auth/prisma-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 - '@better-auth/telemetry@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))': + '@better-auth/telemetry@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 '@better-fetch/fetch': 1.1.21 @@ -8381,7 +8393,7 @@ snapshots: '@cloudflare/workerd-windows-64@1.20260312.1': optional: true - '@cloudflare/workers-types@4.20260313.1': {} + '@cloudflare/workers-types@4.20260316.1': {} '@computesdk/cmd@0.4.1': {} @@ -10112,7 +10124,7 @@ snapshots: '@types/better-sqlite3@7.6.13': dependencies: - '@types/node': 25.3.0 + '@types/node': 24.10.9 '@types/bun@1.3.10': dependencies: @@ -10180,7 +10192,7 @@ snapshots: '@types/pg@8.16.0': dependencies: - '@types/node': 25.3.0 + '@types/node': 24.10.9 pg-protocol: 1.11.0 pg-types: 2.2.0 @@ -10209,7 +10221,7 @@ snapshots: '@types/sax@1.2.7': dependencies: - '@types/node': 25.3.0 + '@types/node': 24.10.9 '@types/semver@7.7.1': {} @@ -10286,21 +10298,29 @@ snapshots: chai: 5.3.3 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(vite@6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': + '@vitest/mocker@3.2.4(vite@7.3.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 7.3.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) - '@vitest/mocker@3.2.4(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': + '@vitest/mocker@3.2.4(vite@7.3.1(@types/node@24.10.9)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 7.3.1(@types/node@24.10.9)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + + '@vitest/mocker@3.2.4(vite@7.3.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))': + dependencies: + '@vitest/spy': 3.2.4 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + vite: 7.3.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) '@vitest/pretty-format@3.2.4': dependencies: @@ -10593,15 +10613,15 @@ snapshots: dependencies: tweetnacl: 0.14.5 - better-auth@1.5.5(@cloudflare/workers-types@4.20260313.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)): + better-auth@1.5.5(@cloudflare/workers-types@4.20260316.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)): dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) - '@better-auth/drizzle-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0)) - '@better-auth/kysely-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11) - '@better-auth/memory-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) - '@better-auth/mongo-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) - '@better-auth/prisma-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) - '@better-auth/telemetry': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1)) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/drizzle-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0)) + '@better-auth/kysely-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11) + '@better-auth/memory-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) + '@better-auth/mongo-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) + '@better-auth/prisma-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) + '@better-auth/telemetry': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1)) '@better-auth/utils': 0.3.1 '@better-fetch/fetch': 1.1.21 '@noble/ciphers': 2.1.1 @@ -10614,7 +10634,7 @@ snapshots: zod: 4.3.6 optionalDependencies: drizzle-kit: 0.31.9 - drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) pg: 8.20.0 react: 19.2.4 react-dom: 19.2.4(react@19.2.4) @@ -11223,9 +11243,9 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0): + drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0): optionalDependencies: - '@cloudflare/workers-types': 4.20260313.1 + '@cloudflare/workers-types': 4.20260316.1 '@opentelemetry/api': 1.9.0 '@types/better-sqlite3': 7.6.13 '@types/pg': 8.18.0 @@ -12533,9 +12553,11 @@ snapshots: pkg-types: 1.3.1 ufo: 1.6.3 - modal@0.7.1: + mockdate@2.0.5: {} + + modal@0.7.3: dependencies: - cbor-x: 1.6.0 + cbor-x: 1.6.3 long: 5.3.2 nice-grpc: 2.1.14 protobufjs: 7.5.4 @@ -13332,7 +13354,7 @@ snapshots: reusify@1.1.0: {} - rivetkit@2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): + rivetkit@2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): dependencies: '@hono/standard-validator': 0.1.5(@standard-schema/spec@1.1.0)(hono@4.12.2) '@hono/zod-openapi': 1.2.2(hono@4.12.2)(zod@4.3.6) @@ -13360,14 +13382,14 @@ snapshots: '@hono/node-server': 1.19.9(hono@4.12.2) '@hono/node-ws': 1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2) drizzle-kit: 0.31.9 - drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) ws: 8.19.0 transitivePeerDependencies: - '@standard-schema/spec' - bufferutil - utf-8-validate - rivetkit@https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): + rivetkit@https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): dependencies: '@hono/standard-validator': 0.1.5(@standard-schema/spec@1.1.0)(hono@4.12.2) '@hono/zod-openapi': 1.2.2(hono@4.12.2)(zod@4.3.6) @@ -13398,7 +13420,7 @@ snapshots: '@hono/node-ws': 1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2) dockerode: 4.0.9 drizzle-kit: 0.31.9 - drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) ws: 8.19.0 transitivePeerDependencies: - '@standard-schema/spec' @@ -14225,15 +14247,6 @@ snapshots: - tsx - yaml - vite@5.4.21(@types/node@25.3.0): - dependencies: - esbuild: 0.21.5 - postcss: 8.5.6 - rollup: 4.56.0 - optionalDependencies: - '@types/node': 24.10.9 - fsevents: 2.3.3 - vite@5.4.21(@types/node@25.5.0): dependencies: esbuild: 0.21.5 @@ -14311,7 +14324,7 @@ snapshots: dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) + '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -14329,7 +14342,7 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 6.4.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 7.3.1(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) vite-node: 3.2.4(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: @@ -14353,7 +14366,7 @@ snapshots: dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@5.4.21(@types/node@24.10.9)) + '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@24.10.9)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -14371,7 +14384,7 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 5.4.21(@types/node@24.10.9) + vite: 7.3.1(@types/node@24.10.9)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) vite-node: 3.2.4(@types/node@24.10.9)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: @@ -14395,7 +14408,7 @@ snapshots: dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) + '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -14413,8 +14426,8 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 6.4.1(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) - vite-node: 3.2.4(@types/node@25.3.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite: 7.3.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vite-node: 3.2.4(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) why-is-node-running: 2.3.0 optionalDependencies: '@types/debug': 4.1.12 @@ -14472,7 +14485,7 @@ snapshots: '@cloudflare/workerd-linux-arm64': 1.20260312.1 '@cloudflare/workerd-windows-64': 1.20260312.1 - wrangler@4.73.0(@cloudflare/workers-types@4.20260313.1): + wrangler@4.73.0(@cloudflare/workers-types@4.20260316.1): dependencies: '@cloudflare/kv-asset-handler': 0.4.2 '@cloudflare/unenv-preset': 2.15.0(unenv@2.0.0-rc.24)(workerd@1.20260312.1) @@ -14483,7 +14496,7 @@ snapshots: unenv: 2.0.0-rc.24 workerd: 1.20260312.1 optionalDependencies: - '@cloudflare/workers-types': 4.20260313.1 + '@cloudflare/workers-types': 4.20260316.1 fsevents: 2.3.3 transitivePeerDependencies: - bufferutil diff --git a/sdks/acp-http-client/package.json b/sdks/acp-http-client/package.json index 007d62e..3e64d8f 100644 --- a/sdks/acp-http-client/package.json +++ b/sdks/acp-http-client/package.json @@ -17,7 +17,7 @@ } }, "dependencies": { - "@agentclientprotocol/sdk": "^0.14.1" + "@agentclientprotocol/sdk": "^0.16.1" }, "files": [ "dist" diff --git a/sdks/acp-http-client/src/index.ts b/sdks/acp-http-client/src/index.ts index 0e8f201..f8ac668 100644 --- a/sdks/acp-http-client/src/index.ts +++ b/sdks/acp-http-client/src/index.ts @@ -199,8 +199,8 @@ export class AcpHttpClient { return wrapRpc(this.connection.setSessionConfigOption(request)); } - async unstableListSessions(request: ListSessionsRequest): Promise { - return wrapRpc(this.connection.unstable_listSessions(request)); + async listSessions(request: ListSessionsRequest): Promise { + return wrapRpc(this.connection.listSessions(request)); } async unstableForkSession(request: ForkSessionRequest): Promise { diff --git a/server/packages/agent-management/src/agents.rs b/server/packages/agent-management/src/agents.rs index 0796926..1ea051c 100644 --- a/server/packages/agent-management/src/agents.rs +++ b/server/packages/agent-management/src/agents.rs @@ -15,6 +15,8 @@ use url::Url; const DEFAULT_ACP_REGISTRY_URL: &str = "https://cdn.agentclientprotocol.com/registry/v1/latest/registry.json"; +const ADAPTERS_JSON: &str = include_str!("../../../../scripts/audit-acp-deps/adapters.json"); + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum AgentId { @@ -818,40 +820,6 @@ impl AgentManager { ) -> Result { let started = Instant::now(); let artifact = match agent { - AgentId::Claude => { - let package = fallback_npx_package( - "@zed-industries/claude-agent-acp", - options.agent_process_version.as_deref(), - ); - self.install_npm_agent_process_package( - agent, - &package, - &[], - &HashMap::new(), - InstallSource::Fallback, - options - .agent_process_version - .clone() - .or(extract_npx_version(&package)), - )? - } - AgentId::Codex => { - let package = fallback_npx_package( - "@zed-industries/codex-acp", - options.agent_process_version.as_deref(), - ); - self.install_npm_agent_process_package( - agent, - &package, - &[], - &HashMap::new(), - InstallSource::Fallback, - options - .agent_process_version - .clone() - .or(extract_npx_version(&package)), - )? - } AgentId::Opencode => { let launcher = self.agent_process_path(agent); let native = self.resolve_binary(agent)?; @@ -869,53 +837,6 @@ impl AgentManager { source: InstallSource::Fallback, } } - AgentId::Amp => { - let package = - fallback_npx_package("amp-acp", options.agent_process_version.as_deref()); - self.install_npm_agent_process_package( - agent, - &package, - &[], - &HashMap::new(), - InstallSource::Fallback, - options - .agent_process_version - .clone() - .or(extract_npx_version(&package)), - )? - } - AgentId::Pi => { - let package = - fallback_npx_package("pi-acp", options.agent_process_version.as_deref()); - self.install_npm_agent_process_package( - agent, - &package, - &[], - &HashMap::new(), - InstallSource::Fallback, - options - .agent_process_version - .clone() - .or(extract_npx_version(&package)), - )? - } - AgentId::Cursor => { - let package = fallback_npx_package( - "@blowmage/cursor-agent-acp", - options.agent_process_version.as_deref(), - ); - self.install_npm_agent_process_package( - agent, - &package, - &[], - &HashMap::new(), - InstallSource::Fallback, - options - .agent_process_version - .clone() - .or(extract_npx_version(&package)), - )? - } AgentId::Mock => { let launcher = self.agent_process_path(agent); write_mock_agent_process_launcher(&launcher)?; @@ -926,6 +847,30 @@ impl AgentManager { source: InstallSource::Fallback, } } + _ => { + let (npm_package, pinned_version) = + adapter_entry(agent.as_str()).ok_or_else(|| { + AgentError::ExtractFailed(format!( + "no adapter entry in adapters.json for agent: {agent}" + )) + })?; + let version = options + .agent_process_version + .as_deref() + .or(Some(pinned_version)); + let package = fallback_npx_package(npm_package, version); + self.install_npm_agent_process_package( + agent, + &package, + &[], + &HashMap::new(), + InstallSource::Fallback, + options + .agent_process_version + .clone() + .or(extract_npx_version(&package)), + )? + } }; tracing::info!( @@ -1018,6 +963,40 @@ pub enum AgentError { MissingNpm { agent: AgentId }, } +/// Looks up the pinned adapter entry from `adapters.json` for the given agent ID. +/// Returns `(npm_package, pinned_version)`. +fn adapter_entry(agent_id: &str) -> Option<(&'static str, &'static str)> { + use std::sync::OnceLock; + + #[derive(Deserialize)] + struct AdaptersConfig { + adapters: Vec, + } + #[derive(Deserialize)] + #[serde(rename_all = "camelCase")] + struct AdapterEntry { + agent_id: String, + npm_package: String, + pinned_version: String, + } + + static PARSED: OnceLock> = OnceLock::new(); + let entries = PARSED.get_or_init(|| { + let config: AdaptersConfig = + serde_json::from_str(ADAPTERS_JSON).expect("adapters.json is valid"); + config + .adapters + .into_iter() + .map(|e| (e.agent_id, e.npm_package, e.pinned_version)) + .collect() + }); + + entries + .iter() + .find(|(id, _, _)| id == agent_id) + .map(|(_, pkg, ver)| (pkg.as_str(), ver.as_str())) +} + fn fallback_npx_package(base: &str, version: Option<&str>) -> String { match version { Some(version) => format!("{base}@{version}"), From cf7e2a92c67d40952a654c0249c643400c297e9d Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 20:29:28 -0700 Subject: [PATCH 10/48] SDK: Add ensureServer() for automatic server recovery (#260) * SDK sandbox provisioning: built-in providers, docs restructure, and quickstart overhaul - Add built-in sandbox providers (local, docker, e2b, daytona, vercel, cloudflare) to the TypeScript SDK so users import directly instead of passing client instances - Restructure docs: rename architecture to orchestration-architecture, add new architecture page for server overview, improve getting started flow - Rewrite quickstart to be TypeScript-first with provider CodeGroup and custom provider accordion - Update all examples to use new provider APIs - Update persist drivers and foundry for new SDK surface Co-Authored-By: Claude Opus 4.6 * Fix SDK typecheck errors and update persist drivers for insertEvent signature - Fix insertEvent call in client.ts to pass sessionId as first argument - Update Daytona provider create options to use Partial type (image has default) - Update StrictUniqueSessionPersistDriver in tests to match new insertEvent signature - Sync persist packages, openapi spec, and docs with upstream changes Co-Authored-By: Claude Opus 4.6 * Add Modal and ComputeSDK built-in providers, update examples and docs - Add `sandbox-agent/modal` provider using Modal SDK with node:22-slim image - Add `sandbox-agent/computesdk` provider using ComputeSDK's unified sandbox API - Update Modal and ComputeSDK examples to use new SDK providers - Update Modal and ComputeSDK deploy docs with provider-based examples - Add Modal to quickstart CodeGroup and docs.json navigation - Add provider test entries for Modal and ComputeSDK - Remove old standalone example files (modal.ts, computesdk.ts) Co-Authored-By: Claude Opus 4.6 * Fix Modal provider: pre-install agents in image, fire-and-forget exec for server - Pre-install agents in Dockerfile commands so they are cached across creates - Use fire-and-forget exec (no wait) to keep server alive in Modal sandbox - Add memoryMiB option (default 2GB) to avoid OOM during agent install Co-Authored-By: Claude Opus 4.6 * Sync upstream changes: multiplayer docs, logos, openapi spec, foundry config Co-Authored-By: Claude Opus 4.6 * SDK: Add ensureServer() for automatic server recovery Add ensureServer() to SandboxProvider interface to handle cases where the sandbox-agent server stops or goes to sleep. The SDK now calls this method after 3 consecutive health-check failures, allowing providers to restart the server if needed. Most built-in providers (E2B, Daytona, Vercel, Modal, ComputeSDK) implement this. Docker and Cloudflare manage server lifecycle differently, and Local uses managed child processes. Also update docs for quickstart, architecture, multiplayer, and session persistence; mark persist-* packages as deprecated; and add ensureServer implementations to all applicable providers. Co-Authored-By: Claude Haiku 4.5 * wip --------- Co-authored-by: Claude Opus 4.6 --- CLAUDE.md | 36 +- docker/release/linux-aarch64.Dockerfile | 5 +- docker/release/linux-x86_64.Dockerfile | 5 +- docker/release/macos-aarch64.Dockerfile | 5 +- docker/release/macos-x86_64.Dockerfile | 5 +- docker/release/windows.Dockerfile | 5 +- docker/runtime/Dockerfile | 3 - docker/runtime/Dockerfile.full | 3 - docs/agent-capabilities.mdx | 127 ---- docs/agent-sessions.mdx | 5 +- docs/agents/amp.mdx | 20 + docs/agents/claude.mdx | 49 ++ docs/agents/codex.mdx | 20 + docs/agents/cursor.mdx | 34 ++ docs/agents/opencode.mdx | 31 + docs/agents/pi.mdx | 20 + docs/architecture.mdx | 85 ++- docs/cli.mdx | 2 +- docs/custom-tools.mdx | 8 +- docs/deploy/cloudflare.mdx | 34 +- docs/deploy/computesdk.mdx | 212 ++----- docs/deploy/daytona.mdx | 47 +- docs/deploy/docker.mdx | 50 +- docs/deploy/e2b.mdx | 45 +- docs/deploy/local.mdx | 23 +- docs/deploy/modal.mdx | 109 +--- docs/deploy/vercel.mdx | 76 +-- docs/docs.json | 22 +- docs/mcp-config.mdx | 4 +- docs/multiplayer.mdx | 38 +- docs/orchestration-architecture.mdx | 43 ++ docs/quickstart.mdx | 549 +++++++++++------- docs/sdk-overview.mdx | 45 +- docs/security.mdx | 4 +- docs/session-persistence.mdx | 104 +--- docs/skills-config.mdx | 4 +- examples/boxlite/src/index.ts | 2 +- examples/cloudflare/tests/cloudflare.test.ts | 154 +++++ .../cloudflare}/vitest.config.ts | 2 +- examples/computesdk/package.json | 2 +- examples/computesdk/src/computesdk.ts | 151 ----- examples/computesdk/src/index.ts | 30 + examples/computesdk/tests/computesdk.test.ts | 25 +- examples/daytona/src/index.ts | 49 +- examples/docker/package.json | 2 +- examples/docker/src/index.ts | 80 +-- examples/docker/tests/docker.test.ts | 39 +- examples/e2b/src/index.ts | 51 +- examples/file-system/src/index.ts | 2 +- examples/modal/package.json | 2 +- examples/modal/src/index.ts | 30 + examples/modal/src/modal.ts | 123 ---- examples/modal/tests/modal.test.ts | 25 +- examples/permissions/src/index.ts | 11 +- examples/persist-postgres/package.json | 1 - examples/persist-postgres/src/index.ts | 2 +- examples/persist-postgres/src/persist.ts | 336 +++++++++++ examples/persist-sqlite/package.json | 3 +- examples/persist-sqlite/src/index.ts | 2 +- examples/persist-sqlite/src/persist.ts | 310 ++++++++++ examples/shared/src/docker.ts | 21 +- examples/skills-custom-tool/src/index.ts | 2 +- examples/skills/src/index.ts | 2 +- examples/vercel/src/index.ts | 68 +-- foundry/compose.dev.yaml | 2 - foundry/docker/backend.Dockerfile | 1 - foundry/packages/backend/package.json | 1 - frontend/packages/inspector/package.json | 7 +- frontend/packages/inspector/src/App.tsx | 2 +- .../inspector/src/persist-indexeddb.ts | 320 ++++++++++ .../website/public/logos/cloudflare.svg | 3 + .../website/public/logos/computesdk.svg | 3 + .../packages/website/public/logos/docker.svg | 3 + .../packages/website/public/logos/modal.svg | 3 + .../website/src/components/GetStarted.tsx | 20 +- pnpm-lock.yaml | 135 ++--- sdks/persist-indexeddb/README.md | 5 + sdks/persist-indexeddb/package.json | 13 +- sdks/persist-indexeddb/src/index.ts | 316 +--------- sdks/persist-indexeddb/tests/driver.test.ts | 96 --- .../tests/integration.test.ts | 129 ---- sdks/persist-postgres/README.md | 5 + sdks/persist-postgres/package.json | 14 +- sdks/persist-postgres/src/index.ts | 311 +--------- .../tests/integration.test.ts | 245 -------- sdks/persist-rivet/README.md | 5 + sdks/persist-rivet/package.json | 20 +- sdks/persist-rivet/src/index.ts | 173 +----- sdks/persist-rivet/tests/driver.test.ts | 236 -------- sdks/persist-sqlite/README.md | 5 + sdks/persist-sqlite/package.json | 15 +- sdks/persist-sqlite/src/index.ts | 289 +-------- sdks/persist-sqlite/tests/integration.test.ts | 131 ----- sdks/typescript/package.json | 77 +++ sdks/typescript/src/client.ts | 215 +++++-- sdks/typescript/src/index.ts | 1 + sdks/typescript/src/providers/cloudflare.ts | 79 +++ sdks/typescript/src/providers/computesdk.ts | 60 ++ sdks/typescript/src/providers/daytona.ts | 67 +++ sdks/typescript/src/providers/docker.ts | 85 +++ sdks/typescript/src/providers/e2b.ts | 62 ++ sdks/typescript/src/providers/local.ts | 84 +++ sdks/typescript/src/providers/modal.ts | 74 +++ sdks/typescript/src/providers/shared.ts | 7 + sdks/typescript/src/providers/types.ts | 31 + sdks/typescript/src/providers/vercel.ts | 65 +++ sdks/typescript/src/types.ts | 15 +- sdks/typescript/tests/integration.test.ts | 8 +- sdks/typescript/tests/providers.test.ts | 417 +++++++++++++ sdks/typescript/tsup.config.ts | 13 +- sdks/typescript/vitest.config.ts | 2 + .../sandbox-agent/src/acp_proxy_runtime.rs | 2 +- 112 files changed, 3739 insertions(+), 3537 deletions(-) delete mode 100644 docs/agent-capabilities.mdx create mode 100644 docs/agents/amp.mdx create mode 100644 docs/agents/claude.mdx create mode 100644 docs/agents/codex.mdx create mode 100644 docs/agents/cursor.mdx create mode 100644 docs/agents/opencode.mdx create mode 100644 docs/agents/pi.mdx create mode 100644 docs/orchestration-architecture.mdx create mode 100644 examples/cloudflare/tests/cloudflare.test.ts rename {sdks/persist-sqlite => examples/cloudflare}/vitest.config.ts (84%) delete mode 100644 examples/computesdk/src/computesdk.ts create mode 100644 examples/computesdk/src/index.ts create mode 100644 examples/modal/src/index.ts delete mode 100644 examples/modal/src/modal.ts create mode 100644 examples/persist-postgres/src/persist.ts create mode 100644 examples/persist-sqlite/src/persist.ts create mode 100644 frontend/packages/inspector/src/persist-indexeddb.ts create mode 100644 frontend/packages/website/public/logos/cloudflare.svg create mode 100644 frontend/packages/website/public/logos/computesdk.svg create mode 100644 frontend/packages/website/public/logos/docker.svg create mode 100644 frontend/packages/website/public/logos/modal.svg create mode 100644 sdks/persist-indexeddb/README.md delete mode 100644 sdks/persist-indexeddb/tests/driver.test.ts delete mode 100644 sdks/persist-indexeddb/tests/integration.test.ts create mode 100644 sdks/persist-postgres/README.md delete mode 100644 sdks/persist-postgres/tests/integration.test.ts create mode 100644 sdks/persist-rivet/README.md delete mode 100644 sdks/persist-rivet/tests/driver.test.ts create mode 100644 sdks/persist-sqlite/README.md delete mode 100644 sdks/persist-sqlite/tests/integration.test.ts create mode 100644 sdks/typescript/src/providers/cloudflare.ts create mode 100644 sdks/typescript/src/providers/computesdk.ts create mode 100644 sdks/typescript/src/providers/daytona.ts create mode 100644 sdks/typescript/src/providers/docker.ts create mode 100644 sdks/typescript/src/providers/e2b.ts create mode 100644 sdks/typescript/src/providers/local.ts create mode 100644 sdks/typescript/src/providers/modal.ts create mode 100644 sdks/typescript/src/providers/shared.ts create mode 100644 sdks/typescript/src/providers/types.ts create mode 100644 sdks/typescript/src/providers/vercel.ts create mode 100644 sdks/typescript/tests/providers.test.ts diff --git a/CLAUDE.md b/CLAUDE.md index d7e091b..cfd28a4 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -43,9 +43,42 @@ - Regenerate `docs/openapi.json` when HTTP contracts change. - Keep `docs/inspector.mdx` and `docs/sdks/typescript.mdx` aligned with implementation. - Append blockers/decisions to `research/acp/friction.md` during ACP work. -- `docs/agent-capabilities.mdx` lists models/modes/thought levels per agent. Update it when adding a new agent or changing `fallback_config_options`. If its "Last updated" date is >2 weeks old, re-run `cd scripts/agent-configs && npx tsx dump.ts` and update the doc to match. Source data: `scripts/agent-configs/resources/*.json` and hardcoded entries in `server/packages/sandbox-agent/src/router/support.rs` (`fallback_config_options`). +- Each agent has its own doc page at `docs/agents/.mdx` listing models, modes, and thought levels. Update the relevant page when changing `fallback_config_options`. To regenerate capability data, run `cd scripts/agent-configs && npx tsx dump.ts`. Source data: `scripts/agent-configs/resources/*.json` and hardcoded entries in `server/packages/sandbox-agent/src/router/support.rs` (`fallback_config_options`). - Some agent models are gated by subscription (e.g. Claude `opus`). The live report only shows models available to the current credentials. The static doc and JSON resource files should list all known models regardless of subscription tier. +## Adding Providers + +When adding a new sandbox provider, update all of the following: + +- `sdks/typescript/src/providers/.ts` — provider implementation +- `sdks/typescript/package.json` — add `./` export, peerDependencies, peerDependenciesMeta, devDependencies +- `sdks/typescript/tsup.config.ts` — add entry point and external +- `sdks/typescript/tests/providers.test.ts` — add test entry +- `examples//` — create example with `src/index.ts` and `tests/.test.ts` +- `docs/deploy/.mdx` — create deploy guide +- `docs/docs.json` — add to Deploy pages navigation +- `docs/quickstart.mdx` — add tab in "Start the sandbox" step, add credentials entry in "Passing LLM credentials" accordion + +## Adding Agents + +When adding a new agent, update all of the following: + +- `docs/agents/.mdx` — create agent page with usage snippet and capabilities table +- `docs/docs.json` — add to the Agents group under Agent +- `docs/quickstart.mdx` — add tab in the "Create a session and send a prompt" CodeGroup + +## Persist Packages (Deprecated) + +- The `@sandbox-agent/persist-*` npm packages (`persist-sqlite`, `persist-postgres`, `persist-indexeddb`, `persist-rivet`) are deprecated stubs. They still publish to npm but throw a deprecation error at import time. +- Driver implementations now live inline in examples and consuming packages: + - SQLite: `examples/persist-sqlite/src/persist.ts` + - Postgres: `examples/persist-postgres/src/persist.ts` + - IndexedDB: `frontend/packages/inspector/src/persist-indexeddb.ts` + - Rivet: inlined in `docs/multiplayer.mdx` + - In-memory: built into the main `sandbox-agent` SDK (`InMemorySessionPersistDriver`) +- Docs (`docs/session-persistence.mdx`) link to the example implementations on GitHub instead of referencing the packages. +- Do not re-add `@sandbox-agent/persist-*` as dependencies anywhere. New persist drivers should be copied into the consuming project directly. + ## Install Version References - Channel policy: @@ -74,6 +107,7 @@ - `examples/docker/src/index.ts` - `examples/e2b/src/index.ts` - `examples/vercel/src/index.ts` + - `sdks/typescript/src/providers/shared.ts` - `scripts/release/main.ts` - `scripts/release/promote-artifacts.ts` - `scripts/release/sdk.ts` diff --git a/docker/release/linux-aarch64.Dockerfile b/docker/release/linux-aarch64.Dockerfile index 412e6c0..d5ff208 100644 --- a/docker/release/linux-aarch64.Dockerfile +++ b/docker/release/linux-aarch64.Dockerfile @@ -10,7 +10,6 @@ COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ COPY sdks/cli-shared/package.json ./sdks/cli-shared/ COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ -COPY sdks/persist-indexeddb/package.json ./sdks/persist-indexeddb/ COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ @@ -21,15 +20,13 @@ RUN pnpm install --filter @sandbox-agent/inspector... COPY docs/openapi.json ./docs/ COPY sdks/cli-shared ./sdks/cli-shared COPY sdks/acp-http-client ./sdks/acp-http-client -COPY sdks/persist-indexeddb ./sdks/persist-indexeddb COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build cli-shared, acp-http-client, SDK, then persist-indexeddb and react (depends on SDK) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) RUN cd sdks/cli-shared && pnpm exec tsup RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup -RUN cd sdks/persist-indexeddb && pnpm exec tsup RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build diff --git a/docker/release/linux-x86_64.Dockerfile b/docker/release/linux-x86_64.Dockerfile index 323e471..1c41711 100644 --- a/docker/release/linux-x86_64.Dockerfile +++ b/docker/release/linux-x86_64.Dockerfile @@ -10,7 +10,6 @@ COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ COPY sdks/cli-shared/package.json ./sdks/cli-shared/ COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ -COPY sdks/persist-indexeddb/package.json ./sdks/persist-indexeddb/ COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ @@ -21,15 +20,13 @@ RUN pnpm install --filter @sandbox-agent/inspector... COPY docs/openapi.json ./docs/ COPY sdks/cli-shared ./sdks/cli-shared COPY sdks/acp-http-client ./sdks/acp-http-client -COPY sdks/persist-indexeddb ./sdks/persist-indexeddb COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build cli-shared, acp-http-client, SDK, then persist-indexeddb and react (depends on SDK) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) RUN cd sdks/cli-shared && pnpm exec tsup RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup -RUN cd sdks/persist-indexeddb && pnpm exec tsup RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build diff --git a/docker/release/macos-aarch64.Dockerfile b/docker/release/macos-aarch64.Dockerfile index 000157e..5d918b2 100644 --- a/docker/release/macos-aarch64.Dockerfile +++ b/docker/release/macos-aarch64.Dockerfile @@ -10,7 +10,6 @@ COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ COPY sdks/cli-shared/package.json ./sdks/cli-shared/ COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ -COPY sdks/persist-indexeddb/package.json ./sdks/persist-indexeddb/ COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ @@ -21,15 +20,13 @@ RUN pnpm install --filter @sandbox-agent/inspector... COPY docs/openapi.json ./docs/ COPY sdks/cli-shared ./sdks/cli-shared COPY sdks/acp-http-client ./sdks/acp-http-client -COPY sdks/persist-indexeddb ./sdks/persist-indexeddb COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build cli-shared, acp-http-client, SDK, then persist-indexeddb and react (depends on SDK) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) RUN cd sdks/cli-shared && pnpm exec tsup RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup -RUN cd sdks/persist-indexeddb && pnpm exec tsup RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build diff --git a/docker/release/macos-x86_64.Dockerfile b/docker/release/macos-x86_64.Dockerfile index 9082018..9b52aa6 100644 --- a/docker/release/macos-x86_64.Dockerfile +++ b/docker/release/macos-x86_64.Dockerfile @@ -10,7 +10,6 @@ COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ COPY sdks/cli-shared/package.json ./sdks/cli-shared/ COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ -COPY sdks/persist-indexeddb/package.json ./sdks/persist-indexeddb/ COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ @@ -21,15 +20,13 @@ RUN pnpm install --filter @sandbox-agent/inspector... COPY docs/openapi.json ./docs/ COPY sdks/cli-shared ./sdks/cli-shared COPY sdks/acp-http-client ./sdks/acp-http-client -COPY sdks/persist-indexeddb ./sdks/persist-indexeddb COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build cli-shared, acp-http-client, SDK, then persist-indexeddb and react (depends on SDK) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) RUN cd sdks/cli-shared && pnpm exec tsup RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup -RUN cd sdks/persist-indexeddb && pnpm exec tsup RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build diff --git a/docker/release/windows.Dockerfile b/docker/release/windows.Dockerfile index 9c7694d..92067db 100644 --- a/docker/release/windows.Dockerfile +++ b/docker/release/windows.Dockerfile @@ -10,7 +10,6 @@ COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ COPY sdks/cli-shared/package.json ./sdks/cli-shared/ COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ -COPY sdks/persist-indexeddb/package.json ./sdks/persist-indexeddb/ COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ @@ -21,15 +20,13 @@ RUN pnpm install --filter @sandbox-agent/inspector... COPY docs/openapi.json ./docs/ COPY sdks/cli-shared ./sdks/cli-shared COPY sdks/acp-http-client ./sdks/acp-http-client -COPY sdks/persist-indexeddb ./sdks/persist-indexeddb COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript -# Build cli-shared, acp-http-client, SDK, then persist-indexeddb and react (depends on SDK) +# Build cli-shared, acp-http-client, SDK, then react (depends on SDK) RUN cd sdks/cli-shared && pnpm exec tsup RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup -RUN cd sdks/persist-indexeddb && pnpm exec tsup RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build diff --git a/docker/runtime/Dockerfile b/docker/runtime/Dockerfile index bdd1a16..e0a3335 100644 --- a/docker/runtime/Dockerfile +++ b/docker/runtime/Dockerfile @@ -12,7 +12,6 @@ COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ COPY sdks/cli-shared/package.json ./sdks/cli-shared/ COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ -COPY sdks/persist-indexeddb/package.json ./sdks/persist-indexeddb/ COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ @@ -23,7 +22,6 @@ RUN pnpm install --filter @sandbox-agent/inspector... COPY docs/openapi.json ./docs/ COPY sdks/cli-shared ./sdks/cli-shared COPY sdks/acp-http-client ./sdks/acp-http-client -COPY sdks/persist-indexeddb ./sdks/persist-indexeddb COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript @@ -31,7 +29,6 @@ COPY sdks/typescript ./sdks/typescript RUN cd sdks/cli-shared && pnpm exec tsup RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup -RUN cd sdks/persist-indexeddb && pnpm exec tsup RUN cd sdks/react && pnpm exec tsup # Copy inspector source and build diff --git a/docker/runtime/Dockerfile.full b/docker/runtime/Dockerfile.full index beb1664..9ab4c0d 100644 --- a/docker/runtime/Dockerfile.full +++ b/docker/runtime/Dockerfile.full @@ -11,7 +11,6 @@ COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./ COPY frontend/packages/inspector/package.json ./frontend/packages/inspector/ COPY sdks/cli-shared/package.json ./sdks/cli-shared/ COPY sdks/acp-http-client/package.json ./sdks/acp-http-client/ -COPY sdks/persist-indexeddb/package.json ./sdks/persist-indexeddb/ COPY sdks/react/package.json ./sdks/react/ COPY sdks/typescript/package.json ./sdks/typescript/ @@ -20,14 +19,12 @@ RUN pnpm install --filter @sandbox-agent/inspector... COPY docs/openapi.json ./docs/ COPY sdks/cli-shared ./sdks/cli-shared COPY sdks/acp-http-client ./sdks/acp-http-client -COPY sdks/persist-indexeddb ./sdks/persist-indexeddb COPY sdks/react ./sdks/react COPY sdks/typescript ./sdks/typescript RUN cd sdks/cli-shared && pnpm exec tsup RUN cd sdks/acp-http-client && pnpm exec tsup RUN cd sdks/typescript && SKIP_OPENAPI_GEN=1 pnpm exec tsup -RUN cd sdks/persist-indexeddb && pnpm exec tsup RUN cd sdks/react && pnpm exec tsup COPY frontend/packages/inspector ./frontend/packages/inspector diff --git a/docs/agent-capabilities.mdx b/docs/agent-capabilities.mdx deleted file mode 100644 index 13f2723..0000000 --- a/docs/agent-capabilities.mdx +++ /dev/null @@ -1,127 +0,0 @@ ---- -title: "Agent Capabilities" -description: "Models, modes, and thought levels supported by each agent." ---- - -Capabilities are subject to change as the agents are updated. See [Agent Sessions](/agent-sessions) for full session configuration API details. - - - - _Last updated: March 5th, 2026. See [Generating a live report](#generating-a-live-report) for up-to-date reference._ - - -## Claude - -| Category | Values | -|----------|--------| -| **Models** | `default`, `sonnet`, `opus`, `haiku` | -| **Modes** | `default`, `acceptEdits`, `plan`, `dontAsk`, `bypassPermissions` | -| **Thought levels** | Unsupported | - -### Configuring Effort Level For Claude - -Claude does not natively support changing effort level after a session starts, so configure it in the filesystem before creating the session. - -```ts -import { mkdir, writeFile } from "node:fs/promises"; -import path from "node:path"; -import { SandboxAgent } from "sandbox-agent"; - -const cwd = "/path/to/workspace"; -await mkdir(path.join(cwd, ".claude"), { recursive: true }); -await writeFile( - path.join(cwd, ".claude", "settings.json"), - JSON.stringify({ effortLevel: "high" }, null, 2), -); - -const sdk = await SandboxAgent.connect({ baseUrl: "http://127.0.0.1:2468" }); -await sdk.createSession({ - agent: "claude", - sessionInit: { cwd, mcpServers: [] }, -}); -``` - - - -1. `~/.claude/settings.json` -2. `/.claude/settings.json` -3. `/.claude/settings.local.json` - - - -## Codex - -| Category | Values | -|----------|--------| -| **Models** | `gpt-5.3-codex` (default), `gpt-5.3-codex-spark`, `gpt-5.2-codex`, `gpt-5.1-codex-max`, `gpt-5.2`, `gpt-5.1-codex-mini` | -| **Modes** | `read-only` (default), `auto`, `full-access` | -| **Thought levels** | `low`, `medium`, `high` (default), `xhigh` | - -## OpenCode - -| Category | Values | -|----------|--------| -| **Models** | See below | -| **Modes** | `build` (default), `plan` | -| **Thought levels** | Unsupported | - - - -| Provider | Models | -|----------|--------| -| **Anthropic** | `anthropic/claude-3-5-haiku-20241022`, `anthropic/claude-3-5-haiku-latest`, `anthropic/claude-3-5-sonnet-20240620`, `anthropic/claude-3-5-sonnet-20241022`, `anthropic/claude-3-7-sonnet-20250219`, `anthropic/claude-3-7-sonnet-latest`, `anthropic/claude-3-haiku-20240307`, `anthropic/claude-3-opus-20240229`, `anthropic/claude-3-sonnet-20240229`, `anthropic/claude-haiku-4-5`, `anthropic/claude-haiku-4-5-20251001`, `anthropic/claude-opus-4-0`, `anthropic/claude-opus-4-1`, `anthropic/claude-opus-4-1-20250805`, `anthropic/claude-opus-4-20250514`, `anthropic/claude-opus-4-5`, `anthropic/claude-opus-4-5-20251101`, `anthropic/claude-opus-4-6`, `anthropic/claude-sonnet-4-0`, `anthropic/claude-sonnet-4-20250514`, `anthropic/claude-sonnet-4-5`, `anthropic/claude-sonnet-4-5-20250929` | -| **OpenAI** | `openai/gpt-5.1-codex`, `openai/gpt-5.1-codex-max`, `openai/gpt-5.1-codex-mini`, `openai/gpt-5.2`, `openai/gpt-5.2-codex`, `openai/gpt-5.3-codex` | -| **Cerebras** | `cerebras/gpt-oss-120b`, `cerebras/qwen-3-235b-a22b-instruct-2507`, `cerebras/zai-glm-4.7` | -| **OpenCode Zen** | `opencode/big-pickle`, `opencode/claude-3-5-haiku`, `opencode/claude-haiku-4-5`, `opencode/claude-opus-4-1`, `opencode/claude-opus-4-5`, `opencode/claude-opus-4-6`, `opencode/claude-sonnet-4`, `opencode/claude-sonnet-4-5`, `opencode/gemini-3-flash`, `opencode/gemini-3-pro` (default), `opencode/glm-4.6`, `opencode/glm-4.7`, `opencode/gpt-5`, `opencode/gpt-5-codex`, `opencode/gpt-5-nano`, `opencode/gpt-5.1`, `opencode/gpt-5.1-codex`, `opencode/gpt-5.1-codex-max`, `opencode/gpt-5.1-codex-mini`, `opencode/gpt-5.2`, `opencode/gpt-5.2-codex`, `opencode/kimi-k2`, `opencode/kimi-k2-thinking`, `opencode/kimi-k2.5`, `opencode/kimi-k2.5-free`, `opencode/minimax-m2.1`, `opencode/minimax-m2.1-free`, `opencode/trinity-large-preview-free` | - - - -## Cursor - -| Category | Values | -|----------|--------| -| **Models** | See below | -| **Modes** | Unsupported | -| **Thought levels** | Unsupported | - - - -| Group | Models | -|-------|--------| -| **Auto** | `auto` | -| **Composer** | `composer-1.5`, `composer-1` | -| **GPT-5.3 Codex** | `gpt-5.3-codex`, `gpt-5.3-codex-low`, `gpt-5.3-codex-high`, `gpt-5.3-codex-xhigh`, `gpt-5.3-codex-fast`, `gpt-5.3-codex-low-fast`, `gpt-5.3-codex-high-fast`, `gpt-5.3-codex-xhigh-fast` | -| **GPT-5.2** | `gpt-5.2`, `gpt-5.2-high`, `gpt-5.2-codex`, `gpt-5.2-codex-low`, `gpt-5.2-codex-high`, `gpt-5.2-codex-xhigh`, `gpt-5.2-codex-fast`, `gpt-5.2-codex-low-fast`, `gpt-5.2-codex-high-fast`, `gpt-5.2-codex-xhigh-fast` | -| **GPT-5.1** | `gpt-5.1-high`, `gpt-5.1-codex-max`, `gpt-5.1-codex-max-high` | -| **Claude** | `opus-4.6-thinking` (default), `opus-4.6`, `opus-4.5`, `opus-4.5-thinking`, `sonnet-4.5`, `sonnet-4.5-thinking` | -| **Other** | `gemini-3-pro`, `gemini-3-flash`, `grok` | - - - -## Amp - -| Category | Values | -|----------|--------| -| **Models** | `amp-default` | -| **Modes** | `default`, `bypass` | -| **Thought levels** | Unsupported | - -## Pi - -| Category | Values | -|----------|--------| -| **Models** | `default` | -| **Modes** | Unsupported | -| **Thought levels** | Unsupported | - -## Generating a live report - -Requires a running Sandbox Agent server. `--endpoint` defaults to `http://127.0.0.1:2468`. - -```bash -sandbox-agent api agents report -``` - - - The live report reflects what the agent adapter returns for the current credentials. Some models may be gated by subscription (e.g. Claude's `opus` requires a paid plan) and will not appear in the report if the credentials don't have access. - diff --git a/docs/agent-sessions.mdx b/docs/agent-sessions.mdx index cf56e9c..0f9e2ab 100644 --- a/docs/agent-sessions.mdx +++ b/docs/agent-sessions.mdx @@ -21,10 +21,7 @@ const sdk = await SandboxAgent.connect({ const session = await sdk.createSession({ agent: "codex", - sessionInit: { - cwd: "/", - mcpServers: [], - }, + cwd: "/", }); console.log(session.id, session.agentSessionId); diff --git a/docs/agents/amp.mdx b/docs/agents/amp.mdx new file mode 100644 index 0000000..f94e97d --- /dev/null +++ b/docs/agents/amp.mdx @@ -0,0 +1,20 @@ +--- +title: "Amp" +description: "Use Amp as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "amp", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | `amp-default` | +| **Modes** | `default`, `bypass` | +| **Thought levels** | Unsupported | diff --git a/docs/agents/claude.mdx b/docs/agents/claude.mdx new file mode 100644 index 0000000..2e4fd43 --- /dev/null +++ b/docs/agents/claude.mdx @@ -0,0 +1,49 @@ +--- +title: "Claude" +description: "Use Claude Code as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "claude", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | `default`, `sonnet`, `opus`, `haiku` | +| **Modes** | `default`, `acceptEdits`, `plan`, `dontAsk`, `bypassPermissions` | +| **Thought levels** | Unsupported | + +## Configuring effort level + +Claude does not support changing effort level after a session starts. Configure it in the filesystem before creating the session. + +```ts +import { mkdir, writeFile } from "node:fs/promises"; +import path from "node:path"; + +const cwd = "/path/to/workspace"; +await mkdir(path.join(cwd, ".claude"), { recursive: true }); +await writeFile( + path.join(cwd, ".claude", "settings.json"), + JSON.stringify({ effortLevel: "high" }, null, 2), +); + +const session = await client.createSession({ + agent: "claude", + cwd, +}); +``` + + + +1. `~/.claude/settings.json` +2. `/.claude/settings.json` +3. `/.claude/settings.local.json` + + diff --git a/docs/agents/codex.mdx b/docs/agents/codex.mdx new file mode 100644 index 0000000..d359beb --- /dev/null +++ b/docs/agents/codex.mdx @@ -0,0 +1,20 @@ +--- +title: "Codex" +description: "Use OpenAI Codex as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "codex", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | `gpt-5.3-codex` (default), `gpt-5.3-codex-spark`, `gpt-5.2-codex`, `gpt-5.1-codex-max`, `gpt-5.2`, `gpt-5.1-codex-mini` | +| **Modes** | `read-only` (default), `auto`, `full-access` | +| **Thought levels** | `low`, `medium`, `high` (default), `xhigh` | diff --git a/docs/agents/cursor.mdx b/docs/agents/cursor.mdx new file mode 100644 index 0000000..0905baa --- /dev/null +++ b/docs/agents/cursor.mdx @@ -0,0 +1,34 @@ +--- +title: "Cursor" +description: "Use Cursor as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "cursor", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | See below | +| **Modes** | Unsupported | +| **Thought levels** | Unsupported | + + + +| Group | Models | +|-------|--------| +| **Auto** | `auto` | +| **Composer** | `composer-1.5`, `composer-1` | +| **GPT-5.3 Codex** | `gpt-5.3-codex`, `gpt-5.3-codex-low`, `gpt-5.3-codex-high`, `gpt-5.3-codex-xhigh`, `gpt-5.3-codex-fast`, `gpt-5.3-codex-low-fast`, `gpt-5.3-codex-high-fast`, `gpt-5.3-codex-xhigh-fast` | +| **GPT-5.2** | `gpt-5.2`, `gpt-5.2-high`, `gpt-5.2-codex`, `gpt-5.2-codex-low`, `gpt-5.2-codex-high`, `gpt-5.2-codex-xhigh`, `gpt-5.2-codex-fast`, `gpt-5.2-codex-low-fast`, `gpt-5.2-codex-high-fast`, `gpt-5.2-codex-xhigh-fast` | +| **GPT-5.1** | `gpt-5.1-high`, `gpt-5.1-codex-max`, `gpt-5.1-codex-max-high` | +| **Claude** | `opus-4.6-thinking` (default), `opus-4.6`, `opus-4.5`, `opus-4.5-thinking`, `sonnet-4.5`, `sonnet-4.5-thinking` | +| **Other** | `gemini-3-pro`, `gemini-3-flash`, `grok` | + + diff --git a/docs/agents/opencode.mdx b/docs/agents/opencode.mdx new file mode 100644 index 0000000..db7b640 --- /dev/null +++ b/docs/agents/opencode.mdx @@ -0,0 +1,31 @@ +--- +title: "OpenCode" +description: "Use OpenCode as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "opencode", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | See below | +| **Modes** | `build` (default), `plan` | +| **Thought levels** | Unsupported | + + + +| Provider | Models | +|----------|--------| +| **Anthropic** | `anthropic/claude-3-5-haiku-20241022`, `anthropic/claude-3-5-haiku-latest`, `anthropic/claude-3-5-sonnet-20240620`, `anthropic/claude-3-5-sonnet-20241022`, `anthropic/claude-3-7-sonnet-20250219`, `anthropic/claude-3-7-sonnet-latest`, `anthropic/claude-3-haiku-20240307`, `anthropic/claude-3-opus-20240229`, `anthropic/claude-3-sonnet-20240229`, `anthropic/claude-haiku-4-5`, `anthropic/claude-haiku-4-5-20251001`, `anthropic/claude-opus-4-0`, `anthropic/claude-opus-4-1`, `anthropic/claude-opus-4-1-20250805`, `anthropic/claude-opus-4-20250514`, `anthropic/claude-opus-4-5`, `anthropic/claude-opus-4-5-20251101`, `anthropic/claude-opus-4-6`, `anthropic/claude-sonnet-4-0`, `anthropic/claude-sonnet-4-20250514`, `anthropic/claude-sonnet-4-5`, `anthropic/claude-sonnet-4-5-20250929` | +| **OpenAI** | `openai/gpt-5.1-codex`, `openai/gpt-5.1-codex-max`, `openai/gpt-5.1-codex-mini`, `openai/gpt-5.2`, `openai/gpt-5.2-codex`, `openai/gpt-5.3-codex` | +| **Cerebras** | `cerebras/gpt-oss-120b`, `cerebras/qwen-3-235b-a22b-instruct-2507`, `cerebras/zai-glm-4.7` | +| **OpenCode Zen** | `opencode/big-pickle`, `opencode/claude-3-5-haiku`, `opencode/claude-haiku-4-5`, `opencode/claude-opus-4-1`, `opencode/claude-opus-4-5`, `opencode/claude-opus-4-6`, `opencode/claude-sonnet-4`, `opencode/claude-sonnet-4-5`, `opencode/gemini-3-flash`, `opencode/gemini-3-pro` (default), `opencode/glm-4.6`, `opencode/glm-4.7`, `opencode/gpt-5`, `opencode/gpt-5-codex`, `opencode/gpt-5-nano`, `opencode/gpt-5.1`, `opencode/gpt-5.1-codex`, `opencode/gpt-5.1-codex-max`, `opencode/gpt-5.1-codex-mini`, `opencode/gpt-5.2`, `opencode/gpt-5.2-codex`, `opencode/kimi-k2`, `opencode/kimi-k2-thinking`, `opencode/kimi-k2.5`, `opencode/kimi-k2.5-free`, `opencode/minimax-m2.1`, `opencode/minimax-m2.1-free`, `opencode/trinity-large-preview-free` | + + diff --git a/docs/agents/pi.mdx b/docs/agents/pi.mdx new file mode 100644 index 0000000..1d56370 --- /dev/null +++ b/docs/agents/pi.mdx @@ -0,0 +1,20 @@ +--- +title: "Pi" +description: "Use Pi as a sandbox agent." +--- + +## Usage + +```typescript +const session = await client.createSession({ + agent: "pi", +}); +``` + +## Capabilities + +| Category | Values | +|----------|--------| +| **Models** | `default` | +| **Modes** | Unsupported | +| **Thought levels** | Unsupported | diff --git a/docs/architecture.mdx b/docs/architecture.mdx index 78585a2..a28f133 100644 --- a/docs/architecture.mdx +++ b/docs/architecture.mdx @@ -1,64 +1,63 @@ --- title: "Architecture" -description: "How the client, sandbox, server, and agent fit together." -icon: "microchip" +description: "How the Sandbox Agent server, SDK, and agent processes fit together." --- -Sandbox Agent runs as an HTTP server inside your sandbox. Your app talks to it remotely. +Sandbox Agent is a lightweight HTTP server that runs **inside** a sandbox. It: + +- **Agent management**: Installs, spawns, and stops coding agent processes +- **Sessions**: Routes prompts to agents and streams events back in real time +- **Sandbox APIs**: Filesystem, process, and terminal access for the sandbox environment ## Components -- `Your client`: your app code using the `sandbox-agent` SDK. -- `Sandbox`: isolated runtime (E2B, Daytona, Docker, etc.). -- `Sandbox Agent server`: process inside the sandbox exposing HTTP transport. -- `Agent`: Claude/Codex/OpenCode/Amp process managed by Sandbox Agent. - -```mermaid placement="top-right" - flowchart LR - CLIENT["Sandbox Agent SDK"] - SERVER["Sandbox Agent server"] - AGENT["Agent process"] +```mermaid +flowchart LR + CLIENT["Your App"] subgraph SANDBOX["Sandbox"] - direction TB - SERVER --> AGENT + direction TB + SERVER["Sandbox Agent Server"] + AGENT["Agent Process
(Claude, Codex, etc.)"] + SERVER --> AGENT end - CLIENT -->|HTTP| SERVER + CLIENT -->|"SDK (HTTP)"| SERVER ``` -## Suggested Topology +- **Your app**: Uses the `sandbox-agent` TypeScript SDK to talk to the server over HTTP. +- **Sandbox**: An isolated runtime (local process, Docker, E2B, Daytona, Vercel, Cloudflare). +- **Sandbox Agent server**: A single binary inside the sandbox that manages agent lifecycles, routes prompts, streams events, and exposes filesystem/process/terminal APIs. +- **Agent process**: A coding agent (Claude Code, Codex, etc.) spawned by the server. Each session maps to one agent process. -Run the SDK on your backend, then call it from your frontend. +## What `SandboxAgent.start()` does -This extra hop is recommended because it keeps auth/token logic on the backend and makes persistence simpler. +1. **Provision**: The provider creates a sandbox (starts a container, creates a VM, etc.) +2. **Install**: The Sandbox Agent binary is installed inside the sandbox +3. **Boot**: The server starts listening on an HTTP port +4. **Health check**: The SDK waits for `/v1/health` to respond +5. **Ready**: The SDK returns a connected client -```mermaid placement="top-right" - flowchart LR - BROWSER["Browser"] - subgraph BACKEND["Your backend"] - direction TB - SDK["Sandbox Agent SDK"] - end - subgraph SANDBOX_SIMPLE["Sandbox"] - SERVER_SIMPLE["Sandbox Agent server"] - end +For the `local` provider, provisioning is a no-op and the server runs as a local subprocess. - BROWSER --> BACKEND - BACKEND --> SDK --> SERVER_SIMPLE +### Server recovery + +If the server process stops, the SDK automatically calls the provider's `ensureServer()` after 3 consecutive health-check failures. Most built-in providers implement this. Custom providers can add `ensureServer(sandboxId)` to their `SandboxProvider` object. + +## Server HTTP API + +See the [HTTP API reference](/api-reference) for the full list of server endpoints. + +## Agent installation + +Agents are installed lazily on first use. To avoid the cold-start delay, pre-install them: + +```bash +sandbox-agent install-agent --all ``` -### Backend requirements +The `rivetdev/sandbox-agent:0.3.2-full` Docker image ships with all agents pre-installed. -Your backend layer needs to handle: +## Production-ready agent orchestration -- **Long-running connections**: prompts can take minutes. -- **Session affinity**: follow-up messages must reach the same session. -- **State between requests**: session metadata and event history must persist across requests. -- **Graceful recovery**: sessions should resume after backend restarts. - -We recommend [Rivet](https://rivet.dev) over serverless because actors natively support the long-lived connections, session routing, and state persistence that agent workloads require. - -## Session persistence - -For storage driver options and replay behavior, see [Persisting Sessions](/session-persistence). +For production deployments, see [Orchestration Architecture](/orchestration-architecture) for recommended topology, backend requirements, and session persistence patterns. diff --git a/docs/cli.mdx b/docs/cli.mdx index 6177fb3..2ad3b08 100644 --- a/docs/cli.mdx +++ b/docs/cli.mdx @@ -259,7 +259,7 @@ Example output: } ``` -See [Agent Capabilities](/agent-capabilities) for a full reference of supported models, modes, and thought levels per agent. +See individual agent pages (e.g. [Claude](/agents/claude), [Codex](/agents/codex)) for supported models, modes, and thought levels. #### api agents install diff --git a/docs/custom-tools.mdx b/docs/custom-tools.mdx index 727fb02..2fb3e15 100644 --- a/docs/custom-tools.mdx +++ b/docs/custom-tools.mdx @@ -80,9 +80,7 @@ await sdk.setMcpConfig( const session = await sdk.createSession({ agent: "claude", - sessionInit: { - cwd: "/workspace", - }, + cwd: "/workspace", }); await session.prompt([ @@ -145,9 +143,7 @@ await sdk.writeFsFile({ path: "/opt/skills/random-number/SKILL.md" }, skill); ```ts const session = await sdk.createSession({ agent: "claude", - sessionInit: { - cwd: "/workspace", - }, + cwd: "/workspace", }); await session.prompt([ diff --git a/docs/deploy/cloudflare.mdx b/docs/deploy/cloudflare.mdx index deca490..1cecdd7 100644 --- a/docs/deploy/cloudflare.mdx +++ b/docs/deploy/cloudflare.mdx @@ -31,7 +31,38 @@ RUN sandbox-agent install-agent claude && sandbox-agent install-agent codex EXPOSE 8000 ``` -## TypeScript example +## TypeScript example (with provider) + +For standalone scripts, use the `cloudflare` provider: + +```bash +npm install sandbox-agent@0.3.x @cloudflare/sandbox +``` + +```typescript +import { SandboxAgent } from "sandbox-agent"; +import { cloudflare } from "sandbox-agent/cloudflare"; + +const sdk = await SandboxAgent.start({ + sandbox: cloudflare(), +}); + +try { + const session = await sdk.createSession({ agent: "codex" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} +``` + +The `cloudflare` provider uses `containerFetch` under the hood, automatically stripping `AbortSignal` to avoid dropped streaming updates. + +## TypeScript example (Durable Objects) + +For Workers with Durable Objects, use `SandboxAgent.connect(...)` with a custom `fetch` backed by `sandbox.containerFetch(...)`: ```typescript import { getSandbox, type Sandbox } from "@cloudflare/sandbox"; @@ -109,7 +140,6 @@ app.all("*", (c) => c.env.ASSETS.fetch(c.req.raw)); export default app; ``` -Create the SDK client inside the Worker using custom `fetch` backed by `sandbox.containerFetch(...)`. This keeps all Sandbox Agent calls inside the Cloudflare sandbox routing path and does not require a `baseUrl`. ## Troubleshooting streaming updates diff --git a/docs/deploy/computesdk.mdx b/docs/deploy/computesdk.mdx index 5e07da0..1adfffe 100644 --- a/docs/deploy/computesdk.mdx +++ b/docs/deploy/computesdk.mdx @@ -1,160 +1,61 @@ --- title: "ComputeSDK" -description: "Deploy the daemon using ComputeSDK's provider-agnostic sandbox API." +description: "Deploy Sandbox Agent using ComputeSDK's provider-agnostic sandbox API." --- -[ComputeSDK](https://computesdk.com) provides a unified interface for managing sandboxes across multiple providers. Write once, deploy anywhere—switch providers by changing environment variables. +[ComputeSDK](https://computesdk.com) provides a unified interface for managing sandboxes across multiple providers. Write once, deploy anywhere by changing environment variables. ## Prerequisites - `COMPUTESDK_API_KEY` from [console.computesdk.com](https://console.computesdk.com) - Provider API key (one of: `E2B_API_KEY`, `DAYTONA_API_KEY`, `VERCEL_TOKEN`, `MODAL_TOKEN_ID` + `MODAL_TOKEN_SECRET`, `BLAXEL_API_KEY`, `CSB_API_KEY`) -- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` for the coding agents +- `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` -## TypeScript Example +## TypeScript example + +```bash +npm install sandbox-agent@0.3.x computesdk +``` ```typescript -import { - compute, - detectProvider, - getMissingEnvVars, - getProviderConfigFromEnv, - isProviderAuthComplete, - isValidProvider, - PROVIDER_NAMES, - type ExplicitComputeConfig, - type ProviderName, -} from "computesdk"; import { SandboxAgent } from "sandbox-agent"; +import { computesdk } from "sandbox-agent/computesdk"; -const PORT = 3000; -const REQUEST_TIMEOUT_MS = - Number.parseInt(process.env.COMPUTESDK_TIMEOUT_MS || "", 10) || 120_000; - -/** - * Detects and validates the provider to use. - * Priority: COMPUTESDK_PROVIDER env var > auto-detection from API keys - */ -function resolveProvider(): ProviderName { - const providerOverride = process.env.COMPUTESDK_PROVIDER; - - if (providerOverride) { - if (!isValidProvider(providerOverride)) { - throw new Error( - `Unsupported provider "${providerOverride}". Supported: ${PROVIDER_NAMES.join(", ")}` - ); - } - if (!isProviderAuthComplete(providerOverride)) { - const missing = getMissingEnvVars(providerOverride); - throw new Error( - `Missing credentials for "${providerOverride}". Set: ${missing.join(", ")}` - ); - } - return providerOverride as ProviderName; - } - - const detected = detectProvider(); - if (!detected) { - throw new Error( - `No provider credentials found. Set one of: ${PROVIDER_NAMES.map((p) => getMissingEnvVars(p).join(", ")).join(" | ")}` - ); - } - return detected as ProviderName; -} - -function configureComputeSDK(): void { - const provider = resolveProvider(); - - const config: ExplicitComputeConfig = { - provider, - computesdkApiKey: process.env.COMPUTESDK_API_KEY, - requestTimeoutMs: REQUEST_TIMEOUT_MS, - }; - - // Add provider-specific config from environment - const providerConfig = getProviderConfigFromEnv(provider); - if (Object.keys(providerConfig).length > 0) { - (config as any)[provider] = providerConfig; - } - - compute.setConfig(config); -} - -configureComputeSDK(); - -// Build environment variables to pass to sandbox const envs: Record = {}; if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; -// Create sandbox -const sandbox = await compute.sandbox.create({ - envs: Object.keys(envs).length > 0 ? envs : undefined, +const sdk = await SandboxAgent.start({ + sandbox: computesdk({ + create: { envs }, + }), }); -// Helper to run commands with error handling -const run = async (cmd: string, options?: { background?: boolean }) => { - const result = await sandbox.runCommand(cmd, options); - if (typeof result?.exitCode === "number" && result.exitCode !== 0) { - throw new Error(`Command failed: ${cmd} (exit ${result.exitCode})\n${result.stderr || ""}`); - } - return result; -}; - -// Install sandbox-agent -await run("curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh"); - -// Install agents conditionally based on available API keys -if (envs.ANTHROPIC_API_KEY) { - await run("sandbox-agent install-agent claude"); +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); } -if (envs.OPENAI_API_KEY) { - await run("sandbox-agent install-agent codex"); -} - -// Start the server in the background -await run(`sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT}`, { background: true }); - -// Get the public URL for the sandbox -const baseUrl = await sandbox.getUrl({ port: PORT }); - -// Wait for server to be ready -const deadline = Date.now() + REQUEST_TIMEOUT_MS; -while (Date.now() < deadline) { - try { - const response = await fetch(`${baseUrl}/v1/health`); - if (response.ok) { - const data = await response.json(); - if (data?.status === "ok") break; - } - } catch { - // Server not ready yet - } - await new Promise((r) => setTimeout(r, 500)); -} - -// Connect to the server -const client = await SandboxAgent.connect({ baseUrl }); - -// Detect which agent to use based on available API keys -const agent = envs.ANTHROPIC_API_KEY ? "claude" : "codex"; - -// Create a session and start coding -await client.createSession("my-session", { agent }); - -await client.postMessage("my-session", { - message: "Summarize this repository", -}); - -for await (const event of client.streamEvents("my-session")) { - console.log(event.type, event.data); -} - -// Cleanup -await sandbox.destroy(); ``` -## Supported Providers +The `computesdk` provider handles sandbox creation, Sandbox Agent installation, agent setup, and server startup automatically. ComputeSDK routes to your configured provider behind the scenes. + +Before calling `SandboxAgent.start()`, configure ComputeSDK with your provider: + +```typescript +import { compute } from "computesdk"; + +compute.setConfig({ + provider: "e2b", // or auto-detect via detectProvider() + computesdkApiKey: process.env.COMPUTESDK_API_KEY, +}); +``` + +## Supported providers ComputeSDK auto-detects your provider from environment variables: @@ -169,46 +70,7 @@ ComputeSDK auto-detects your provider from environment variables: ## Notes -- **Provider resolution order**: `COMPUTESDK_PROVIDER` env var takes priority, otherwise auto-detection from API keys. -- **Conditional agent installation**: Only agents with available API keys are installed, reducing setup time. -- **Command error handling**: The example validates exit codes and throws on failures for easier debugging. +- **Provider resolution**: Set `COMPUTESDK_PROVIDER` to force a specific provider, or let ComputeSDK auto-detect from API keys. - `sandbox.runCommand(..., { background: true })` keeps the server running while your app continues. - `sandbox.getUrl({ port })` returns a public URL for the sandbox port. -- Always destroy the sandbox when you are done to avoid leaking resources. -- If sandbox creation times out, set `COMPUTESDK_TIMEOUT_MS` to a higher value (default: 120000ms). - -## Explicit Provider Selection - -To force a specific provider instead of auto-detection, set the `COMPUTESDK_PROVIDER` environment variable: - -```bash -export COMPUTESDK_PROVIDER=e2b -``` - -Or configure programmatically using `getProviderConfigFromEnv()`: - -```typescript -import { compute, getProviderConfigFromEnv, type ExplicitComputeConfig } from "computesdk"; - -const config: ExplicitComputeConfig = { - provider: "e2b", - computesdkApiKey: process.env.COMPUTESDK_API_KEY, - requestTimeoutMs: 120_000, -}; - -// Automatically populate provider-specific config from environment -const providerConfig = getProviderConfigFromEnv("e2b"); -if (Object.keys(providerConfig).length > 0) { - (config as any).e2b = providerConfig; -} - -compute.setConfig(config); -``` - -## Direct Mode (No ComputeSDK API Key) - -To bypass the ComputeSDK gateway and use provider SDKs directly, see the provider-specific examples: - -- [E2B](/deploy/e2b) -- [Daytona](/deploy/daytona) -- [Vercel](/deploy/vercel) +- Always destroy the sandbox when done to avoid leaking resources. diff --git a/docs/deploy/daytona.mdx b/docs/deploy/daytona.mdx index 5eb8f5d..b65aec9 100644 --- a/docs/deploy/daytona.mdx +++ b/docs/deploy/daytona.mdx @@ -15,40 +15,37 @@ See [Daytona network limits](https://www.daytona.io/docs/en/network-limits/). ## TypeScript example -```typescript -import { Daytona } from "@daytonaio/sdk"; -import { SandboxAgent } from "sandbox-agent"; +```bash +npm install sandbox-agent@0.3.x @daytonaio/sdk +``` -const daytona = new Daytona(); +```typescript +import { SandboxAgent } from "sandbox-agent"; +import { daytona } from "sandbox-agent/daytona"; const envVars: Record = {}; if (process.env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; if (process.env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; -const sandbox = await daytona.create({ envVars }); +const sdk = await SandboxAgent.start({ + sandbox: daytona({ + create: { envVars }, + }), +}); -await sandbox.process.executeCommand( - "curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh" -); - -await sandbox.process.executeCommand("sandbox-agent install-agent claude"); -await sandbox.process.executeCommand("sandbox-agent install-agent codex"); - -await sandbox.process.executeCommand( - "nohup sandbox-agent server --no-token --host 0.0.0.0 --port 3000 >/tmp/sandbox-agent.log 2>&1 &" -); - -await new Promise((r) => setTimeout(r, 2000)); - -const baseUrl = (await sandbox.getSignedPreviewUrl(3000, 4 * 60 * 60)).url; -const sdk = await SandboxAgent.connect({ baseUrl }); - -const session = await sdk.createSession({ agent: "claude" }); -await session.prompt([{ type: "text", text: "Summarize this repository" }]); - -await sandbox.delete(); +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} ``` +The `daytona` provider uses the `rivetdev/sandbox-agent:0.3.2-full` image by default and starts the server automatically. + ## Using snapshots for faster startup ```typescript diff --git a/docs/deploy/docker.mdx b/docs/deploy/docker.mdx index 030ddc9..b674b7a 100644 --- a/docs/deploy/docker.mdx +++ b/docs/deploy/docker.mdx @@ -15,43 +15,43 @@ Run the published full image with all supported agents pre-installed: docker run --rm -p 3000:3000 \ -e ANTHROPIC_API_KEY="$ANTHROPIC_API_KEY" \ -e OPENAI_API_KEY="$OPENAI_API_KEY" \ - rivetdev/sandbox-agent:0.3.1-full \ + rivetdev/sandbox-agent:0.3.2-full \ server --no-token --host 0.0.0.0 --port 3000 ``` -The `0.3.1-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. +The `0.3.2-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. -## TypeScript with dockerode +## TypeScript with the Docker provider + +```bash +npm install sandbox-agent@0.3.x dockerode get-port +``` ```typescript -import Docker from "dockerode"; import { SandboxAgent } from "sandbox-agent"; +import { docker } from "sandbox-agent/docker"; -const docker = new Docker(); -const PORT = 3000; - -const container = await docker.createContainer({ - Image: "rivetdev/sandbox-agent:0.3.1-full", - Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", `${PORT}`], - Env: [ - `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}`, - `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}`, - `CODEX_API_KEY=${process.env.CODEX_API_KEY}`, - ].filter(Boolean), - ExposedPorts: { [`${PORT}/tcp`]: {} }, - HostConfig: { - AutoRemove: true, - PortBindings: { [`${PORT}/tcp`]: [{ HostPort: `${PORT}` }] }, - }, +const sdk = await SandboxAgent.start({ + sandbox: docker({ + env: [ + `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}`, + `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}`, + ].filter(Boolean), + }), }); -await container.start(); +try { + const session = await sdk.createSession({ agent: "codex" }); + await session.prompt([{ type: "text", text: "Summarize this repository." }]); +} finally { + await sdk.destroySandbox(); +} +``` -const baseUrl = `http://127.0.0.1:${PORT}`; -const sdk = await SandboxAgent.connect({ baseUrl }); +The `docker` provider uses the `rivetdev/sandbox-agent:0.3.2-full` image by default. Override with `image`: -const session = await sdk.createSession({ agent: "codex" }); -await session.prompt([{ type: "text", text: "Summarize this repository." }]); +```typescript +docker({ image: "my-custom-image:latest" }) ``` ## Building a custom image with everything preinstalled diff --git a/docs/deploy/e2b.mdx b/docs/deploy/e2b.mdx index 8ea4c74..4e056ee 100644 --- a/docs/deploy/e2b.mdx +++ b/docs/deploy/e2b.mdx @@ -10,42 +10,37 @@ description: "Deploy Sandbox Agent inside an E2B sandbox." ## TypeScript example +```bash +npm install sandbox-agent@0.3.x @e2b/code-interpreter +``` + ```typescript -import { Sandbox } from "@e2b/code-interpreter"; import { SandboxAgent } from "sandbox-agent"; +import { e2b } from "sandbox-agent/e2b"; const envs: Record = {}; if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; -const sandbox = await Sandbox.create({ allowInternetAccess: true, envs }); - -await sandbox.commands.run( - "curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh" -); - -await sandbox.commands.run("sandbox-agent install-agent claude"); -await sandbox.commands.run("sandbox-agent install-agent codex"); - -await sandbox.commands.run( - "sandbox-agent server --no-token --host 0.0.0.0 --port 3000", - { background: true, timeoutMs: 0 } -); - -const baseUrl = `https://${sandbox.getHost(3000)}`; -const sdk = await SandboxAgent.connect({ baseUrl }); - -const session = await sdk.createSession({ agent: "claude" }); -const off = session.onEvent((event) => { - console.log(event.sender, event.payload); +const sdk = await SandboxAgent.start({ + sandbox: e2b({ + create: { envs }, + }), }); -await session.prompt([{ type: "text", text: "Summarize this repository" }]); -off(); - -await sandbox.kill(); +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} ``` +The `e2b` provider handles sandbox creation, Sandbox Agent installation, agent setup, and server startup automatically. + ## Faster cold starts For faster startup, create a custom E2B template with Sandbox Agent and target agents pre-installed. diff --git a/docs/deploy/local.mdx b/docs/deploy/local.mdx index eab8f3f..90e2ba6 100644 --- a/docs/deploy/local.mdx +++ b/docs/deploy/local.mdx @@ -32,12 +32,15 @@ Or with npm/Bun: ## With the TypeScript SDK -The SDK can spawn and manage the server as a subprocess: +The SDK can spawn and manage the server as a subprocess using the `local` provider: ```typescript import { SandboxAgent } from "sandbox-agent"; +import { local } from "sandbox-agent/local"; -const sdk = await SandboxAgent.start(); +const sdk = await SandboxAgent.start({ + sandbox: local(), +}); const session = await sdk.createSession({ agent: "claude", @@ -47,7 +50,21 @@ await session.prompt([ { type: "text", text: "Summarize this repository." }, ]); -await sdk.dispose(); +await sdk.destroySandbox(); ``` This starts the server on an available local port and connects automatically. + +Pass options to customize the local provider: + +```typescript +const sdk = await SandboxAgent.start({ + sandbox: local({ + port: 3000, + log: "inherit", + env: { + ANTHROPIC_API_KEY: process.env.MY_ANTHROPIC_KEY, + }, + }), +}); +``` diff --git a/docs/deploy/modal.mdx b/docs/deploy/modal.mdx index cb081b0..02a3828 100644 --- a/docs/deploy/modal.mdx +++ b/docs/deploy/modal.mdx @@ -10,88 +10,43 @@ description: "Deploy Sandbox Agent inside a Modal sandbox." ## TypeScript example -```typescript -import { ModalClient } from "modal"; -import { SandboxAgent } from "sandbox-agent"; - -const modal = new ModalClient(); -const app = await modal.apps.fromName("sandbox-agent", { createIfMissing: true }); - -const image = modal.images - .fromRegistry("ubuntu:22.04") - .dockerfileCommands([ - "RUN apt-get update && apt-get install -y curl ca-certificates", - "RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.2.x/install.sh | sh", - ]); - -const envs: Record = {}; -if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; -if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; - -const secrets = Object.keys(envs).length > 0 - ? [await modal.secrets.fromObject(envs)] - : []; - -const sb = await modal.sandboxes.create(app, image, { - encryptedPorts: [3000], - secrets, -}); - -const exec = async (cmd: string) => { - const p = await sb.exec(["bash", "-c", cmd], { stdout: "pipe", stderr: "pipe" }); - const exitCode = await p.wait(); - if (exitCode !== 0) { - const stderr = await p.stderr.readText(); - throw new Error(`Command failed (exit ${exitCode}): ${cmd}\n${stderr}`); - } -}; - -await exec("sandbox-agent install-agent claude"); -await exec("sandbox-agent install-agent codex"); - -await sb.exec( - ["bash", "-c", "sandbox-agent server --no-token --host 0.0.0.0 --port 3000 &"], -); - -const tunnels = await sb.tunnels(); -const baseUrl = tunnels[3000].url; - -const sdk = await SandboxAgent.connect({ baseUrl }); - -const session = await sdk.createSession({ agent: "claude" }); -const off = session.onEvent((event) => { - console.log(event.sender, event.payload); -}); - -await session.prompt([{ type: "text", text: "Summarize this repository" }]); -off(); - -await sb.terminate(); +```bash +npm install sandbox-agent@0.3.x modal ``` +```typescript +import { SandboxAgent } from "sandbox-agent"; +import { modal } from "sandbox-agent/modal"; + +const secrets: Record = {}; +if (process.env.ANTHROPIC_API_KEY) secrets.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) secrets.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const sdk = await SandboxAgent.start({ + sandbox: modal({ + create: { secrets }, + }), +}); + +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} +``` + +The `modal` provider handles app creation, image building, sandbox provisioning, agent installation, server startup, and tunnel networking automatically. + ## Faster cold starts -Modal caches image layers, so the `dockerfileCommands` that install `curl` and `sandbox-agent` only run on the first build. Subsequent sandbox creates reuse the cached image. - -## Running the test - -The example includes a health-check test. First, build the SDK: - -```bash -pnpm --filter sandbox-agent build -``` - -Then run the test with your Modal credentials: - -```bash -MODAL_TOKEN_ID= MODAL_TOKEN_SECRET= npx vitest run -``` - -Run from `examples/modal/`. The test will skip if credentials are not set. +Modal caches image layers, so the Dockerfile commands that install `curl` and `sandbox-agent` only run on the first build. Subsequent sandbox creates reuse the cached image. ## Notes - Modal sandboxes use [gVisor](https://gvisor.dev/) for strong isolation. -- Ports are exposed via encrypted tunnels (`encryptedPorts`). Use `sb.tunnels()` to get the public HTTPS URL. -- Environment variables (API keys) are passed as Modal [Secrets](https://modal.com/docs/guide/secrets) rather than plain env vars for security. -- Always call `sb.terminate()` when done to avoid leaking sandbox resources. +- Ports are exposed via encrypted tunnels (`encryptedPorts`). The provider uses `sb.tunnels()` to get the public HTTPS URL. +- Environment variables (API keys) are passed as Modal [Secrets](https://modal.com/docs/guide/secrets) for security. diff --git a/docs/deploy/vercel.mdx b/docs/deploy/vercel.mdx index 2025d67..db97236 100644 --- a/docs/deploy/vercel.mdx +++ b/docs/deploy/vercel.mdx @@ -10,52 +10,40 @@ description: "Deploy Sandbox Agent inside a Vercel Sandbox." ## TypeScript example -```typescript -import { Sandbox } from "@vercel/sandbox"; -import { SandboxAgent } from "sandbox-agent"; - -const envs: Record = {}; -if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; -if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; - -const sandbox = await Sandbox.create({ - runtime: "node24", - ports: [3000], -}); - -const run = async (cmd: string, args: string[] = []) => { - const result = await sandbox.runCommand({ cmd, args, env: envs }); - if (result.exitCode !== 0) { - throw new Error(`Command failed: ${cmd} ${args.join(" ")}`); - } -}; - -await run("sh", ["-c", "curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh"]); -await run("sandbox-agent", ["install-agent", "claude"]); -await run("sandbox-agent", ["install-agent", "codex"]); - -await sandbox.runCommand({ - cmd: "sandbox-agent", - args: ["server", "--no-token", "--host", "0.0.0.0", "--port", "3000"], - env: envs, - detached: true, -}); - -const baseUrl = sandbox.domain(3000); -const sdk = await SandboxAgent.connect({ baseUrl }); - -const session = await sdk.createSession({ agent: "claude" }); - -const off = session.onEvent((event) => { - console.log(event.sender, event.payload); -}); - -await session.prompt([{ type: "text", text: "Summarize this repository" }]); -off(); - -await sandbox.stop(); +```bash +npm install sandbox-agent@0.3.x @vercel/sandbox ``` +```typescript +import { SandboxAgent } from "sandbox-agent"; +import { vercel } from "sandbox-agent/vercel"; + +const env: Record = {}; +if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const sdk = await SandboxAgent.start({ + sandbox: vercel({ + create: { + runtime: "node24", + env, + }, + }), +}); + +try { + const session = await sdk.createSession({ agent: "claude" }); + const response = await session.prompt([ + { type: "text", text: "Summarize this repository" }, + ]); + console.log(response.stopReason); +} finally { + await sdk.destroySandbox(); +} +``` + +The `vercel` provider handles sandbox creation, Sandbox Agent installation, agent setup, and server startup automatically. + ## Authentication Vercel Sandboxes support OIDC token auth (recommended) and access-token auth. diff --git a/docs/docs.json b/docs/docs.json index 9ba082c..16620fe 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -58,20 +58,32 @@ "icon": "server", "pages": [ "deploy/local", - "deploy/computesdk", "deploy/e2b", "deploy/daytona", "deploy/vercel", "deploy/cloudflare", "deploy/docker", - "deploy/boxlite" + "deploy/modal", + "deploy/boxlite", + "deploy/computesdk" ] } ] }, { "group": "Agent", - "pages": ["agent-sessions", "attachments", "skills-config", "mcp-config", "custom-tools"] + "pages": [ + "agent-sessions", + { + "group": "Agents", + "icon": "robot", + "pages": ["agents/claude", "agents/codex", "agents/opencode", "agents/cursor", "agents/amp", "agents/pi"] + }, + "attachments", + "skills-config", + "mcp-config", + "custom-tools" + ] }, { "group": "System", @@ -79,12 +91,12 @@ }, { "group": "Orchestration", - "pages": ["architecture", "session-persistence", "observability", "multiplayer", "security"] + "pages": ["orchestration-architecture", "session-persistence", "observability", "multiplayer", "security"] }, { "group": "Reference", "pages": [ - "agent-capabilities", + "architecture", "cli", "inspector", "opencode-compatibility", diff --git a/docs/mcp-config.mdx b/docs/mcp-config.mdx index 71e8105..cc1c976 100644 --- a/docs/mcp-config.mdx +++ b/docs/mcp-config.mdx @@ -27,9 +27,7 @@ await sdk.setMcpConfig( // Create a session using the configured MCP servers const session = await sdk.createSession({ agent: "claude", - sessionInit: { - cwd: "/workspace", - }, + cwd: "/workspace", }); await session.prompt([ diff --git a/docs/multiplayer.mdx b/docs/multiplayer.mdx index 4f405ea..215bb1c 100644 --- a/docs/multiplayer.mdx +++ b/docs/multiplayer.mdx @@ -20,8 +20,40 @@ Use [actor keys](https://rivet.dev/docs/actors/keys) to map each workspace to on ```ts Actor (server) import { actor, setup } from "rivetkit"; -import { SandboxAgent } from "sandbox-agent"; -import { RivetSessionPersistDriver, type RivetPersistState } from "@sandbox-agent/persist-rivet"; +import { SandboxAgent, type SessionPersistDriver, type SessionRecord, type SessionEvent, type ListPageRequest, type ListPage, type ListEventsRequest } from "sandbox-agent"; + +interface RivetPersistData { sessions: Record; events: Record; } +type RivetPersistState = { _sandboxAgentPersist: RivetPersistData }; + +class RivetSessionPersistDriver implements SessionPersistDriver { + private readonly stateKey: string; + private readonly ctx: { state: Record }; + constructor(ctx: { state: Record }, options: { stateKey?: string } = {}) { + this.ctx = ctx; + this.stateKey = options.stateKey ?? "_sandboxAgentPersist"; + if (!this.ctx.state[this.stateKey]) { + this.ctx.state[this.stateKey] = { sessions: {}, events: {} }; + } + } + private get data(): RivetPersistData { return this.ctx.state[this.stateKey] as RivetPersistData; } + async getSession(id: string) { const s = this.data.sessions[id]; return s ? { ...s } : undefined; } + async listSessions(request: ListPageRequest = {}): Promise> { + const sorted = Object.values(this.data.sessions).sort((a, b) => a.createdAt - b.createdAt || a.id.localeCompare(b.id)); + const offset = Number(request.cursor ?? 0); + const limit = request.limit ?? 100; + const slice = sorted.slice(offset, offset + limit); + return { items: slice, nextCursor: offset + slice.length < sorted.length ? String(offset + slice.length) : undefined }; + } + async updateSession(session: SessionRecord) { this.data.sessions[session.id] = { ...session }; if (!this.data.events[session.id]) this.data.events[session.id] = []; } + async listEvents(request: ListEventsRequest): Promise> { + const all = [...(this.data.events[request.sessionId] ?? [])].sort((a, b) => a.eventIndex - b.eventIndex || a.id.localeCompare(b.id)); + const offset = Number(request.cursor ?? 0); + const limit = request.limit ?? 100; + const slice = all.slice(offset, offset + limit); + return { items: slice, nextCursor: offset + slice.length < all.length ? String(offset + slice.length) : undefined }; + } + async insertEvent(sessionId: string, event: SessionEvent) { const events = this.data.events[sessionId] ?? []; events.push({ ...event, payload: JSON.parse(JSON.stringify(event.payload)) }); this.data.events[sessionId] = events; } +} type WorkspaceState = RivetPersistState & { sandboxId: string; @@ -111,5 +143,5 @@ await conn.prompt({ ## Notes - Keep sandbox calls actor-only. Browser clients should not call Sandbox Agent directly. -- Use `@sandbox-agent/persist-rivet` so session history persists in actor state. +- Copy the Rivet persist driver from the example above into your project so session history persists in actor state. - For client connection patterns, see [Rivet JavaScript client](https://rivet.dev/docs/clients/javascript). diff --git a/docs/orchestration-architecture.mdx b/docs/orchestration-architecture.mdx new file mode 100644 index 0000000..08c776c --- /dev/null +++ b/docs/orchestration-architecture.mdx @@ -0,0 +1,43 @@ +--- +title: "Orchestration Architecture" +description: "Production topology, backend requirements, and session persistence." +icon: "sitemap" +--- + +This page covers production topology and backend requirements. Read [Architecture](/architecture) first for an overview of how the server, SDK, and agent processes fit together. + +## Suggested Topology + +Run the SDK on your backend, then call it from your frontend. + +This extra hop is recommended because it keeps auth/token logic on the backend and makes persistence simpler. + +```mermaid placement="top-right" + flowchart LR + BROWSER["Browser"] + subgraph BACKEND["Your backend"] + direction TB + SDK["Sandbox Agent SDK"] + end + subgraph SANDBOX_SIMPLE["Sandbox"] + SERVER_SIMPLE["Sandbox Agent server"] + end + + BROWSER --> BACKEND + BACKEND --> SDK --> SERVER_SIMPLE +``` + +### Backend requirements + +Your backend layer needs to handle: + +- **Long-running connections**: prompts can take minutes. +- **Session affinity**: follow-up messages must reach the same session. +- **State between requests**: session metadata and event history must persist across requests. +- **Graceful recovery**: sessions should resume after backend restarts. + +We recommend [Rivet](https://rivet.dev) over serverless because actors natively support the long-lived connections, session routing, and state persistence that agent workloads require. + +## Session persistence + +For storage driver options and replay behavior, see [Persisting Sessions](/session-persistence). diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index caf2c21..19d9742 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -1,281 +1,370 @@ --- title: "Quickstart" -description: "Start the server and send your first message." +description: "Get a coding agent running in a sandbox in under a minute." icon: "rocket" --- - + - + ```bash - npx skills add rivet-dev/skills -s sandbox-agent + npm install sandbox-agent@0.3.x ``` - + ```bash - bunx skills add rivet-dev/skills -s sandbox-agent - ``` - - - - - - Each coding agent requires API keys to connect to their respective LLM providers. - - - - ```bash - export ANTHROPIC_API_KEY="sk-ant-..." - export OPENAI_API_KEY="sk-..." - ``` - - - - ```typescript - import { Sandbox } from "@e2b/code-interpreter"; - - const envs: Record = {}; - if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; - if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; - - const sandbox = await Sandbox.create({ envs }); - ``` - - - - ```typescript - import { Daytona } from "@daytonaio/sdk"; - - const envVars: Record = {}; - if (process.env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; - if (process.env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; - - const daytona = new Daytona(); - const sandbox = await daytona.create({ - snapshot: "sandbox-agent-ready", - envVars, - }); - ``` - - - - ```bash - docker run -p 2468:2468 \ - -e ANTHROPIC_API_KEY="sk-ant-..." \ - -e OPENAI_API_KEY="sk-..." \ - rivetdev/sandbox-agent:0.3.1-full \ - server --no-token --host 0.0.0.0 --port 2468 - ``` - - - - - - Use `sandbox-agent credentials extract-env --export` to extract your existing API keys (Anthropic, OpenAI, etc.) from local Claude Code or Codex config files. - - - Use the `mock` agent for SDK and integration testing without provider credentials. - - - For per-tenant token tracking, budget enforcement, or usage-based billing, see [LLM Credentials](/llm-credentials) for gateway options like OpenRouter, LiteLLM, and Portkey. - - - - - - - - Install and run the binary directly. - - ```bash - curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh - sandbox-agent server --no-token --host 0.0.0.0 --port 2468 - ``` - - - - Run without installing globally. - - ```bash - npx @sandbox-agent/cli@0.3.x server --no-token --host 0.0.0.0 --port 2468 - ``` - - - - Run without installing globally. - - ```bash - bunx @sandbox-agent/cli@0.3.x server --no-token --host 0.0.0.0 --port 2468 - ``` - - - - Install globally, then run. - - ```bash - npm install -g @sandbox-agent/cli@0.3.x - sandbox-agent server --no-token --host 0.0.0.0 --port 2468 - ``` - - - - Install globally, then run. - - ```bash - bun add -g @sandbox-agent/cli@0.3.x + bun add sandbox-agent@0.3.x # Allow Bun to run postinstall scripts for native binaries (required for SandboxAgent.start()). - bun pm -g trust @sandbox-agent/cli-linux-x64 @sandbox-agent/cli-linux-arm64 @sandbox-agent/cli-darwin-arm64 @sandbox-agent/cli-darwin-x64 @sandbox-agent/cli-win32-x64 - sandbox-agent server --no-token --host 0.0.0.0 --port 2468 + bun pm trust @sandbox-agent/cli-linux-x64 @sandbox-agent/cli-linux-arm64 @sandbox-agent/cli-darwin-arm64 @sandbox-agent/cli-darwin-x64 @sandbox-agent/cli-win32-x64 ``` + + - - For local development, use `SandboxAgent.start()` to spawn and manage the server as a subprocess. + + `SandboxAgent.start()` provisions a sandbox, starts a lightweight [Sandbox Agent server](/architecture) inside it, and connects your SDK client. + + ```bash npm install sandbox-agent@0.3.x ``` ```typescript import { SandboxAgent } from "sandbox-agent"; + import { local } from "sandbox-agent/local"; - const sdk = await SandboxAgent.start(); + // Runs on your machine. Inherits process.env automatically. + const client = await SandboxAgent.start({ + sandbox: local(), + }); ``` + + See [Local deploy guide](/deploy/local) - - For local development, use `SandboxAgent.start()` to spawn and manage the server as a subprocess. - + ```bash - bun add sandbox-agent@0.3.x - # Allow Bun to run postinstall scripts for native binaries (required for SandboxAgent.start()). - bun pm trust @sandbox-agent/cli-linux-x64 @sandbox-agent/cli-linux-arm64 @sandbox-agent/cli-darwin-arm64 @sandbox-agent/cli-darwin-x64 @sandbox-agent/cli-win32-x64 + npm install sandbox-agent@0.3.x @e2b/code-interpreter ``` ```typescript import { SandboxAgent } from "sandbox-agent"; + import { e2b } from "sandbox-agent/e2b"; - const sdk = await SandboxAgent.start(); + // Provisions a cloud sandbox on E2B, installs the server, and connects. + const client = await SandboxAgent.start({ + sandbox: e2b(), + }); ``` + + See [E2B deploy guide](/deploy/e2b) - - If you're running from source instead of the installed CLI. - + ```bash - cargo run -p sandbox-agent -- server --no-token --host 0.0.0.0 --port 2468 + npm install sandbox-agent@0.3.x @daytonaio/sdk ``` + + ```typescript + import { SandboxAgent } from "sandbox-agent"; + import { daytona } from "sandbox-agent/daytona"; + + // Provisions a Daytona workspace with the server pre-installed. + const client = await SandboxAgent.start({ + sandbox: daytona(), + }); + ``` + + See [Daytona deploy guide](/deploy/daytona) + + + + ```bash + npm install sandbox-agent@0.3.x @vercel/sandbox + ``` + + ```typescript + import { SandboxAgent } from "sandbox-agent"; + import { vercel } from "sandbox-agent/vercel"; + + // Provisions a Vercel sandbox with the server installed on boot. + const client = await SandboxAgent.start({ + sandbox: vercel(), + }); + ``` + + See [Vercel deploy guide](/deploy/vercel) + + + + ```bash + npm install sandbox-agent@0.3.x modal + ``` + + ```typescript + import { SandboxAgent } from "sandbox-agent"; + import { modal } from "sandbox-agent/modal"; + + // Builds a container image with agents pre-installed (cached after first run), + // starts a Modal sandbox from that image, and connects. + const client = await SandboxAgent.start({ + sandbox: modal(), + }); + ``` + + See [Modal deploy guide](/deploy/modal) + + + + ```bash + npm install sandbox-agent@0.3.x @cloudflare/sandbox + ``` + + ```typescript + import { SandboxAgent } from "sandbox-agent"; + import { cloudflare } from "sandbox-agent/cloudflare"; + import { SandboxClient } from "@cloudflare/sandbox"; + + // Uses the Cloudflare Sandbox SDK to provision and connect. + // The Cloudflare SDK handles server lifecycle internally. + const cfSandboxClient = new SandboxClient(); + const client = await SandboxAgent.start({ + sandbox: cloudflare({ sdk: cfSandboxClient }), + }); + ``` + + See [Cloudflare deploy guide](/deploy/cloudflare) + + + + ```bash + npm install sandbox-agent@0.3.x dockerode get-port + ``` + + ```typescript + import { SandboxAgent } from "sandbox-agent"; + import { docker } from "sandbox-agent/docker"; + + // Runs a Docker container locally. Good for testing. + const client = await SandboxAgent.start({ + sandbox: docker(), + }); + ``` + + See [Docker deploy guide](/deploy/docker) - Binding to `0.0.0.0` allows the server to accept connections from any network interface, which is required when running inside a sandbox where clients connect remotely. +
+ + **More info:** - - Tokens are usually not required. Most sandbox providers (E2B, Daytona, etc.) already secure networking at the infrastructure layer. + + Agents need API keys for their LLM provider. Each provider passes credentials differently: - If you expose the server publicly, use `--token "$SANDBOX_TOKEN"` to require authentication: + ```typescript + // Local — inherits process.env automatically - ```bash - sandbox-agent server --token "$SANDBOX_TOKEN" --host 0.0.0.0 --port 2468 + // E2B + e2b({ create: { envs: { ANTHROPIC_API_KEY: "..." } } }) + + // Daytona + daytona({ create: { envVars: { ANTHROPIC_API_KEY: "..." } } }) + + // Vercel + vercel({ create: { env: { ANTHROPIC_API_KEY: "..." } } }) + + // Modal + modal({ create: { secrets: { ANTHROPIC_API_KEY: "..." } } }) + + // Docker + docker({ env: ["ANTHROPIC_API_KEY=..."] }) ``` - Then pass the token when connecting: + For multi-tenant billing, per-user keys, and gateway options, see [LLM Credentials](/llm-credentials). + + + Implement the `SandboxProvider` interface to use any sandbox platform: + + ```typescript + import { SandboxAgent, type SandboxProvider } from "sandbox-agent"; + + const myProvider: SandboxProvider = { + name: "my-provider", + async create() { + // Provision a sandbox, install & start the server, return an ID + return "sandbox-123"; + }, + async destroy(sandboxId) { + // Tear down the sandbox + }, + async getUrl(sandboxId) { + // Return the Sandbox Agent server URL + return `https://${sandboxId}.my-platform.dev:3000`; + }, + }; + + const client = await SandboxAgent.start({ + sandbox: myProvider, + }); + ``` + + + + If you already have a Sandbox Agent server running, connect directly: + + ```typescript + const client = await SandboxAgent.connect({ + baseUrl: "http://127.0.0.1:2468", + }); + ``` + + + - - ```typescript - import { SandboxAgent } from "sandbox-agent"; - - const sdk = await SandboxAgent.connect({ - baseUrl: "http://your-server:2468", - token: process.env.SANDBOX_TOKEN, - }); - ``` - - ```bash - curl "http://your-server:2468/v1/health" \ - -H "Authorization: Bearer $SANDBOX_TOKEN" + curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh + sandbox-agent server --no-token --host 0.0.0.0 --port 2468 ``` - - + ```bash - sandbox-agent --token "$SANDBOX_TOKEN" api agents list \ - --endpoint http://your-server:2468 + npx @sandbox-agent/cli@0.3.x server --no-token --host 0.0.0.0 --port 2468 + ``` + + + ```bash + docker run -p 2468:2468 \ + -e ANTHROPIC_API_KEY="sk-ant-..." \ + -e OPENAI_API_KEY="sk-..." \ + rivetdev/sandbox-agent:0.3.2-full \ + server --no-token --host 0.0.0.0 --port 2468 ``` - - If you're calling the server from a browser, see the [CORS configuration guide](/cors). - - - Supported agent IDs: `claude`, `codex`, `opencode`, `amp`, `pi`, `cursor`, `mock`. + + - To preinstall agents: + ```typescript Claude + const session = await client.createSession({ + agent: "claude", + }); - ```bash - sandbox-agent install-agent --all - ``` + session.onEvent((event) => { + console.log(event.sender, event.payload); + }); - If agents are not installed up front, they are lazily installed when creating a session. + const result = await session.prompt([ + { type: "text", text: "Summarize the repository and suggest next steps." }, + ]); + + console.log(result.stopReason); + ``` + + ```typescript Codex + const session = await client.createSession({ + agent: "codex", + }); + + session.onEvent((event) => { + console.log(event.sender, event.payload); + }); + + const result = await session.prompt([ + { type: "text", text: "Summarize the repository and suggest next steps." }, + ]); + + console.log(result.stopReason); + ``` + + ```typescript OpenCode + const session = await client.createSession({ + agent: "opencode", + }); + + session.onEvent((event) => { + console.log(event.sender, event.payload); + }); + + const result = await session.prompt([ + { type: "text", text: "Summarize the repository and suggest next steps." }, + ]); + + console.log(result.stopReason); + ``` + + ```typescript Cursor + const session = await client.createSession({ + agent: "cursor", + }); + + session.onEvent((event) => { + console.log(event.sender, event.payload); + }); + + const result = await session.prompt([ + { type: "text", text: "Summarize the repository and suggest next steps." }, + ]); + + console.log(result.stopReason); + ``` + + ```typescript Amp + const session = await client.createSession({ + agent: "amp", + }); + + session.onEvent((event) => { + console.log(event.sender, event.payload); + }); + + const result = await session.prompt([ + { type: "text", text: "Summarize the repository and suggest next steps." }, + ]); + + console.log(result.stopReason); + ``` + + ```typescript Pi + const session = await client.createSession({ + agent: "pi", + }); + + session.onEvent((event) => { + console.log(event.sender, event.payload); + }); + + const result = await session.prompt([ + { type: "text", text: "Summarize the repository and suggest next steps." }, + ]); + + console.log(result.stopReason); + ``` + + + + See [Agent Sessions](/agent-sessions) for the full sessions API. - + ```typescript - import { SandboxAgent } from "sandbox-agent"; - - const sdk = await SandboxAgent.connect({ - baseUrl: "http://127.0.0.1:2468", - }); - - const session = await sdk.createSession({ - agent: "claude", - sessionInit: { - cwd: "/", - mcpServers: [], - }, - }); - - console.log(session.id); + await client.destroySandbox(); // tears down the sandbox and disconnects ``` + + Use `client.dispose()` instead to disconnect without destroying the sandbox (for reconnecting later). - - ```typescript - const result = await session.prompt([ - { type: "text", text: "Summarize the repository and suggest next steps." }, - ]); - - console.log(result.stopReason); - ``` - - - - ```typescript - const off = session.onEvent((event) => { - console.log(event.sender, event.payload); - }); - - const page = await sdk.getEvents({ - sessionId: session.id, - limit: 50, - }); - - console.log(page.items.length); - off(); - ``` - - - - Open the Inspector UI at `/ui/` on your server (for example, `http://localhost:2468/ui/`) to inspect sessions and events in a GUI. + + Open the Inspector at `/ui/` on your server (e.g. `http://localhost:2468/ui/`) to view sessions and events in a GUI. Sandbox Agent Inspector @@ -283,16 +372,44 @@ icon: "rocket" +## Full example + +```typescript +import { SandboxAgent } from "sandbox-agent"; +import { e2b } from "sandbox-agent/e2b"; + +const client = await SandboxAgent.start({ + sandbox: e2b({ + create: { + envs: { ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY }, + }, + }), +}); + +try { + const session = await client.createSession({ agent: "claude" }); + + session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); + }); + + const result = await session.prompt([ + { type: "text", text: "Write a function that checks if a number is prime." }, + ]); + + console.log("Done:", result.stopReason); +} finally { + await client.destroySandbox(); +} +``` + ## Next steps - - - Configure in-memory, Rivet Actor state, IndexedDB, SQLite, and Postgres persistence. + + + Full TypeScript SDK API surface. - Deploy your agent to E2B, Daytona, Docker, Vercel, or Cloudflare. - - - Use the latest TypeScript SDK API. + Deploy to E2B, Daytona, Docker, Vercel, or Cloudflare. diff --git a/docs/sdk-overview.mdx b/docs/sdk-overview.mdx index fc4aee1..a0f9b84 100644 --- a/docs/sdk-overview.mdx +++ b/docs/sdk-overview.mdx @@ -23,12 +23,6 @@ The TypeScript SDK is centered on `sandbox-agent` and its `SandboxAgent` class. -## Optional persistence drivers - -```bash -npm install @sandbox-agent/persist-indexeddb@0.3.x @sandbox-agent/persist-sqlite@0.3.x @sandbox-agent/persist-postgres@0.3.x -``` - ## Optional React components ```bash @@ -68,15 +62,12 @@ const sdk = await SandboxAgent.connect({ controller.abort(); ``` -With persistence: +With persistence (see [Persisting Sessions](/session-persistence) for driver options): ```ts -import { SandboxAgent } from "sandbox-agent"; -import { SQLiteSessionPersistDriver } from "@sandbox-agent/persist-sqlite"; +import { SandboxAgent, InMemorySessionPersistDriver } from "sandbox-agent"; -const persist = new SQLiteSessionPersistDriver({ - filename: "./sessions.db", -}); +const persist = new InMemorySessionPersistDriver(); const sdk = await SandboxAgent.connect({ baseUrl: "http://127.0.0.1:2468", @@ -84,25 +75,40 @@ const sdk = await SandboxAgent.connect({ }); ``` -Local autospawn (Node.js only): +Local spawn with a sandbox provider: ```ts import { SandboxAgent } from "sandbox-agent"; +import { local } from "sandbox-agent/local"; -const localSdk = await SandboxAgent.start(); +const sdk = await SandboxAgent.start({ + sandbox: local(), +}); -await localSdk.dispose(); +// sdk.sandboxId — prefixed provider ID (e.g. "local/127.0.0.1:2468") + +await sdk.destroySandbox(); // tears down sandbox + disposes client ``` +`SandboxAgent.start(...)` requires a `sandbox` provider. Built-in providers: + +| Import | Provider | +|--------|----------| +| `sandbox-agent/local` | Local subprocess | +| `sandbox-agent/docker` | Docker container | +| `sandbox-agent/e2b` | E2B sandbox | +| `sandbox-agent/daytona` | Daytona workspace | +| `sandbox-agent/vercel` | Vercel Sandbox | +| `sandbox-agent/cloudflare` | Cloudflare Sandbox | + +Use `sdk.dispose()` to disconnect without destroying the sandbox, or `sdk.destroySandbox()` to tear down both. + ## Session flow ```ts const session = await sdk.createSession({ agent: "mock", - sessionInit: { - cwd: "/", - mcpServers: [], - }, + cwd: "/", }); const prompt = await session.prompt([ @@ -223,6 +229,7 @@ Parameters: - `token` (optional): Bearer token for authenticated servers - `headers` (optional): Additional request headers - `fetch` (optional): Custom fetch implementation used by SDK HTTP and session calls +- `skipHealthCheck` (optional): set `true` to skip the startup `/v1/health` wait - `waitForHealth` (optional, defaults to enabled): waits for `/v1/health` before HTTP helpers and session setup proceed; pass `false` to disable or `{ timeoutMs }` to bound the wait - `signal` (optional): aborts the startup `/v1/health` wait used by `connect()` diff --git a/docs/security.mdx b/docs/security.mdx index ec00f49..c8b02ad 100644 --- a/docs/security.mdx +++ b/docs/security.mdx @@ -4,7 +4,7 @@ description: "Backend-first auth and access control patterns." icon: "shield" --- -As covered in [Architecture](/architecture), run the Sandbox Agent client on your backend, not in the browser. +As covered in [Orchestration Architecture](/orchestration-architecture), run the Sandbox Agent client on your backend, not in the browser. This keeps sandbox credentials private and gives you one place for authz, rate limiting, and audit logging. @@ -92,7 +92,7 @@ export const workspace = actor({ const session = await sdk.createSession({ agent: "claude", - sessionInit: { cwd: "/workspace" }, + cwd: "/workspace", }); session.onEvent((event) => { diff --git a/docs/session-persistence.mdx b/docs/session-persistence.mdx index eaa4de0..5505864 100644 --- a/docs/session-persistence.mdx +++ b/docs/session-persistence.mdx @@ -10,14 +10,22 @@ With persistence enabled, sessions can be restored after runtime/session loss. S Each driver stores: -- `SessionRecord` (`id`, `agent`, `agentSessionId`, `lastConnectionId`, `createdAt`, optional `destroyedAt`, optional `sessionInit`) +- `SessionRecord` (`id`, `agent`, `agentSessionId`, `lastConnectionId`, `createdAt`, optional `destroyedAt`, optional `sandboxId`, optional `sessionInit`, optional `configOptions`, optional `modes`) - `SessionEvent` (`id`, `eventIndex`, `sessionId`, `connectionId`, `sender`, `payload`, `createdAt`) ## Persistence drivers -### In-memory +### Rivet -Best for local dev and ephemeral workloads. +Recommended for sandbox orchestration with actor state. See [Multiplayer](/multiplayer) for a full Rivet actor example with persistence in actor state. + +### IndexedDB (browser) + +Best for browser apps that should survive reloads. See the [Inspector source](https://github.com/rivet-dev/sandbox-agent/tree/main/frontend/packages/inspector/src/persist-indexeddb.ts) for a complete IndexedDB driver you can copy into your project. + +### In-memory (built-in) + +Best for local dev and ephemeral workloads. No extra dependencies required. ```ts import { InMemorySessionPersistDriver, SandboxAgent } from "sandbox-agent"; @@ -33,91 +41,17 @@ const sdk = await SandboxAgent.connect({ }); ``` -### Rivet - -Recommended for sandbox orchestration with actor state. - -```bash -npm install @sandbox-agent/persist-rivet@0.3.x -``` - -```ts -import { actor } from "rivetkit"; -import { SandboxAgent } from "sandbox-agent"; -import { RivetSessionPersistDriver, type RivetPersistState } from "@sandbox-agent/persist-rivet"; - -type PersistedState = RivetPersistState & { - sandboxId: string; - baseUrl: string; -}; - -export default actor({ - createState: async () => { - return { - sandboxId: "sbx_123", - baseUrl: "http://127.0.0.1:2468", - } satisfies Partial; - }, - createVars: async (c) => { - const persist = new RivetSessionPersistDriver(c); - const sdk = await SandboxAgent.connect({ - baseUrl: c.state.baseUrl, - persist, - }); - - const session = await sdk.resumeOrCreateSession({ id: "default", agent: "codex" }); - - const unsubscribe = session.onEvent((event) => { - c.broadcast("session.event", event); - }); - - return { sdk, session, unsubscribe }; - }, - actions: { - sendMessage: async (c, message: string) => { - await c.vars.session.prompt([{ type: "text", text: message }]); - }, - }, - onSleep: async (c) => { - c.vars.unsubscribe?.(); - await c.vars.sdk.dispose(); - }, -}); -``` - -### IndexedDB - -Best for browser apps that should survive reloads. - -```bash -npm install @sandbox-agent/persist-indexeddb@0.3.x -``` - -```ts -import { SandboxAgent } from "sandbox-agent"; -import { IndexedDbSessionPersistDriver } from "@sandbox-agent/persist-indexeddb"; - -const persist = new IndexedDbSessionPersistDriver({ - databaseName: "sandbox-agent-session-store", -}); - -const sdk = await SandboxAgent.connect({ - baseUrl: "http://127.0.0.1:2468", - persist, -}); -``` - ### SQLite Best for local/server Node apps that need durable storage without a DB server. ```bash -npm install @sandbox-agent/persist-sqlite@0.3.x +npm install better-sqlite3 ``` ```ts import { SandboxAgent } from "sandbox-agent"; -import { SQLiteSessionPersistDriver } from "@sandbox-agent/persist-sqlite"; +import { SQLiteSessionPersistDriver } from "./persist.ts"; const persist = new SQLiteSessionPersistDriver({ filename: "./sandbox-agent.db", @@ -129,17 +63,19 @@ const sdk = await SandboxAgent.connect({ }); ``` +See the [full SQLite example](https://github.com/rivet-dev/sandbox-agent/tree/main/examples/persist-sqlite) for the complete driver implementation you can copy into your project. + ### Postgres Use when you already run Postgres and want shared relational storage. ```bash -npm install @sandbox-agent/persist-postgres@0.3.x +npm install pg ``` ```ts import { SandboxAgent } from "sandbox-agent"; -import { PostgresSessionPersistDriver } from "@sandbox-agent/persist-postgres"; +import { PostgresSessionPersistDriver } from "./persist.ts"; const persist = new PostgresSessionPersistDriver({ connectionString: process.env.DATABASE_URL, @@ -152,6 +88,8 @@ const sdk = await SandboxAgent.connect({ }); ``` +See the [full Postgres example](https://github.com/rivet-dev/sandbox-agent/tree/main/examples/persist-postgres) for the complete driver implementation you can copy into your project. + ### Custom driver Implement `SessionPersistDriver` for custom backends. @@ -160,11 +98,11 @@ Implement `SessionPersistDriver` for custom backends. import type { SessionPersistDriver } from "sandbox-agent"; class MyDriver implements SessionPersistDriver { - async getSession(id) { return null; } + async getSession(id) { return undefined; } async listSessions(request) { return { items: [] }; } async updateSession(session) {} async listEvents(request) { return { items: [] }; } - async insertEvent(event) {} + async insertEvent(sessionId, event) {} } ``` diff --git a/docs/skills-config.mdx b/docs/skills-config.mdx index c85bc2c..c3145c2 100644 --- a/docs/skills-config.mdx +++ b/docs/skills-config.mdx @@ -35,9 +35,7 @@ await sdk.setSkillsConfig( // Create a session using the configured skills const session = await sdk.createSession({ agent: "claude", - sessionInit: { - cwd: "/workspace", - }, + cwd: "/workspace", }); await session.prompt([ diff --git a/examples/boxlite/src/index.ts b/examples/boxlite/src/index.ts index bdcd53a..171166b 100644 --- a/examples/boxlite/src/index.ts +++ b/examples/boxlite/src/index.ts @@ -25,7 +25,7 @@ const baseUrl = "http://localhost:3000"; console.log("Connecting to server..."); const client = await SandboxAgent.connect({ baseUrl }); -const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/root", mcpServers: [] } }); +const session = await client.createSession({ agent: detectAgent(), cwd: "/root" }); const sessionId = session.id; console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); diff --git a/examples/cloudflare/tests/cloudflare.test.ts b/examples/cloudflare/tests/cloudflare.test.ts new file mode 100644 index 0000000..d00c2ce --- /dev/null +++ b/examples/cloudflare/tests/cloudflare.test.ts @@ -0,0 +1,154 @@ +import { describe, it, expect } from "vitest"; +import { spawn, type ChildProcess } from "node:child_process"; +import { resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; +import { execSync } from "node:child_process"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const PROJECT_DIR = resolve(__dirname, ".."); + +/** + * Cloudflare Workers integration test. + * + * Set RUN_CLOUDFLARE_EXAMPLES=1 to enable. Requires wrangler and Docker. + * + * This starts `wrangler dev` which: + * 1. Builds the Dockerfile (cloudflare/sandbox base + sandbox-agent) + * 2. Starts a local Workers runtime with Durable Objects and containers + * 3. Exposes the app on a local port + * + * We then test through the proxy endpoint which forwards to sandbox-agent + * running inside the container. + */ +const shouldRun = process.env.RUN_CLOUDFLARE_EXAMPLES === "1"; +const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 600_000; + +const testFn = shouldRun ? it : it.skip; + +interface WranglerDev { + baseUrl: string; + cleanup: () => void; +} + +async function startWranglerDev(): Promise { + // Build frontend assets first (wrangler expects dist/ to exist) + execSync("npx vite build", { cwd: PROJECT_DIR, stdio: "pipe" }); + + return new Promise((resolve, reject) => { + const child: ChildProcess = spawn("npx", ["wrangler", "dev", "--port", "0"], { + cwd: PROJECT_DIR, + stdio: ["ignore", "pipe", "pipe"], + detached: true, + env: { + ...process.env, + // Ensure wrangler picks up API keys to pass to the container + NODE_ENV: "development", + }, + }); + + let stdout = ""; + let stderr = ""; + let resolved = false; + + const cleanup = () => { + if (child.pid) { + // Kill process group to ensure wrangler and its children are cleaned up + try { + process.kill(-child.pid, "SIGTERM"); + } catch { + try { + child.kill("SIGTERM"); + } catch {} + } + } + }; + + const timer = setTimeout(() => { + if (!resolved) { + resolved = true; + cleanup(); + reject(new Error(`wrangler dev did not start within 120s.\nstdout: ${stdout}\nstderr: ${stderr}`)); + } + }, 120_000); + + const onData = (chunk: Buffer) => { + const text = chunk.toString(); + stdout += text; + + // wrangler dev prints "Ready on http://localhost:XXXX" when ready + const match = stdout.match(/Ready on (https?:\/\/[^\s]+)/i) ?? stdout.match(/(https?:\/\/(?:localhost|127\.0\.0\.1):\d+)/); + if (match && !resolved) { + resolved = true; + clearTimeout(timer); + resolve({ baseUrl: match[1], cleanup }); + } + }; + + child.stdout?.on("data", onData); + child.stderr?.on("data", (chunk: Buffer) => { + const text = chunk.toString(); + stderr += text; + // Some wrangler versions print ready message to stderr + const match = text.match(/Ready on (https?:\/\/[^\s]+)/i) ?? text.match(/(https?:\/\/(?:localhost|127\.0\.0\.1):\d+)/); + if (match && !resolved) { + resolved = true; + clearTimeout(timer); + resolve({ baseUrl: match[1], cleanup }); + } + }); + + child.on("error", (err) => { + if (!resolved) { + resolved = true; + clearTimeout(timer); + reject(new Error(`wrangler dev failed to start: ${err.message}`)); + } + }); + + child.on("exit", (code) => { + if (!resolved) { + resolved = true; + clearTimeout(timer); + reject(new Error(`wrangler dev exited with code ${code}.\nstdout: ${stdout}\nstderr: ${stderr}`)); + } + }); + }); +} + +describe("cloudflare example", () => { + testFn( + "starts wrangler dev and sandbox-agent responds via proxy", + async () => { + const { baseUrl, cleanup } = await startWranglerDev(); + try { + // The Cloudflare example proxies requests through /sandbox/:name/proxy/* + // Wait for the container inside the Durable Object to start sandbox-agent + const healthUrl = `${baseUrl}/sandbox/test/proxy/v1/health`; + + let healthy = false; + for (let i = 0; i < 120; i++) { + try { + const res = await fetch(healthUrl); + if (res.ok) { + const data = await res.json(); + // The proxied health endpoint returns {name: "Sandbox Agent", ...} + if (data.status === "ok" || data.name === "Sandbox Agent") { + healthy = true; + break; + } + } + } catch {} + await new Promise((r) => setTimeout(r, 2000)); + } + expect(healthy).toBe(true); + + // Confirm a second request also works + const response = await fetch(healthUrl); + expect(response.ok).toBe(true); + } finally { + cleanup(); + } + }, + timeoutMs, + ); +}); diff --git a/sdks/persist-sqlite/vitest.config.ts b/examples/cloudflare/vitest.config.ts similarity index 84% rename from sdks/persist-sqlite/vitest.config.ts rename to examples/cloudflare/vitest.config.ts index 8a85a83..52a3740 100644 --- a/sdks/persist-sqlite/vitest.config.ts +++ b/examples/cloudflare/vitest.config.ts @@ -2,7 +2,7 @@ import { defineConfig } from "vitest/config"; export default defineConfig({ test: { + root: ".", include: ["tests/**/*.test.ts"], - testTimeout: 60000, }, }); diff --git a/examples/computesdk/package.json b/examples/computesdk/package.json index e22b51b..243b3b1 100644 --- a/examples/computesdk/package.json +++ b/examples/computesdk/package.json @@ -3,7 +3,7 @@ "private": true, "type": "module", "scripts": { - "start": "tsx src/computesdk.ts", + "start": "tsx src/index.ts", "typecheck": "tsc --noEmit" }, "dependencies": { diff --git a/examples/computesdk/src/computesdk.ts b/examples/computesdk/src/computesdk.ts deleted file mode 100644 index 46f43d6..0000000 --- a/examples/computesdk/src/computesdk.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { - compute, - detectProvider, - getMissingEnvVars, - getProviderConfigFromEnv, - isProviderAuthComplete, - isValidProvider, - PROVIDER_NAMES, - type ExplicitComputeConfig, - type ProviderName, -} from "computesdk"; -import { SandboxAgent } from "sandbox-agent"; -import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; -import { fileURLToPath } from "node:url"; -import { resolve } from "node:path"; - -const PORT = 3000; -const REQUEST_TIMEOUT_MS = Number.parseInt(process.env.COMPUTESDK_TIMEOUT_MS || "", 10) || 120_000; - -/** - * Detects and validates the provider to use. - * Priority: COMPUTESDK_PROVIDER env var > auto-detection from API keys - */ -function resolveProvider(): ProviderName { - const providerOverride = process.env.COMPUTESDK_PROVIDER; - - if (providerOverride) { - if (!isValidProvider(providerOverride)) { - throw new Error(`Unsupported ComputeSDK provider "${providerOverride}". Supported providers: ${PROVIDER_NAMES.join(", ")}`); - } - if (!isProviderAuthComplete(providerOverride)) { - const missing = getMissingEnvVars(providerOverride); - throw new Error(`Missing credentials for provider "${providerOverride}". Set: ${missing.join(", ")}`); - } - console.log(`Using ComputeSDK provider: ${providerOverride} (explicit)`); - return providerOverride as ProviderName; - } - - const detected = detectProvider(); - if (!detected) { - throw new Error(`No provider credentials found. Set one of: ${PROVIDER_NAMES.map((p) => getMissingEnvVars(p).join(", ")).join(" | ")}`); - } - console.log(`Using ComputeSDK provider: ${detected} (auto-detected)`); - return detected as ProviderName; -} - -function configureComputeSDK(): void { - const provider = resolveProvider(); - - const config: ExplicitComputeConfig = { - provider, - computesdkApiKey: process.env.COMPUTESDK_API_KEY, - requestTimeoutMs: REQUEST_TIMEOUT_MS, - }; - - const providerConfig = getProviderConfigFromEnv(provider); - if (Object.keys(providerConfig).length > 0) { - const configWithProvider = config as ExplicitComputeConfig & Record>; - configWithProvider[provider] = providerConfig; - } - - compute.setConfig(config); -} - -configureComputeSDK(); - -const buildEnv = (): Record => { - const env: Record = {}; - if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; - if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; - return env; -}; - -export async function setupComputeSdkSandboxAgent(): Promise<{ - baseUrl: string; - cleanup: () => Promise; -}> { - const env = buildEnv(); - - console.log("Creating ComputeSDK sandbox..."); - const sandbox = await compute.sandbox.create({ - envs: Object.keys(env).length > 0 ? env : undefined, - }); - - const run = async (cmd: string, options?: { background?: boolean }) => { - const result = await sandbox.runCommand(cmd, options); - if (typeof result?.exitCode === "number" && result.exitCode !== 0) { - throw new Error(`Command failed: ${cmd} (exit ${result.exitCode})\n${result.stderr || ""}`); - } - return result; - }; - - console.log("Installing sandbox-agent..."); - await run("curl -fsSL https://releases.rivet.dev/sandbox-agent/latest/install.sh | sh"); - - if (env.ANTHROPIC_API_KEY) { - console.log("Installing Claude agent..."); - await run("sandbox-agent install-agent claude"); - } - - if (env.OPENAI_API_KEY) { - console.log("Installing Codex agent..."); - await run("sandbox-agent install-agent codex"); - } - - console.log("Starting server..."); - await run(`sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT}`, { background: true }); - - const baseUrl = await sandbox.getUrl({ port: PORT }); - - const cleanup = async () => { - try { - await sandbox.destroy(); - } catch (error) { - console.warn("Cleanup failed:", error instanceof Error ? error.message : error); - } - }; - - return { baseUrl, cleanup }; -} - -export async function runComputeSdkExample(): Promise { - const { baseUrl, cleanup } = await setupComputeSdkSandboxAgent(); - - const handleExit = async () => { - await cleanup(); - process.exit(0); - }; - - process.once("SIGINT", handleExit); - process.once("SIGTERM", handleExit); - - const client = await SandboxAgent.connect({ baseUrl }); - const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/home", mcpServers: [] } }); - const sessionId = session.id; - - console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); - console.log(" Press Ctrl+C to stop."); - - // Keep alive until SIGINT/SIGTERM triggers cleanup above - await new Promise(() => {}); -} - -const isDirectRun = Boolean(process.argv[1] && resolve(process.argv[1]) === fileURLToPath(import.meta.url)); - -if (isDirectRun) { - runComputeSdkExample().catch((error) => { - console.error(error instanceof Error ? error.message : error); - process.exit(1); - }); -} diff --git a/examples/computesdk/src/index.ts b/examples/computesdk/src/index.ts new file mode 100644 index 0000000..63d4aee --- /dev/null +++ b/examples/computesdk/src/index.ts @@ -0,0 +1,30 @@ +import { SandboxAgent } from "sandbox-agent"; +import { computesdk } from "sandbox-agent/computesdk"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const envs: Record = {}; +if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const client = await SandboxAgent.start({ + sandbox: computesdk({ + create: { envs }, + }), +}); + +console.log(`UI: ${client.inspectorUrl}`); + +const session = await client.createSession({ + agent: detectAgent(), +}); + +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); + +session.prompt([{ type: "text", text: "Say hello from ComputeSDK in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/computesdk/tests/computesdk.test.ts b/examples/computesdk/tests/computesdk.test.ts index 0bbd24c..61ebb2c 100644 --- a/examples/computesdk/tests/computesdk.test.ts +++ b/examples/computesdk/tests/computesdk.test.ts @@ -1,6 +1,6 @@ import { describe, it, expect } from "vitest"; -import { buildHeaders } from "@sandbox-agent/example-shared"; -import { setupComputeSdkSandboxAgent } from "../src/computesdk.ts"; +import { SandboxAgent } from "sandbox-agent"; +import { computesdk } from "sandbox-agent/computesdk"; const hasModal = Boolean(process.env.MODAL_TOKEN_ID && process.env.MODAL_TOKEN_SECRET); const hasVercel = Boolean(process.env.VERCEL_TOKEN || process.env.VERCEL_OIDC_TOKEN); @@ -13,20 +13,23 @@ const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) const testFn = shouldRun ? it : it.skip; -describe("computesdk example", () => { +describe("computesdk provider", () => { testFn( "starts sandbox-agent and responds to /v1/health", async () => { - const { baseUrl, cleanup } = await setupComputeSdkSandboxAgent(); + const envs: Record = {}; + if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + + const sdk = await SandboxAgent.start({ + sandbox: computesdk({ create: { envs } }), + }); + try { - const response = await fetch(`${baseUrl}/v1/health`, { - headers: buildHeaders({}), - }); - expect(response.ok).toBe(true); - const data = await response.json(); - expect(data.status).toBe("ok"); + const health = await sdk.getHealth(); + expect(health.status).toBe("ok"); } finally { - await cleanup(); + await sdk.destroySandbox(); } }, timeoutMs, diff --git a/examples/daytona/src/index.ts b/examples/daytona/src/index.ts index 09f4cff..b881113 100644 --- a/examples/daytona/src/index.ts +++ b/examples/daytona/src/index.ts @@ -1,42 +1,31 @@ -import { Daytona } from "@daytonaio/sdk"; import { SandboxAgent } from "sandbox-agent"; -import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; - -const daytona = new Daytona(); +import { daytona } from "sandbox-agent/daytona"; +import { detectAgent } from "@sandbox-agent/example-shared"; const envVars: Record = {}; if (process.env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; if (process.env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; -// Use default image and install sandbox-agent at runtime (faster startup, no snapshot build) -console.log("Creating Daytona sandbox..."); -const sandbox = await daytona.create({ envVars, autoStopInterval: 0 }); +const client = await SandboxAgent.start({ + sandbox: daytona({ + create: { envVars }, + }), +}); -// Install sandbox-agent and start server -console.log("Installing sandbox-agent..."); -await sandbox.process.executeCommand("curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh"); +console.log(`UI: ${client.inspectorUrl}`); -console.log("Installing agents..."); -await sandbox.process.executeCommand("sandbox-agent install-agent claude"); -await sandbox.process.executeCommand("sandbox-agent install-agent codex"); +const session = await client.createSession({ + agent: detectAgent(), + cwd: "/home/daytona", +}); -await sandbox.process.executeCommand("nohup sandbox-agent server --no-token --host 0.0.0.0 --port 3000 >/tmp/sandbox-agent.log 2>&1 &"); +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); -const baseUrl = (await sandbox.getSignedPreviewUrl(3000, 4 * 60 * 60)).url; +session.prompt([{ type: "text", text: "Say hello from Daytona in one sentence." }]); -console.log("Connecting to server..."); -const client = await SandboxAgent.connect({ baseUrl }); -const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/home/daytona", mcpServers: [] } }); -const sessionId = session.id; - -console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); -console.log(" Press Ctrl+C to stop."); - -const keepAlive = setInterval(() => {}, 60_000); -const cleanup = async () => { - clearInterval(keepAlive); - await sandbox.delete(60); +process.once("SIGINT", async () => { + await client.destroySandbox(); process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); +}); diff --git a/examples/docker/package.json b/examples/docker/package.json index 2c29cfe..7b796c9 100644 --- a/examples/docker/package.json +++ b/examples/docker/package.json @@ -9,10 +9,10 @@ "dependencies": { "@sandbox-agent/example-shared": "workspace:*", "dockerode": "latest", + "get-port": "latest", "sandbox-agent": "workspace:*" }, "devDependencies": { - "@types/dockerode": "latest", "@types/node": "latest", "tsx": "latest", "typescript": "latest", diff --git a/examples/docker/src/index.ts b/examples/docker/src/index.ts index 74469f3..9f50859 100644 --- a/examples/docker/src/index.ts +++ b/examples/docker/src/index.ts @@ -1,68 +1,40 @@ -import Docker from "dockerode"; import fs from "node:fs"; import path from "node:path"; import { SandboxAgent } from "sandbox-agent"; -import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { docker } from "sandbox-agent/docker"; +import { detectAgent } from "@sandbox-agent/example-shared"; import { FULL_IMAGE } from "@sandbox-agent/example-shared/docker"; -const IMAGE = FULL_IMAGE; -const PORT = 3000; -const agent = detectAgent(); const codexAuthPath = process.env.HOME ? path.join(process.env.HOME, ".codex", "auth.json") : null; const bindMounts = codexAuthPath && fs.existsSync(codexAuthPath) ? [`${codexAuthPath}:/home/sandbox/.codex/auth.json:ro`] : []; +const env = [ + process.env.ANTHROPIC_API_KEY ? `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}` : "", + process.env.OPENAI_API_KEY ? `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}` : "", + process.env.CODEX_API_KEY ? `CODEX_API_KEY=${process.env.CODEX_API_KEY}` : "", +].filter(Boolean); -const docker = new Docker({ socketPath: "/var/run/docker.sock" }); - -// Pull image if needed -try { - await docker.getImage(IMAGE).inspect(); -} catch { - console.log(`Pulling ${IMAGE}...`); - await new Promise((resolve, reject) => { - docker.pull(IMAGE, (err: Error | null, stream: NodeJS.ReadableStream) => { - if (err) return reject(err); - docker.modem.followProgress(stream, (err: Error | null) => (err ? reject(err) : resolve())); - }); - }); -} - -console.log("Starting container..."); -const container = await docker.createContainer({ - Image: IMAGE, - Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", `${PORT}`], - Env: [ - process.env.ANTHROPIC_API_KEY ? `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}` : "", - process.env.OPENAI_API_KEY ? `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}` : "", - process.env.CODEX_API_KEY ? `CODEX_API_KEY=${process.env.CODEX_API_KEY}` : "", - ].filter(Boolean), - ExposedPorts: { [`${PORT}/tcp`]: {} }, - HostConfig: { - AutoRemove: true, - PortBindings: { [`${PORT}/tcp`]: [{ HostPort: `${PORT}` }] }, - Binds: bindMounts, - }, +const client = await SandboxAgent.start({ + sandbox: docker({ + image: FULL_IMAGE, + env, + binds: bindMounts, + }), }); -await container.start(); -const baseUrl = `http://127.0.0.1:${PORT}`; +console.log(`UI: ${client.inspectorUrl}`); -const client = await SandboxAgent.connect({ baseUrl }); -const session = await client.createSession({ agent, sessionInit: { cwd: "/home/sandbox", mcpServers: [] } }); -const sessionId = session.id; +const session = await client.createSession({ + agent: detectAgent(), + cwd: "/home/sandbox", +}); -console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); -console.log(" Press Ctrl+C to stop."); +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); -const keepAlive = setInterval(() => {}, 60_000); -const cleanup = async () => { - clearInterval(keepAlive); - try { - await container.stop({ t: 5 }); - } catch {} - try { - await container.remove({ force: true }); - } catch {} +session.prompt([{ type: "text", text: "Say hello from Docker in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); +}); diff --git a/examples/docker/tests/docker.test.ts b/examples/docker/tests/docker.test.ts index 66730f0..683f033 100644 --- a/examples/docker/tests/docker.test.ts +++ b/examples/docker/tests/docker.test.ts @@ -1,8 +1,15 @@ import { describe, it, expect } from "vitest"; -import { buildHeaders } from "@sandbox-agent/example-shared"; -import { setupDockerSandboxAgent } from "../src/docker.ts"; +import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; -const shouldRun = process.env.RUN_DOCKER_EXAMPLES === "1"; +/** + * Docker integration test. + * + * Set SANDBOX_AGENT_DOCKER_IMAGE to the image tag to test (e.g. a locally-built + * full image). The test starts a container from that image, waits for + * sandbox-agent to become healthy, and validates the /v1/health endpoint. + */ +const image = process.env.SANDBOX_AGENT_DOCKER_IMAGE; +const shouldRun = Boolean(image); const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000; const testFn = shouldRun ? it : it.skip; @@ -11,11 +18,29 @@ describe("docker example", () => { testFn( "starts sandbox-agent and responds to /v1/health", async () => { - const { baseUrl, token, cleanup } = await setupDockerSandboxAgent(); + const { baseUrl, cleanup } = await startDockerSandbox({ + port: 2468, + image: image!, + }); try { - const response = await fetch(`${baseUrl}/v1/health`, { - headers: buildHeaders({ token }), - }); + // Wait for health check + let healthy = false; + for (let i = 0; i < 60; i++) { + try { + const res = await fetch(`${baseUrl}/v1/health`); + if (res.ok) { + const data = await res.json(); + if (data.status === "ok") { + healthy = true; + break; + } + } + } catch {} + await new Promise((r) => setTimeout(r, 1000)); + } + expect(healthy).toBe(true); + + const response = await fetch(`${baseUrl}/v1/health`); expect(response.ok).toBe(true); const data = await response.json(); expect(data.status).toBe("ok"); diff --git a/examples/e2b/src/index.ts b/examples/e2b/src/index.ts index 7dd2882..c20ebaa 100644 --- a/examples/e2b/src/index.ts +++ b/examples/e2b/src/index.ts @@ -1,45 +1,28 @@ -import { Sandbox } from "@e2b/code-interpreter"; import { SandboxAgent } from "sandbox-agent"; -import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { e2b } from "sandbox-agent/e2b"; +import { detectAgent } from "@sandbox-agent/example-shared"; const envs: Record = {}; if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; -console.log("Creating E2B sandbox..."); -const sandbox = await Sandbox.create({ allowInternetAccess: true, envs }); +const client = await SandboxAgent.start({ + // ✨ NEW ✨ + sandbox: e2b({ create: { envs } }), +}); -const run = async (cmd: string) => { - const result = await sandbox.commands.run(cmd); - if (result.exitCode !== 0) throw new Error(`Command failed: ${cmd}\n${result.stderr}`); - return result; -}; +const session = await client.createSession({ + agent: detectAgent(), + cwd: "/home/user", +}); -console.log("Installing sandbox-agent..."); -await run("curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh"); +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); -console.log("Installing agents..."); -await run("sandbox-agent install-agent claude"); -await run("sandbox-agent install-agent codex"); +session.prompt([{ type: "text", text: "Say hello from E2B in one sentence." }]); -console.log("Starting server..."); -await sandbox.commands.run("sandbox-agent server --no-token --host 0.0.0.0 --port 3000", { background: true, timeoutMs: 0 }); - -const baseUrl = `https://${sandbox.getHost(3000)}`; - -console.log("Connecting to server..."); -const client = await SandboxAgent.connect({ baseUrl }); -const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/home/user", mcpServers: [] } }); -const sessionId = session.id; - -console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); -console.log(" Press Ctrl+C to stop."); - -const keepAlive = setInterval(() => {}, 60_000); -const cleanup = async () => { - clearInterval(keepAlive); - await sandbox.kill(); +process.once("SIGINT", async () => { + await client.destroySandbox(); process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); +}); diff --git a/examples/file-system/src/index.ts b/examples/file-system/src/index.ts index abe4e08..71d65c0 100644 --- a/examples/file-system/src/index.ts +++ b/examples/file-system/src/index.ts @@ -44,7 +44,7 @@ const readmeText = new TextDecoder().decode(readmeBytes); console.log(` README.md content: ${readmeText.trim()}`); console.log("Creating session..."); -const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/opt/my-project", mcpServers: [] } }); +const session = await client.createSession({ agent: detectAgent(), cwd: "/opt/my-project" }); const sessionId = session.id; console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); console.log(' Try: "read the README in /opt/my-project"'); diff --git a/examples/modal/package.json b/examples/modal/package.json index 61debbd..d3e51ec 100644 --- a/examples/modal/package.json +++ b/examples/modal/package.json @@ -3,7 +3,7 @@ "private": true, "type": "module", "scripts": { - "start": "tsx src/modal.ts", + "start": "tsx src/index.ts", "typecheck": "tsc --noEmit" }, "dependencies": { diff --git a/examples/modal/src/index.ts b/examples/modal/src/index.ts new file mode 100644 index 0000000..35eef8d --- /dev/null +++ b/examples/modal/src/index.ts @@ -0,0 +1,30 @@ +import { SandboxAgent } from "sandbox-agent"; +import { modal } from "sandbox-agent/modal"; +import { detectAgent } from "@sandbox-agent/example-shared"; + +const secrets: Record = {}; +if (process.env.ANTHROPIC_API_KEY) secrets.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) secrets.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + +const client = await SandboxAgent.start({ + sandbox: modal({ + create: { secrets }, + }), +}); + +console.log(`UI: ${client.inspectorUrl}`); + +const session = await client.createSession({ + agent: detectAgent(), +}); + +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); + +session.prompt([{ type: "text", text: "Say hello from Modal in one sentence." }]); + +process.once("SIGINT", async () => { + await client.destroySandbox(); + process.exit(0); +}); diff --git a/examples/modal/src/modal.ts b/examples/modal/src/modal.ts deleted file mode 100644 index d525ad3..0000000 --- a/examples/modal/src/modal.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { ModalClient } from "modal"; -import { SandboxAgent } from "sandbox-agent"; -import { detectAgent, buildInspectorUrl, waitForHealth } from "@sandbox-agent/example-shared"; -import { fileURLToPath } from "node:url"; -import { resolve } from "node:path"; -import { run } from "node:test"; - -const PORT = 3000; -const APP_NAME = "sandbox-agent"; - -async function buildSecrets(modal: ModalClient) { - const envVars: Record = {}; - if (process.env.ANTHROPIC_API_KEY) - envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; - if (process.env.OPENAI_API_KEY) - envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; - - if (Object.keys(envVars).length === 0) return []; - return [await modal.secrets.fromObject(envVars)]; -} - -export async function setupModalSandboxAgent(): Promise<{ - baseUrl: string; - cleanup: () => Promise; -}> { - const modal = new ModalClient(); - const app = await modal.apps.fromName(APP_NAME, { createIfMissing: true }); - - const image = modal.images - .fromRegistry("ubuntu:22.04") - .dockerfileCommands([ - "RUN apt-get update && apt-get install -y curl ca-certificates", - "RUN curl -fsSL https://releases.rivet.dev/sandbox-agent/0.2.x/install.sh | sh", - ]); - - const secrets = await buildSecrets(modal); - - console.log("Creating Modal sandbox!"); - const sb = await modal.sandboxes.create(app, image, { - secrets: secrets, - encryptedPorts: [PORT], - }); - console.log(`Sandbox created: ${sb.sandboxId}`); - - const exec = async (cmd: string) => { - const p = await sb.exec(["bash", "-c", cmd], { - stdout: "pipe", - stderr: "pipe", - }); - const exitCode = await p.wait(); - if (exitCode !== 0) { - const stderr = await p.stderr.readText(); - throw new Error(`Command failed (exit ${exitCode}): ${cmd}\n${stderr}`); - } - }; - - if (process.env.ANTHROPIC_API_KEY) { - console.log("Installing Claude agent..."); - await exec("sandbox-agent install-agent claude"); - } - if (process.env.OPENAI_API_KEY) { - console.log("Installing Codex agent..."); - await exec("sandbox-agent install-agent codex"); - } - - console.log("Starting server..."); - - await sb.exec( - ["bash", "-c", `sandbox-agent server --no-token --host 0.0.0.0 --port ${PORT} &`], - ); - - const tunnels = await sb.tunnels(); - const tunnel = tunnels[PORT]; - if (!tunnel) { - throw new Error(`No tunnel found for port ${PORT}`); - } - const baseUrl = tunnel.url; - - console.log("Waiting for server..."); - await waitForHealth({ baseUrl }); - - const cleanup = async () => { - try { - await sb.terminate(); - } catch (error) { - console.warn("Cleanup failed:", error instanceof Error ? error.message : error); - } - }; - - return { baseUrl, cleanup }; -} - -export async function runModalExample(): Promise { - const { baseUrl, cleanup } = await setupModalSandboxAgent(); - - const handleExit = async () => { - await cleanup(); - process.exit(0); - }; - - process.once("SIGINT", handleExit); - process.once("SIGTERM", handleExit); - - const client = await SandboxAgent.connect({ baseUrl }); - const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/root", mcpServers: [] } }); - const sessionId = session.id; - - console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); - console.log(" Press Ctrl+C to stop."); - - await new Promise(() => {}); -} - -const isDirectRun = Boolean( - process.argv[1] && resolve(process.argv[1]) === fileURLToPath(import.meta.url), -); - -if (isDirectRun) { - runModalExample().catch((error) => { - console.error(error instanceof Error ? error.message : error); - process.exit(1); - }); -} diff --git a/examples/modal/tests/modal.test.ts b/examples/modal/tests/modal.test.ts index 9c27a21..010256a 100644 --- a/examples/modal/tests/modal.test.ts +++ b/examples/modal/tests/modal.test.ts @@ -1,26 +1,29 @@ import { describe, it, expect } from "vitest"; -import { buildHeaders } from "@sandbox-agent/example-shared"; -import { setupModalSandboxAgent } from "../src/modal.ts"; +import { SandboxAgent } from "sandbox-agent"; +import { modal } from "sandbox-agent/modal"; const shouldRun = Boolean(process.env.MODAL_TOKEN_ID && process.env.MODAL_TOKEN_SECRET); const timeoutMs = Number.parseInt(process.env.SANDBOX_TEST_TIMEOUT_MS || "", 10) || 300_000; const testFn = shouldRun ? it : it.skip; -describe("modal example", () => { +describe("modal provider", () => { testFn( "starts sandbox-agent and responds to /v1/health", async () => { - const { baseUrl, cleanup } = await setupModalSandboxAgent(); + const secrets: Record = {}; + if (process.env.ANTHROPIC_API_KEY) secrets.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) secrets.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + + const sdk = await SandboxAgent.start({ + sandbox: modal({ create: { secrets } }), + }); + try { - const response = await fetch(`${baseUrl}/v1/health`, { - headers: buildHeaders({}), - }); - expect(response.ok).toBe(true); - const data = await response.json(); - expect(data.status).toBe("ok"); + const health = await sdk.getHealth(); + expect(health.status).toBe("ok"); } finally { - await cleanup(); + await sdk.destroySandbox(); } }, timeoutMs, diff --git a/examples/permissions/src/index.ts b/examples/permissions/src/index.ts index 811f65c..e684e34 100644 --- a/examples/permissions/src/index.ts +++ b/examples/permissions/src/index.ts @@ -2,6 +2,7 @@ import { createInterface } from "node:readline/promises"; import { stdin as input, stdout as output } from "node:process"; import { Command } from "commander"; import { SandboxAgent, type PermissionReply, type SessionPermissionRequest } from "sandbox-agent"; +import { local } from "sandbox-agent/local"; const options = parseOptions(); const agent = options.agent.trim().toLowerCase(); @@ -9,10 +10,7 @@ const autoReply = parsePermissionReply(options.reply); const promptText = options.prompt?.trim() || `Create ./permission-example.txt with the text 'hello from the ${agent} permissions example'.`; const sdk = await SandboxAgent.start({ - spawn: { - enabled: true, - log: "inherit", - }, + sandbox: local({ log: "inherit" }), }); try { @@ -43,10 +41,7 @@ try { const session = await sdk.createSession({ agent, ...(mode ? { mode } : {}), - sessionInit: { - cwd: process.cwd(), - mcpServers: [], - }, + cwd: process.cwd(), }); const rl = autoReply diff --git a/examples/persist-postgres/package.json b/examples/persist-postgres/package.json index 8114ffb..8445516 100644 --- a/examples/persist-postgres/package.json +++ b/examples/persist-postgres/package.json @@ -8,7 +8,6 @@ }, "dependencies": { "@sandbox-agent/example-shared": "workspace:*", - "@sandbox-agent/persist-postgres": "workspace:*", "pg": "latest", "sandbox-agent": "workspace:*" }, diff --git a/examples/persist-postgres/src/index.ts b/examples/persist-postgres/src/index.ts index 73f9f04..43eecbd 100644 --- a/examples/persist-postgres/src/index.ts +++ b/examples/persist-postgres/src/index.ts @@ -3,7 +3,7 @@ import { randomUUID } from "node:crypto"; import { Client } from "pg"; import { setTimeout as delay } from "node:timers/promises"; import { SandboxAgent } from "sandbox-agent"; -import { PostgresSessionPersistDriver } from "@sandbox-agent/persist-postgres"; +import { PostgresSessionPersistDriver } from "./persist.ts"; import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; import { detectAgent } from "@sandbox-agent/example-shared"; diff --git a/examples/persist-postgres/src/persist.ts b/examples/persist-postgres/src/persist.ts new file mode 100644 index 0000000..2a6ccff --- /dev/null +++ b/examples/persist-postgres/src/persist.ts @@ -0,0 +1,336 @@ +import { Pool, type PoolConfig } from "pg"; +import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; + +const DEFAULT_LIST_LIMIT = 100; + +export interface PostgresSessionPersistDriverOptions { + connectionString?: string; + pool?: Pool; + poolConfig?: PoolConfig; + schema?: string; +} + +export class PostgresSessionPersistDriver implements SessionPersistDriver { + private readonly pool: Pool; + private readonly ownsPool: boolean; + private readonly schema: string; + private readonly initialized: Promise; + + constructor(options: PostgresSessionPersistDriverOptions = {}) { + this.schema = normalizeSchema(options.schema ?? "public"); + + if (options.pool) { + this.pool = options.pool; + this.ownsPool = false; + } else { + this.pool = new Pool({ + connectionString: options.connectionString, + ...options.poolConfig, + }); + this.ownsPool = true; + } + + this.initialized = this.initialize(); + } + + async getSession(id: string): Promise { + await this.ready(); + + const result = await this.pool.query( + `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + FROM ${this.table("sessions")} + WHERE id = $1`, + [id], + ); + + if (result.rows.length === 0) { + return undefined; + } + + return decodeSessionRow(result.rows[0]); + } + + async listSessions(request: ListPageRequest = {}): Promise> { + await this.ready(); + + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + + const rowsResult = await this.pool.query( + `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + FROM ${this.table("sessions")} + ORDER BY created_at ASC, id ASC + LIMIT $1 OFFSET $2`, + [limit, offset], + ); + + const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("sessions")}`); + const total = parseInteger(countResult.rows[0]?.count ?? "0"); + const nextOffset = offset + rowsResult.rows.length; + + return { + items: rowsResult.rows.map(decodeSessionRow), + nextCursor: nextOffset < total ? String(nextOffset) : undefined, + }; + } + + async updateSession(session: SessionRecord): Promise { + await this.ready(); + + await this.pool.query( + `INSERT INTO ${this.table("sessions")} ( + id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + ON CONFLICT(id) DO UPDATE SET + agent = EXCLUDED.agent, + agent_session_id = EXCLUDED.agent_session_id, + last_connection_id = EXCLUDED.last_connection_id, + created_at = EXCLUDED.created_at, + destroyed_at = EXCLUDED.destroyed_at, + sandbox_id = EXCLUDED.sandbox_id, + session_init_json = EXCLUDED.session_init_json, + config_options_json = EXCLUDED.config_options_json, + modes_json = EXCLUDED.modes_json`, + [ + session.id, + session.agent, + session.agentSessionId, + session.lastConnectionId, + session.createdAt, + session.destroyedAt ?? null, + session.sandboxId ?? null, + session.sessionInit ? JSON.stringify(session.sessionInit) : null, + session.configOptions ? JSON.stringify(session.configOptions) : null, + session.modes !== undefined ? JSON.stringify(session.modes) : null, + ], + ); + } + + async listEvents(request: ListEventsRequest): Promise> { + await this.ready(); + + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + + const rowsResult = await this.pool.query( + `SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json + FROM ${this.table("events")} + WHERE session_id = $1 + ORDER BY event_index ASC, id ASC + LIMIT $2 OFFSET $3`, + [request.sessionId, limit, offset], + ); + + const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("events")} WHERE session_id = $1`, [ + request.sessionId, + ]); + const total = parseInteger(countResult.rows[0]?.count ?? "0"); + const nextOffset = offset + rowsResult.rows.length; + + return { + items: rowsResult.rows.map(decodeEventRow), + nextCursor: nextOffset < total ? String(nextOffset) : undefined, + }; + } + + async insertEvent(_sessionId: string, event: SessionEvent): Promise { + await this.ready(); + + await this.pool.query( + `INSERT INTO ${this.table("events")} ( + id, event_index, session_id, created_at, connection_id, sender, payload_json + ) VALUES ($1, $2, $3, $4, $5, $6, $7) + ON CONFLICT(id) DO UPDATE SET + event_index = EXCLUDED.event_index, + session_id = EXCLUDED.session_id, + created_at = EXCLUDED.created_at, + connection_id = EXCLUDED.connection_id, + sender = EXCLUDED.sender, + payload_json = EXCLUDED.payload_json`, + [event.id, event.eventIndex, event.sessionId, event.createdAt, event.connectionId, event.sender, event.payload], + ); + } + + async close(): Promise { + if (!this.ownsPool) { + return; + } + await this.pool.end(); + } + + private async ready(): Promise { + await this.initialized; + } + + private table(name: "sessions" | "events"): string { + return `"${this.schema}"."${name}"`; + } + + private async initialize(): Promise { + await this.pool.query(`CREATE SCHEMA IF NOT EXISTS "${this.schema}"`); + + await this.pool.query(` + CREATE TABLE IF NOT EXISTS ${this.table("sessions")} ( + id TEXT PRIMARY KEY, + agent TEXT NOT NULL, + agent_session_id TEXT NOT NULL, + last_connection_id TEXT NOT NULL, + created_at BIGINT NOT NULL, + destroyed_at BIGINT, + sandbox_id TEXT, + session_init_json JSONB, + config_options_json JSONB, + modes_json JSONB + ) + `); + + await this.pool.query(` + ALTER TABLE ${this.table("sessions")} + ADD COLUMN IF NOT EXISTS sandbox_id TEXT + `); + + await this.pool.query(` + ALTER TABLE ${this.table("sessions")} + ADD COLUMN IF NOT EXISTS config_options_json JSONB + `); + + await this.pool.query(` + ALTER TABLE ${this.table("sessions")} + ADD COLUMN IF NOT EXISTS modes_json JSONB + `); + + await this.pool.query(` + CREATE TABLE IF NOT EXISTS ${this.table("events")} ( + id TEXT PRIMARY KEY, + event_index BIGINT NOT NULL, + session_id TEXT NOT NULL, + created_at BIGINT NOT NULL, + connection_id TEXT NOT NULL, + sender TEXT NOT NULL, + payload_json JSONB NOT NULL + ) + `); + + await this.pool.query(` + ALTER TABLE ${this.table("events")} + ALTER COLUMN id TYPE TEXT USING id::TEXT + `); + + await this.pool.query(` + ALTER TABLE ${this.table("events")} + ADD COLUMN IF NOT EXISTS event_index BIGINT + `); + + await this.pool.query(` + WITH ranked AS ( + SELECT id, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC) AS ranked_index + FROM ${this.table("events")} + ) + UPDATE ${this.table("events")} AS current_events + SET event_index = ranked.ranked_index + FROM ranked + WHERE current_events.id = ranked.id + AND current_events.event_index IS NULL + `); + + await this.pool.query(` + ALTER TABLE ${this.table("events")} + ALTER COLUMN event_index SET NOT NULL + `); + + await this.pool.query(` + CREATE INDEX IF NOT EXISTS idx_events_session_order + ON ${this.table("events")}(session_id, event_index, id) + `); + } +} + +type SessionRow = { + id: string; + agent: string; + agent_session_id: string; + last_connection_id: string; + created_at: string | number; + destroyed_at: string | number | null; + sandbox_id: string | null; + session_init_json: unknown | null; + config_options_json: unknown | null; + modes_json: unknown | null; +}; + +type EventRow = { + id: string | number; + event_index: string | number; + session_id: string; + created_at: string | number; + connection_id: string; + sender: string; + payload_json: unknown; +}; + +function decodeSessionRow(row: SessionRow): SessionRecord { + return { + id: row.id, + agent: row.agent, + agentSessionId: row.agent_session_id, + lastConnectionId: row.last_connection_id, + createdAt: parseInteger(row.created_at), + destroyedAt: row.destroyed_at === null ? undefined : parseInteger(row.destroyed_at), + sandboxId: row.sandbox_id ?? undefined, + sessionInit: row.session_init_json ? (row.session_init_json as SessionRecord["sessionInit"]) : undefined, + configOptions: row.config_options_json ? (row.config_options_json as SessionRecord["configOptions"]) : undefined, + modes: row.modes_json ? (row.modes_json as SessionRecord["modes"]) : undefined, + }; +} + +function decodeEventRow(row: EventRow): SessionEvent { + return { + id: String(row.id), + eventIndex: parseInteger(row.event_index), + sessionId: row.session_id, + createdAt: parseInteger(row.created_at), + connectionId: row.connection_id, + sender: parseSender(row.sender), + payload: row.payload_json as SessionEvent["payload"], + }; +} + +function normalizeLimit(limit: number | undefined): number { + if (!Number.isFinite(limit) || (limit ?? 0) < 1) { + return DEFAULT_LIST_LIMIT; + } + return Math.floor(limit as number); +} + +function parseCursor(cursor: string | undefined): number { + if (!cursor) { + return 0; + } + const parsed = Number.parseInt(cursor, 10); + if (!Number.isFinite(parsed) || parsed < 0) { + return 0; + } + return parsed; +} + +function parseInteger(value: string | number): number { + const parsed = typeof value === "number" ? value : Number.parseInt(value, 10); + if (!Number.isFinite(parsed)) { + throw new Error(`Invalid integer value returned by postgres: ${String(value)}`); + } + return parsed; +} + +function parseSender(value: string): SessionEvent["sender"] { + if (value === "agent" || value === "client") { + return value; + } + throw new Error(`Invalid sender value returned by postgres: ${value}`); +} + +function normalizeSchema(schema: string): string { + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(schema)) { + throw new Error(`Invalid schema name '${schema}'. Use letters, numbers, and underscores only.`); + } + return schema; +} diff --git a/examples/persist-sqlite/package.json b/examples/persist-sqlite/package.json index 8b7b822..be6bf0d 100644 --- a/examples/persist-sqlite/package.json +++ b/examples/persist-sqlite/package.json @@ -8,10 +8,11 @@ }, "dependencies": { "@sandbox-agent/example-shared": "workspace:*", - "@sandbox-agent/persist-sqlite": "workspace:*", + "better-sqlite3": "^11.0.0", "sandbox-agent": "workspace:*" }, "devDependencies": { + "@types/better-sqlite3": "^7.0.0", "@types/node": "latest", "tsx": "latest", "typescript": "latest" diff --git a/examples/persist-sqlite/src/index.ts b/examples/persist-sqlite/src/index.ts index d2c4ef2..943e902 100644 --- a/examples/persist-sqlite/src/index.ts +++ b/examples/persist-sqlite/src/index.ts @@ -1,5 +1,5 @@ import { SandboxAgent } from "sandbox-agent"; -import { SQLiteSessionPersistDriver } from "@sandbox-agent/persist-sqlite"; +import { SQLiteSessionPersistDriver } from "./persist.ts"; import { startDockerSandbox } from "@sandbox-agent/example-shared/docker"; import { detectAgent } from "@sandbox-agent/example-shared"; diff --git a/examples/persist-sqlite/src/persist.ts b/examples/persist-sqlite/src/persist.ts new file mode 100644 index 0000000..2292903 --- /dev/null +++ b/examples/persist-sqlite/src/persist.ts @@ -0,0 +1,310 @@ +import Database from "better-sqlite3"; +import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; + +const DEFAULT_LIST_LIMIT = 100; + +export interface SQLiteSessionPersistDriverOptions { + filename?: string; +} + +export class SQLiteSessionPersistDriver implements SessionPersistDriver { + private readonly db: Database.Database; + + constructor(options: SQLiteSessionPersistDriverOptions = {}) { + this.db = new Database(options.filename ?? ":memory:"); + this.initialize(); + } + + async getSession(id: string): Promise { + const row = this.db + .prepare( + `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + FROM sessions WHERE id = ?`, + ) + .get(id) as SessionRow | undefined; + + if (!row) { + return undefined; + } + + return decodeSessionRow(row); + } + + async listSessions(request: ListPageRequest = {}): Promise> { + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + + const rows = this.db + .prepare( + `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + FROM sessions + ORDER BY created_at ASC, id ASC + LIMIT ? OFFSET ?`, + ) + .all(limit, offset) as SessionRow[]; + + const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM sessions`).get() as { count: number }; + const nextOffset = offset + rows.length; + + return { + items: rows.map(decodeSessionRow), + nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined, + }; + } + + async updateSession(session: SessionRecord): Promise { + this.db + .prepare( + `INSERT INTO sessions ( + id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, sandbox_id, session_init_json, config_options_json, modes_json + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + agent = excluded.agent, + agent_session_id = excluded.agent_session_id, + last_connection_id = excluded.last_connection_id, + created_at = excluded.created_at, + destroyed_at = excluded.destroyed_at, + sandbox_id = excluded.sandbox_id, + session_init_json = excluded.session_init_json, + config_options_json = excluded.config_options_json, + modes_json = excluded.modes_json`, + ) + .run( + session.id, + session.agent, + session.agentSessionId, + session.lastConnectionId, + session.createdAt, + session.destroyedAt ?? null, + session.sandboxId ?? null, + session.sessionInit ? JSON.stringify(session.sessionInit) : null, + session.configOptions ? JSON.stringify(session.configOptions) : null, + session.modes !== undefined ? JSON.stringify(session.modes) : null, + ); + } + + async listEvents(request: ListEventsRequest): Promise> { + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + + const rows = this.db + .prepare( + `SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json + FROM events + WHERE session_id = ? + ORDER BY event_index ASC, id ASC + LIMIT ? OFFSET ?`, + ) + .all(request.sessionId, limit, offset) as EventRow[]; + + const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM events WHERE session_id = ?`).get(request.sessionId) as { count: number }; + + const nextOffset = offset + rows.length; + + return { + items: rows.map(decodeEventRow), + nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined, + }; + } + + async insertEvent(_sessionId: string, event: SessionEvent): Promise { + this.db + .prepare( + `INSERT INTO events ( + id, event_index, session_id, created_at, connection_id, sender, payload_json + ) VALUES (?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(id) DO UPDATE SET + event_index = excluded.event_index, + session_id = excluded.session_id, + created_at = excluded.created_at, + connection_id = excluded.connection_id, + sender = excluded.sender, + payload_json = excluded.payload_json`, + ) + .run(event.id, event.eventIndex, event.sessionId, event.createdAt, event.connectionId, event.sender, JSON.stringify(event.payload)); + } + + close(): void { + this.db.close(); + } + + private initialize(): void { + this.db.exec(` + CREATE TABLE IF NOT EXISTS sessions ( + id TEXT PRIMARY KEY, + agent TEXT NOT NULL, + agent_session_id TEXT NOT NULL, + last_connection_id TEXT NOT NULL, + created_at INTEGER NOT NULL, + destroyed_at INTEGER, + sandbox_id TEXT, + session_init_json TEXT, + config_options_json TEXT, + modes_json TEXT + ) + `); + + const sessionColumns = this.db.prepare(`PRAGMA table_info(sessions)`).all() as TableInfoRow[]; + if (!sessionColumns.some((column) => column.name === "sandbox_id")) { + this.db.exec(`ALTER TABLE sessions ADD COLUMN sandbox_id TEXT`); + } + if (!sessionColumns.some((column) => column.name === "config_options_json")) { + this.db.exec(`ALTER TABLE sessions ADD COLUMN config_options_json TEXT`); + } + if (!sessionColumns.some((column) => column.name === "modes_json")) { + this.db.exec(`ALTER TABLE sessions ADD COLUMN modes_json TEXT`); + } + + this.ensureEventsTable(); + } + + private ensureEventsTable(): void { + const tableInfo = this.db.prepare(`PRAGMA table_info(events)`).all() as TableInfoRow[]; + if (tableInfo.length === 0) { + this.createEventsTable(); + return; + } + + const idColumn = tableInfo.find((column) => column.name === "id"); + const hasEventIndex = tableInfo.some((column) => column.name === "event_index"); + const idType = (idColumn?.type ?? "").trim().toUpperCase(); + const idIsText = idType === "TEXT"; + + if (!idIsText || !hasEventIndex) { + this.rebuildEventsTable(hasEventIndex); + } + + this.db.exec(` + CREATE INDEX IF NOT EXISTS idx_events_session_order + ON events(session_id, event_index, id) + `); + } + + private createEventsTable(): void { + this.db.exec(` + CREATE TABLE IF NOT EXISTS events ( + id TEXT PRIMARY KEY, + event_index INTEGER NOT NULL, + session_id TEXT NOT NULL, + created_at INTEGER NOT NULL, + connection_id TEXT NOT NULL, + sender TEXT NOT NULL, + payload_json TEXT NOT NULL + ); + + CREATE INDEX IF NOT EXISTS idx_events_session_order + ON events(session_id, event_index, id) + `); + } + + private rebuildEventsTable(hasEventIndex: boolean): void { + this.db.exec(` + ALTER TABLE events RENAME TO events_legacy; + `); + + this.createEventsTable(); + + if (hasEventIndex) { + this.db.exec(` + INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json) + SELECT + CAST(id AS TEXT), + COALESCE(event_index, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC)), + session_id, + created_at, + connection_id, + sender, + payload_json + FROM events_legacy + `); + } else { + this.db.exec(` + INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json) + SELECT + CAST(id AS TEXT), + ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC), + session_id, + created_at, + connection_id, + sender, + payload_json + FROM events_legacy + `); + } + + this.db.exec(`DROP TABLE events_legacy`); + } +} + +type SessionRow = { + id: string; + agent: string; + agent_session_id: string; + last_connection_id: string; + created_at: number; + destroyed_at: number | null; + sandbox_id: string | null; + session_init_json: string | null; + config_options_json: string | null; + modes_json: string | null; +}; + +type EventRow = { + id: string; + event_index: number; + session_id: string; + created_at: number; + connection_id: string; + sender: "client" | "agent"; + payload_json: string; +}; + +type TableInfoRow = { + name: string; + type: string; +}; + +function decodeSessionRow(row: SessionRow): SessionRecord { + return { + id: row.id, + agent: row.agent, + agentSessionId: row.agent_session_id, + lastConnectionId: row.last_connection_id, + createdAt: row.created_at, + destroyedAt: row.destroyed_at ?? undefined, + sandboxId: row.sandbox_id ?? undefined, + sessionInit: row.session_init_json ? (JSON.parse(row.session_init_json) as SessionRecord["sessionInit"]) : undefined, + configOptions: row.config_options_json ? (JSON.parse(row.config_options_json) as SessionRecord["configOptions"]) : undefined, + modes: row.modes_json ? (JSON.parse(row.modes_json) as SessionRecord["modes"]) : undefined, + }; +} + +function decodeEventRow(row: EventRow): SessionEvent { + return { + id: row.id, + eventIndex: row.event_index, + sessionId: row.session_id, + createdAt: row.created_at, + connectionId: row.connection_id, + sender: row.sender, + payload: JSON.parse(row.payload_json), + }; +} + +function normalizeLimit(limit: number | undefined): number { + if (!Number.isFinite(limit) || (limit ?? 0) < 1) { + return DEFAULT_LIST_LIMIT; + } + return Math.floor(limit as number); +} + +function parseCursor(cursor: string | undefined): number { + if (!cursor) { + return 0; + } + const parsed = Number.parseInt(cursor, 10); + if (!Number.isFinite(parsed) || parsed < 0) { + return 0; + } + return parsed; +} diff --git a/examples/shared/src/docker.ts b/examples/shared/src/docker.ts index 2feca37..8459535 100644 --- a/examples/shared/src/docker.ts +++ b/examples/shared/src/docker.ts @@ -78,11 +78,11 @@ function readClaudeCredentialFiles(): ClaudeCredentialFile[] { const candidates: Array<{ hostPath: string; containerPath: string }> = [ { hostPath: path.join(homeDir, ".claude", ".credentials.json"), - containerPath: "/root/.claude/.credentials.json", + containerPath: ".claude/.credentials.json", }, { hostPath: path.join(homeDir, ".claude-oauth-credentials.json"), - containerPath: "/root/.claude-oauth-credentials.json", + containerPath: ".claude-oauth-credentials.json", }, ]; @@ -180,10 +180,9 @@ export async function startDockerSandbox(opts: DockerSandboxOptions): Promise { const envKey = `SANDBOX_AGENT_CLAUDE_CREDENTIAL_${index}_B64`; bootstrapEnv[envKey] = file.base64Content; - return [ - `mkdir -p ${shellSingleQuotedLiteral(path.posix.dirname(file.containerPath))}`, - `printf %s "$${envKey}" | base64 -d > ${shellSingleQuotedLiteral(file.containerPath)}`, - ]; + // Use $HOME-relative paths so credentials work regardless of container user + const containerDir = path.posix.dirname(file.containerPath); + return [`mkdir -p "$HOME/${containerDir}"`, `printf %s "$${envKey}" | base64 -d > "$HOME/${file.containerPath}"`]; }); setupCommands.unshift(...credentialBootstrapCommands); } @@ -200,8 +199,9 @@ export async function startDockerSandbox(opts: DockerSandboxOptions): Promise `${key}=${value}`), ...Object.entries(bootstrapEnv).map(([key, value]) => `${key}=${value}`)], ExposedPorts: { [`${port}/tcp`]: {} }, HostConfig: { @@ -253,10 +253,13 @@ export async function startDockerSandbox(opts: DockerSandboxOptions): Promise { + await cleanup(); process.exit(0); }; - process.once("SIGINT", cleanup); - process.once("SIGTERM", cleanup); + process.once("SIGINT", signalCleanup); + process.once("SIGTERM", signalCleanup); return { baseUrl, cleanup }; } diff --git a/examples/skills-custom-tool/src/index.ts b/examples/skills-custom-tool/src/index.ts index 44b2161..490be64 100644 --- a/examples/skills-custom-tool/src/index.ts +++ b/examples/skills-custom-tool/src/index.ts @@ -36,7 +36,7 @@ await client.setSkillsConfig({ directory: "/", skillName: "random-number" }, { s // Create a session. console.log("Creating session with custom skill..."); -const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/root", mcpServers: [] } }); +const session = await client.createSession({ agent: detectAgent(), cwd: "/root" }); const sessionId = session.id; console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); console.log(' Try: "generate a random number between 1 and 100"'); diff --git a/examples/skills/src/index.ts b/examples/skills/src/index.ts index c04815c..3087ecc 100644 --- a/examples/skills/src/index.ts +++ b/examples/skills/src/index.ts @@ -15,7 +15,7 @@ await client.setSkillsConfig( ); console.log("Creating session..."); -const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/root", mcpServers: [] } }); +const session = await client.createSession({ agent: detectAgent(), cwd: "/root" }); const sessionId = session.id; console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); console.log(' Try: "How do I start sandbox-agent?"'); diff --git a/examples/vercel/src/index.ts b/examples/vercel/src/index.ts index 4a63bfc..9839893 100644 --- a/examples/vercel/src/index.ts +++ b/examples/vercel/src/index.ts @@ -1,56 +1,34 @@ -import { Sandbox } from "@vercel/sandbox"; import { SandboxAgent } from "sandbox-agent"; -import { detectAgent, buildInspectorUrl } from "@sandbox-agent/example-shared"; +import { vercel } from "sandbox-agent/vercel"; +import { detectAgent } from "@sandbox-agent/example-shared"; -const envs: Record = {}; -if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; -if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; +const env: Record = {}; +if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; +if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; -console.log("Creating Vercel sandbox..."); -const sandbox = await Sandbox.create({ - runtime: "node24", - ports: [3000], +const client = await SandboxAgent.start({ + sandbox: vercel({ + create: { + runtime: "node24", + env, + }, + }), }); -const run = async (cmd: string, args: string[] = []) => { - const result = await sandbox.runCommand({ cmd, args, env: envs }); - if (result.exitCode !== 0) { - const stderr = await result.stderr(); - throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${stderr}`); - } - return result; -}; +console.log(`UI: ${client.inspectorUrl}`); -console.log("Installing sandbox-agent..."); -await run("sh", ["-c", "curl -fsSL https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh | sh"]); - -console.log("Installing agents..."); -await run("sandbox-agent", ["install-agent", "claude"]); -await run("sandbox-agent", ["install-agent", "codex"]); - -console.log("Starting server..."); -await sandbox.runCommand({ - cmd: "sandbox-agent", - args: ["server", "--no-token", "--host", "0.0.0.0", "--port", "3000"], - env: envs, - detached: true, +const session = await client.createSession({ + agent: detectAgent(), + cwd: "/home/vercel-sandbox", }); -const baseUrl = sandbox.domain(3000); +session.onEvent((event) => { + console.log(`[${event.sender}]`, JSON.stringify(event.payload)); +}); -console.log("Connecting to server..."); -const client = await SandboxAgent.connect({ baseUrl }); -const session = await client.createSession({ agent: detectAgent(), sessionInit: { cwd: "/home/vercel-sandbox", mcpServers: [] } }); -const sessionId = session.id; +session.prompt([{ type: "text", text: "Say hello from Vercel in one sentence." }]); -console.log(` UI: ${buildInspectorUrl({ baseUrl, sessionId })}`); -console.log(" Press Ctrl+C to stop."); - -const keepAlive = setInterval(() => {}, 60_000); -const cleanup = async () => { - clearInterval(keepAlive); - await sandbox.stop(); +process.once("SIGINT", async () => { + await client.destroySandbox(); process.exit(0); -}; -process.once("SIGINT", cleanup); -process.once("SIGTERM", cleanup); +}); diff --git a/foundry/compose.dev.yaml b/foundry/compose.dev.yaml index b96805e..c57d971 100644 --- a/foundry/compose.dev.yaml +++ b/foundry/compose.dev.yaml @@ -65,7 +65,6 @@ services: - "foundry_backend_root_node_modules:/app/node_modules" - "foundry_backend_backend_node_modules:/app/foundry/packages/backend/node_modules" - "foundry_backend_shared_node_modules:/app/foundry/packages/shared/node_modules" - - "foundry_backend_persist_rivet_node_modules:/app/sdks/persist-rivet/node_modules" - "foundry_backend_typescript_node_modules:/app/sdks/typescript/node_modules" - "foundry_backend_pnpm_store:/root/.local/share/pnpm/store" # Persist RivetKit local storage across container restarts. @@ -120,7 +119,6 @@ volumes: foundry_backend_root_node_modules: {} foundry_backend_backend_node_modules: {} foundry_backend_shared_node_modules: {} - foundry_backend_persist_rivet_node_modules: {} foundry_backend_typescript_node_modules: {} foundry_backend_pnpm_store: {} foundry_rivetkit_storage: {} diff --git a/foundry/docker/backend.Dockerfile b/foundry/docker/backend.Dockerfile index c41fd1f..3dc1c7d 100644 --- a/foundry/docker/backend.Dockerfile +++ b/foundry/docker/backend.Dockerfile @@ -13,7 +13,6 @@ RUN pnpm --filter @sandbox-agent/foundry-shared build RUN pnpm --filter acp-http-client build RUN pnpm --filter @sandbox-agent/cli-shared build RUN SKIP_OPENAPI_GEN=1 pnpm --filter sandbox-agent build -RUN pnpm --filter @sandbox-agent/persist-rivet build RUN pnpm --filter @sandbox-agent/foundry-backend build RUN pnpm --filter @sandbox-agent/foundry-backend deploy --prod /out diff --git a/foundry/packages/backend/package.json b/foundry/packages/backend/package.json index e11cd62..562bab7 100644 --- a/foundry/packages/backend/package.json +++ b/foundry/packages/backend/package.json @@ -18,7 +18,6 @@ "@hono/node-ws": "^1.3.0", "@iarna/toml": "^2.2.5", "@sandbox-agent/foundry-shared": "workspace:*", - "@sandbox-agent/persist-rivet": "workspace:*", "better-auth": "^1.5.5", "dockerode": "^4.0.9", "drizzle-kit": "^0.31.8", diff --git a/frontend/packages/inspector/package.json b/frontend/packages/inspector/package.json index 9671ecb..45b7224 100644 --- a/frontend/packages/inspector/package.json +++ b/frontend/packages/inspector/package.json @@ -6,10 +6,10 @@ "type": "module", "scripts": { "dev": "vite", - "build": "SKIP_OPENAPI_GEN=1 pnpm --filter @sandbox-agent/persist-indexeddb build && pnpm --filter @sandbox-agent/react build && vite build", + "build": "SKIP_OPENAPI_GEN=1 pnpm --filter @sandbox-agent/react build && vite build", "preview": "vite preview", - "typecheck": "SKIP_OPENAPI_GEN=1 pnpm --filter @sandbox-agent/persist-indexeddb build && pnpm --filter @sandbox-agent/react build && tsc --noEmit", - "test": "SKIP_OPENAPI_GEN=1 pnpm --filter @sandbox-agent/persist-indexeddb build && pnpm --filter @sandbox-agent/react build && vitest run" + "typecheck": "SKIP_OPENAPI_GEN=1 pnpm --filter @sandbox-agent/react build && tsc --noEmit", + "test": "SKIP_OPENAPI_GEN=1 pnpm --filter @sandbox-agent/react build && vitest run" }, "devDependencies": { "@sandbox-agent/react": "workspace:*", @@ -23,7 +23,6 @@ "vitest": "^3.0.0" }, "dependencies": { - "@sandbox-agent/persist-indexeddb": "workspace:*", "lucide-react": "^0.469.0", "react": "^18.3.1", "react-dom": "^18.3.1" diff --git a/frontend/packages/inspector/src/App.tsx b/frontend/packages/inspector/src/App.tsx index ac06904..f6e319c 100644 --- a/frontend/packages/inspector/src/App.tsx +++ b/frontend/packages/inspector/src/App.tsx @@ -24,7 +24,7 @@ type ConfigOption = { }; type AgentModeInfo = { id: string; name: string; description: string }; type AgentModelInfo = { id: string; name?: string }; -import { IndexedDbSessionPersistDriver } from "@sandbox-agent/persist-indexeddb"; +import { IndexedDbSessionPersistDriver } from "./persist-indexeddb"; import ChatPanel from "./components/chat/ChatPanel"; import ConnectScreen from "./components/ConnectScreen"; import DebugPanel, { type DebugTab } from "./components/debug/DebugPanel"; diff --git a/frontend/packages/inspector/src/persist-indexeddb.ts b/frontend/packages/inspector/src/persist-indexeddb.ts new file mode 100644 index 0000000..23475cb --- /dev/null +++ b/frontend/packages/inspector/src/persist-indexeddb.ts @@ -0,0 +1,320 @@ +import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; + +const DEFAULT_DB_NAME = "sandbox-agent-session-store"; +const DEFAULT_DB_VERSION = 2; +const SESSIONS_STORE = "sessions"; +const EVENTS_STORE = "events"; +const EVENTS_BY_SESSION_INDEX = "by_session_index"; +const DEFAULT_LIST_LIMIT = 100; + +export interface IndexedDbSessionPersistDriverOptions { + databaseName?: string; + databaseVersion?: number; + indexedDb?: IDBFactory; +} + +export class IndexedDbSessionPersistDriver implements SessionPersistDriver { + private readonly indexedDb: IDBFactory; + private readonly dbName: string; + private readonly dbVersion: number; + private readonly dbPromise: Promise; + + constructor(options: IndexedDbSessionPersistDriverOptions = {}) { + const indexedDb = options.indexedDb ?? globalThis.indexedDB; + if (!indexedDb) { + throw new Error("IndexedDB is not available in this runtime."); + } + + this.indexedDb = indexedDb; + this.dbName = options.databaseName ?? DEFAULT_DB_NAME; + this.dbVersion = options.databaseVersion ?? DEFAULT_DB_VERSION; + this.dbPromise = this.openDatabase(); + } + + async getSession(id: string): Promise { + const db = await this.dbPromise; + const row = await requestToPromise(db.transaction(SESSIONS_STORE, "readonly").objectStore(SESSIONS_STORE).get(id)); + if (!row || typeof row !== "object") { + return undefined; + } + return decodeSessionRow(row as SessionRow); + } + + async listSessions(request: ListPageRequest = {}): Promise> { + const db = await this.dbPromise; + const rows = await getAllRows(db, SESSIONS_STORE); + + rows.sort((a, b) => { + if (a.createdAt !== b.createdAt) { + return a.createdAt - b.createdAt; + } + return a.id.localeCompare(b.id); + }); + + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + const slice = rows.slice(offset, offset + limit).map(decodeSessionRow); + const nextOffset = offset + slice.length; + + return { + items: slice, + nextCursor: nextOffset < rows.length ? String(nextOffset) : undefined, + }; + } + + async updateSession(session: SessionRecord): Promise { + const db = await this.dbPromise; + await transactionPromise(db, [SESSIONS_STORE], "readwrite", (tx) => { + tx.objectStore(SESSIONS_STORE).put(encodeSessionRow(session)); + }); + } + + async listEvents(request: ListEventsRequest): Promise> { + const db = await this.dbPromise; + const rows = (await getAllRows(db, EVENTS_STORE)).filter((row) => row.sessionId === request.sessionId).sort(compareEventRowsByOrder); + + const offset = parseCursor(request.cursor); + const limit = normalizeLimit(request.limit); + const slice = rows.slice(offset, offset + limit).map(decodeEventRow); + const nextOffset = offset + slice.length; + + return { + items: slice, + nextCursor: nextOffset < rows.length ? String(nextOffset) : undefined, + }; + } + + async insertEvent(_sessionId: string, event: SessionEvent): Promise { + const db = await this.dbPromise; + await transactionPromise(db, [EVENTS_STORE], "readwrite", (tx) => { + tx.objectStore(EVENTS_STORE).put(encodeEventRow(event)); + }); + } + + async close(): Promise { + const db = await this.dbPromise; + db.close(); + } + + private openDatabase(): Promise { + return new Promise((resolve, reject) => { + const request = this.indexedDb.open(this.dbName, this.dbVersion); + + request.onupgradeneeded = () => { + const db = request.result; + + if (!db.objectStoreNames.contains(SESSIONS_STORE)) { + db.createObjectStore(SESSIONS_STORE, { keyPath: "id" }); + } + + if (!db.objectStoreNames.contains(EVENTS_STORE)) { + const events = db.createObjectStore(EVENTS_STORE, { keyPath: "id" }); + events.createIndex(EVENTS_BY_SESSION_INDEX, ["sessionId", "eventIndex", "id"], { + unique: false, + }); + } else { + const tx = request.transaction; + if (!tx) { + return; + } + const events = tx.objectStore(EVENTS_STORE); + if (!events.indexNames.contains(EVENTS_BY_SESSION_INDEX)) { + events.createIndex(EVENTS_BY_SESSION_INDEX, ["sessionId", "eventIndex", "id"], { + unique: false, + }); + } + } + }; + + request.onsuccess = () => resolve(request.result); + request.onerror = () => reject(request.error ?? new Error("Unable to open IndexedDB")); + }); + } +} + +type SessionRow = { + id: string; + agent: string; + agentSessionId: string; + lastConnectionId: string; + createdAt: number; + destroyedAt?: number; + sandboxId?: string; + sessionInit?: SessionRecord["sessionInit"]; + configOptions?: SessionRecord["configOptions"]; + modes?: SessionRecord["modes"]; +}; + +type EventRow = { + id: number | string; + eventIndex?: number; + sessionId: string; + createdAt: number; + connectionId: string; + sender: "client" | "agent"; + payload: unknown; +}; + +function encodeSessionRow(session: SessionRecord): SessionRow { + return { + id: session.id, + agent: session.agent, + agentSessionId: session.agentSessionId, + lastConnectionId: session.lastConnectionId, + createdAt: session.createdAt, + destroyedAt: session.destroyedAt, + sandboxId: session.sandboxId, + sessionInit: session.sessionInit, + configOptions: session.configOptions, + modes: session.modes, + }; +} + +function decodeSessionRow(row: SessionRow): SessionRecord { + return { + id: row.id, + agent: row.agent, + agentSessionId: row.agentSessionId, + lastConnectionId: row.lastConnectionId, + createdAt: row.createdAt, + destroyedAt: row.destroyedAt, + sandboxId: row.sandboxId, + sessionInit: row.sessionInit, + configOptions: row.configOptions, + modes: row.modes, + }; +} + +function encodeEventRow(event: SessionEvent): EventRow { + return { + id: event.id, + eventIndex: event.eventIndex, + sessionId: event.sessionId, + createdAt: event.createdAt, + connectionId: event.connectionId, + sender: event.sender, + payload: event.payload, + }; +} + +function decodeEventRow(row: EventRow): SessionEvent { + return { + id: String(row.id), + eventIndex: parseEventIndex(row.eventIndex, row.id), + sessionId: row.sessionId, + createdAt: row.createdAt, + connectionId: row.connectionId, + sender: row.sender, + payload: row.payload as SessionEvent["payload"], + }; +} + +async function getAllRows(db: IDBDatabase, storeName: string): Promise { + return await transactionPromise(db, [storeName], "readonly", async (tx) => { + const request = tx.objectStore(storeName).getAll(); + return (await requestToPromise(request)) as T[]; + }); +} + +function normalizeLimit(limit: number | undefined): number { + if (!Number.isFinite(limit) || (limit ?? 0) < 1) { + return DEFAULT_LIST_LIMIT; + } + return Math.floor(limit as number); +} + +function parseCursor(cursor: string | undefined): number { + if (!cursor) { + return 0; + } + const parsed = Number.parseInt(cursor, 10); + if (!Number.isFinite(parsed) || parsed < 0) { + return 0; + } + return parsed; +} + +function compareEventRowsByOrder(a: EventRow, b: EventRow): number { + const indexA = parseEventIndex(a.eventIndex, a.id); + const indexB = parseEventIndex(b.eventIndex, b.id); + if (indexA !== indexB) { + return indexA - indexB; + } + return String(a.id).localeCompare(String(b.id)); +} + +function parseEventIndex(value: number | undefined, fallback: number | string): number { + if (typeof value === "number" && Number.isFinite(value)) { + return Math.max(0, Math.floor(value)); + } + + const parsed = Number.parseInt(String(fallback), 10); + if (!Number.isFinite(parsed) || parsed < 0) { + return 0; + } + return parsed; +} + +function requestToPromise(request: IDBRequest): Promise { + return new Promise((resolve, reject) => { + request.onsuccess = () => resolve(request.result); + request.onerror = () => reject(request.error ?? new Error("IndexedDB request failed")); + }); +} + +function transactionPromise(db: IDBDatabase, stores: string[], mode: IDBTransactionMode, run: (tx: IDBTransaction) => T | Promise): Promise { + return new Promise((resolve, reject) => { + const tx = db.transaction(stores, mode); + let settled = false; + let resultValue: T | undefined; + let runCompleted = false; + let txCompleted = false; + + function tryResolve() { + if (settled || !runCompleted || !txCompleted) { + return; + } + settled = true; + resolve(resultValue as T); + } + + tx.oncomplete = () => { + txCompleted = true; + tryResolve(); + }; + + tx.onerror = () => { + if (settled) { + return; + } + settled = true; + reject(tx.error ?? new Error("IndexedDB transaction failed")); + }; + + tx.onabort = () => { + if (settled) { + return; + } + settled = true; + reject(tx.error ?? new Error("IndexedDB transaction aborted")); + }; + + Promise.resolve(run(tx)) + .then((value) => { + resultValue = value; + runCompleted = true; + tryResolve(); + }) + .catch((error) => { + if (!settled) { + settled = true; + reject(error); + } + try { + tx.abort(); + } catch { + // no-op + } + }); + }); +} diff --git a/frontend/packages/website/public/logos/cloudflare.svg b/frontend/packages/website/public/logos/cloudflare.svg new file mode 100644 index 0000000..76a2e80 --- /dev/null +++ b/frontend/packages/website/public/logos/cloudflare.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/packages/website/public/logos/computesdk.svg b/frontend/packages/website/public/logos/computesdk.svg new file mode 100644 index 0000000..45c6271 --- /dev/null +++ b/frontend/packages/website/public/logos/computesdk.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/packages/website/public/logos/docker.svg b/frontend/packages/website/public/logos/docker.svg new file mode 100644 index 0000000..33582ef --- /dev/null +++ b/frontend/packages/website/public/logos/docker.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/packages/website/public/logos/modal.svg b/frontend/packages/website/public/logos/modal.svg new file mode 100644 index 0000000..990b5bd --- /dev/null +++ b/frontend/packages/website/public/logos/modal.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/packages/website/src/components/GetStarted.tsx b/frontend/packages/website/src/components/GetStarted.tsx index 57cccef..8a03b34 100644 --- a/frontend/packages/website/src/components/GetStarted.tsx +++ b/frontend/packages/website/src/components/GetStarted.tsx @@ -5,8 +5,11 @@ import { Code, Server, GitBranch } from "lucide-react"; import { CopyButton } from "./ui/CopyButton"; const sdkCodeRaw = `import { SandboxAgent } from "sandbox-agent"; +import { local } from "sandbox-agent/local"; -const client = await SandboxAgent.start(); +const client = await SandboxAgent.start({ + sandbox: local(), +}); await client.createSession("my-session", { agent: "claude-code", @@ -32,13 +35,26 @@ function SdkCodeHighlighted() { "sandbox-agent" ; + {"\n"} + import + {" { "} + local + {" } "} + from + + "sandbox-agent/local" + ; {"\n\n"} const client = await SandboxAgent. start - (); + {"({"} + {"\n"} + {" sandbox: local(),"} + {"\n"} + {"});"} {"\n\n"} await client. diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8396837..e4e7838 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -154,13 +154,13 @@ importers: dockerode: specifier: latest version: 4.0.9 + get-port: + specifier: latest + version: 7.1.0 sandbox-agent: specifier: workspace:* version: link:../../sdks/typescript devDependencies: - '@types/dockerode': - specifier: latest - version: 4.0.1 '@types/node': specifier: latest version: 25.5.0 @@ -345,9 +345,6 @@ importers: '@sandbox-agent/example-shared': specifier: workspace:* version: link:../shared - '@sandbox-agent/persist-postgres': - specifier: workspace:* - version: link:../../sdks/persist-postgres pg: specifier: latest version: 8.20.0 @@ -373,13 +370,16 @@ importers: '@sandbox-agent/example-shared': specifier: workspace:* version: link:../shared - '@sandbox-agent/persist-sqlite': - specifier: workspace:* - version: link:../../sdks/persist-sqlite + better-sqlite3: + specifier: ^11.0.0 + version: 11.10.0 sandbox-agent: specifier: workspace:* version: link:../../sdks/typescript devDependencies: + '@types/better-sqlite3': + specifier: ^7.0.0 + version: 7.6.13 '@types/node': specifier: latest version: 25.5.0 @@ -640,9 +640,6 @@ importers: frontend/packages/inspector: dependencies: - '@sandbox-agent/persist-indexeddb': - specifier: workspace:* - version: link:../../../sdks/persist-indexeddb lucide-react: specifier: ^0.469.0 version: 0.469.0(react@18.3.1) @@ -897,57 +894,30 @@ importers: sdks/gigacode/platforms/win32-x64: {} sdks/persist-indexeddb: - dependencies: - sandbox-agent: - specifier: workspace:* - version: link:../typescript devDependencies: '@types/node': specifier: ^22.0.0 version: 22.19.7 - fake-indexeddb: - specifier: ^6.2.4 - version: 6.2.5 tsup: specifier: ^8.0.0 version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) typescript: specifier: ^5.7.0 version: 5.9.3 - vitest: - specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) sdks/persist-postgres: - dependencies: - pg: - specifier: ^8.16.3 - version: 8.18.0 - sandbox-agent: - specifier: workspace:* - version: link:../typescript devDependencies: '@types/node': specifier: ^22.0.0 version: 22.19.7 - '@types/pg': - specifier: ^8.15.6 - version: 8.16.0 tsup: specifier: ^8.0.0 version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) typescript: specifier: ^5.7.0 version: 5.9.3 - vitest: - specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) sdks/persist-rivet: - dependencies: - sandbox-agent: - specifier: workspace:* - version: link:../typescript devDependencies: '@types/node': specifier: ^22.0.0 @@ -958,22 +928,9 @@ importers: typescript: specifier: ^5.7.0 version: 5.9.3 - vitest: - specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) sdks/persist-sqlite: - dependencies: - better-sqlite3: - specifier: ^11.0.0 - version: 11.10.0 - sandbox-agent: - specifier: workspace:* - version: link:../typescript devDependencies: - '@types/better-sqlite3': - specifier: ^7.0.0 - version: 7.6.13 '@types/node': specifier: ^22.0.0 version: 22.19.7 @@ -983,9 +940,6 @@ importers: typescript: specifier: ^5.7.0 version: 5.9.3 - vitest: - specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) sdks/react: dependencies: @@ -1025,12 +979,39 @@ importers: specifier: workspace:* version: link:../cli devDependencies: + '@cloudflare/sandbox': + specifier: '>=0.1.0' + version: 0.7.17(@opencode-ai/sdk@1.2.24) + '@daytonaio/sdk': + specifier: '>=0.12.0' + version: 0.151.0(ws@8.19.0) + '@e2b/code-interpreter': + specifier: '>=1.0.0' + version: 2.3.3 + '@types/dockerode': + specifier: ^4.0.0 + version: 4.0.1 '@types/node': specifier: ^22.0.0 version: 22.19.7 '@types/ws': specifier: ^8.18.1 version: 8.18.1 + '@vercel/sandbox': + specifier: '>=0.1.0' + version: 1.8.1 + computesdk: + specifier: '>=0.1.0' + version: 2.5.0 + dockerode: + specifier: '>=4.0.0' + version: 4.0.9 + get-port: + specifier: '>=7.0.0' + version: 7.1.0 + modal: + specifier: '>=0.1.0' + version: 0.7.3 openapi-typescript: specifier: ^6.7.0 version: 6.7.6 @@ -3607,9 +3588,6 @@ packages: '@types/node@25.5.0': resolution: {integrity: sha512-jp2P3tQMSxWugkCUKLRPVUpGaL5MVFwF8RDuSRztfwgN1wmqJeMSbKlnEtQqU8UrhTmzEmZdu2I6v2dpp7XIxw==} - '@types/pg@8.16.0': - resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} - '@types/pg@8.18.0': resolution: {integrity: sha512-gT+oueVQkqnj6ajGJXblFR4iavIXWsGAFCk3dP4Kki5+a9R4NMt0JARdk6s8cUKcfUoqP5dAtDSLU8xYUTFV+Q==} @@ -5823,9 +5801,6 @@ packages: pg-cloudflare@1.3.0: resolution: {integrity: sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==} - pg-connection-string@2.11.0: - resolution: {integrity: sha512-kecgoJwhOpxYU21rZjULrmrBJ698U2RxXofKVzOn5UDj61BPj/qMb7diYUR1nLScCDbrztQFl1TaQZT0t1EtzQ==} - pg-connection-string@2.12.0: resolution: {integrity: sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==} @@ -5833,11 +5808,6 @@ packages: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} - pg-pool@3.11.0: - resolution: {integrity: sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==} - peerDependencies: - pg: '>=8.0' - pg-pool@3.13.0: resolution: {integrity: sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==} peerDependencies: @@ -5853,15 +5823,6 @@ packages: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} engines: {node: '>=4'} - pg@8.18.0: - resolution: {integrity: sha512-xqrUDL1b9MbkydY/s+VZ6v+xiMUmOUk7SS9d/1kpyQxoJ6U9AO1oIJyUWVZojbfe5Cc/oluutcgFG4L9RDP1iQ==} - engines: {node: '>= 16.0.0'} - peerDependencies: - pg-native: '>=3.0.1' - peerDependenciesMeta: - pg-native: - optional: true - pg@8.20.0: resolution: {integrity: sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==} engines: {node: '>= 16.0.0'} @@ -10190,12 +10151,6 @@ snapshots: dependencies: undici-types: 7.18.2 - '@types/pg@8.16.0': - dependencies: - '@types/node': 24.10.9 - pg-protocol: 1.11.0 - pg-types: 2.2.0 - '@types/pg@8.18.0': dependencies: '@types/node': 24.10.9 @@ -11273,7 +11228,7 @@ snapshots: glob: 11.1.0 openapi-fetch: 0.14.1 platform: 1.3.6 - tar: 7.5.6 + tar: 7.5.7 earcut@2.2.4: {} @@ -12783,16 +12738,10 @@ snapshots: pg-cloudflare@1.3.0: optional: true - pg-connection-string@2.11.0: {} - pg-connection-string@2.12.0: {} pg-int8@1.0.1: {} - pg-pool@3.11.0(pg@8.18.0): - dependencies: - pg: 8.18.0 - pg-pool@3.13.0(pg@8.20.0): dependencies: pg: 8.20.0 @@ -12809,16 +12758,6 @@ snapshots: postgres-date: 1.0.7 postgres-interval: 1.2.0 - pg@8.18.0: - dependencies: - pg-connection-string: 2.11.0 - pg-pool: 3.11.0(pg@8.18.0) - pg-protocol: 1.11.0 - pg-types: 2.2.0 - pgpass: 1.0.5 - optionalDependencies: - pg-cloudflare: 1.3.0 - pg@8.20.0: dependencies: pg-connection-string: 2.12.0 diff --git a/sdks/persist-indexeddb/README.md b/sdks/persist-indexeddb/README.md new file mode 100644 index 0000000..02dc5c2 --- /dev/null +++ b/sdks/persist-indexeddb/README.md @@ -0,0 +1,5 @@ +# @sandbox-agent/persist-indexeddb + +> **Deprecated:** This package has been deprecated and removed. + +Copy the driver source into your project. See the [reference implementation](https://github.com/rivet-dev/sandbox-agent/tree/main/frontend/packages/inspector/src/persist-indexeddb.ts) and the [session persistence docs](https://sandboxagent.dev/session-persistence) for guidance. diff --git a/sdks/persist-indexeddb/package.json b/sdks/persist-indexeddb/package.json index 179e0be..da05325 100644 --- a/sdks/persist-indexeddb/package.json +++ b/sdks/persist-indexeddb/package.json @@ -1,7 +1,7 @@ { "name": "@sandbox-agent/persist-indexeddb", "version": "0.3.2", - "description": "IndexedDB persistence driver for the Sandbox Agent TypeScript SDK", + "description": "IndexedDB persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { "type": "git", @@ -16,23 +16,16 @@ "import": "./dist/index.js" } }, - "dependencies": { - "sandbox-agent": "workspace:*" - }, "files": [ "dist" ], "scripts": { "build": "tsup", - "typecheck": "tsc --noEmit", - "test": "vitest run", - "test:watch": "vitest" + "typecheck": "tsc --noEmit" }, "devDependencies": { "@types/node": "^22.0.0", - "fake-indexeddb": "^6.2.4", "tsup": "^8.0.0", - "typescript": "^5.7.0", - "vitest": "^3.0.0" + "typescript": "^5.7.0" } } diff --git a/sdks/persist-indexeddb/src/index.ts b/sdks/persist-indexeddb/src/index.ts index 945e993..e388530 100644 --- a/sdks/persist-indexeddb/src/index.ts +++ b/sdks/persist-indexeddb/src/index.ts @@ -1,311 +1,5 @@ -import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; - -const DEFAULT_DB_NAME = "sandbox-agent-session-store"; -const DEFAULT_DB_VERSION = 2; -const SESSIONS_STORE = "sessions"; -const EVENTS_STORE = "events"; -const EVENTS_BY_SESSION_INDEX = "by_session_index"; -const DEFAULT_LIST_LIMIT = 100; - -export interface IndexedDbSessionPersistDriverOptions { - databaseName?: string; - databaseVersion?: number; - indexedDb?: IDBFactory; -} - -export class IndexedDbSessionPersistDriver implements SessionPersistDriver { - private readonly indexedDb: IDBFactory; - private readonly dbName: string; - private readonly dbVersion: number; - private readonly dbPromise: Promise; - - constructor(options: IndexedDbSessionPersistDriverOptions = {}) { - const indexedDb = options.indexedDb ?? globalThis.indexedDB; - if (!indexedDb) { - throw new Error("IndexedDB is not available in this runtime."); - } - - this.indexedDb = indexedDb; - this.dbName = options.databaseName ?? DEFAULT_DB_NAME; - this.dbVersion = options.databaseVersion ?? DEFAULT_DB_VERSION; - this.dbPromise = this.openDatabase(); - } - - async getSession(id: string): Promise { - const db = await this.dbPromise; - const row = await requestToPromise(db.transaction(SESSIONS_STORE, "readonly").objectStore(SESSIONS_STORE).get(id)); - if (!row || typeof row !== "object") { - return null; - } - return decodeSessionRow(row as SessionRow); - } - - async listSessions(request: ListPageRequest = {}): Promise> { - const db = await this.dbPromise; - const rows = await getAllRows(db, SESSIONS_STORE); - - rows.sort((a, b) => { - if (a.createdAt !== b.createdAt) { - return a.createdAt - b.createdAt; - } - return a.id.localeCompare(b.id); - }); - - const offset = parseCursor(request.cursor); - const limit = normalizeLimit(request.limit); - const slice = rows.slice(offset, offset + limit).map(decodeSessionRow); - const nextOffset = offset + slice.length; - - return { - items: slice, - nextCursor: nextOffset < rows.length ? String(nextOffset) : undefined, - }; - } - - async updateSession(session: SessionRecord): Promise { - const db = await this.dbPromise; - await transactionPromise(db, [SESSIONS_STORE], "readwrite", (tx) => { - tx.objectStore(SESSIONS_STORE).put(encodeSessionRow(session)); - }); - } - - async listEvents(request: ListEventsRequest): Promise> { - const db = await this.dbPromise; - const rows = (await getAllRows(db, EVENTS_STORE)).filter((row) => row.sessionId === request.sessionId).sort(compareEventRowsByOrder); - - const offset = parseCursor(request.cursor); - const limit = normalizeLimit(request.limit); - const slice = rows.slice(offset, offset + limit).map(decodeEventRow); - const nextOffset = offset + slice.length; - - return { - items: slice, - nextCursor: nextOffset < rows.length ? String(nextOffset) : undefined, - }; - } - - async insertEvent(event: SessionEvent): Promise { - const db = await this.dbPromise; - await transactionPromise(db, [EVENTS_STORE], "readwrite", (tx) => { - tx.objectStore(EVENTS_STORE).put(encodeEventRow(event)); - }); - } - - async close(): Promise { - const db = await this.dbPromise; - db.close(); - } - - private openDatabase(): Promise { - return new Promise((resolve, reject) => { - const request = this.indexedDb.open(this.dbName, this.dbVersion); - - request.onupgradeneeded = () => { - const db = request.result; - - if (!db.objectStoreNames.contains(SESSIONS_STORE)) { - db.createObjectStore(SESSIONS_STORE, { keyPath: "id" }); - } - - if (!db.objectStoreNames.contains(EVENTS_STORE)) { - const events = db.createObjectStore(EVENTS_STORE, { keyPath: "id" }); - events.createIndex(EVENTS_BY_SESSION_INDEX, ["sessionId", "eventIndex", "id"], { - unique: false, - }); - } else { - const tx = request.transaction; - if (!tx) { - return; - } - const events = tx.objectStore(EVENTS_STORE); - if (!events.indexNames.contains(EVENTS_BY_SESSION_INDEX)) { - events.createIndex(EVENTS_BY_SESSION_INDEX, ["sessionId", "eventIndex", "id"], { - unique: false, - }); - } - } - }; - - request.onsuccess = () => resolve(request.result); - request.onerror = () => reject(request.error ?? new Error("Unable to open IndexedDB")); - }); - } -} - -type SessionRow = { - id: string; - agent: string; - agentSessionId: string; - lastConnectionId: string; - createdAt: number; - destroyedAt?: number; - sessionInit?: SessionRecord["sessionInit"]; -}; - -type EventRow = { - id: number | string; - eventIndex?: number; - sessionId: string; - createdAt: number; - connectionId: string; - sender: "client" | "agent"; - payload: unknown; -}; - -function encodeSessionRow(session: SessionRecord): SessionRow { - return { - id: session.id, - agent: session.agent, - agentSessionId: session.agentSessionId, - lastConnectionId: session.lastConnectionId, - createdAt: session.createdAt, - destroyedAt: session.destroyedAt, - sessionInit: session.sessionInit, - }; -} - -function decodeSessionRow(row: SessionRow): SessionRecord { - return { - id: row.id, - agent: row.agent, - agentSessionId: row.agentSessionId, - lastConnectionId: row.lastConnectionId, - createdAt: row.createdAt, - destroyedAt: row.destroyedAt, - sessionInit: row.sessionInit, - }; -} - -function encodeEventRow(event: SessionEvent): EventRow { - return { - id: event.id, - eventIndex: event.eventIndex, - sessionId: event.sessionId, - createdAt: event.createdAt, - connectionId: event.connectionId, - sender: event.sender, - payload: event.payload, - }; -} - -function decodeEventRow(row: EventRow): SessionEvent { - return { - id: String(row.id), - eventIndex: parseEventIndex(row.eventIndex, row.id), - sessionId: row.sessionId, - createdAt: row.createdAt, - connectionId: row.connectionId, - sender: row.sender, - payload: row.payload as SessionEvent["payload"], - }; -} - -async function getAllRows(db: IDBDatabase, storeName: string): Promise { - return await transactionPromise(db, [storeName], "readonly", async (tx) => { - const request = tx.objectStore(storeName).getAll(); - return (await requestToPromise(request)) as T[]; - }); -} - -function normalizeLimit(limit: number | undefined): number { - if (!Number.isFinite(limit) || (limit ?? 0) < 1) { - return DEFAULT_LIST_LIMIT; - } - return Math.floor(limit as number); -} - -function parseCursor(cursor: string | undefined): number { - if (!cursor) { - return 0; - } - const parsed = Number.parseInt(cursor, 10); - if (!Number.isFinite(parsed) || parsed < 0) { - return 0; - } - return parsed; -} - -function compareEventRowsByOrder(a: EventRow, b: EventRow): number { - const indexA = parseEventIndex(a.eventIndex, a.id); - const indexB = parseEventIndex(b.eventIndex, b.id); - if (indexA !== indexB) { - return indexA - indexB; - } - return String(a.id).localeCompare(String(b.id)); -} - -function parseEventIndex(value: number | undefined, fallback: number | string): number { - if (typeof value === "number" && Number.isFinite(value)) { - return Math.max(0, Math.floor(value)); - } - - const parsed = Number.parseInt(String(fallback), 10); - if (!Number.isFinite(parsed) || parsed < 0) { - return 0; - } - return parsed; -} - -function requestToPromise(request: IDBRequest): Promise { - return new Promise((resolve, reject) => { - request.onsuccess = () => resolve(request.result); - request.onerror = () => reject(request.error ?? new Error("IndexedDB request failed")); - }); -} - -function transactionPromise(db: IDBDatabase, stores: string[], mode: IDBTransactionMode, run: (tx: IDBTransaction) => T | Promise): Promise { - return new Promise((resolve, reject) => { - const tx = db.transaction(stores, mode); - let settled = false; - let resultValue: T | undefined; - let runCompleted = false; - let txCompleted = false; - - function tryResolve() { - if (settled || !runCompleted || !txCompleted) { - return; - } - settled = true; - resolve(resultValue as T); - } - - tx.oncomplete = () => { - txCompleted = true; - tryResolve(); - }; - - tx.onerror = () => { - if (settled) { - return; - } - settled = true; - reject(tx.error ?? new Error("IndexedDB transaction failed")); - }; - - tx.onabort = () => { - if (settled) { - return; - } - settled = true; - reject(tx.error ?? new Error("IndexedDB transaction aborted")); - }; - - Promise.resolve(run(tx)) - .then((value) => { - resultValue = value; - runCompleted = true; - tryResolve(); - }) - .catch((error) => { - if (!settled) { - settled = true; - reject(error); - } - try { - tx.abort(); - } catch { - // no-op - } - }); - }); -} +throw new Error( + "@sandbox-agent/persist-indexeddb has been deprecated and removed. " + + "Copy the reference implementation from frontend/packages/inspector/src/persist-indexeddb.ts into your project instead. " + + "See https://github.com/rivet-dev/sandbox-agent/tree/main/frontend/packages/inspector/src/persist-indexeddb.ts", +); diff --git a/sdks/persist-indexeddb/tests/driver.test.ts b/sdks/persist-indexeddb/tests/driver.test.ts deleted file mode 100644 index 78acbe1..0000000 --- a/sdks/persist-indexeddb/tests/driver.test.ts +++ /dev/null @@ -1,96 +0,0 @@ -import "fake-indexeddb/auto"; -import { describe, it, expect } from "vitest"; -import { IndexedDbSessionPersistDriver } from "../src/index.ts"; - -function uniqueDbName(prefix: string): string { - return `${prefix}-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`; -} - -describe("IndexedDbSessionPersistDriver", () => { - it("stores and pages sessions and events", async () => { - const dbName = uniqueDbName("indexeddb-driver"); - const driver = new IndexedDbSessionPersistDriver({ databaseName: dbName }); - - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 100, - }); - - await driver.updateSession({ - id: "s-2", - agent: "mock", - agentSessionId: "a-2", - lastConnectionId: "c-2", - createdAt: 200, - destroyedAt: 300, - }); - - await driver.insertEvent({ - id: "evt-1", - eventIndex: 1, - sessionId: "s-1", - createdAt: 1, - connectionId: "c-1", - sender: "client", - payload: { jsonrpc: "2.0", method: "session/prompt", params: { sessionId: "a-1" } }, - }); - - await driver.insertEvent({ - id: "evt-2", - eventIndex: 2, - sessionId: "s-1", - createdAt: 2, - connectionId: "c-1", - sender: "agent", - payload: { jsonrpc: "2.0", method: "session/update", params: { sessionId: "a-1" } }, - }); - - const loaded = await driver.getSession("s-2"); - expect(loaded?.destroyedAt).toBe(300); - - const page1 = await driver.listSessions({ limit: 1 }); - expect(page1.items).toHaveLength(1); - expect(page1.items[0]?.id).toBe("s-1"); - expect(page1.nextCursor).toBeTruthy(); - - const page2 = await driver.listSessions({ cursor: page1.nextCursor, limit: 1 }); - expect(page2.items).toHaveLength(1); - expect(page2.items[0]?.id).toBe("s-2"); - expect(page2.nextCursor).toBeUndefined(); - - const eventsPage = await driver.listEvents({ sessionId: "s-1", limit: 10 }); - expect(eventsPage.items).toHaveLength(2); - expect(eventsPage.items[0]?.id).toBe("evt-1"); - expect(eventsPage.items[0]?.eventIndex).toBe(1); - expect(eventsPage.items[1]?.id).toBe("evt-2"); - expect(eventsPage.items[1]?.eventIndex).toBe(2); - - await driver.close(); - }); - - it("persists across driver instances for same database", async () => { - const dbName = uniqueDbName("indexeddb-reopen"); - - { - const driver = new IndexedDbSessionPersistDriver({ databaseName: dbName }); - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 1, - }); - await driver.close(); - } - - { - const driver = new IndexedDbSessionPersistDriver({ databaseName: dbName }); - const session = await driver.getSession("s-1"); - expect(session?.id).toBe("s-1"); - await driver.close(); - } - }); -}); diff --git a/sdks/persist-indexeddb/tests/integration.test.ts b/sdks/persist-indexeddb/tests/integration.test.ts deleted file mode 100644 index 4a27ac5..0000000 --- a/sdks/persist-indexeddb/tests/integration.test.ts +++ /dev/null @@ -1,129 +0,0 @@ -import "fake-indexeddb/auto"; -import { describe, it, expect, beforeAll, afterAll } from "vitest"; -import { existsSync, mkdtempSync, rmSync } from "node:fs"; -import { dirname, join, resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -import { tmpdir } from "node:os"; -import { SandboxAgent } from "sandbox-agent"; -import { spawnSandboxAgent, type SandboxAgentSpawnHandle } from "../../typescript/src/spawn.ts"; -import { prepareMockAgentDataHome } from "../../typescript/tests/helpers/mock-agent.ts"; -import { IndexedDbSessionPersistDriver } from "../src/index.ts"; - -const __dirname = dirname(fileURLToPath(import.meta.url)); - -function findBinary(): string | null { - if (process.env.SANDBOX_AGENT_BIN) { - return process.env.SANDBOX_AGENT_BIN; - } - - const cargoPaths = [resolve(__dirname, "../../../target/debug/sandbox-agent"), resolve(__dirname, "../../../target/release/sandbox-agent")]; - - for (const p of cargoPaths) { - if (existsSync(p)) { - return p; - } - } - - return null; -} - -function uniqueDbName(prefix: string): string { - return `${prefix}-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 10)}`; -} - -const BINARY_PATH = findBinary(); -if (!BINARY_PATH) { - throw new Error("sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN."); -} -if (!process.env.SANDBOX_AGENT_BIN) { - process.env.SANDBOX_AGENT_BIN = BINARY_PATH; -} - -describe("IndexedDB persistence end-to-end", () => { - let handle: SandboxAgentSpawnHandle; - let baseUrl: string; - let token: string; - let dataHome: string; - - beforeAll(async () => { - dataHome = mkdtempSync(join(tmpdir(), "indexeddb-integration-")); - prepareMockAgentDataHome(dataHome); - - handle = await spawnSandboxAgent({ - enabled: true, - log: "silent", - timeoutMs: 30000, - env: { - XDG_DATA_HOME: dataHome, - HOME: dataHome, - USERPROFILE: dataHome, - APPDATA: join(dataHome, "AppData", "Roaming"), - LOCALAPPDATA: join(dataHome, "AppData", "Local"), - }, - }); - baseUrl = handle.baseUrl; - token = handle.token; - }); - - afterAll(async () => { - await handle.dispose(); - rmSync(dataHome, { recursive: true, force: true }); - }); - - it("restores sessions/events across sdk instances", async () => { - const dbName = uniqueDbName("sandbox-agent-browser-e2e"); - - const persist1 = new IndexedDbSessionPersistDriver({ databaseName: dbName }); - const sdk1 = await SandboxAgent.connect({ - baseUrl, - token, - persist: persist1, - replayMaxEvents: 40, - replayMaxChars: 16000, - }); - - const created = await sdk1.createSession({ agent: "mock" }); - await created.prompt([{ type: "text", text: "indexeddb-first" }]); - const firstConnectionId = created.lastConnectionId; - - await sdk1.dispose(); - await persist1.close(); - - const persist2 = new IndexedDbSessionPersistDriver({ databaseName: dbName }); - const sdk2 = await SandboxAgent.connect({ - baseUrl, - token, - persist: persist2, - replayMaxEvents: 40, - replayMaxChars: 16000, - }); - - const restored = await sdk2.resumeSession(created.id); - expect(restored.lastConnectionId).not.toBe(firstConnectionId); - - await restored.prompt([{ type: "text", text: "indexeddb-second" }]); - - const sessions = await sdk2.listSessions({ limit: 20 }); - expect(sessions.items.some((entry) => entry.id === created.id)).toBe(true); - - const events = await sdk2.getEvents({ sessionId: created.id, limit: 1000 }); - expect(events.items.length).toBeGreaterThan(0); - - const replayInjected = events.items.find((event) => { - if (event.sender !== "client") { - return false; - } - const payload = event.payload as Record; - const method = payload.method; - const params = payload.params as Record | undefined; - const prompt = Array.isArray(params?.prompt) ? params?.prompt : []; - const firstBlock = prompt[0] as Record | undefined; - return method === "session/prompt" && typeof firstBlock?.text === "string" && firstBlock.text.includes("Previous session history is replayed below"); - }); - - expect(replayInjected).toBeTruthy(); - - await sdk2.dispose(); - await persist2.close(); - }); -}); diff --git a/sdks/persist-postgres/README.md b/sdks/persist-postgres/README.md new file mode 100644 index 0000000..5a3afba --- /dev/null +++ b/sdks/persist-postgres/README.md @@ -0,0 +1,5 @@ +# @sandbox-agent/persist-postgres + +> **Deprecated:** This package has been deprecated and removed. + +Install `pg` directly and copy the driver source into your project. See the [full example](https://github.com/rivet-dev/sandbox-agent/tree/main/examples/persist-postgres) and the [session persistence docs](https://sandboxagent.dev/session-persistence) for guidance. diff --git a/sdks/persist-postgres/package.json b/sdks/persist-postgres/package.json index 49bd9f1..caa49f6 100644 --- a/sdks/persist-postgres/package.json +++ b/sdks/persist-postgres/package.json @@ -1,7 +1,7 @@ { "name": "@sandbox-agent/persist-postgres", "version": "0.3.2", - "description": "PostgreSQL persistence driver for the Sandbox Agent TypeScript SDK", + "description": "PostgreSQL persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { "type": "git", @@ -16,24 +16,16 @@ "import": "./dist/index.js" } }, - "dependencies": { - "pg": "^8.16.3", - "sandbox-agent": "workspace:*" - }, "files": [ "dist" ], "scripts": { "build": "tsup", - "typecheck": "tsc --noEmit", - "test": "vitest run", - "test:watch": "vitest" + "typecheck": "tsc --noEmit" }, "devDependencies": { "@types/node": "^22.0.0", - "@types/pg": "^8.15.6", "tsup": "^8.0.0", - "typescript": "^5.7.0", - "vitest": "^3.0.0" + "typescript": "^5.7.0" } } diff --git a/sdks/persist-postgres/src/index.ts b/sdks/persist-postgres/src/index.ts index 7c77827..ec76a53 100644 --- a/sdks/persist-postgres/src/index.ts +++ b/sdks/persist-postgres/src/index.ts @@ -1,306 +1,5 @@ -import { Pool, type PoolConfig } from "pg"; -import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; - -const DEFAULT_LIST_LIMIT = 100; - -export interface PostgresSessionPersistDriverOptions { - connectionString?: string; - pool?: Pool; - poolConfig?: PoolConfig; - schema?: string; -} - -export class PostgresSessionPersistDriver implements SessionPersistDriver { - private readonly pool: Pool; - private readonly ownsPool: boolean; - private readonly schema: string; - private readonly initialized: Promise; - - constructor(options: PostgresSessionPersistDriverOptions = {}) { - this.schema = normalizeSchema(options.schema ?? "public"); - - if (options.pool) { - this.pool = options.pool; - this.ownsPool = false; - } else { - this.pool = new Pool({ - connectionString: options.connectionString, - ...options.poolConfig, - }); - this.ownsPool = true; - } - - this.initialized = this.initialize(); - } - - async getSession(id: string): Promise { - await this.ready(); - - const result = await this.pool.query( - `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json - FROM ${this.table("sessions")} - WHERE id = $1`, - [id], - ); - - if (result.rows.length === 0) { - return null; - } - - return decodeSessionRow(result.rows[0]); - } - - async listSessions(request: ListPageRequest = {}): Promise> { - await this.ready(); - - const offset = parseCursor(request.cursor); - const limit = normalizeLimit(request.limit); - - const rowsResult = await this.pool.query( - `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json - FROM ${this.table("sessions")} - ORDER BY created_at ASC, id ASC - LIMIT $1 OFFSET $2`, - [limit, offset], - ); - - const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("sessions")}`); - const total = parseInteger(countResult.rows[0]?.count ?? "0"); - const nextOffset = offset + rowsResult.rows.length; - - return { - items: rowsResult.rows.map(decodeSessionRow), - nextCursor: nextOffset < total ? String(nextOffset) : undefined, - }; - } - - async updateSession(session: SessionRecord): Promise { - await this.ready(); - - await this.pool.query( - `INSERT INTO ${this.table("sessions")} ( - id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json - ) VALUES ($1, $2, $3, $4, $5, $6, $7) - ON CONFLICT(id) DO UPDATE SET - agent = EXCLUDED.agent, - agent_session_id = EXCLUDED.agent_session_id, - last_connection_id = EXCLUDED.last_connection_id, - created_at = EXCLUDED.created_at, - destroyed_at = EXCLUDED.destroyed_at, - session_init_json = EXCLUDED.session_init_json`, - [ - session.id, - session.agent, - session.agentSessionId, - session.lastConnectionId, - session.createdAt, - session.destroyedAt ?? null, - session.sessionInit ?? null, - ], - ); - } - - async listEvents(request: ListEventsRequest): Promise> { - await this.ready(); - - const offset = parseCursor(request.cursor); - const limit = normalizeLimit(request.limit); - - const rowsResult = await this.pool.query( - `SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json - FROM ${this.table("events")} - WHERE session_id = $1 - ORDER BY event_index ASC, id ASC - LIMIT $2 OFFSET $3`, - [request.sessionId, limit, offset], - ); - - const countResult = await this.pool.query<{ count: string }>(`SELECT COUNT(*) AS count FROM ${this.table("events")} WHERE session_id = $1`, [ - request.sessionId, - ]); - const total = parseInteger(countResult.rows[0]?.count ?? "0"); - const nextOffset = offset + rowsResult.rows.length; - - return { - items: rowsResult.rows.map(decodeEventRow), - nextCursor: nextOffset < total ? String(nextOffset) : undefined, - }; - } - - async insertEvent(event: SessionEvent): Promise { - await this.ready(); - - await this.pool.query( - `INSERT INTO ${this.table("events")} ( - id, event_index, session_id, created_at, connection_id, sender, payload_json - ) VALUES ($1, $2, $3, $4, $5, $6, $7) - ON CONFLICT(id) DO UPDATE SET - event_index = EXCLUDED.event_index, - session_id = EXCLUDED.session_id, - created_at = EXCLUDED.created_at, - connection_id = EXCLUDED.connection_id, - sender = EXCLUDED.sender, - payload_json = EXCLUDED.payload_json`, - [event.id, event.eventIndex, event.sessionId, event.createdAt, event.connectionId, event.sender, event.payload], - ); - } - - async close(): Promise { - if (!this.ownsPool) { - return; - } - await this.pool.end(); - } - - private async ready(): Promise { - await this.initialized; - } - - private table(name: "sessions" | "events"): string { - return `"${this.schema}"."${name}"`; - } - - private async initialize(): Promise { - await this.pool.query(`CREATE SCHEMA IF NOT EXISTS "${this.schema}"`); - - await this.pool.query(` - CREATE TABLE IF NOT EXISTS ${this.table("sessions")} ( - id TEXT PRIMARY KEY, - agent TEXT NOT NULL, - agent_session_id TEXT NOT NULL, - last_connection_id TEXT NOT NULL, - created_at BIGINT NOT NULL, - destroyed_at BIGINT, - session_init_json JSONB - ) - `); - - await this.pool.query(` - CREATE TABLE IF NOT EXISTS ${this.table("events")} ( - id TEXT PRIMARY KEY, - event_index BIGINT NOT NULL, - session_id TEXT NOT NULL, - created_at BIGINT NOT NULL, - connection_id TEXT NOT NULL, - sender TEXT NOT NULL, - payload_json JSONB NOT NULL - ) - `); - - await this.pool.query(` - ALTER TABLE ${this.table("events")} - ALTER COLUMN id TYPE TEXT USING id::TEXT - `); - - await this.pool.query(` - ALTER TABLE ${this.table("events")} - ADD COLUMN IF NOT EXISTS event_index BIGINT - `); - - await this.pool.query(` - WITH ranked AS ( - SELECT id, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC) AS ranked_index - FROM ${this.table("events")} - ) - UPDATE ${this.table("events")} AS current_events - SET event_index = ranked.ranked_index - FROM ranked - WHERE current_events.id = ranked.id - AND current_events.event_index IS NULL - `); - - await this.pool.query(` - ALTER TABLE ${this.table("events")} - ALTER COLUMN event_index SET NOT NULL - `); - - await this.pool.query(` - CREATE INDEX IF NOT EXISTS idx_events_session_order - ON ${this.table("events")}(session_id, event_index, id) - `); - } -} - -type SessionRow = { - id: string; - agent: string; - agent_session_id: string; - last_connection_id: string; - created_at: string | number; - destroyed_at: string | number | null; - session_init_json: unknown | null; -}; - -type EventRow = { - id: string | number; - event_index: string | number; - session_id: string; - created_at: string | number; - connection_id: string; - sender: string; - payload_json: unknown; -}; - -function decodeSessionRow(row: SessionRow): SessionRecord { - return { - id: row.id, - agent: row.agent, - agentSessionId: row.agent_session_id, - lastConnectionId: row.last_connection_id, - createdAt: parseInteger(row.created_at), - destroyedAt: row.destroyed_at === null ? undefined : parseInteger(row.destroyed_at), - sessionInit: row.session_init_json ? (row.session_init_json as SessionRecord["sessionInit"]) : undefined, - }; -} - -function decodeEventRow(row: EventRow): SessionEvent { - return { - id: String(row.id), - eventIndex: parseInteger(row.event_index), - sessionId: row.session_id, - createdAt: parseInteger(row.created_at), - connectionId: row.connection_id, - sender: parseSender(row.sender), - payload: row.payload_json as SessionEvent["payload"], - }; -} - -function normalizeLimit(limit: number | undefined): number { - if (!Number.isFinite(limit) || (limit ?? 0) < 1) { - return DEFAULT_LIST_LIMIT; - } - return Math.floor(limit as number); -} - -function parseCursor(cursor: string | undefined): number { - if (!cursor) { - return 0; - } - const parsed = Number.parseInt(cursor, 10); - if (!Number.isFinite(parsed) || parsed < 0) { - return 0; - } - return parsed; -} - -function parseInteger(value: string | number): number { - const parsed = typeof value === "number" ? value : Number.parseInt(value, 10); - if (!Number.isFinite(parsed)) { - throw new Error(`Invalid integer value returned by postgres: ${String(value)}`); - } - return parsed; -} - -function parseSender(value: string): SessionEvent["sender"] { - if (value === "agent" || value === "client") { - return value; - } - throw new Error(`Invalid sender value returned by postgres: ${value}`); -} - -function normalizeSchema(schema: string): string { - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(schema)) { - throw new Error(`Invalid schema name '${schema}'. Use letters, numbers, and underscores only.`); - } - return schema; -} +throw new Error( + "@sandbox-agent/persist-postgres has been deprecated and removed. " + + "Copy the reference implementation from examples/persist-postgres into your project instead. " + + "See https://github.com/rivet-dev/sandbox-agent/tree/main/examples/persist-postgres", +); diff --git a/sdks/persist-postgres/tests/integration.test.ts b/sdks/persist-postgres/tests/integration.test.ts deleted file mode 100644 index ddd4123..0000000 --- a/sdks/persist-postgres/tests/integration.test.ts +++ /dev/null @@ -1,245 +0,0 @@ -import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from "vitest"; -import { execFileSync } from "node:child_process"; -import { existsSync, mkdtempSync, rmSync } from "node:fs"; -import { dirname, join, resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -import { tmpdir } from "node:os"; -import { randomUUID } from "node:crypto"; -import { Client } from "pg"; -import { SandboxAgent } from "sandbox-agent"; -import { spawnSandboxAgent, type SandboxAgentSpawnHandle } from "../../typescript/src/spawn.ts"; -import { prepareMockAgentDataHome } from "../../typescript/tests/helpers/mock-agent.ts"; -import { PostgresSessionPersistDriver } from "../src/index.ts"; - -const __dirname = dirname(fileURLToPath(import.meta.url)); - -function findBinary(): string | null { - if (process.env.SANDBOX_AGENT_BIN) { - return process.env.SANDBOX_AGENT_BIN; - } - - const cargoPaths = [resolve(__dirname, "../../../target/debug/sandbox-agent"), resolve(__dirname, "../../../target/release/sandbox-agent")]; - - for (const p of cargoPaths) { - if (existsSync(p)) { - return p; - } - } - - return null; -} - -const BINARY_PATH = findBinary(); -if (!BINARY_PATH) { - throw new Error("sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN."); -} -if (!process.env.SANDBOX_AGENT_BIN) { - process.env.SANDBOX_AGENT_BIN = BINARY_PATH; -} - -interface PostgresContainer { - containerId: string; - connectionString: string; -} - -describe("Postgres persistence driver", () => { - let handle: SandboxAgentSpawnHandle; - let baseUrl: string; - let token: string; - let dataHome: string; - let postgres: PostgresContainer | null = null; - - beforeAll(async () => { - dataHome = mkdtempSync(join(tmpdir(), "postgres-integration-")); - prepareMockAgentDataHome(dataHome); - - handle = await spawnSandboxAgent({ - enabled: true, - log: "silent", - timeoutMs: 30000, - env: { - XDG_DATA_HOME: dataHome, - HOME: dataHome, - USERPROFILE: dataHome, - APPDATA: join(dataHome, "AppData", "Roaming"), - LOCALAPPDATA: join(dataHome, "AppData", "Local"), - }, - }); - baseUrl = handle.baseUrl; - token = handle.token; - }); - - beforeEach(async () => { - postgres = await startPostgresContainer(); - }); - - afterEach(() => { - if (postgres) { - stopPostgresContainer(postgres.containerId); - postgres = null; - } - }); - - afterAll(async () => { - await handle.dispose(); - rmSync(dataHome, { recursive: true, force: true }); - }); - - it("persists session/event history across SDK instances and supports replay restore", async () => { - const connectionString = requirePostgres(postgres).connectionString; - - const persist1 = new PostgresSessionPersistDriver({ - connectionString, - }); - - const sdk1 = await SandboxAgent.connect({ - baseUrl, - token, - persist: persist1, - replayMaxEvents: 40, - replayMaxChars: 16000, - }); - - const created = await sdk1.createSession({ agent: "mock" }); - await created.prompt([{ type: "text", text: "postgres-first" }]); - const firstConnectionId = created.lastConnectionId; - - await sdk1.dispose(); - await persist1.close(); - - const persist2 = new PostgresSessionPersistDriver({ - connectionString, - }); - const sdk2 = await SandboxAgent.connect({ - baseUrl, - token, - persist: persist2, - replayMaxEvents: 40, - replayMaxChars: 16000, - }); - - const restored = await sdk2.resumeSession(created.id); - expect(restored.lastConnectionId).not.toBe(firstConnectionId); - - await restored.prompt([{ type: "text", text: "postgres-second" }]); - - const sessions = await sdk2.listSessions({ limit: 20 }); - expect(sessions.items.some((entry) => entry.id === created.id)).toBe(true); - - const events = await sdk2.getEvents({ sessionId: created.id, limit: 1000 }); - expect(events.items.length).toBeGreaterThan(0); - expect(events.items.every((event) => typeof event.id === "string")).toBe(true); - expect(events.items.every((event) => Number.isInteger(event.eventIndex))).toBe(true); - - for (let i = 1; i < events.items.length; i += 1) { - expect(events.items[i]!.eventIndex).toBeGreaterThanOrEqual(events.items[i - 1]!.eventIndex); - } - - const replayInjected = events.items.find((event) => { - if (event.sender !== "client") { - return false; - } - const payload = event.payload as Record; - const method = payload.method; - const params = payload.params as Record | undefined; - const prompt = Array.isArray(params?.prompt) ? params?.prompt : []; - const firstBlock = prompt[0] as Record | undefined; - return method === "session/prompt" && typeof firstBlock?.text === "string" && firstBlock.text.includes("Previous session history is replayed below"); - }); - expect(replayInjected).toBeTruthy(); - - await sdk2.dispose(); - await persist2.close(); - }); -}); - -async function startPostgresContainer(): Promise { - const name = `sandbox-agent-postgres-${randomUUID()}`; - const containerId = runDockerCommand([ - "run", - "-d", - "--rm", - "--name", - name, - "-e", - "POSTGRES_USER=postgres", - "-e", - "POSTGRES_PASSWORD=postgres", - "-e", - "POSTGRES_DB=sandboxagent", - "-p", - "127.0.0.1::5432", - "postgres:16-alpine", - ]); - - const portOutput = runDockerCommand(["port", containerId, "5432/tcp"]); - const port = parsePort(portOutput); - const connectionString = `postgres://postgres:postgres@127.0.0.1:${port}/sandboxagent`; - await waitForPostgres(connectionString); - - return { - containerId, - connectionString, - }; -} - -function stopPostgresContainer(containerId: string): void { - try { - runDockerCommand(["rm", "-f", containerId]); - } catch { - // Container may already be gone when test teardown runs. - } -} - -function runDockerCommand(args: string[]): string { - return execFileSync("docker", args, { - encoding: "utf8", - stdio: ["ignore", "pipe", "pipe"], - }).trim(); -} - -function parsePort(output: string): string { - const firstLine = output.split("\n")[0]?.trim() ?? ""; - const match = firstLine.match(/:(\d+)$/); - if (!match) { - throw new Error(`Failed to parse docker port output: '${output}'`); - } - return match[1]; -} - -async function waitForPostgres(connectionString: string): Promise { - const timeoutMs = 30000; - const deadline = Date.now() + timeoutMs; - let lastError: unknown; - - while (Date.now() < deadline) { - const client = new Client({ connectionString }); - try { - await client.connect(); - await client.query("SELECT 1"); - await client.end(); - return; - } catch (error) { - lastError = error; - try { - await client.end(); - } catch { - // Ignore cleanup failures while retrying. - } - await delay(250); - } - } - - throw new Error(`Postgres container did not become ready: ${String(lastError)}`); -} - -function delay(ms: number): Promise { - return new Promise((resolvePromise) => setTimeout(resolvePromise, ms)); -} - -function requirePostgres(container: PostgresContainer | null): PostgresContainer { - if (!container) { - throw new Error("Postgres container was not initialized for this test."); - } - return container; -} diff --git a/sdks/persist-rivet/README.md b/sdks/persist-rivet/README.md new file mode 100644 index 0000000..ce93b8d --- /dev/null +++ b/sdks/persist-rivet/README.md @@ -0,0 +1,5 @@ +# @sandbox-agent/persist-rivet + +> **Deprecated:** This package has been deprecated and removed. + +Copy the driver source into your project. See the [multiplayer docs](https://github.com/rivet-dev/sandbox-agent/tree/main/docs/multiplayer.mdx) and the [session persistence docs](https://sandboxagent.dev/session-persistence) for guidance. diff --git a/sdks/persist-rivet/package.json b/sdks/persist-rivet/package.json index 723e3b4..047bea6 100644 --- a/sdks/persist-rivet/package.json +++ b/sdks/persist-rivet/package.json @@ -1,7 +1,7 @@ { "name": "@sandbox-agent/persist-rivet", "version": "0.3.2", - "description": "Rivet Actor persistence driver for the Sandbox Agent TypeScript SDK", + "description": "Rivet Actor persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { "type": "git", @@ -16,30 +16,16 @@ "import": "./dist/index.js" } }, - "dependencies": { - "sandbox-agent": "workspace:*" - }, - "peerDependencies": { - "rivetkit": ">=0.5.0" - }, - "peerDependenciesMeta": { - "rivetkit": { - "optional": true - } - }, "files": [ "dist" ], "scripts": { "build": "tsup", - "typecheck": "tsc --noEmit", - "test": "vitest run", - "test:watch": "vitest" + "typecheck": "tsc --noEmit" }, "devDependencies": { "@types/node": "^22.0.0", "tsup": "^8.0.0", - "typescript": "^5.7.0", - "vitest": "^3.0.0" + "typescript": "^5.7.0" } } diff --git a/sdks/persist-rivet/src/index.ts b/sdks/persist-rivet/src/index.ts index d236040..87907c6 100644 --- a/sdks/persist-rivet/src/index.ts +++ b/sdks/persist-rivet/src/index.ts @@ -1,168 +1,5 @@ -import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; - -/** Structural type compatible with rivetkit's ActorContext without importing it. */ -export interface ActorContextLike { - state: Record; -} - -export interface RivetPersistData { - sessions: Record; - events: Record; -} - -export type RivetPersistState = { - _sandboxAgentPersist: RivetPersistData; -}; - -export interface RivetSessionPersistDriverOptions { - /** Maximum number of sessions to retain. Oldest are evicted first. Default: 1024. */ - maxSessions?: number; - /** Maximum events per session. Oldest are trimmed first. Default: 500. */ - maxEventsPerSession?: number; - /** Key on `c.state` where persist data is stored. Default: `"_sandboxAgentPersist"`. */ - stateKey?: string; -} - -const DEFAULT_MAX_SESSIONS = 1024; -const DEFAULT_MAX_EVENTS_PER_SESSION = 500; -const DEFAULT_LIST_LIMIT = 100; -const DEFAULT_STATE_KEY = "_sandboxAgentPersist"; - -export class RivetSessionPersistDriver implements SessionPersistDriver { - private readonly maxSessions: number; - private readonly maxEventsPerSession: number; - private readonly stateKey: string; - private readonly ctx: ActorContextLike; - - constructor(ctx: ActorContextLike, options: RivetSessionPersistDriverOptions = {}) { - this.ctx = ctx; - this.maxSessions = normalizeCap(options.maxSessions, DEFAULT_MAX_SESSIONS); - this.maxEventsPerSession = normalizeCap(options.maxEventsPerSession, DEFAULT_MAX_EVENTS_PER_SESSION); - this.stateKey = options.stateKey ?? DEFAULT_STATE_KEY; - - // Auto-initialize if absent; preserve existing data on actor wake. - if (!this.ctx.state[this.stateKey]) { - this.ctx.state[this.stateKey] = { sessions: {}, events: {} } satisfies RivetPersistData; - } - } - - private get data(): RivetPersistData { - return this.ctx.state[this.stateKey] as RivetPersistData; - } - - async getSession(id: string): Promise { - const session = this.data.sessions[id]; - return session ? cloneSessionRecord(session) : null; - } - - async listSessions(request: ListPageRequest = {}): Promise> { - const sorted = Object.values(this.data.sessions).sort((a, b) => { - if (a.createdAt !== b.createdAt) { - return a.createdAt - b.createdAt; - } - return a.id.localeCompare(b.id); - }); - const page = paginate(sorted, request); - return { - items: page.items.map(cloneSessionRecord), - nextCursor: page.nextCursor, - }; - } - - async updateSession(session: SessionRecord): Promise { - this.data.sessions[session.id] = { ...session }; - - if (!this.data.events[session.id]) { - this.data.events[session.id] = []; - } - - const ids = Object.keys(this.data.sessions); - if (ids.length <= this.maxSessions) { - return; - } - - const overflow = ids.length - this.maxSessions; - const removable = Object.values(this.data.sessions) - .sort((a, b) => { - if (a.createdAt !== b.createdAt) { - return a.createdAt - b.createdAt; - } - return a.id.localeCompare(b.id); - }) - .slice(0, overflow) - .map((s) => s.id); - - for (const sessionId of removable) { - delete this.data.sessions[sessionId]; - delete this.data.events[sessionId]; - } - } - - async listEvents(request: ListEventsRequest): Promise> { - const all = [...(this.data.events[request.sessionId] ?? [])].sort((a, b) => { - if (a.eventIndex !== b.eventIndex) { - return a.eventIndex - b.eventIndex; - } - return a.id.localeCompare(b.id); - }); - const page = paginate(all, request); - return { - items: page.items.map(cloneSessionEvent), - nextCursor: page.nextCursor, - }; - } - - async insertEvent(event: SessionEvent): Promise { - const events = this.data.events[event.sessionId] ?? []; - events.push(cloneSessionEvent(event)); - - if (events.length > this.maxEventsPerSession) { - events.splice(0, events.length - this.maxEventsPerSession); - } - - this.data.events[event.sessionId] = events; - } -} - -function cloneSessionRecord(session: SessionRecord): SessionRecord { - return { - ...session, - sessionInit: session.sessionInit ? (JSON.parse(JSON.stringify(session.sessionInit)) as SessionRecord["sessionInit"]) : undefined, - }; -} - -function cloneSessionEvent(event: SessionEvent): SessionEvent { - return { - ...event, - payload: JSON.parse(JSON.stringify(event.payload)) as SessionEvent["payload"], - }; -} - -function normalizeCap(value: number | undefined, fallback: number): number { - if (!Number.isFinite(value) || (value ?? 0) < 1) { - return fallback; - } - return Math.floor(value as number); -} - -function paginate(items: T[], request: ListPageRequest): ListPage { - const offset = parseCursor(request.cursor); - const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT); - const slice = items.slice(offset, offset + limit); - const nextOffset = offset + slice.length; - return { - items: slice, - nextCursor: nextOffset < items.length ? String(nextOffset) : undefined, - }; -} - -function parseCursor(cursor: string | undefined): number { - if (!cursor) { - return 0; - } - const parsed = Number.parseInt(cursor, 10); - if (!Number.isFinite(parsed) || parsed < 0) { - return 0; - } - return parsed; -} +throw new Error( + "@sandbox-agent/persist-rivet has been deprecated and removed. " + + "Copy the reference implementation from docs/multiplayer.mdx into your project instead. " + + "See https://github.com/rivet-dev/sandbox-agent/tree/main/docs/multiplayer.mdx", +); diff --git a/sdks/persist-rivet/tests/driver.test.ts b/sdks/persist-rivet/tests/driver.test.ts deleted file mode 100644 index c16e733..0000000 --- a/sdks/persist-rivet/tests/driver.test.ts +++ /dev/null @@ -1,236 +0,0 @@ -import { describe, it, expect } from "vitest"; -import { RivetSessionPersistDriver } from "../src/index.ts"; -import type { RivetPersistData } from "../src/index.ts"; - -function makeCtx() { - return { state: {} as Record }; -} - -describe("RivetSessionPersistDriver", () => { - it("auto-initializes state on construction", () => { - const ctx = makeCtx(); - new RivetSessionPersistDriver(ctx); - const data = ctx.state._sandboxAgentPersist as RivetPersistData; - expect(data).toBeDefined(); - expect(data.sessions).toEqual({}); - expect(data.events).toEqual({}); - }); - - it("preserves existing state on construction (actor wake)", async () => { - const ctx = makeCtx(); - const driver1 = new RivetSessionPersistDriver(ctx); - - await driver1.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 100, - }); - - // Simulate actor wake: new driver instance, same state object - const driver2 = new RivetSessionPersistDriver(ctx); - const session = await driver2.getSession("s-1"); - expect(session?.id).toBe("s-1"); - expect(session?.createdAt).toBe(100); - }); - - it("stores and retrieves sessions", async () => { - const driver = new RivetSessionPersistDriver(makeCtx()); - - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 100, - }); - - await driver.updateSession({ - id: "s-2", - agent: "mock", - agentSessionId: "a-2", - lastConnectionId: "c-2", - createdAt: 200, - destroyedAt: 300, - }); - - const loaded = await driver.getSession("s-2"); - expect(loaded?.destroyedAt).toBe(300); - - const missing = await driver.getSession("s-nonexistent"); - expect(missing).toBeNull(); - }); - - it("pages sessions sorted by createdAt", async () => { - const driver = new RivetSessionPersistDriver(makeCtx()); - - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 100, - }); - - await driver.updateSession({ - id: "s-2", - agent: "mock", - agentSessionId: "a-2", - lastConnectionId: "c-2", - createdAt: 200, - }); - - const page1 = await driver.listSessions({ limit: 1 }); - expect(page1.items).toHaveLength(1); - expect(page1.items[0]?.id).toBe("s-1"); - expect(page1.nextCursor).toBeTruthy(); - - const page2 = await driver.listSessions({ cursor: page1.nextCursor, limit: 1 }); - expect(page2.items).toHaveLength(1); - expect(page2.items[0]?.id).toBe("s-2"); - expect(page2.nextCursor).toBeUndefined(); - }); - - it("stores and pages events", async () => { - const driver = new RivetSessionPersistDriver(makeCtx()); - - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 1, - }); - - await driver.insertEvent({ - id: "evt-1", - eventIndex: 1, - sessionId: "s-1", - createdAt: 1, - connectionId: "c-1", - sender: "client", - payload: { jsonrpc: "2.0", method: "session/prompt", params: { sessionId: "a-1" } }, - }); - - await driver.insertEvent({ - id: "evt-2", - eventIndex: 2, - sessionId: "s-1", - createdAt: 2, - connectionId: "c-1", - sender: "agent", - payload: { jsonrpc: "2.0", method: "session/update", params: { sessionId: "a-1" } }, - }); - - const eventsPage = await driver.listEvents({ sessionId: "s-1", limit: 10 }); - expect(eventsPage.items).toHaveLength(2); - expect(eventsPage.items[0]?.id).toBe("evt-1"); - expect(eventsPage.items[0]?.eventIndex).toBe(1); - expect(eventsPage.items[1]?.id).toBe("evt-2"); - expect(eventsPage.items[1]?.eventIndex).toBe(2); - }); - - it("evicts oldest sessions when maxSessions exceeded", async () => { - const driver = new RivetSessionPersistDriver(makeCtx(), { maxSessions: 2 }); - - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 100, - }); - - await driver.updateSession({ - id: "s-2", - agent: "mock", - agentSessionId: "a-2", - lastConnectionId: "c-2", - createdAt: 200, - }); - - // Adding a third session should evict the oldest (s-1) - await driver.updateSession({ - id: "s-3", - agent: "mock", - agentSessionId: "a-3", - lastConnectionId: "c-3", - createdAt: 300, - }); - - expect(await driver.getSession("s-1")).toBeNull(); - expect(await driver.getSession("s-2")).not.toBeNull(); - expect(await driver.getSession("s-3")).not.toBeNull(); - }); - - it("trims oldest events when maxEventsPerSession exceeded", async () => { - const driver = new RivetSessionPersistDriver(makeCtx(), { maxEventsPerSession: 2 }); - - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 1, - }); - - for (let i = 1; i <= 3; i++) { - await driver.insertEvent({ - id: `evt-${i}`, - eventIndex: i, - sessionId: "s-1", - createdAt: i, - connectionId: "c-1", - sender: "client", - payload: { jsonrpc: "2.0", method: "session/prompt", params: { sessionId: "a-1" } }, - }); - } - - const page = await driver.listEvents({ sessionId: "s-1" }); - expect(page.items).toHaveLength(2); - // Oldest event (evt-1) should be trimmed - expect(page.items[0]?.id).toBe("evt-2"); - expect(page.items[1]?.id).toBe("evt-3"); - }); - - it("clones data to prevent external mutation", async () => { - const driver = new RivetSessionPersistDriver(makeCtx()); - - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 1, - }); - - const s1 = await driver.getSession("s-1"); - const s2 = await driver.getSession("s-1"); - expect(s1).toEqual(s2); - expect(s1).not.toBe(s2); // Different object references - }); - - it("supports custom stateKey", async () => { - const ctx = makeCtx(); - const driver = new RivetSessionPersistDriver(ctx, { stateKey: "myPersist" }); - - await driver.updateSession({ - id: "s-1", - agent: "mock", - agentSessionId: "a-1", - lastConnectionId: "c-1", - createdAt: 1, - }); - - expect((ctx.state.myPersist as RivetPersistData).sessions["s-1"]).toBeDefined(); - expect(ctx.state._sandboxAgentPersist).toBeUndefined(); - }); - - it("returns empty results for unknown session events", async () => { - const driver = new RivetSessionPersistDriver(makeCtx()); - const page = await driver.listEvents({ sessionId: "nonexistent" }); - expect(page.items).toHaveLength(0); - expect(page.nextCursor).toBeUndefined(); - }); -}); diff --git a/sdks/persist-sqlite/README.md b/sdks/persist-sqlite/README.md new file mode 100644 index 0000000..07296fe --- /dev/null +++ b/sdks/persist-sqlite/README.md @@ -0,0 +1,5 @@ +# @sandbox-agent/persist-sqlite + +> **Deprecated:** This package has been deprecated and removed. + +Install `better-sqlite3` directly and copy the driver source into your project. See the [full example](https://github.com/rivet-dev/sandbox-agent/tree/main/examples/persist-sqlite) and the [session persistence docs](https://sandboxagent.dev/session-persistence) for guidance. diff --git a/sdks/persist-sqlite/package.json b/sdks/persist-sqlite/package.json index 852e384..6c08fec 100644 --- a/sdks/persist-sqlite/package.json +++ b/sdks/persist-sqlite/package.json @@ -1,7 +1,7 @@ { "name": "@sandbox-agent/persist-sqlite", "version": "0.3.2", - "description": "SQLite persistence driver for the Sandbox Agent TypeScript SDK", + "description": "SQLite persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { "type": "git", @@ -16,24 +16,17 @@ "import": "./dist/index.js" } }, - "dependencies": { - "better-sqlite3": "^11.0.0", - "sandbox-agent": "workspace:*" - }, + "dependencies": {}, "files": [ "dist" ], "scripts": { "build": "tsup", - "typecheck": "tsc --noEmit", - "test": "vitest run", - "test:watch": "vitest" + "typecheck": "tsc --noEmit" }, "devDependencies": { - "@types/better-sqlite3": "^7.0.0", "@types/node": "^22.0.0", "tsup": "^8.0.0", - "typescript": "^5.7.0", - "vitest": "^3.0.0" + "typescript": "^5.7.0" } } diff --git a/sdks/persist-sqlite/src/index.ts b/sdks/persist-sqlite/src/index.ts index 379c4ef..fa76679 100644 --- a/sdks/persist-sqlite/src/index.ts +++ b/sdks/persist-sqlite/src/index.ts @@ -1,284 +1,5 @@ -import Database from "better-sqlite3"; -import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent"; - -const DEFAULT_LIST_LIMIT = 100; - -export interface SQLiteSessionPersistDriverOptions { - filename?: string; -} - -export class SQLiteSessionPersistDriver implements SessionPersistDriver { - private readonly db: Database.Database; - - constructor(options: SQLiteSessionPersistDriverOptions = {}) { - this.db = new Database(options.filename ?? ":memory:"); - this.initialize(); - } - - async getSession(id: string): Promise { - const row = this.db - .prepare( - `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json - FROM sessions WHERE id = ?`, - ) - .get(id) as SessionRow | undefined; - - if (!row) { - return null; - } - - return decodeSessionRow(row); - } - - async listSessions(request: ListPageRequest = {}): Promise> { - const offset = parseCursor(request.cursor); - const limit = normalizeLimit(request.limit); - - const rows = this.db - .prepare( - `SELECT id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json - FROM sessions - ORDER BY created_at ASC, id ASC - LIMIT ? OFFSET ?`, - ) - .all(limit, offset) as SessionRow[]; - - const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM sessions`).get() as { count: number }; - const nextOffset = offset + rows.length; - - return { - items: rows.map(decodeSessionRow), - nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined, - }; - } - - async updateSession(session: SessionRecord): Promise { - this.db - .prepare( - `INSERT INTO sessions ( - id, agent, agent_session_id, last_connection_id, created_at, destroyed_at, session_init_json - ) VALUES (?, ?, ?, ?, ?, ?, ?) - ON CONFLICT(id) DO UPDATE SET - agent = excluded.agent, - agent_session_id = excluded.agent_session_id, - last_connection_id = excluded.last_connection_id, - created_at = excluded.created_at, - destroyed_at = excluded.destroyed_at, - session_init_json = excluded.session_init_json`, - ) - .run( - session.id, - session.agent, - session.agentSessionId, - session.lastConnectionId, - session.createdAt, - session.destroyedAt ?? null, - session.sessionInit ? JSON.stringify(session.sessionInit) : null, - ); - } - - async listEvents(request: ListEventsRequest): Promise> { - const offset = parseCursor(request.cursor); - const limit = normalizeLimit(request.limit); - - const rows = this.db - .prepare( - `SELECT id, event_index, session_id, created_at, connection_id, sender, payload_json - FROM events - WHERE session_id = ? - ORDER BY event_index ASC, id ASC - LIMIT ? OFFSET ?`, - ) - .all(request.sessionId, limit, offset) as EventRow[]; - - const countRow = this.db.prepare(`SELECT COUNT(*) as count FROM events WHERE session_id = ?`).get(request.sessionId) as { count: number }; - - const nextOffset = offset + rows.length; - - return { - items: rows.map(decodeEventRow), - nextCursor: nextOffset < countRow.count ? String(nextOffset) : undefined, - }; - } - - async insertEvent(event: SessionEvent): Promise { - this.db - .prepare( - `INSERT INTO events ( - id, event_index, session_id, created_at, connection_id, sender, payload_json - ) VALUES (?, ?, ?, ?, ?, ?, ?) - ON CONFLICT(id) DO UPDATE SET - event_index = excluded.event_index, - session_id = excluded.session_id, - created_at = excluded.created_at, - connection_id = excluded.connection_id, - sender = excluded.sender, - payload_json = excluded.payload_json`, - ) - .run(event.id, event.eventIndex, event.sessionId, event.createdAt, event.connectionId, event.sender, JSON.stringify(event.payload)); - } - - close(): void { - this.db.close(); - } - - private initialize(): void { - this.db.exec(` - CREATE TABLE IF NOT EXISTS sessions ( - id TEXT PRIMARY KEY, - agent TEXT NOT NULL, - agent_session_id TEXT NOT NULL, - last_connection_id TEXT NOT NULL, - created_at INTEGER NOT NULL, - destroyed_at INTEGER, - session_init_json TEXT - ) - `); - - this.ensureEventsTable(); - } - - private ensureEventsTable(): void { - const tableInfo = this.db.prepare(`PRAGMA table_info(events)`).all() as TableInfoRow[]; - if (tableInfo.length === 0) { - this.createEventsTable(); - return; - } - - const idColumn = tableInfo.find((column) => column.name === "id"); - const hasEventIndex = tableInfo.some((column) => column.name === "event_index"); - const idType = (idColumn?.type ?? "").trim().toUpperCase(); - const idIsText = idType === "TEXT"; - - if (!idIsText || !hasEventIndex) { - this.rebuildEventsTable(hasEventIndex); - } - - this.db.exec(` - CREATE INDEX IF NOT EXISTS idx_events_session_order - ON events(session_id, event_index, id) - `); - } - - private createEventsTable(): void { - this.db.exec(` - CREATE TABLE IF NOT EXISTS events ( - id TEXT PRIMARY KEY, - event_index INTEGER NOT NULL, - session_id TEXT NOT NULL, - created_at INTEGER NOT NULL, - connection_id TEXT NOT NULL, - sender TEXT NOT NULL, - payload_json TEXT NOT NULL - ); - - CREATE INDEX IF NOT EXISTS idx_events_session_order - ON events(session_id, event_index, id) - `); - } - - private rebuildEventsTable(hasEventIndex: boolean): void { - this.db.exec(` - ALTER TABLE events RENAME TO events_legacy; - `); - - this.createEventsTable(); - - if (hasEventIndex) { - this.db.exec(` - INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json) - SELECT - CAST(id AS TEXT), - COALESCE(event_index, ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC)), - session_id, - created_at, - connection_id, - sender, - payload_json - FROM events_legacy - `); - } else { - this.db.exec(` - INSERT INTO events (id, event_index, session_id, created_at, connection_id, sender, payload_json) - SELECT - CAST(id AS TEXT), - ROW_NUMBER() OVER (PARTITION BY session_id ORDER BY created_at ASC, id ASC), - session_id, - created_at, - connection_id, - sender, - payload_json - FROM events_legacy - `); - } - - this.db.exec(`DROP TABLE events_legacy`); - } -} - -type SessionRow = { - id: string; - agent: string; - agent_session_id: string; - last_connection_id: string; - created_at: number; - destroyed_at: number | null; - session_init_json: string | null; -}; - -type EventRow = { - id: string; - event_index: number; - session_id: string; - created_at: number; - connection_id: string; - sender: "client" | "agent"; - payload_json: string; -}; - -type TableInfoRow = { - name: string; - type: string; -}; - -function decodeSessionRow(row: SessionRow): SessionRecord { - return { - id: row.id, - agent: row.agent, - agentSessionId: row.agent_session_id, - lastConnectionId: row.last_connection_id, - createdAt: row.created_at, - destroyedAt: row.destroyed_at ?? undefined, - sessionInit: row.session_init_json ? (JSON.parse(row.session_init_json) as SessionRecord["sessionInit"]) : undefined, - }; -} - -function decodeEventRow(row: EventRow): SessionEvent { - return { - id: row.id, - eventIndex: row.event_index, - sessionId: row.session_id, - createdAt: row.created_at, - connectionId: row.connection_id, - sender: row.sender, - payload: JSON.parse(row.payload_json), - }; -} - -function normalizeLimit(limit: number | undefined): number { - if (!Number.isFinite(limit) || (limit ?? 0) < 1) { - return DEFAULT_LIST_LIMIT; - } - return Math.floor(limit as number); -} - -function parseCursor(cursor: string | undefined): number { - if (!cursor) { - return 0; - } - const parsed = Number.parseInt(cursor, 10); - if (!Number.isFinite(parsed) || parsed < 0) { - return 0; - } - return parsed; -} +throw new Error( + "@sandbox-agent/persist-sqlite has been deprecated and removed. " + + "Copy the reference implementation from examples/persist-sqlite into your project instead. " + + "See https://github.com/rivet-dev/sandbox-agent/tree/main/examples/persist-sqlite", +); diff --git a/sdks/persist-sqlite/tests/integration.test.ts b/sdks/persist-sqlite/tests/integration.test.ts deleted file mode 100644 index 376406c..0000000 --- a/sdks/persist-sqlite/tests/integration.test.ts +++ /dev/null @@ -1,131 +0,0 @@ -import { describe, it, expect, beforeAll, afterAll } from "vitest"; -import { existsSync, mkdtempSync, rmSync } from "node:fs"; -import { dirname, join, resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -import { tmpdir } from "node:os"; -import { SandboxAgent } from "sandbox-agent"; -import { spawnSandboxAgent, type SandboxAgentSpawnHandle } from "../../typescript/src/spawn.ts"; -import { prepareMockAgentDataHome } from "../../typescript/tests/helpers/mock-agent.ts"; -import { SQLiteSessionPersistDriver } from "../src/index.ts"; - -const __dirname = dirname(fileURLToPath(import.meta.url)); - -function findBinary(): string | null { - if (process.env.SANDBOX_AGENT_BIN) { - return process.env.SANDBOX_AGENT_BIN; - } - - const cargoPaths = [resolve(__dirname, "../../../target/debug/sandbox-agent"), resolve(__dirname, "../../../target/release/sandbox-agent")]; - - for (const p of cargoPaths) { - if (existsSync(p)) { - return p; - } - } - - return null; -} - -const BINARY_PATH = findBinary(); -if (!BINARY_PATH) { - throw new Error("sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN."); -} -if (!process.env.SANDBOX_AGENT_BIN) { - process.env.SANDBOX_AGENT_BIN = BINARY_PATH; -} - -describe("SQLite persistence driver", () => { - let handle: SandboxAgentSpawnHandle; - let baseUrl: string; - let token: string; - let dataHome: string; - - beforeAll(async () => { - dataHome = mkdtempSync(join(tmpdir(), "sqlite-integration-")); - prepareMockAgentDataHome(dataHome); - - handle = await spawnSandboxAgent({ - enabled: true, - log: "silent", - timeoutMs: 30000, - env: { - XDG_DATA_HOME: dataHome, - HOME: dataHome, - USERPROFILE: dataHome, - APPDATA: join(dataHome, "AppData", "Roaming"), - LOCALAPPDATA: join(dataHome, "AppData", "Local"), - }, - }); - baseUrl = handle.baseUrl; - token = handle.token; - }); - - afterAll(async () => { - await handle.dispose(); - rmSync(dataHome, { recursive: true, force: true }); - }); - - it("persists session/event history across SDK instances and supports replay restore", async () => { - const tempDir = mkdtempSync(join(tmpdir(), "sqlite-persist-")); - const dbPath = join(tempDir, "session-store.db"); - - const persist1 = new SQLiteSessionPersistDriver({ filename: dbPath }); - const sdk1 = await SandboxAgent.connect({ - baseUrl, - token, - persist: persist1, - replayMaxEvents: 40, - replayMaxChars: 16000, - }); - - const created = await sdk1.createSession({ agent: "mock" }); - await created.prompt([{ type: "text", text: "sqlite-first" }]); - const firstConnectionId = created.lastConnectionId; - - await sdk1.dispose(); - persist1.close(); - - const persist2 = new SQLiteSessionPersistDriver({ filename: dbPath }); - const sdk2 = await SandboxAgent.connect({ - baseUrl, - token, - persist: persist2, - replayMaxEvents: 40, - replayMaxChars: 16000, - }); - - const restored = await sdk2.resumeSession(created.id); - expect(restored.lastConnectionId).not.toBe(firstConnectionId); - - await restored.prompt([{ type: "text", text: "sqlite-second" }]); - - const sessions = await sdk2.listSessions({ limit: 20 }); - expect(sessions.items.some((entry) => entry.id === created.id)).toBe(true); - - const events = await sdk2.getEvents({ sessionId: created.id, limit: 1000 }); - expect(events.items.length).toBeGreaterThan(0); - expect(events.items.every((event) => typeof event.id === "string")).toBe(true); - expect(events.items.every((event) => Number.isInteger(event.eventIndex))).toBe(true); - - for (let i = 1; i < events.items.length; i += 1) { - expect(events.items[i]!.eventIndex).toBeGreaterThanOrEqual(events.items[i - 1]!.eventIndex); - } - - const replayInjected = events.items.find((event) => { - if (event.sender !== "client") { - return false; - } - const payload = event.payload as Record; - const method = payload.method; - const params = payload.params as Record | undefined; - const prompt = Array.isArray(params?.prompt) ? params?.prompt : []; - const firstBlock = prompt[0] as Record | undefined; - return method === "session/prompt" && typeof firstBlock?.text === "string" && firstBlock.text.includes("Previous session history is replayed below"); - }); - expect(replayInjected).toBeTruthy(); - - await sdk2.dispose(); - persist2.close(); - rmSync(tempDir, { recursive: true, force: true }); - }); -}); diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index 2c94592..9fd62fe 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -14,6 +14,74 @@ ".": { "types": "./dist/index.d.ts", "import": "./dist/index.js" + }, + "./local": { + "types": "./dist/providers/local.d.ts", + "import": "./dist/providers/local.js" + }, + "./e2b": { + "types": "./dist/providers/e2b.d.ts", + "import": "./dist/providers/e2b.js" + }, + "./daytona": { + "types": "./dist/providers/daytona.d.ts", + "import": "./dist/providers/daytona.js" + }, + "./docker": { + "types": "./dist/providers/docker.d.ts", + "import": "./dist/providers/docker.js" + }, + "./vercel": { + "types": "./dist/providers/vercel.d.ts", + "import": "./dist/providers/vercel.js" + }, + "./cloudflare": { + "types": "./dist/providers/cloudflare.d.ts", + "import": "./dist/providers/cloudflare.js" + }, + "./modal": { + "types": "./dist/providers/modal.d.ts", + "import": "./dist/providers/modal.js" + }, + "./computesdk": { + "types": "./dist/providers/computesdk.d.ts", + "import": "./dist/providers/computesdk.js" + } + }, + "peerDependencies": { + "@cloudflare/sandbox": ">=0.1.0", + "@daytonaio/sdk": ">=0.12.0", + "@e2b/code-interpreter": ">=1.0.0", + "@vercel/sandbox": ">=0.1.0", + "dockerode": ">=4.0.0", + "get-port": ">=7.0.0", + "modal": ">=0.1.0", + "computesdk": ">=0.1.0" + }, + "peerDependenciesMeta": { + "@cloudflare/sandbox": { + "optional": true + }, + "@daytonaio/sdk": { + "optional": true + }, + "@e2b/code-interpreter": { + "optional": true + }, + "@vercel/sandbox": { + "optional": true + }, + "dockerode": { + "optional": true + }, + "get-port": { + "optional": true + }, + "modal": { + "optional": true + }, + "computesdk": { + "optional": true } }, "dependencies": { @@ -33,8 +101,17 @@ "test:watch": "vitest" }, "devDependencies": { + "@cloudflare/sandbox": ">=0.1.0", + "@daytonaio/sdk": ">=0.12.0", + "@e2b/code-interpreter": ">=1.0.0", + "@types/dockerode": "^4.0.0", "@types/node": "^22.0.0", "@types/ws": "^8.18.1", + "@vercel/sandbox": ">=0.1.0", + "dockerode": ">=4.0.0", + "get-port": ">=7.0.0", + "modal": ">=0.1.0", + "computesdk": ">=0.1.0", "openapi-typescript": "^6.7.0", "tsup": "^8.0.0", "typescript": "^5.7.0", diff --git a/sdks/typescript/src/client.ts b/sdks/typescript/src/client.ts index 9945c0a..f64c833 100644 --- a/sdks/typescript/src/client.ts +++ b/sdks/typescript/src/client.ts @@ -22,7 +22,7 @@ import { type SetSessionModeResponse, type SetSessionModeRequest, } from "acp-http-client"; -import type { SandboxAgentSpawnHandle, SandboxAgentSpawnOptions } from "./spawn.ts"; +import type { SandboxProvider } from "./providers/types.ts"; import { type AcpServerListResponse, type AgentInfo, @@ -89,6 +89,7 @@ const HEALTH_WAIT_MIN_DELAY_MS = 500; const HEALTH_WAIT_MAX_DELAY_MS = 15_000; const HEALTH_WAIT_LOG_AFTER_MS = 5_000; const HEALTH_WAIT_LOG_EVERY_MS = 10_000; +const HEALTH_WAIT_ENSURE_SERVER_AFTER_FAILURES = 3; export interface SandboxAgentHealthWaitOptions { timeoutMs?: number; @@ -101,6 +102,8 @@ interface SandboxAgentConnectCommonOptions { replayMaxChars?: number; signal?: AbortSignal; token?: string; + skipHealthCheck?: boolean; + /** @deprecated Use skipHealthCheck instead. */ waitForHealth?: boolean | SandboxAgentHealthWaitOptions; } @@ -115,17 +118,24 @@ export type SandboxAgentConnectOptions = }); export interface SandboxAgentStartOptions { + sandbox: SandboxProvider; + sandboxId?: string; + skipHealthCheck?: boolean; fetch?: typeof fetch; headers?: HeadersInit; persist?: SessionPersistDriver; replayMaxEvents?: number; replayMaxChars?: number; - spawn?: SandboxAgentSpawnOptions | boolean; + signal?: AbortSignal; + token?: string; } export interface SessionCreateRequest { id?: string; agent: string; + /** Shorthand for `sessionInit.cwd`. Ignored when `sessionInit` is provided. */ + cwd?: string; + /** Full session init. When omitted, built from `cwd` (or default) with empty `mcpServers`. */ sessionInit?: Omit; model?: string; mode?: string; @@ -135,6 +145,9 @@ export interface SessionCreateRequest { export interface SessionResumeOrCreateRequest { id: string; agent: string; + /** Shorthand for `sessionInit.cwd`. Ignored when `sessionInit` is provided. */ + cwd?: string; + /** Full session init. When omitted, built from `cwd` (or default) with empty `mcpServers`. */ sessionInit?: Omit; model?: string; mode?: string; @@ -824,12 +837,14 @@ export class SandboxAgent { private readonly defaultHeaders?: HeadersInit; private readonly healthWait: NormalizedHealthWaitOptions; private readonly healthWaitAbortController = new AbortController(); + private sandboxProvider?: SandboxProvider; + private sandboxProviderId?: string; + private sandboxProviderRawId?: string; private readonly persist: SessionPersistDriver; private readonly replayMaxEvents: number; private readonly replayMaxChars: number; - private spawnHandle?: SandboxAgentSpawnHandle; private healthPromise?: Promise; private healthError?: Error; private disposed = false; @@ -857,7 +872,7 @@ export class SandboxAgent { } this.fetcher = resolvedFetch; this.defaultHeaders = options.headers; - this.healthWait = normalizeHealthWaitOptions(options.waitForHealth, options.signal); + this.healthWait = normalizeHealthWaitOptions(options.skipHealthCheck, options.waitForHealth, options.signal); this.persist = options.persist ?? new InMemorySessionPersistDriver(); this.replayMaxEvents = normalizePositiveInt(options.replayMaxEvents, DEFAULT_REPLAY_MAX_EVENTS); @@ -870,29 +885,79 @@ export class SandboxAgent { return new SandboxAgent(options); } - static async start(options: SandboxAgentStartOptions = {}): Promise { - const spawnOptions = normalizeSpawnOptions(options.spawn, true); - if (!spawnOptions.enabled) { - throw new Error("SandboxAgent.start requires spawn to be enabled."); + static async start(options: SandboxAgentStartOptions): Promise { + const provider = options.sandbox; + if (!provider.getUrl && !provider.getFetch) { + throw new Error(`Sandbox provider '${provider.name}' must implement getUrl() or getFetch().`); } - const { spawnSandboxAgent } = await import("./spawn.js"); - const resolvedFetch = options.fetch ?? globalThis.fetch?.bind(globalThis); - const handle = await spawnSandboxAgent(spawnOptions, resolvedFetch); + const existingSandbox = options.sandboxId ? parseSandboxProviderId(options.sandboxId) : null; - const client = new SandboxAgent({ - baseUrl: handle.baseUrl, - token: handle.token, - fetch: options.fetch, - headers: options.headers, - waitForHealth: false, - persist: options.persist, - replayMaxEvents: options.replayMaxEvents, - replayMaxChars: options.replayMaxChars, - }); + if (existingSandbox && existingSandbox.provider !== provider.name) { + throw new Error( + `SandboxAgent.start received sandboxId '${options.sandboxId}' for provider '${existingSandbox.provider}', but the configured provider is '${provider.name}'.`, + ); + } - client.spawnHandle = handle; - return client; + const rawSandboxId = existingSandbox?.rawId ?? (await provider.create()); + const prefixedSandboxId = `${provider.name}/${rawSandboxId}`; + const createdSandbox = !existingSandbox; + + if (existingSandbox) { + await provider.ensureServer?.(rawSandboxId); + } + + try { + const fetcher = await resolveProviderFetch(provider, rawSandboxId); + const baseUrl = provider.getUrl ? await provider.getUrl(rawSandboxId) : undefined; + const providerFetch = options.fetch ?? fetcher; + const commonConnectOptions = { + headers: options.headers, + persist: options.persist, + replayMaxEvents: options.replayMaxEvents, + replayMaxChars: options.replayMaxChars, + signal: options.signal, + skipHealthCheck: options.skipHealthCheck, + token: options.token ?? (await resolveProviderToken(provider, rawSandboxId)), + }; + + const client = providerFetch + ? new SandboxAgent({ + ...commonConnectOptions, + baseUrl, + fetch: providerFetch, + }) + : new SandboxAgent({ + ...commonConnectOptions, + baseUrl: requireSandboxBaseUrl(baseUrl, provider.name), + }); + + client.sandboxProvider = provider; + client.sandboxProviderId = prefixedSandboxId; + client.sandboxProviderRawId = rawSandboxId; + return client; + } catch (error) { + if (createdSandbox) { + try { + await provider.destroy(rawSandboxId); + } catch { + // Best-effort cleanup if connect fails after provisioning. + } + } + throw error; + } + } + + get sandboxId(): string | undefined { + return this.sandboxProviderId; + } + + get sandbox(): SandboxProvider | undefined { + return this.sandboxProvider; + } + + get inspectorUrl(): string { + return `${this.baseUrl.replace(/\/+$/, "")}/ui/`; } async dispose(): Promise { @@ -922,10 +987,23 @@ export class SandboxAgent { await connection.close(); }), ); + } - if (this.spawnHandle) { - await this.spawnHandle.dispose(); - this.spawnHandle = undefined; + async destroySandbox(): Promise { + const provider = this.sandboxProvider; + const rawSandboxId = this.sandboxProviderRawId; + + try { + if (provider && rawSandboxId) { + await provider.destroy(rawSandboxId); + } else if (!provider || !rawSandboxId) { + throw new Error("SandboxAgent is not attached to a provisioned sandbox."); + } + } finally { + await this.dispose(); + this.sandboxProvider = undefined; + this.sandboxProviderId = undefined; + this.sandboxProviderRawId = undefined; } } @@ -956,7 +1034,7 @@ export class SandboxAgent { const localSessionId = request.id?.trim() || randomId(); const live = await this.getLiveConnection(request.agent.trim()); - const sessionInit = normalizeSessionInit(request.sessionInit); + const sessionInit = normalizeSessionInit(request.sessionInit, request.cwd); const response = await live.createRemoteSession(localSessionId, sessionInit); @@ -966,6 +1044,7 @@ export class SandboxAgent { agentSessionId: response.sessionId, lastConnectionId: live.connectionId, createdAt: nowMs(), + sandboxId: this.sandboxProviderId, sessionInit, configOptions: cloneConfigOptions(response.configOptions), modes: cloneModes(response.modes), @@ -1692,7 +1771,7 @@ export class SandboxAgent { }; try { - await this.persist.insertEvent(event); + await this.persist.insertEvent(localSessionId, event); break; } catch (error) { if (!isSessionEventIndexConflict(error) || attempt === MAX_EVENT_INDEX_INSERT_RETRIES - 1) { @@ -2040,6 +2119,7 @@ export class SandboxAgent { let delayMs = HEALTH_WAIT_MIN_DELAY_MS; let nextLogAt = startedAt + HEALTH_WAIT_LOG_AFTER_MS; let lastError: unknown; + let consecutiveFailures = 0; while (!this.disposed && (deadline === undefined || Date.now() < deadline)) { throwIfAborted(signal); @@ -2050,11 +2130,22 @@ export class SandboxAgent { return; } lastError = new Error(`Unexpected health response: ${JSON.stringify(health)}`); + consecutiveFailures++; } catch (error) { if (isAbortError(error)) { throw error; } lastError = error; + consecutiveFailures++; + } + + if (consecutiveFailures >= HEALTH_WAIT_ENSURE_SERVER_AFTER_FAILURES && this.sandboxProvider?.ensureServer && this.sandboxProviderRawId) { + try { + await this.sandboxProvider.ensureServer(this.sandboxProviderRawId); + } catch { + // Best-effort; the next health check will determine if it worked. + } + consecutiveFailures = 0; } const now = Date.now(); @@ -2255,17 +2346,17 @@ function toAgentQuery(options: AgentQueryOptions | undefined): Record | undefined): Omit { +function normalizeSessionInit(value: Omit | undefined, cwdShorthand?: string): Omit { if (!value) { return { - cwd: defaultCwd(), + cwd: cwdShorthand ?? defaultCwd(), mcpServers: [], }; } return { ...value, - cwd: value.cwd ?? defaultCwd(), + cwd: value.cwd ?? cwdShorthand ?? defaultCwd(), mcpServers: value.mcpServers ?? [], }; } @@ -2405,16 +2496,23 @@ function normalizePositiveInt(value: number | undefined, fallback: number): numb return Math.floor(value as number); } -function normalizeHealthWaitOptions(value: boolean | SandboxAgentHealthWaitOptions | undefined, signal: AbortSignal | undefined): NormalizedHealthWaitOptions { - if (value === false) { +function normalizeHealthWaitOptions( + skipHealthCheck: boolean | undefined, + waitForHealth: boolean | SandboxAgentHealthWaitOptions | undefined, + signal: AbortSignal | undefined, +): NormalizedHealthWaitOptions { + if (skipHealthCheck === true || waitForHealth === false) { return { enabled: false }; } - if (value === true || value === undefined) { + if (waitForHealth === true || waitForHealth === undefined) { return { enabled: true, signal }; } - const timeoutMs = typeof value.timeoutMs === "number" && Number.isFinite(value.timeoutMs) && value.timeoutMs > 0 ? Math.floor(value.timeoutMs) : undefined; + const timeoutMs = + typeof waitForHealth.timeoutMs === "number" && Number.isFinite(waitForHealth.timeoutMs) && waitForHealth.timeoutMs > 0 + ? Math.floor(waitForHealth.timeoutMs) + : undefined; return { enabled: true, @@ -2423,24 +2521,47 @@ function normalizeHealthWaitOptions(value: boolean | SandboxAgentHealthWaitOptio }; } -function normalizeSpawnOptions( - spawn: SandboxAgentSpawnOptions | boolean | undefined, - defaultEnabled: boolean, -): SandboxAgentSpawnOptions & { enabled: boolean } { - if (spawn === false) { - return { enabled: false }; - } - - if (spawn === true || spawn === undefined) { - return { enabled: defaultEnabled }; +function parseSandboxProviderId(sandboxId: string): { provider: string; rawId: string } { + const slashIndex = sandboxId.indexOf("/"); + if (slashIndex < 1 || slashIndex === sandboxId.length - 1) { + throw new Error(`Sandbox IDs must be prefixed as "{provider}/{id}". Received '${sandboxId}'.`); } return { - ...spawn, - enabled: spawn.enabled ?? defaultEnabled, + provider: sandboxId.slice(0, slashIndex), + rawId: sandboxId.slice(slashIndex + 1), }; } +function requireSandboxBaseUrl(baseUrl: string | undefined, providerName: string): string { + if (!baseUrl) { + throw new Error(`Sandbox provider '${providerName}' did not return a base URL.`); + } + return baseUrl; +} + +async function resolveProviderFetch(provider: SandboxProvider, rawSandboxId: string): Promise { + if (provider.getFetch) { + return await provider.getFetch(rawSandboxId); + } + + return undefined; +} + +async function resolveProviderToken(provider: SandboxProvider, rawSandboxId: string): Promise { + const maybeGetToken = ( + provider as SandboxProvider & { + getToken?: (sandboxId: string) => string | undefined | Promise; + } + ).getToken; + if (typeof maybeGetToken !== "function") { + return undefined; + } + + const token = await maybeGetToken.call(provider, rawSandboxId); + return typeof token === "string" && token ? token : undefined; +} + async function readProblem(response: Response): Promise { try { const text = await response.clone().text(); diff --git a/sdks/typescript/src/index.ts b/sdks/typescript/src/index.ts index 99bc1b6..f0ebe2e 100644 --- a/sdks/typescript/src/index.ts +++ b/sdks/typescript/src/index.ts @@ -38,6 +38,7 @@ export type { export type { InspectorUrlOptions } from "./inspector.ts"; export { InMemorySessionPersistDriver } from "./types.ts"; +export type { SandboxProvider } from "./providers/types.ts"; export type { AcpEnvelope, diff --git a/sdks/typescript/src/providers/cloudflare.ts b/sdks/typescript/src/providers/cloudflare.ts new file mode 100644 index 0000000..c17adfc --- /dev/null +++ b/sdks/typescript/src/providers/cloudflare.ts @@ -0,0 +1,79 @@ +import type { SandboxProvider } from "./types.ts"; + +const DEFAULT_AGENT_PORT = 3000; + +export interface CloudflareSandboxClient { + create?(options?: Record): Promise<{ id?: string; sandboxId?: string }>; + connect?( + sandboxId: string, + options?: Record, + ): Promise<{ + close?(): Promise; + stop?(): Promise; + containerFetch(input: RequestInfo | URL, init?: RequestInit, port?: number): Promise; + }>; +} + +export interface CloudflareProviderOptions { + sdk: CloudflareSandboxClient; + create?: Record | (() => Record | Promise>); + agentPort?: number; +} + +async function resolveCreateOptions(value: CloudflareProviderOptions["create"]): Promise> { + if (!value) { + return {}; + } + if (typeof value === "function") { + return await value(); + } + return value; +} + +export function cloudflare(options: CloudflareProviderOptions): SandboxProvider { + const agentPort = options.agentPort ?? DEFAULT_AGENT_PORT; + const sdk = options.sdk; + + return { + name: "cloudflare", + async create(): Promise { + if (typeof sdk.create !== "function") { + throw new Error('sandbox provider "cloudflare" requires a sdk with a `create()` method.'); + } + const sandbox = await sdk.create(await resolveCreateOptions(options.create)); + const sandboxId = sandbox.sandboxId ?? sandbox.id; + if (!sandboxId) { + throw new Error("cloudflare sandbox did not return an id"); + } + return sandboxId; + }, + async destroy(sandboxId: string): Promise { + if (typeof sdk.connect !== "function") { + throw new Error('sandbox provider "cloudflare" requires a sdk with a `connect()` method.'); + } + const sandbox = await sdk.connect(sandboxId); + if (typeof sandbox.close === "function") { + await sandbox.close(); + return; + } + if (typeof sandbox.stop === "function") { + await sandbox.stop(); + } + }, + async getFetch(sandboxId: string): Promise { + if (typeof sdk.connect !== "function") { + throw new Error('sandbox provider "cloudflare" requires a sdk with a `connect()` method.'); + } + const sandbox = await sdk.connect(sandboxId); + return async (input, init) => + sandbox.containerFetch( + input, + { + ...(init ?? {}), + signal: undefined, + }, + agentPort, + ); + }, + }; +} diff --git a/sdks/typescript/src/providers/computesdk.ts b/sdks/typescript/src/providers/computesdk.ts new file mode 100644 index 0000000..7bca7ca --- /dev/null +++ b/sdks/typescript/src/providers/computesdk.ts @@ -0,0 +1,60 @@ +import { compute } from "computesdk"; +import type { SandboxProvider } from "./types.ts"; +import { DEFAULT_AGENTS, SANDBOX_AGENT_INSTALL_SCRIPT } from "./shared.ts"; + +const DEFAULT_AGENT_PORT = 3000; + +export interface ComputeSdkProviderOptions { + create?: { + envs?: Record; + }; + agentPort?: number; +} + +export function computesdk(options: ComputeSdkProviderOptions = {}): SandboxProvider { + const agentPort = options.agentPort ?? DEFAULT_AGENT_PORT; + + return { + name: "computesdk", + async create(): Promise { + const envs = options.create?.envs; + const sandbox = await compute.sandbox.create({ + envs: envs && Object.keys(envs).length > 0 ? envs : undefined, + }); + + const run = async (cmd: string, runOptions?: { background?: boolean }) => { + const result = await sandbox.runCommand(cmd, runOptions); + if (typeof result?.exitCode === "number" && result.exitCode !== 0) { + throw new Error(`computesdk command failed: ${cmd} (exit ${result.exitCode})\n${result.stderr || ""}`); + } + return result; + }; + + await run(`curl -fsSL ${SANDBOX_AGENT_INSTALL_SCRIPT} | sh`); + for (const agent of DEFAULT_AGENTS) { + await run(`sandbox-agent install-agent ${agent}`); + } + await run(`sandbox-agent server --no-token --host 0.0.0.0 --port ${agentPort}`, { + background: true, + }); + + return sandbox.sandboxId; + }, + async destroy(sandboxId: string): Promise { + const sandbox = await compute.sandbox.getById(sandboxId); + if (sandbox) await sandbox.destroy(); + }, + async getUrl(sandboxId: string): Promise { + const sandbox = await compute.sandbox.getById(sandboxId); + if (!sandbox) throw new Error(`computesdk sandbox not found: ${sandboxId}`); + return sandbox.getUrl({ port: agentPort }); + }, + async ensureServer(sandboxId: string): Promise { + const sandbox = await compute.sandbox.getById(sandboxId); + if (!sandbox) throw new Error(`computesdk sandbox not found: ${sandboxId}`); + await sandbox.runCommand(`sandbox-agent server --no-token --host 0.0.0.0 --port ${agentPort}`, { + background: true, + }); + }, + }; +} diff --git a/sdks/typescript/src/providers/daytona.ts b/sdks/typescript/src/providers/daytona.ts new file mode 100644 index 0000000..19026de --- /dev/null +++ b/sdks/typescript/src/providers/daytona.ts @@ -0,0 +1,67 @@ +import { Daytona } from "@daytonaio/sdk"; +import type { SandboxProvider } from "./types.ts"; +import { DEFAULT_SANDBOX_AGENT_IMAGE, buildServerStartCommand } from "./shared.ts"; + +const DEFAULT_AGENT_PORT = 3000; +const DEFAULT_PREVIEW_TTL_SECONDS = 4 * 60 * 60; + +type DaytonaCreateParams = NonNullable[0]>; + +type DaytonaCreateOverrides = Partial; + +export interface DaytonaProviderOptions { + create?: DaytonaCreateOverrides | (() => DaytonaCreateOverrides | Promise); + image?: string; + agentPort?: number; + previewTtlSeconds?: number; + deleteTimeoutSeconds?: number; +} + +async function resolveCreateOptions(value: DaytonaProviderOptions["create"]): Promise { + if (!value) return undefined; + if (typeof value === "function") return await value(); + return value; +} + +export function daytona(options: DaytonaProviderOptions = {}): SandboxProvider { + const agentPort = options.agentPort ?? DEFAULT_AGENT_PORT; + const image = options.image ?? DEFAULT_SANDBOX_AGENT_IMAGE; + const previewTtlSeconds = options.previewTtlSeconds ?? DEFAULT_PREVIEW_TTL_SECONDS; + const client = new Daytona(); + + return { + name: "daytona", + async create(): Promise { + const createOpts = await resolveCreateOptions(options.create); + const sandbox = await client.create({ + image, + autoStopInterval: 0, + ...createOpts, + } as DaytonaCreateParams); + await sandbox.process.executeCommand(buildServerStartCommand(agentPort)); + return sandbox.id; + }, + async destroy(sandboxId: string): Promise { + const sandbox = await client.get(sandboxId); + if (!sandbox) { + return; + } + await sandbox.delete(options.deleteTimeoutSeconds); + }, + async getUrl(sandboxId: string): Promise { + const sandbox = await client.get(sandboxId); + if (!sandbox) { + throw new Error(`daytona sandbox not found: ${sandboxId}`); + } + const preview = await sandbox.getSignedPreviewUrl(agentPort, previewTtlSeconds); + return typeof preview === "string" ? preview : preview.url; + }, + async ensureServer(sandboxId: string): Promise { + const sandbox = await client.get(sandboxId); + if (!sandbox) { + throw new Error(`daytona sandbox not found: ${sandboxId}`); + } + await sandbox.process.executeCommand(buildServerStartCommand(agentPort)); + }, + }; +} diff --git a/sdks/typescript/src/providers/docker.ts b/sdks/typescript/src/providers/docker.ts new file mode 100644 index 0000000..9e49687 --- /dev/null +++ b/sdks/typescript/src/providers/docker.ts @@ -0,0 +1,85 @@ +import Docker from "dockerode"; +import getPort from "get-port"; +import type { SandboxProvider } from "./types.ts"; +import { DEFAULT_SANDBOX_AGENT_IMAGE } from "./shared.ts"; + +const DEFAULT_HOST = "127.0.0.1"; +const DEFAULT_AGENT_PORT = 3000; + +export interface DockerProviderOptions { + image?: string; + host?: string; + agentPort?: number; + env?: string[] | (() => string[] | Promise); + binds?: string[] | (() => string[] | Promise); + createContainerOptions?: Record; +} + +async function resolveValue(value: T | (() => T | Promise) | undefined, fallback: T): Promise { + if (value === undefined) { + return fallback; + } + if (typeof value === "function") { + return await (value as () => T | Promise)(); + } + return value; +} + +function extractMappedPort( + inspect: { NetworkSettings?: { Ports?: Record | null | undefined> } }, + containerPort: number, +): number { + const hostPort = inspect.NetworkSettings?.Ports?.[`${containerPort}/tcp`]?.[0]?.HostPort; + if (!hostPort) { + throw new Error(`docker sandbox-agent port ${containerPort} is not published`); + } + return Number(hostPort); +} + +export function docker(options: DockerProviderOptions = {}): SandboxProvider { + const image = options.image ?? DEFAULT_SANDBOX_AGENT_IMAGE; + const host = options.host ?? DEFAULT_HOST; + const agentPort = options.agentPort ?? DEFAULT_AGENT_PORT; + const client = new Docker({ socketPath: "/var/run/docker.sock" }); + + return { + name: "docker", + async create(): Promise { + const hostPort = await getPort(); + const env = await resolveValue(options.env, []); + const binds = await resolveValue(options.binds, []); + + const container = await client.createContainer({ + Image: image, + Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", String(agentPort)], + Env: env, + ExposedPorts: { [`${agentPort}/tcp`]: {} }, + HostConfig: { + AutoRemove: true, + Binds: binds, + PortBindings: { + [`${agentPort}/tcp`]: [{ HostPort: String(hostPort) }], + }, + }, + ...(options.createContainerOptions ?? {}), + }); + + await container.start(); + return container.id; + }, + async destroy(sandboxId: string): Promise { + const container = client.getContainer(sandboxId); + try { + await container.stop({ t: 5 }); + } catch {} + try { + await container.remove({ force: true }); + } catch {} + }, + async getUrl(sandboxId: string): Promise { + const container = client.getContainer(sandboxId); + const hostPort = extractMappedPort(await container.inspect(), agentPort); + return `http://${host}:${hostPort}`; + }, + }; +} diff --git a/sdks/typescript/src/providers/e2b.ts b/sdks/typescript/src/providers/e2b.ts new file mode 100644 index 0000000..84d767c --- /dev/null +++ b/sdks/typescript/src/providers/e2b.ts @@ -0,0 +1,62 @@ +import { Sandbox } from "@e2b/code-interpreter"; +import type { SandboxProvider } from "./types.ts"; +import { DEFAULT_AGENTS, SANDBOX_AGENT_INSTALL_SCRIPT } from "./shared.ts"; + +const DEFAULT_AGENT_PORT = 3000; + +export interface E2BProviderOptions { + create?: Record | (() => Record | Promise>); + connect?: Record | ((sandboxId: string) => Record | Promise>); + agentPort?: number; +} + +async function resolveOptions(value: E2BProviderOptions["create"] | E2BProviderOptions["connect"], sandboxId?: string): Promise> { + if (!value) return {}; + if (typeof value === "function") { + if (sandboxId) { + return await (value as (id: string) => Record | Promise>)(sandboxId); + } + return await (value as () => Record | Promise>)(); + } + return value; +} + +export function e2b(options: E2BProviderOptions = {}): SandboxProvider { + const agentPort = options.agentPort ?? DEFAULT_AGENT_PORT; + + return { + name: "e2b", + async create(): Promise { + const createOpts = await resolveOptions(options.create); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const sandbox = await Sandbox.create({ allowInternetAccess: true, ...createOpts } as any); + + await sandbox.commands.run(`curl -fsSL ${SANDBOX_AGENT_INSTALL_SCRIPT} | sh`).then((r) => { + if (r.exitCode !== 0) throw new Error(`e2b install failed:\n${r.stderr}`); + }); + for (const agent of DEFAULT_AGENTS) { + await sandbox.commands.run(`sandbox-agent install-agent ${agent}`).then((r) => { + if (r.exitCode !== 0) throw new Error(`e2b agent install failed: ${agent}\n${r.stderr}`); + }); + } + await sandbox.commands.run(`sandbox-agent server --no-token --host 0.0.0.0 --port ${agentPort}`, { background: true, timeoutMs: 0 }); + + return sandbox.sandboxId; + }, + async destroy(sandboxId: string): Promise { + const connectOpts = await resolveOptions(options.connect, sandboxId); + const sandbox = await Sandbox.connect(sandboxId, connectOpts as any); + await sandbox.kill(); + }, + async getUrl(sandboxId: string): Promise { + const connectOpts = await resolveOptions(options.connect, sandboxId); + const sandbox = await Sandbox.connect(sandboxId, connectOpts as any); + return `https://${sandbox.getHost(agentPort)}`; + }, + async ensureServer(sandboxId: string): Promise { + const connectOpts = await resolveOptions(options.connect, sandboxId); + const sandbox = await Sandbox.connect(sandboxId, connectOpts as any); + await sandbox.commands.run(`sandbox-agent server --no-token --host 0.0.0.0 --port ${agentPort}`, { background: true, timeoutMs: 0 }); + }, + }; +} diff --git a/sdks/typescript/src/providers/local.ts b/sdks/typescript/src/providers/local.ts new file mode 100644 index 0000000..18fc3d4 --- /dev/null +++ b/sdks/typescript/src/providers/local.ts @@ -0,0 +1,84 @@ +import { spawnSandboxAgent, type SandboxAgentSpawnHandle, type SandboxAgentSpawnLogMode, type SandboxAgentSpawnOptions } from "../spawn.ts"; +import type { SandboxProvider } from "./types.ts"; + +export interface LocalProviderOptions { + host?: string; + port?: number; + token?: string; + binaryPath?: string; + log?: SandboxAgentSpawnLogMode; + env?: Record; +} + +const localSandboxes = new Map(); + +type LocalSandboxProvider = SandboxProvider & { + getToken(sandboxId: string): Promise; +}; + +export function local(options: LocalProviderOptions = {}): SandboxProvider { + const provider: LocalSandboxProvider = { + name: "local", + async create(): Promise { + const handle = await spawnSandboxAgent( + { + host: options.host, + port: options.port, + token: options.token, + binaryPath: options.binaryPath, + log: options.log, + env: options.env, + } satisfies SandboxAgentSpawnOptions, + globalThis.fetch?.bind(globalThis), + ); + + const rawSandboxId = baseUrlToSandboxId(handle.baseUrl); + localSandboxes.set(rawSandboxId, handle); + return rawSandboxId; + }, + async destroy(sandboxId: string): Promise { + const handle = localSandboxes.get(sandboxId); + if (!handle) { + return; + } + localSandboxes.delete(sandboxId); + await handle.dispose(); + }, + async getUrl(sandboxId: string): Promise { + return `http://${sandboxId}`; + }, + async getFetch(sandboxId: string): Promise { + const handle = localSandboxes.get(sandboxId); + const token = options.token ?? handle?.token; + const fetcher = globalThis.fetch?.bind(globalThis); + if (!fetcher) { + throw new Error("Fetch API is not available; provide a fetch implementation."); + } + + if (!token) { + return fetcher; + } + + return async (input, init) => { + const request = new Request(input, init); + const targetUrl = new URL(request.url); + targetUrl.protocol = "http:"; + targetUrl.host = sandboxId; + const headers = new Headers(request.headers); + if (!headers.has("authorization")) { + headers.set("authorization", `Bearer ${token}`); + } + const forwarded = new Request(targetUrl.toString(), request); + return fetcher(new Request(forwarded, { headers })); + }; + }, + async getToken(sandboxId: string): Promise { + return options.token ?? localSandboxes.get(sandboxId)?.token; + }, + }; + return provider; +} + +function baseUrlToSandboxId(baseUrl: string): string { + return new URL(baseUrl).host; +} diff --git a/sdks/typescript/src/providers/modal.ts b/sdks/typescript/src/providers/modal.ts new file mode 100644 index 0000000..394272b --- /dev/null +++ b/sdks/typescript/src/providers/modal.ts @@ -0,0 +1,74 @@ +import { ModalClient } from "modal"; +import type { SandboxProvider } from "./types.ts"; +import { DEFAULT_AGENTS, SANDBOX_AGENT_INSTALL_SCRIPT } from "./shared.ts"; + +const DEFAULT_AGENT_PORT = 3000; +const DEFAULT_APP_NAME = "sandbox-agent"; +const DEFAULT_MEMORY_MIB = 2048; + +export interface ModalProviderOptions { + create?: { + secrets?: Record; + appName?: string; + memoryMiB?: number; + }; + agentPort?: number; +} + +export function modal(options: ModalProviderOptions = {}): SandboxProvider { + const agentPort = options.agentPort ?? DEFAULT_AGENT_PORT; + const appName = options.create?.appName ?? DEFAULT_APP_NAME; + const memoryMiB = options.create?.memoryMiB ?? DEFAULT_MEMORY_MIB; + const client = new ModalClient(); + + return { + name: "modal", + async create(): Promise { + const app = await client.apps.fromName(appName, { createIfMissing: true }); + + // Pre-install sandbox-agent and agents in the image so they are cached + // across sandbox creates and don't need to be installed at runtime. + const installAgentCmds = DEFAULT_AGENTS.map((agent) => `RUN sandbox-agent install-agent ${agent}`); + const image = client.images + .fromRegistry("node:22-slim") + .dockerfileCommands([ + "RUN apt-get update && apt-get install -y curl ca-certificates && rm -rf /var/lib/apt/lists/*", + `RUN curl -fsSL ${SANDBOX_AGENT_INSTALL_SCRIPT} | sh`, + ...installAgentCmds, + ]); + + const envVars = options.create?.secrets ?? {}; + const secrets = Object.keys(envVars).length > 0 ? [await client.secrets.fromObject(envVars)] : []; + + const sb = await client.sandboxes.create(app, image, { + encryptedPorts: [agentPort], + secrets, + memoryMiB, + }); + + // Start the server as a long-running exec process. We intentionally + // do NOT await p.wait() — the process stays alive for the sandbox + // lifetime and keeps the port open for the tunnel. + sb.exec(["sandbox-agent", "server", "--no-token", "--host", "0.0.0.0", "--port", String(agentPort)]); + + return sb.sandboxId; + }, + async destroy(sandboxId: string): Promise { + const sb = await client.sandboxes.fromId(sandboxId); + await sb.terminate(); + }, + async getUrl(sandboxId: string): Promise { + const sb = await client.sandboxes.fromId(sandboxId); + const tunnels = await sb.tunnels(); + const tunnel = tunnels[agentPort]; + if (!tunnel) { + throw new Error(`modal: no tunnel found for port ${agentPort}`); + } + return tunnel.url; + }, + async ensureServer(sandboxId: string): Promise { + const sb = await client.sandboxes.fromId(sandboxId); + sb.exec(["sandbox-agent", "server", "--no-token", "--host", "0.0.0.0", "--port", String(agentPort)]); + }, + }; +} diff --git a/sdks/typescript/src/providers/shared.ts b/sdks/typescript/src/providers/shared.ts new file mode 100644 index 0000000..d838a0a --- /dev/null +++ b/sdks/typescript/src/providers/shared.ts @@ -0,0 +1,7 @@ +export const DEFAULT_SANDBOX_AGENT_IMAGE = "rivetdev/sandbox-agent:0.3.2-full"; +export const SANDBOX_AGENT_INSTALL_SCRIPT = "https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh"; +export const DEFAULT_AGENTS = ["claude", "codex"] as const; + +export function buildServerStartCommand(port: number): string { + return `nohup sandbox-agent server --no-token --host 0.0.0.0 --port ${port} >/tmp/sandbox-agent.log 2>&1 &`; +} diff --git a/sdks/typescript/src/providers/types.ts b/sdks/typescript/src/providers/types.ts new file mode 100644 index 0000000..ea778de --- /dev/null +++ b/sdks/typescript/src/providers/types.ts @@ -0,0 +1,31 @@ +export interface SandboxProvider { + /** Provider name. Must match the prefix in sandbox IDs (for example "e2b"). */ + name: string; + + /** Provision a new sandbox and return the provider-specific ID. */ + create(): Promise; + + /** Permanently tear down a sandbox. */ + destroy(sandboxId: string): Promise; + + /** + * Return the sandbox-agent base URL for this sandbox. + * Providers that cannot expose a URL should implement `getFetch()` instead. + */ + getUrl?(sandboxId: string): Promise; + + /** + * Return a fetch implementation that routes requests to the sandbox. + * Providers that expose a URL can implement `getUrl()` instead. + */ + getFetch?(sandboxId: string): Promise; + + /** + * Ensure the sandbox-agent server process is running inside the sandbox. + * Called during health-wait after consecutive failures, and before + * reconnecting to an existing sandbox. Implementations should be + * idempotent — if the server is already running, this should be a no-op + * (e.g. the duplicate process exits on port conflict). + */ + ensureServer?(sandboxId: string): Promise; +} diff --git a/sdks/typescript/src/providers/vercel.ts b/sdks/typescript/src/providers/vercel.ts new file mode 100644 index 0000000..09d41cf --- /dev/null +++ b/sdks/typescript/src/providers/vercel.ts @@ -0,0 +1,65 @@ +import { Sandbox } from "@vercel/sandbox"; +import type { SandboxProvider } from "./types.ts"; +import { DEFAULT_AGENTS, SANDBOX_AGENT_INSTALL_SCRIPT } from "./shared.ts"; + +const DEFAULT_AGENT_PORT = 3000; + +export interface VercelProviderOptions { + create?: Record | (() => Record | Promise>); + agentPort?: number; +} + +async function resolveCreateOptions(value: VercelProviderOptions["create"], agentPort: number): Promise> { + const resolved = typeof value === "function" ? await value() : (value ?? {}); + return { + ports: [agentPort], + ...resolved, + }; +} + +async function runVercelCommand(sandbox: InstanceType, cmd: string, args: string[] = []): Promise { + const result = await sandbox.runCommand({ cmd, args }); + if (result.exitCode !== 0) { + const stderr = await result.stderr(); + throw new Error(`vercel command failed: ${cmd} ${args.join(" ")}\n${stderr}`); + } +} + +export function vercel(options: VercelProviderOptions = {}): SandboxProvider { + const agentPort = options.agentPort ?? DEFAULT_AGENT_PORT; + + return { + name: "vercel", + async create(): Promise { + const sandbox = await Sandbox.create((await resolveCreateOptions(options.create, agentPort)) as Parameters[0]); + + await runVercelCommand(sandbox, "sh", ["-c", `curl -fsSL ${SANDBOX_AGENT_INSTALL_SCRIPT} | sh`]); + for (const agent of DEFAULT_AGENTS) { + await runVercelCommand(sandbox, "sandbox-agent", ["install-agent", agent]); + } + await sandbox.runCommand({ + cmd: "sandbox-agent", + args: ["server", "--no-token", "--host", "0.0.0.0", "--port", String(agentPort)], + detached: true, + }); + + return sandbox.sandboxId; + }, + async destroy(sandboxId: string): Promise { + const sandbox = await Sandbox.get({ sandboxId }); + await sandbox.stop(); + }, + async getUrl(sandboxId: string): Promise { + const sandbox = await Sandbox.get({ sandboxId }); + return sandbox.domain(agentPort); + }, + async ensureServer(sandboxId: string): Promise { + const sandbox = await Sandbox.get({ sandboxId }); + await sandbox.runCommand({ + cmd: "sandbox-agent", + args: ["server", "--no-token", "--host", "0.0.0.0", "--port", String(agentPort)], + detached: true, + }); + }, + }; +} diff --git a/sdks/typescript/src/types.ts b/sdks/typescript/src/types.ts index 6865690..f2a7af3 100644 --- a/sdks/typescript/src/types.ts +++ b/sdks/typescript/src/types.ts @@ -98,6 +98,7 @@ export interface SessionRecord { lastConnectionId: string; createdAt: number; destroyedAt?: number; + sandboxId?: string; sessionInit?: Omit; configOptions?: SessionConfigOption[]; modes?: SessionModeState | null; @@ -131,11 +132,11 @@ export interface ListEventsRequest extends ListPageRequest { } export interface SessionPersistDriver { - getSession(id: string): Promise; + getSession(id: string): Promise; listSessions(request?: ListPageRequest): Promise>; updateSession(session: SessionRecord): Promise; listEvents(request: ListEventsRequest): Promise>; - insertEvent(event: SessionEvent): Promise; + insertEvent(sessionId: string, event: SessionEvent): Promise; } export interface InMemorySessionPersistDriverOptions { @@ -158,9 +159,9 @@ export class InMemorySessionPersistDriver implements SessionPersistDriver { this.maxEventsPerSession = normalizeCap(options.maxEventsPerSession, DEFAULT_MAX_EVENTS_PER_SESSION); } - async getSession(id: string): Promise { + async getSession(id: string): Promise { const session = this.sessions.get(id); - return session ? cloneSessionRecord(session) : null; + return session ? cloneSessionRecord(session) : undefined; } async listSessions(request: ListPageRequest = {}): Promise> { @@ -219,15 +220,15 @@ export class InMemorySessionPersistDriver implements SessionPersistDriver { }; } - async insertEvent(event: SessionEvent): Promise { - const events = this.eventsBySession.get(event.sessionId) ?? []; + async insertEvent(sessionId: string, event: SessionEvent): Promise { + const events = this.eventsBySession.get(sessionId) ?? []; events.push(cloneSessionEvent(event)); if (events.length > this.maxEventsPerSession) { events.splice(0, events.length - this.maxEventsPerSession); } - this.eventsBySession.set(event.sessionId, events); + this.eventsBySession.set(sessionId, events); } } diff --git a/sdks/typescript/tests/integration.test.ts b/sdks/typescript/tests/integration.test.ts index 003b0dd..295e688 100644 --- a/sdks/typescript/tests/integration.test.ts +++ b/sdks/typescript/tests/integration.test.ts @@ -70,19 +70,19 @@ class StrictUniqueSessionPersistDriver implements SessionPersistDriver { return this.events.listEvents(request); } - async insertEvent(event: SessionEvent): Promise { + async insertEvent(sessionId: string, event: SessionEvent): Promise { await sleep(5); - const indexes = this.eventIndexesBySession.get(event.sessionId) ?? new Set(); + const indexes = this.eventIndexesBySession.get(sessionId) ?? new Set(); if (indexes.has(event.eventIndex)) { throw new Error("UNIQUE constraint failed: sandbox_agent_events.session_id, sandbox_agent_events.event_index"); } indexes.add(event.eventIndex); - this.eventIndexesBySession.set(event.sessionId, indexes); + this.eventIndexesBySession.set(sessionId, indexes); await sleep(5); - await this.events.insertEvent(event); + await this.events.insertEvent(sessionId, event); } } diff --git a/sdks/typescript/tests/providers.test.ts b/sdks/typescript/tests/providers.test.ts new file mode 100644 index 0000000..3376026 --- /dev/null +++ b/sdks/typescript/tests/providers.test.ts @@ -0,0 +1,417 @@ +import { describe, it, expect, beforeAll, afterAll, afterEach } from "vitest"; +import { createRequire } from "node:module"; +import { existsSync, mkdtempSync, rmSync } from "node:fs"; +import { dirname, join, resolve } from "node:path"; +import { fileURLToPath } from "node:url"; +import { tmpdir } from "node:os"; +import { execSync } from "node:child_process"; + +const _require = createRequire(import.meta.url); +import { InMemorySessionPersistDriver, SandboxAgent, type SandboxProvider } from "../src/index.ts"; +import { local } from "../src/providers/local.ts"; +import { docker } from "../src/providers/docker.ts"; +import { e2b } from "../src/providers/e2b.ts"; +import { daytona } from "../src/providers/daytona.ts"; +import { vercel } from "../src/providers/vercel.ts"; +import { modal } from "../src/providers/modal.ts"; +import { computesdk } from "../src/providers/computesdk.ts"; +import { prepareMockAgentDataHome } from "./helpers/mock-agent.ts"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); + +function findBinary(): string | null { + if (process.env.SANDBOX_AGENT_BIN) { + return process.env.SANDBOX_AGENT_BIN; + } + + const cargoPaths = [resolve(__dirname, "../../../target/debug/sandbox-agent"), resolve(__dirname, "../../../target/release/sandbox-agent")]; + for (const candidate of cargoPaths) { + if (existsSync(candidate)) { + return candidate; + } + } + + return null; +} + +const BINARY_PATH = findBinary(); +if (!BINARY_PATH) { + throw new Error("sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN."); +} +if (!process.env.SANDBOX_AGENT_BIN) { + process.env.SANDBOX_AGENT_BIN = BINARY_PATH; +} + +function isModuleAvailable(name: string): boolean { + try { + _require.resolve(name); + return true; + } catch { + return false; + } +} + +function isDockerAvailable(): boolean { + try { + execSync("docker info", { stdio: "ignore", timeout: 5_000 }); + return true; + } catch { + return false; + } +} + +// --------------------------------------------------------------------------- +// Provider registry — each entry defines how to create a provider and +// what preconditions are required for it to run. +// --------------------------------------------------------------------------- + +interface ProviderEntry { + name: string; + /** Human-readable reasons this provider can't run, or empty if ready. */ + skipReasons: string[]; + /** Return a fresh provider instance for a single test. */ + createProvider: () => SandboxProvider; + /** Optional per-provider setup (e.g. create temp dirs). Returns cleanup fn. */ + setup?: () => { cleanup: () => void }; + /** Agent to use for session tests. */ + agent: string; + /** Timeout for start() — remote providers need longer. */ + startTimeoutMs?: number; + /** Some providers (e.g. local) can verify the sandbox is gone after destroy. */ + canVerifyDestroyedSandbox?: boolean; + /** + * Whether session tests (createSession, prompt) should run. + * The mock agent only works with local provider (requires mock-acp process binary). + * Remote providers need a real agent (claude) which requires compatible server version + API keys. + */ + sessionTestsEnabled: boolean; +} + +function missingEnvVars(...vars: string[]): string[] { + const missing = vars.filter((v) => !process.env[v]); + return missing.length > 0 ? [`missing env: ${missing.join(", ")}`] : []; +} + +function missingModules(...modules: string[]): string[] { + const missing = modules.filter((m) => !isModuleAvailable(m)); + return missing.length > 0 ? [`missing npm packages: ${missing.join(", ")}`] : []; +} + +function collectApiKeys(): Record { + const keys: Record = {}; + if (process.env.ANTHROPIC_API_KEY) keys.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) keys.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + return keys; +} + +function buildProviders(): ProviderEntry[] { + const entries: ProviderEntry[] = []; + + // --- local --- + // Uses the mock-acp process binary created by prepareMockAgentDataHome. + { + let dataHome: string | undefined; + entries.push({ + name: "local", + skipReasons: [], + agent: "mock", + canVerifyDestroyedSandbox: true, + sessionTestsEnabled: true, + setup() { + dataHome = mkdtempSync(join(tmpdir(), "sdk-provider-local-")); + return { + cleanup: () => { + if (dataHome) rmSync(dataHome, { recursive: true, force: true }); + }, + }; + }, + createProvider() { + return local({ + log: "silent", + env: prepareMockAgentDataHome(dataHome!), + }); + }, + }); + } + + // --- docker --- + // Requires SANDBOX_AGENT_DOCKER_IMAGE (e.g. "sandbox-agent-dev:local"). + // Session tests disabled: released server images use a different ACP protocol + // version than the current SDK branch, causing "Query closed before response + // received" errors on session creation. + { + entries.push({ + name: "docker", + skipReasons: [ + ...missingEnvVars("SANDBOX_AGENT_DOCKER_IMAGE"), + ...missingModules("dockerode", "get-port"), + ...(isDockerAvailable() ? [] : ["Docker daemon not available"]), + ], + agent: "claude", + startTimeoutMs: 180_000, + canVerifyDestroyedSandbox: false, + sessionTestsEnabled: false, + createProvider() { + const apiKeys = [ + process.env.ANTHROPIC_API_KEY ? `ANTHROPIC_API_KEY=${process.env.ANTHROPIC_API_KEY}` : "", + process.env.OPENAI_API_KEY ? `OPENAI_API_KEY=${process.env.OPENAI_API_KEY}` : "", + ].filter(Boolean); + return docker({ + image: process.env.SANDBOX_AGENT_DOCKER_IMAGE, + env: apiKeys, + }); + }, + }); + } + + // --- e2b --- + // Session tests disabled: see docker comment above (ACP protocol mismatch). + { + entries.push({ + name: "e2b", + skipReasons: [...missingEnvVars("E2B_API_KEY"), ...missingModules("@e2b/code-interpreter")], + agent: "claude", + startTimeoutMs: 300_000, + canVerifyDestroyedSandbox: false, + sessionTestsEnabled: false, + createProvider() { + return e2b({ + create: { envs: collectApiKeys() }, + }); + }, + }); + } + + // --- daytona --- + // Session tests disabled: see docker comment above (ACP protocol mismatch). + { + entries.push({ + name: "daytona", + skipReasons: [...missingEnvVars("DAYTONA_API_KEY"), ...missingModules("@daytonaio/sdk")], + agent: "claude", + startTimeoutMs: 300_000, + canVerifyDestroyedSandbox: false, + sessionTestsEnabled: false, + createProvider() { + return daytona({ + create: { envVars: collectApiKeys() }, + }); + }, + }); + } + + // --- vercel --- + // Session tests disabled: see docker comment above (ACP protocol mismatch). + { + entries.push({ + name: "vercel", + skipReasons: [...missingEnvVars("VERCEL_ACCESS_TOKEN"), ...missingModules("@vercel/sandbox")], + agent: "claude", + startTimeoutMs: 300_000, + canVerifyDestroyedSandbox: false, + sessionTestsEnabled: false, + createProvider() { + return vercel({ + create: { env: collectApiKeys() }, + }); + }, + }); + } + + // --- modal --- + // Session tests disabled: see docker comment above (ACP protocol mismatch). + { + entries.push({ + name: "modal", + skipReasons: [...missingEnvVars("MODAL_TOKEN_ID", "MODAL_TOKEN_SECRET"), ...missingModules("modal")], + agent: "claude", + startTimeoutMs: 300_000, + canVerifyDestroyedSandbox: false, + sessionTestsEnabled: false, + createProvider() { + return modal({ + create: { secrets: collectApiKeys() }, + }); + }, + }); + } + + // --- computesdk --- + // Session tests disabled: see docker comment above (ACP protocol mismatch). + { + entries.push({ + name: "computesdk", + skipReasons: [...missingEnvVars("COMPUTESDK_API_KEY"), ...missingModules("computesdk")], + agent: "claude", + startTimeoutMs: 300_000, + canVerifyDestroyedSandbox: false, + sessionTestsEnabled: false, + createProvider() { + return computesdk({ + create: { envs: collectApiKeys() }, + }); + }, + }); + } + + return entries; +} + +// --------------------------------------------------------------------------- +// Shared test suite — runs the same assertions against every provider. +// +// Provider lifecycle tests (start, sandboxId, reconnect, destroy) use only +// listAgents() and never create sessions — these work regardless of which +// agents are installed or whether API keys are present. +// +// Session tests (createSession, prompt) are only enabled for providers where +// the agent is known to work. For local, the mock-acp process binary is +// created by test setup. For remote providers, a real agent (claude) is used +// which requires ANTHROPIC_API_KEY and a compatible server version. +// --------------------------------------------------------------------------- + +function providerSuite(entry: ProviderEntry) { + const skip = entry.skipReasons.length > 0; + + const descFn = skip ? describe.skip : describe; + + descFn(`SandboxProvider: ${entry.name}`, () => { + let sdk: SandboxAgent | undefined; + let cleanupFn: (() => void) | undefined; + + if (skip) { + it.skip(`skipped — ${entry.skipReasons.join("; ")}`, () => {}); + return; + } + + beforeAll(() => { + const result = entry.setup?.(); + cleanupFn = result?.cleanup; + }); + + afterEach(async () => { + if (!sdk) return; + await sdk.destroySandbox().catch(async () => { + await sdk?.dispose().catch(() => {}); + }); + sdk = undefined; + }, 30_000); + + afterAll(() => { + cleanupFn?.(); + }); + + // -- lifecycle tests (no session creation) -- + + it( + "starts with a prefixed sandboxId and passes health", + async () => { + sdk = await SandboxAgent.start({ sandbox: entry.createProvider() }); + expect(sdk.sandboxId).toMatch(new RegExp(`^${entry.name}/`)); + + // listAgents() awaits the internal health gate, confirming the server is ready. + const agents = await sdk.listAgents(); + expect(agents.agents.length).toBeGreaterThan(0); + }, + entry.startTimeoutMs, + ); + + it("rejects mismatched sandboxId prefixes", async () => { + await expect( + SandboxAgent.start({ + sandbox: entry.createProvider(), + sandboxId: "wrong-provider/example", + }), + ).rejects.toThrow(/provider/i); + }); + + it( + "reconnects after dispose without destroying the sandbox", + async () => { + sdk = await SandboxAgent.start({ sandbox: entry.createProvider() }); + const sandboxId = sdk.sandboxId; + expect(sandboxId).toBeTruthy(); + + await sdk.dispose(); + + const reconnected = await SandboxAgent.start({ + sandbox: entry.createProvider(), + sandboxId, + }); + + const agents = await reconnected.listAgents(); + expect(agents.agents.length).toBeGreaterThan(0); + sdk = reconnected; + }, + entry.startTimeoutMs ? entry.startTimeoutMs * 2 : undefined, + ); + + it( + "destroySandbox tears the sandbox down", + async () => { + sdk = await SandboxAgent.start({ sandbox: entry.createProvider() }); + const sandboxId = sdk.sandboxId; + expect(sandboxId).toBeTruthy(); + + await sdk.destroySandbox(); + sdk = undefined; + + if (entry.canVerifyDestroyedSandbox) { + const reconnected = await SandboxAgent.start({ + sandbox: entry.createProvider(), + sandboxId, + skipHealthCheck: true, + }); + await expect(reconnected.listAgents()).rejects.toThrow(); + } + }, + entry.startTimeoutMs, + ); + + // -- session tests (require working agent) -- + + const sessionIt = entry.sessionTestsEnabled ? it : it.skip; + + sessionIt( + "creates sessions with persisted sandboxId", + async () => { + const persist = new InMemorySessionPersistDriver(); + sdk = await SandboxAgent.start({ sandbox: entry.createProvider(), persist }); + + const session = await sdk.createSession({ agent: entry.agent }); + const record = await persist.getSession(session.id); + + expect(record?.sandboxId).toBe(sdk.sandboxId); + }, + entry.startTimeoutMs, + ); + + sessionIt( + "sends a prompt and receives a response", + async () => { + sdk = await SandboxAgent.start({ sandbox: entry.createProvider() }); + + const session = await sdk.createSession({ agent: entry.agent }); + const events: unknown[] = []; + const off = session.onEvent((event) => { + events.push(event); + }); + + const result = await session.prompt([{ type: "text", text: "Say hello in one word." }]); + off(); + + expect(result.stopReason).toBe("end_turn"); + expect(events.length).toBeGreaterThan(0); + }, + entry.startTimeoutMs ? entry.startTimeoutMs * 2 : 30_000, + ); + }); +} + +// --------------------------------------------------------------------------- +// Register all providers +// --------------------------------------------------------------------------- + +for (const entry of buildProviders()) { + providerSuite(entry); +} diff --git a/sdks/typescript/tsup.config.ts b/sdks/typescript/tsup.config.ts index faf3167..984eeb3 100644 --- a/sdks/typescript/tsup.config.ts +++ b/sdks/typescript/tsup.config.ts @@ -1,9 +1,20 @@ import { defineConfig } from "tsup"; export default defineConfig({ - entry: ["src/index.ts"], + entry: [ + "src/index.ts", + "src/providers/local.ts", + "src/providers/e2b.ts", + "src/providers/daytona.ts", + "src/providers/docker.ts", + "src/providers/vercel.ts", + "src/providers/cloudflare.ts", + "src/providers/modal.ts", + "src/providers/computesdk.ts", + ], format: ["esm"], dts: true, clean: true, sourcemap: true, + external: ["@cloudflare/sandbox", "@daytonaio/sdk", "@e2b/code-interpreter", "@vercel/sandbox", "dockerode", "get-port", "modal", "computesdk"], }); diff --git a/sdks/typescript/vitest.config.ts b/sdks/typescript/vitest.config.ts index 8676010..e83d10a 100644 --- a/sdks/typescript/vitest.config.ts +++ b/sdks/typescript/vitest.config.ts @@ -4,5 +4,7 @@ export default defineConfig({ test: { include: ["tests/**/*.test.ts"], testTimeout: 30000, + teardownTimeout: 10000, + pool: "forks", }, }); diff --git a/server/packages/sandbox-agent/src/acp_proxy_runtime.rs b/server/packages/sandbox-agent/src/acp_proxy_runtime.rs index 3710e2f..212356e 100644 --- a/server/packages/sandbox-agent/src/acp_proxy_runtime.rs +++ b/server/packages/sandbox-agent/src/acp_proxy_runtime.rs @@ -415,7 +415,7 @@ impl AcpProxyRuntime { async fn is_ready(&self, agent: AgentId) -> bool { if agent == AgentId::Mock { - return self.inner.agent_manager.agent_process_path(agent).exists(); + return true; } self.inner.agent_manager.is_installed(agent) } From 2f9f25ae54148658f9cd9b6fca2b4f5206a5aefa Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 20:34:01 -0700 Subject: [PATCH 11/48] chore(release): update version to 0.4.0-rc.1 --- Cargo.toml | 16 +++++----- docs/openapi.json | 2 +- .../packages/client/src/workbench-model.ts | 2 +- .../frontend/src/components/mock-layout.tsx | 2 +- .../components/mock-layout/message-list.tsx | 7 ++-- pnpm-lock.yaml | 3 -- scripts/audit-acp-deps/adapters.json | 32 +++++++++++++++++++ sdks/acp-http-client/package.json | 2 +- sdks/cli-shared/package.json | 2 +- sdks/cli/package.json | 2 +- sdks/cli/platforms/darwin-arm64/package.json | 2 +- sdks/cli/platforms/darwin-x64/package.json | 2 +- sdks/cli/platforms/linux-arm64/package.json | 2 +- sdks/cli/platforms/linux-x64/package.json | 2 +- sdks/cli/platforms/win32-x64/package.json | 2 +- sdks/gigacode/package.json | 2 +- .../platforms/darwin-arm64/package.json | 2 +- .../platforms/darwin-x64/package.json | 2 +- .../platforms/linux-arm64/package.json | 2 +- .../gigacode/platforms/linux-x64/package.json | 2 +- .../gigacode/platforms/win32-x64/package.json | 2 +- sdks/persist-indexeddb/package.json | 2 +- sdks/persist-postgres/package.json | 2 +- sdks/persist-rivet/package.json | 2 +- sdks/persist-sqlite/package.json | 2 +- sdks/react/package.json | 2 +- sdks/typescript/package.json | 2 +- sdks/typescript/src/client.ts | 8 ++--- 28 files changed, 72 insertions(+), 40 deletions(-) create mode 100644 scripts/audit-acp-deps/adapters.json diff --git a/Cargo.toml b/Cargo.toml index c353c2c..0865385 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ members = ["server/packages/*", "gigacode"] exclude = ["factory/packages/desktop/src-tauri", "foundry/packages/desktop/src-tauri"] [workspace.package] -version = "0.3.2" +version = "0.4.0-rc.1" edition = "2021" authors = [ "Rivet Gaming, LLC " ] license = "Apache-2.0" @@ -13,13 +13,13 @@ description = "Universal API for automatic coding agents in sandboxes. Supports [workspace.dependencies] # Internal crates -sandbox-agent = { version = "0.3.2", path = "server/packages/sandbox-agent" } -sandbox-agent-error = { version = "0.3.2", path = "server/packages/error" } -sandbox-agent-agent-management = { version = "0.3.2", path = "server/packages/agent-management" } -sandbox-agent-agent-credentials = { version = "0.3.2", path = "server/packages/agent-credentials" } -sandbox-agent-opencode-adapter = { version = "0.3.2", path = "server/packages/opencode-adapter" } -sandbox-agent-opencode-server-manager = { version = "0.3.2", path = "server/packages/opencode-server-manager" } -acp-http-adapter = { version = "0.3.2", path = "server/packages/acp-http-adapter" } +sandbox-agent = { version = "0.4.0-rc.1", path = "server/packages/sandbox-agent" } +sandbox-agent-error = { version = "0.4.0-rc.1", path = "server/packages/error" } +sandbox-agent-agent-management = { version = "0.4.0-rc.1", path = "server/packages/agent-management" } +sandbox-agent-agent-credentials = { version = "0.4.0-rc.1", path = "server/packages/agent-credentials" } +sandbox-agent-opencode-adapter = { version = "0.4.0-rc.1", path = "server/packages/opencode-adapter" } +sandbox-agent-opencode-server-manager = { version = "0.4.0-rc.1", path = "server/packages/opencode-server-manager" } +acp-http-adapter = { version = "0.4.0-rc.1", path = "server/packages/acp-http-adapter" } # Serialization serde = { version = "1.0", features = ["derive"] } diff --git a/docs/openapi.json b/docs/openapi.json index f2bd640..1beeb2b 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -10,7 +10,7 @@ "license": { "name": "Apache-2.0" }, - "version": "0.3.2" + "version": "0.4.0-rc.1" }, "servers": [ { diff --git a/foundry/packages/client/src/workbench-model.ts b/foundry/packages/client/src/workbench-model.ts index d30407f..afe9e8b 100644 --- a/foundry/packages/client/src/workbench-model.ts +++ b/foundry/packages/client/src/workbench-model.ts @@ -1234,7 +1234,7 @@ export function buildInitialTasks(): Task[] { updatedAtMs: minutesAgo(40), branch: "perf/transcript-virtualizer", pullRequest: null, - tabs: [ + sessions: [ { id: "stress-transcript-tab", sessionId: "stress-transcript-session", diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx index d922ce2..1ff4d35 100644 --- a/foundry/packages/frontend/src/components/mock-layout.tsx +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -939,7 +939,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ messageRefs={messageRefs} historyEvents={historyEvents} onSelectHistoryEvent={jumpToHistoryEvent} - targetMessageId={pendingHistoryTarget && activeTabId === pendingHistoryTarget.tabId ? pendingHistoryTarget.messageId : null} + targetMessageId={pendingHistoryTarget && activeSessionId === pendingHistoryTarget.sessionId ? pendingHistoryTarget.messageId : null} onTargetMessageResolved={() => setPendingHistoryTarget(null)} copiedMessageId={copiedMessageId} onCopyMessage={(message) => { diff --git a/foundry/packages/frontend/src/components/mock-layout/message-list.tsx b/foundry/packages/frontend/src/components/mock-layout/message-list.tsx index 743c200..df6d10a 100644 --- a/foundry/packages/frontend/src/components/mock-layout/message-list.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/message-list.tsx @@ -1,4 +1,7 @@ -import { AgentTranscript, type AgentTranscriptClassNames, type TranscriptEntry } from "@sandbox-agent/react"; +import { AgentTranscript as AgentTranscript_, type AgentTranscriptClassNames, type TranscriptEntry } from "@sandbox-agent/react"; + +// Cast to work around React 18/19 type incompatibility between @sandbox-agent/react and foundry +const AgentTranscript = AgentTranscript_ as unknown as (props: Record) => JSX.Element; import { memo, useEffect, useMemo, type MutableRefObject, type RefObject } from "react"; import { useStyletron } from "baseui"; import { LabelSmall, LabelXSmall } from "baseui/typography"; @@ -291,7 +294,7 @@ export const MessageList = memo(function MessageList({ scrollRef={scrollRef} scrollToEntryId={targetMessageId} virtualize - renderMessageText={(entry) => { + renderMessageText={(entry: TranscriptEntry) => { if (entry.id === PENDING_MESSAGE_ID && pendingMessage) { const pendingMsg: Message = { id: PENDING_MESSAGE_ID, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e4e7838..46a59b7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -492,9 +492,6 @@ importers: '@sandbox-agent/foundry-shared': specifier: workspace:* version: link:../shared - '@sandbox-agent/persist-rivet': - specifier: workspace:* - version: link:../../../sdks/persist-rivet better-auth: specifier: ^1.5.5 version: 1.5.5(@cloudflare/workers-types@4.20260316.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) diff --git a/scripts/audit-acp-deps/adapters.json b/scripts/audit-acp-deps/adapters.json new file mode 100644 index 0000000..3f7b1f4 --- /dev/null +++ b/scripts/audit-acp-deps/adapters.json @@ -0,0 +1,32 @@ +{ + "sdkDeps": { + "@agentclientprotocol/sdk": "^0.16.1" + }, + "adapters": [ + { + "agentId": "claude", + "npmPackage": "@zed-industries/claude-agent-acp", + "pinnedVersion": "0.20.0" + }, + { + "agentId": "codex", + "npmPackage": "@zed-industries/codex-acp", + "pinnedVersion": "0.1.0" + }, + { + "agentId": "amp", + "npmPackage": "amp-acp", + "pinnedVersion": "0.7.0" + }, + { + "agentId": "pi", + "npmPackage": "pi-acp", + "pinnedVersion": "0.0.23" + }, + { + "agentId": "cursor", + "npmPackage": "@blowmage/cursor-agent-acp", + "pinnedVersion": "0.1.0" + } + ] +} diff --git a/sdks/acp-http-client/package.json b/sdks/acp-http-client/package.json index 3e64d8f..b5d264f 100644 --- a/sdks/acp-http-client/package.json +++ b/sdks/acp-http-client/package.json @@ -1,6 +1,6 @@ { "name": "acp-http-client", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "Protocol-faithful ACP JSON-RPC over streamable HTTP client.", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli-shared/package.json b/sdks/cli-shared/package.json index 756458a..b2825fb 100644 --- a/sdks/cli-shared/package.json +++ b/sdks/cli-shared/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-shared", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "Shared helpers for sandbox-agent CLI and SDK", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/package.json b/sdks/cli/package.json index 79e1f9b..17c0401 100644 --- a/sdks/cli/package.json +++ b/sdks/cli/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "CLI for sandbox-agent - run AI coding agents in sandboxes", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/darwin-arm64/package.json b/sdks/cli/platforms/darwin-arm64/package.json index 20ca14b..2b5a7e8 100644 --- a/sdks/cli/platforms/darwin-arm64/package.json +++ b/sdks/cli/platforms/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-darwin-arm64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "sandbox-agent CLI binary for macOS ARM64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/darwin-x64/package.json b/sdks/cli/platforms/darwin-x64/package.json index cd63213..72f3044 100644 --- a/sdks/cli/platforms/darwin-x64/package.json +++ b/sdks/cli/platforms/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-darwin-x64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "sandbox-agent CLI binary for macOS x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/linux-arm64/package.json b/sdks/cli/platforms/linux-arm64/package.json index 7fdc11e..0d8d92a 100644 --- a/sdks/cli/platforms/linux-arm64/package.json +++ b/sdks/cli/platforms/linux-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-linux-arm64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "sandbox-agent CLI binary for Linux arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/linux-x64/package.json b/sdks/cli/platforms/linux-x64/package.json index 6568e75..5d70adb 100644 --- a/sdks/cli/platforms/linux-x64/package.json +++ b/sdks/cli/platforms/linux-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-linux-x64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "sandbox-agent CLI binary for Linux x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/win32-x64/package.json b/sdks/cli/platforms/win32-x64/package.json index 09938cf..e95b818 100644 --- a/sdks/cli/platforms/win32-x64/package.json +++ b/sdks/cli/platforms/win32-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-win32-x64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "sandbox-agent CLI binary for Windows x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/package.json b/sdks/gigacode/package.json index 8ec1ff8..2f22d77 100644 --- a/sdks/gigacode/package.json +++ b/sdks/gigacode/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "Gigacode CLI (sandbox-agent with OpenCode attach by default)", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/darwin-arm64/package.json b/sdks/gigacode/platforms/darwin-arm64/package.json index bd92a97..220ba65 100644 --- a/sdks/gigacode/platforms/darwin-arm64/package.json +++ b/sdks/gigacode/platforms/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-darwin-arm64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "gigacode CLI binary for macOS arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/darwin-x64/package.json b/sdks/gigacode/platforms/darwin-x64/package.json index 2723d10..8912854 100644 --- a/sdks/gigacode/platforms/darwin-x64/package.json +++ b/sdks/gigacode/platforms/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-darwin-x64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "gigacode CLI binary for macOS x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/linux-arm64/package.json b/sdks/gigacode/platforms/linux-arm64/package.json index de416f8..5b042cf 100644 --- a/sdks/gigacode/platforms/linux-arm64/package.json +++ b/sdks/gigacode/platforms/linux-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-linux-arm64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "gigacode CLI binary for Linux arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/linux-x64/package.json b/sdks/gigacode/platforms/linux-x64/package.json index 83690d6..ea7c298 100644 --- a/sdks/gigacode/platforms/linux-x64/package.json +++ b/sdks/gigacode/platforms/linux-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-linux-x64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "gigacode CLI binary for Linux x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/win32-x64/package.json b/sdks/gigacode/platforms/win32-x64/package.json index 81a8aaa..86997a1 100644 --- a/sdks/gigacode/platforms/win32-x64/package.json +++ b/sdks/gigacode/platforms/win32-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-win32-x64", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "gigacode CLI binary for Windows x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-indexeddb/package.json b/sdks/persist-indexeddb/package.json index da05325..86f30b6 100644 --- a/sdks/persist-indexeddb/package.json +++ b/sdks/persist-indexeddb/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-indexeddb", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "IndexedDB persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-postgres/package.json b/sdks/persist-postgres/package.json index caa49f6..27a3756 100644 --- a/sdks/persist-postgres/package.json +++ b/sdks/persist-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-postgres", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "PostgreSQL persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-rivet/package.json b/sdks/persist-rivet/package.json index 047bea6..3c64db2 100644 --- a/sdks/persist-rivet/package.json +++ b/sdks/persist-rivet/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-rivet", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "Rivet Actor persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-sqlite/package.json b/sdks/persist-sqlite/package.json index 6c08fec..e7f472c 100644 --- a/sdks/persist-sqlite/package.json +++ b/sdks/persist-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-sqlite", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "SQLite persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/react/package.json b/sdks/react/package.json index 8b2e1d4..6bc21ec 100644 --- a/sdks/react/package.json +++ b/sdks/react/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/react", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "React components for Sandbox Agent frontend integrations", "license": "Apache-2.0", "repository": { diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index 9fd62fe..7e2dfe4 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -1,6 +1,6 @@ { "name": "sandbox-agent", - "version": "0.3.2", + "version": "0.4.0-rc.1", "description": "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp.", "license": "Apache-2.0", "repository": { diff --git a/sdks/typescript/src/client.ts b/sdks/typescript/src/client.ts index f64c833..4752c0a 100644 --- a/sdks/typescript/src/client.ts +++ b/sdks/typescript/src/client.ts @@ -2637,13 +2637,13 @@ function deriveModesFromConfigOptions(configOptions: SessionConfigOption[] | und } const modeOption = findConfigOptionByCategory(configOptions, "mode"); - if (!modeOption || !Array.isArray(modeOption.options)) { + if (!modeOption || modeOption.type !== "select" || !Array.isArray(modeOption.options)) { return null; } const availableModes = modeOption.options - .flatMap((entry) => flattenConfigOptions(entry)) - .map((entry) => ({ + .flatMap((entry: unknown) => flattenConfigOptions(entry)) + .map((entry: { value: string; name: string; description?: string }) => ({ id: entry.value, name: entry.name, description: entry.description ?? null, @@ -2674,7 +2674,7 @@ function applyConfigOptionValue(configOptions: SessionConfigOption[], configId: return null; } const updated = cloneConfigOptions(configOptions) ?? []; - updated[idx] = { ...updated[idx]!, currentValue: value }; + updated[idx] = { ...updated[idx]!, currentValue: value } as SessionConfigOption; return updated; } From bf543d225d47c59b5bd3370b8a9cead5b1806d83 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 22:36:48 -0700 Subject: [PATCH 12/48] fix: mock agent process, React 18/19 types, release version refs - Add hidden `mock-agent-process` CLI subcommand implementing a stdio JSON-RPC echo agent (ported from examples/mock-acp-agent) - Update write_mock_agent_process_launcher() to exec the new subcommand instead of exiting with error - Update sdks/react to support React 18 and 19 peer dependencies - Update @types/react to v19 across workspace (pnpm override + inspector) - Fix RefObject compatibility for React 19 useRef() signatures - Add version reference replacement logic to release update_version.ts covering all docs, examples, and code files listed in CLAUDE.md - Add missing files to CLAUDE.md Install Version References list (architecture.mdx, boxlite, modal, computesdk docs and examples) Co-Authored-By: Claude Opus 4.6 (1M context) --- CLAUDE.md | 6 + .../components/mock-layout/message-list.tsx | 19 ++- frontend/packages/inspector/package.json | 4 +- .../src/components/chat/ChatPanel.tsx | 2 +- .../components/chat/InspectorConversation.tsx | 2 +- package.json | 4 +- pnpm-lock.yaml | 158 +++++++++--------- scripts/release/update_version.ts | 114 ++++++++++++- sdks/react/package.json | 6 +- sdks/react/src/AgentConversation.tsx | 2 +- sdks/react/src/AgentTranscript.tsx | 4 +- sdks/react/src/useTranscriptVirtualizer.ts | 2 +- .../packages/agent-management/src/agents.rs | 4 +- server/packages/sandbox-agent/src/cli.rs | 69 ++++++++ 14 files changed, 296 insertions(+), 100 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index cfd28a4..4935aa5 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -93,15 +93,21 @@ When adding a new agent, update all of the following: - `docs/sdk-overview.mdx` - `docs/react-components.mdx` - `docs/session-persistence.mdx` + - `docs/architecture.mdx` - `docs/deploy/local.mdx` - `docs/deploy/cloudflare.mdx` - `docs/deploy/vercel.mdx` - `docs/deploy/daytona.mdx` - `docs/deploy/e2b.mdx` - `docs/deploy/docker.mdx` + - `docs/deploy/boxlite.mdx` + - `docs/deploy/modal.mdx` + - `docs/deploy/computesdk.mdx` - `frontend/packages/website/src/components/GetStarted.tsx` - `.claude/commands/post-release-testing.md` - `examples/cloudflare/Dockerfile` + - `examples/boxlite/Dockerfile` + - `examples/boxlite-python/Dockerfile` - `examples/daytona/src/index.ts` - `examples/shared/src/docker.ts` - `examples/docker/src/index.ts` diff --git a/foundry/packages/frontend/src/components/mock-layout/message-list.tsx b/foundry/packages/frontend/src/components/mock-layout/message-list.tsx index df6d10a..499e6cd 100644 --- a/foundry/packages/frontend/src/components/mock-layout/message-list.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/message-list.tsx @@ -1,8 +1,19 @@ import { AgentTranscript as AgentTranscript_, type AgentTranscriptClassNames, type TranscriptEntry } from "@sandbox-agent/react"; - -// Cast to work around React 18/19 type incompatibility between @sandbox-agent/react and foundry -const AgentTranscript = AgentTranscript_ as unknown as (props: Record) => JSX.Element; import { memo, useEffect, useMemo, type MutableRefObject, type RefObject } from "react"; + +// Cast needed: tsup-generated .d.ts returns react_jsx_runtime.JSX.Element which +// doesn't unify with the consumer's JSX.Element under Bundler moduleResolution. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const AgentTranscript = AgentTranscript_ as any as React.FC<{ + entries: TranscriptEntry[]; + classNames?: Partial; + scrollRef?: RefObject; + scrollToEntryId?: string | null; + virtualize?: boolean; + isThinking?: boolean; + renderMessageText?: (entry: TranscriptEntry) => React.ReactNode; + renderThinkingState?: () => React.ReactNode; +}>; import { useStyletron } from "baseui"; import { LabelSmall, LabelXSmall } from "baseui/typography"; import { Copy } from "lucide-react"; @@ -156,7 +167,7 @@ export const MessageList = memo(function MessageList({ pendingMessage, }: { session: AgentSession | null | undefined; - scrollRef: RefObject; + scrollRef: RefObject; messageRefs: MutableRefObject>; historyEvents: HistoryEvent[]; onSelectHistoryEvent: (event: HistoryEvent) => void; diff --git a/frontend/packages/inspector/package.json b/frontend/packages/inspector/package.json index 45b7224..d17c3a0 100644 --- a/frontend/packages/inspector/package.json +++ b/frontend/packages/inspector/package.json @@ -14,8 +14,8 @@ "devDependencies": { "@sandbox-agent/react": "workspace:*", "sandbox-agent": "workspace:*", - "@types/react": "^18.3.3", - "@types/react-dom": "^18.3.0", + "@types/react": "^19.1.12", + "@types/react-dom": "^19.1.6", "@vitejs/plugin-react": "^4.3.1", "fake-indexeddb": "^6.2.4", "typescript": "^5.7.3", diff --git a/frontend/packages/inspector/src/components/chat/ChatPanel.tsx b/frontend/packages/inspector/src/components/chat/ChatPanel.tsx index 5afc259..9203105 100644 --- a/frontend/packages/inspector/src/components/chat/ChatPanel.tsx +++ b/frontend/packages/inspector/src/components/chat/ChatPanel.tsx @@ -71,7 +71,7 @@ const ChatPanel = ({ agents: AgentInfo[]; agentsLoading: boolean; agentsError: string | null; - scrollRef: RefObject; + scrollRef: RefObject; agentLabel: string; modelLabel?: string | null; currentAgentVersion?: string | null; diff --git a/frontend/packages/inspector/src/components/chat/InspectorConversation.tsx b/frontend/packages/inspector/src/components/chat/InspectorConversation.tsx index 5d3c007..f14e39e 100644 --- a/frontend/packages/inspector/src/components/chat/InspectorConversation.tsx +++ b/frontend/packages/inspector/src/components/chat/InspectorConversation.tsx @@ -84,7 +84,7 @@ export interface InspectorConversationProps { entries: TranscriptEntry[]; sessionError: string | null; eventError?: string | null; - scrollRef: RefObject; + scrollRef: RefObject; onEventClick?: (eventId: string) => void; isThinking?: boolean; agentId?: string; diff --git a/package.json b/package.json index 41a699f..c10fd76 100644 --- a/package.json +++ b/package.json @@ -20,8 +20,8 @@ }, "pnpm": { "overrides": { - "@types/react": "^18.3.3", - "@types/react-dom": "^18.3.0" + "@types/react": "^19.1.12", + "@types/react-dom": "^19.1.6" } } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 46a59b7..8824736 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -5,8 +5,8 @@ settings: excludeLinksFromLockfile: false overrides: - '@types/react': ^18.3.3 - '@types/react-dom': ^18.3.0 + '@types/react': ^19.1.12 + '@types/react-dom': ^19.1.6 importers: @@ -78,11 +78,11 @@ importers: specifier: latest version: 25.5.0 '@types/react': - specifier: ^18.3.3 - version: 18.3.27 + specifier: ^19.1.12 + version: 19.1.12 '@types/react-dom': - specifier: ^18.3.0 - version: 18.3.7(@types/react@18.3.27) + specifier: ^19.1.6 + version: 19.2.3(@types/react@19.1.12) '@vitejs/plugin-react': specifier: ^4.5.0 version: 4.7.0(vite@6.4.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) @@ -549,8 +549,8 @@ importers: version: link:../../../sdks/typescript devDependencies: '@types/react': - specifier: ^18.3.3 - version: 18.3.27 + specifier: ^19.1.12 + version: 19.1.12 tsup: specifier: ^8.5.0 version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) @@ -577,7 +577,7 @@ importers: version: 3.13.22(react-dom@19.2.4(react@19.2.4))(react@19.2.4) baseui: specifier: ^16.1.1 - version: 16.1.1(@types/react@18.3.27)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(styletron-react@6.1.1(react@19.2.4)) + version: 16.1.1(@types/react@19.1.12)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(styletron-react@6.1.1(react@19.2.4)) lucide-react: specifier: ^0.542.0 version: 0.542.0(react@19.2.4) @@ -599,19 +599,19 @@ importers: devDependencies: '@react-grab/mcp': specifier: ^0.1.13 - version: 0.1.27(@types/react@18.3.27)(react@19.2.4) + version: 0.1.27(@types/react@19.1.12)(react@19.2.4) '@types/react': - specifier: ^18.3.3 - version: 18.3.27 + specifier: ^19.1.12 + version: 19.1.12 '@types/react-dom': - specifier: ^18.3.0 - version: 18.3.7(@types/react@18.3.27) + specifier: ^19.1.6 + version: 19.2.3(@types/react@19.1.12) '@vitejs/plugin-react': specifier: ^5.0.3 version: 5.1.4(vite@7.3.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) react-grab: specifier: ^0.1.13 - version: 0.1.27(@types/react@18.3.27)(react@19.2.4) + version: 0.1.27(@types/react@19.1.12)(react@19.2.4) tsup: specifier: ^8.5.0 version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) @@ -651,11 +651,11 @@ importers: specifier: workspace:* version: link:../../../sdks/react '@types/react': - specifier: ^18.3.3 - version: 18.3.27 + specifier: ^19.1.12 + version: 19.1.12 '@types/react-dom': - specifier: ^18.3.0 - version: 18.3.7(@types/react@18.3.27) + specifier: ^19.1.6 + version: 19.2.3(@types/react@19.1.12) '@vitejs/plugin-react': specifier: ^4.3.1 version: 4.7.0(vite@5.4.21(@types/node@25.5.0)) @@ -679,7 +679,7 @@ importers: dependencies: '@astrojs/react': specifier: ^4.2.0 - version: 4.4.2(@types/node@25.5.0)(@types/react-dom@18.3.7(@types/react@18.3.27))(@types/react@18.3.27)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2) + version: 4.4.2(@types/node@25.5.0)(@types/react-dom@19.2.3(@types/react@19.1.12))(@types/react@19.1.12)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2) '@astrojs/sitemap': specifier: ^3.2.0 version: 3.7.0 @@ -706,11 +706,11 @@ importers: version: 3.4.19(tsx@4.21.0)(yaml@2.8.2) devDependencies: '@types/react': - specifier: ^18.3.3 - version: 18.3.27 + specifier: ^19.1.12 + version: 19.1.12 '@types/react-dom': - specifier: ^18.3.0 - version: 18.3.7(@types/react@18.3.27) + specifier: ^19.1.6 + version: 19.2.3(@types/react@19.1.12) typescript: specifier: ^5.7.0 version: 5.9.3 @@ -942,17 +942,17 @@ importers: dependencies: '@tanstack/react-virtual': specifier: ^3.13.22 - version: 3.13.22(react-dom@19.2.4(react@18.3.1))(react@18.3.1) + version: 3.13.22(react-dom@19.2.4(react@19.1.1))(react@19.1.1) ghostty-web: specifier: ^0.4.0 version: 0.4.0 devDependencies: '@types/react': - specifier: ^18.3.3 - version: 18.3.27 + specifier: ^19.1.12 + version: 19.1.12 react: - specifier: ^18.3.1 - version: 18.3.1 + specifier: ^19.1.1 + version: 19.1.1 sandbox-agent: specifier: workspace:* version: link:../typescript @@ -1083,8 +1083,8 @@ packages: resolution: {integrity: sha512-1tl95bpGfuaDMDn8O3x/5Dxii1HPvzjvpL2YTuqOOrQehs60I2DKiDgh1jrKc7G8lv+LQT5H15V6QONQ+9waeQ==} engines: {node: 18.20.8 || ^20.3.0 || >=22.0.0} peerDependencies: - '@types/react': ^18.3.3 - '@types/react-dom': ^18.3.0 + '@types/react': ^19.1.12 + '@types/react-dom': ^19.1.6 react: ^17.0.2 || ^18.0.0 || ^19.0.0 react-dom: ^17.0.2 || ^18.0.0 || ^19.0.0 @@ -3588,21 +3588,18 @@ packages: '@types/pg@8.18.0': resolution: {integrity: sha512-gT+oueVQkqnj6ajGJXblFR4iavIXWsGAFCk3dP4Kki5+a9R4NMt0JARdk6s8cUKcfUoqP5dAtDSLU8xYUTFV+Q==} - '@types/prop-types@15.7.15': - resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} - - '@types/react-dom@18.3.7': - resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==} + '@types/react-dom@19.2.3': + resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} peerDependencies: - '@types/react': ^18.3.3 + '@types/react': ^19.1.12 '@types/react-reconciler@0.28.9': resolution: {integrity: sha512-HHM3nxyUZ3zAylX8ZEyrDNd2XZOnQ0D5XfunJF5FLQnZbHHYq4UWvW1QfelQNXv1ICNkwYhfxjwfnqivYB6bFg==} peerDependencies: - '@types/react': ^18.3.3 + '@types/react': ^19.1.12 - '@types/react@18.3.27': - resolution: {integrity: sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==} + '@types/react@19.1.12': + resolution: {integrity: sha512-cMoR+FoAf/Jyq6+Df2/Z41jISvGZZ2eTlnsaJRptmZ76Caldwy1odD4xTr/gNV9VLj0AWgg/nmkevIyUfIIq5w==} '@types/retry@0.12.2': resolution: {integrity: sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==} @@ -6076,7 +6073,7 @@ packages: react-focus-lock@2.13.7: resolution: {integrity: sha512-20lpZHEQrXPb+pp1tzd4ULL6DyO5D2KnR0G69tTDdydrmNhU7pdFmbQUYVyHUgp+xN29IuFR0PVuhOmvaZL9Og==} peerDependencies: - '@types/react': ^18.3.3 + '@types/react': ^19.1.12 react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc peerDependenciesMeta: '@types/react': @@ -6145,7 +6142,7 @@ packages: resolution: {integrity: sha512-tsPZ77GR0pISGYmpCLHAbZTabKXZ7zBniKPVqVMMfnXFyo39zq5g/psIlD5vLTKkjQEhWOO8JhqcHnxkwNu6eA==} engines: {node: '>=8.5.0'} peerDependencies: - '@types/react': ^18.3.3 + '@types/react': ^19.1.12 react: ^16.8.0 peerDependenciesMeta: '@types/react': @@ -6175,6 +6172,10 @@ packages: resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} engines: {node: '>=0.10.0'} + react@19.1.1: + resolution: {integrity: sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==} + engines: {node: '>=0.10.0'} + react@19.2.4: resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} engines: {node: '>=0.10.0'} @@ -6980,7 +6981,7 @@ packages: resolution: {integrity: sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==} engines: {node: '>=10'} peerDependencies: - '@types/react': ^18.3.3 + '@types/react': ^19.1.12 react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc peerDependenciesMeta: '@types/react': @@ -6990,7 +6991,7 @@ packages: resolution: {integrity: sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==} engines: {node: '>=10'} peerDependencies: - '@types/react': ^18.3.3 + '@types/react': ^19.1.12 react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc peerDependenciesMeta: '@types/react': @@ -7413,10 +7414,10 @@ snapshots: dependencies: prismjs: 1.30.0 - '@astrojs/react@4.4.2(@types/node@25.5.0)(@types/react-dom@18.3.7(@types/react@18.3.27))(@types/react@18.3.27)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2)': + '@astrojs/react@4.4.2(@types/node@25.5.0)(@types/react-dom@19.2.3(@types/react@19.1.12))(@types/react@19.1.12)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2)': dependencies: - '@types/react': 18.3.27 - '@types/react-dom': 18.3.7(@types/react@18.3.27) + '@types/react': 19.1.12 + '@types/react-dom': 19.2.3(@types/react@19.1.12) '@vitejs/plugin-react': 4.7.0(vite@6.4.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) react: 19.2.4 react-dom: 19.2.4(react@19.2.4) @@ -9416,11 +9417,11 @@ snapshots: prompts: 2.4.2 smol-toml: 1.6.0 - '@react-grab/mcp@0.1.27(@types/react@18.3.27)(react@19.2.4)': + '@react-grab/mcp@0.1.27(@types/react@19.1.12)(react@19.2.4)': dependencies: '@modelcontextprotocol/sdk': 1.27.1(zod@3.25.76) fkill: 9.0.0 - react-grab: 0.1.27(@types/react@18.3.27)(react@19.2.4) + react-grab: 0.1.27(@types/react@19.1.12)(react@19.2.4) zod: 3.25.76 transitivePeerDependencies: - '@cfworker/json-schema' @@ -10033,11 +10034,11 @@ snapshots: react-dom: 19.2.4(react@19.2.4) use-sync-external-store: 1.6.0(react@19.2.4) - '@tanstack/react-virtual@3.13.22(react-dom@19.2.4(react@18.3.1))(react@18.3.1)': + '@tanstack/react-virtual@3.13.22(react-dom@19.2.4(react@19.1.1))(react@19.1.1)': dependencies: '@tanstack/virtual-core': 3.13.22 - react: 18.3.1 - react-dom: 19.2.4(react@18.3.1) + react: 19.1.1 + react-dom: 19.2.4(react@19.1.1) '@tanstack/react-virtual@3.13.22(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: @@ -10154,19 +10155,16 @@ snapshots: pg-protocol: 1.11.0 pg-types: 2.2.0 - '@types/prop-types@15.7.15': {} - - '@types/react-dom@18.3.7(@types/react@18.3.27)': + '@types/react-dom@19.2.3(@types/react@19.1.12)': dependencies: - '@types/react': 18.3.27 + '@types/react': 19.1.12 - '@types/react-reconciler@0.28.9(@types/react@18.3.27)': + '@types/react-reconciler@0.28.9(@types/react@19.1.12)': dependencies: - '@types/react': 18.3.27 + '@types/react': 19.1.12 - '@types/react@18.3.27': + '@types/react@19.1.12': dependencies: - '@types/prop-types': 15.7.15 csstype: 3.2.3 '@types/retry@0.12.2': {} @@ -10523,7 +10521,7 @@ snapshots: baseline-browser-mapping@2.9.18: {} - baseui@16.1.1(@types/react@18.3.27)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(styletron-react@6.1.1(react@19.2.4)): + baseui@16.1.1(@types/react@19.1.12)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(styletron-react@6.1.1(react@19.2.4)): dependencies: '@date-io/date-fns': 2.17.0(date-fns@2.30.0) '@date-io/moment': 2.17.0(moment@2.30.1) @@ -10543,7 +10541,7 @@ snapshots: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) react-dropzone: 9.0.0(react@19.2.4) - react-focus-lock: 2.13.7(@types/react@18.3.27)(react@19.2.4) + react-focus-lock: 2.13.7(@types/react@19.1.12)(react@19.2.4) react-hook-form: 7.71.2(react@19.2.4) react-input-mask: 2.0.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-is: 17.0.2 @@ -10551,7 +10549,7 @@ snapshots: react-movable: 3.4.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-multi-ref: 1.0.2 react-range: 1.10.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) - react-uid: 2.3.0(@types/react@18.3.27)(react@19.2.4) + react-uid: 2.3.0(@types/react@19.1.12)(react@19.2.4) react-virtualized: 9.22.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-virtualized-auto-sizer: 1.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-window: 1.8.5(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -10615,9 +10613,9 @@ snapshots: dependencies: file-uri-to-path: 1.0.0 - bippy@0.5.32(@types/react@18.3.27)(react@19.2.4): + bippy@0.5.32(@types/react@19.1.12)(react@19.2.4): dependencies: - '@types/react-reconciler': 0.28.9(@types/react@18.3.27) + '@types/react-reconciler': 0.28.9(@types/react@19.1.12) react: 19.2.4 transitivePeerDependencies: - '@types/react' @@ -13009,9 +13007,9 @@ snapshots: react: 18.3.1 scheduler: 0.23.2 - react-dom@19.2.4(react@18.3.1): + react-dom@19.2.4(react@19.1.1): dependencies: - react: 18.3.1 + react: 19.1.1 scheduler: 0.27.0 react-dom@19.2.4(react@19.2.4): @@ -13027,23 +13025,23 @@ snapshots: prop-types-extra: 1.1.1(react@19.2.4) react: 19.2.4 - react-focus-lock@2.13.7(@types/react@18.3.27)(react@19.2.4): + react-focus-lock@2.13.7(@types/react@19.1.12)(react@19.2.4): dependencies: '@babel/runtime': 7.28.6 focus-lock: 1.3.6 prop-types: 15.8.1 react: 19.2.4 react-clientside-effect: 1.2.8(react@19.2.4) - use-callback-ref: 1.3.3(@types/react@18.3.27)(react@19.2.4) - use-sidecar: 1.1.3(@types/react@18.3.27)(react@19.2.4) + use-callback-ref: 1.3.3(@types/react@19.1.12)(react@19.2.4) + use-sidecar: 1.1.3(@types/react@19.1.12)(react@19.2.4) optionalDependencies: - '@types/react': 18.3.27 + '@types/react': 19.1.12 - react-grab@0.1.27(@types/react@18.3.27)(react@19.2.4): + react-grab@0.1.27(@types/react@19.1.12)(react@19.2.4): dependencies: '@medv/finder': 4.0.2 '@react-grab/cli': 0.1.27 - bippy: 0.5.32(@types/react@18.3.27)(react@19.2.4) + bippy: 0.5.32(@types/react@19.1.12)(react@19.2.4) solid-js: 1.9.11 optionalDependencies: react: 19.2.4 @@ -13097,12 +13095,12 @@ snapshots: react-refresh@0.18.0: {} - react-uid@2.3.0(@types/react@18.3.27)(react@19.2.4): + react-uid@2.3.0(@types/react@19.1.12)(react@19.2.4): dependencies: react: 19.2.4 tslib: 1.14.1 optionalDependencies: - '@types/react': 18.3.27 + '@types/react': 19.1.12 react-virtualized-auto-sizer@1.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: @@ -13131,6 +13129,8 @@ snapshots: dependencies: loose-envify: 1.4.0 + react@19.1.1: {} + react@19.2.4: {} read-cache@1.0.0: @@ -14068,20 +14068,20 @@ snapshots: escalade: 3.2.0 picocolors: 1.1.1 - use-callback-ref@1.3.3(@types/react@18.3.27)(react@19.2.4): + use-callback-ref@1.3.3(@types/react@19.1.12)(react@19.2.4): dependencies: react: 19.2.4 tslib: 2.8.1 optionalDependencies: - '@types/react': 18.3.27 + '@types/react': 19.1.12 - use-sidecar@1.1.3(@types/react@18.3.27)(react@19.2.4): + use-sidecar@1.1.3(@types/react@19.1.12)(react@19.2.4): dependencies: detect-node-es: 1.1.0 react: 19.2.4 tslib: 2.8.1 optionalDependencies: - '@types/react': 18.3.27 + '@types/react': 19.1.12 use-sync-external-store@1.6.0(react@19.2.4): dependencies: diff --git a/scripts/release/update_version.ts b/scripts/release/update_version.ts index d603156..9d39beb 100644 --- a/scripts/release/update_version.ts +++ b/scripts/release/update_version.ts @@ -2,6 +2,7 @@ import * as fs from "node:fs/promises"; import { join } from "node:path"; import { $ } from "execa"; import { glob } from "glob"; +import * as semver from "semver"; import type { ReleaseOpts } from "./main"; function assert(condition: any, message?: string): asserts condition { @@ -10,13 +11,56 @@ function assert(condition: any, message?: string): asserts condition { } } +// Files containing version references that need channel/image tag updates. +// Keep in sync with CLAUDE.md "Install Version References" section. +const VERSION_REFERENCE_FILES = [ + "README.md", + "docs/acp-http-client.mdx", + "docs/cli.mdx", + "docs/quickstart.mdx", + "docs/sdk-overview.mdx", + "docs/react-components.mdx", + "docs/session-persistence.mdx", + "docs/architecture.mdx", + "docs/deploy/local.mdx", + "docs/deploy/cloudflare.mdx", + "docs/deploy/vercel.mdx", + "docs/deploy/daytona.mdx", + "docs/deploy/e2b.mdx", + "docs/deploy/docker.mdx", + "docs/deploy/boxlite.mdx", + "docs/deploy/modal.mdx", + "docs/deploy/computesdk.mdx", + "frontend/packages/website/src/components/GetStarted.tsx", + ".claude/commands/post-release-testing.md", + "examples/cloudflare/Dockerfile", + "examples/boxlite/Dockerfile", + "examples/boxlite-python/Dockerfile", + "examples/daytona/src/index.ts", + "examples/shared/src/docker.ts", + "examples/docker/src/index.ts", + "examples/e2b/src/index.ts", + "examples/vercel/src/index.ts", + "sdks/typescript/src/providers/shared.ts", + "scripts/release/main.ts", + "scripts/release/promote-artifacts.ts", + "scripts/release/sdk.ts", + "scripts/sandbox-testing/test-sandbox.ts", +]; + export async function updateVersion(opts: ReleaseOpts) { - // 1. Update workspace version and internal crate versions in root Cargo.toml + // 1. Read current version from Cargo.toml before overwriting const cargoTomlPath = join(opts.root, "Cargo.toml"); let cargoContent = await fs.readFile(cargoTomlPath, "utf-8"); + const oldVersionMatch = cargoContent.match(/\[workspace\.package\]\nversion = "([^"]+)"/); + assert(oldVersionMatch, "Could not find workspace.package version in Cargo.toml"); + const oldVersion = oldVersionMatch[1]; + const oldParsed = semver.parse(oldVersion); + assert(oldParsed, `Could not parse old version: ${oldVersion}`); + const oldMinorChannel = `${oldParsed.major}.${oldParsed.minor}.x`; + // Update [workspace.package] version - assert(/\[workspace\.package\]\nversion = ".*"/.test(cargoContent), "Could not find workspace.package version in Cargo.toml"); cargoContent = cargoContent.replace(/\[workspace\.package\]\nversion = ".*"/, `[workspace.package]\nversion = "${opts.version}"`); // Discover internal crates from [workspace.dependencies] by matching @@ -69,4 +113,70 @@ export async function updateVersion(opts: ReleaseOpts) { await fs.writeFile(fullPath, updated); await $({ cwd: opts.root })`git add ${relPath}`; } + + // 3. Update version references across docs, examples, and code + await updateVersionReferences(opts, oldVersion, oldMinorChannel); +} + +async function updateVersionReferences(opts: ReleaseOpts, oldVersion: string, oldMinorChannel: string) { + const newMinorChannel = opts.minorVersionChannel; + + // Find old Docker image tags by scanning for rivetdev/sandbox-agent:-full patterns + // The old version might be a different patch or RC, so we match any version-full tag + const oldDockerTagPattern = /rivetdev\/sandbox-agent:([0-9]+\.[0-9]+\.[0-9]+(?:-[a-zA-Z0-9.]+)?)-full/; + + console.log(`\nUpdating version references:`); + console.log(` Old minor channel: ${oldMinorChannel}`); + console.log(` New minor channel: ${newMinorChannel}`); + console.log(` New Docker tag: ${opts.version}-full`); + + const modifiedFiles: string[] = []; + + for (const relPath of VERSION_REFERENCE_FILES) { + const fullPath = join(opts.root, relPath); + + let content: string; + try { + content = await fs.readFile(fullPath, "utf-8"); + } catch (err: any) { + if (err.code === "ENOENT") { + console.log(` ⚠️ Skipping ${relPath} (file not found)`); + continue; + } + throw err; + } + + const original = content; + + // Replace minor channel references (e.g. sandbox-agent@0.3.x -> sandbox-agent@0.4.x) + content = content.replaceAll(`sandbox-agent@${oldMinorChannel}`, `sandbox-agent@${newMinorChannel}`); + content = content.replaceAll(`@sandbox-agent/cli@${oldMinorChannel}`, `@sandbox-agent/cli@${newMinorChannel}`); + content = content.replaceAll(`@sandbox-agent/react@${oldMinorChannel}`, `@sandbox-agent/react@${newMinorChannel}`); + + // Replace install script URL channel + content = content.replaceAll(`releases.rivet.dev/sandbox-agent/${oldMinorChannel}/`, `releases.rivet.dev/sandbox-agent/${newMinorChannel}/`); + + // Replace Docker image tags (rivetdev/sandbox-agent:-full -> rivetdev/sandbox-agent:-full) + content = content.replaceAll( + new RegExp(`rivetdev/sandbox-agent:[0-9]+\\.[0-9]+\\.[0-9]+(?:-[a-zA-Z0-9.]+)?-full`, "g"), + `rivetdev/sandbox-agent:${opts.version}-full`, + ); + + // Replace standalone version-full references in prose (e.g. "The `0.3.2-full` tag pins...") + // Match backtick-wrapped version-full patterns + content = content.replaceAll(new RegExp("`[0-9]+\\.[0-9]+\\.[0-9]+(?:-[a-zA-Z0-9.]+)?-full`", "g"), `\`${opts.version}-full\``); + + if (content !== original) { + await fs.writeFile(fullPath, content); + modifiedFiles.push(relPath); + console.log(` ✅ ${relPath}`); + } + } + + if (modifiedFiles.length > 0) { + await $({ cwd: opts.root })`git add ${modifiedFiles}`; + console.log(`\nUpdated ${modifiedFiles.length} files with version references.`); + } else { + console.log(`\nNo version reference files needed updates.`); + } } diff --git a/sdks/react/package.json b/sdks/react/package.json index 6bc21ec..b82892a 100644 --- a/sdks/react/package.json +++ b/sdks/react/package.json @@ -24,7 +24,7 @@ "typecheck": "tsc --noEmit" }, "peerDependencies": { - "react": "^18.3.1", + "react": "^18.3.1 || ^19.0.0", "sandbox-agent": "^0.2.2" }, "dependencies": { @@ -32,8 +32,8 @@ "ghostty-web": "^0.4.0" }, "devDependencies": { - "@types/react": "^18.3.3", - "react": "^18.3.1", + "@types/react": "^19.1.12", + "react": "^19.1.1", "sandbox-agent": "workspace:*", "tsup": "^8.0.0", "typescript": "^5.7.0" diff --git a/sdks/react/src/AgentConversation.tsx b/sdks/react/src/AgentConversation.tsx index acc9466..44dd0de 100644 --- a/sdks/react/src/AgentConversation.tsx +++ b/sdks/react/src/AgentConversation.tsx @@ -18,7 +18,7 @@ export interface AgentConversationProps { emptyState?: ReactNode; transcriptClassName?: string; transcriptClassNames?: Partial; - scrollRef?: RefObject; + scrollRef?: RefObject; composerClassName?: string; composerClassNames?: Partial; transcriptProps?: Omit; diff --git a/sdks/react/src/AgentTranscript.tsx b/sdks/react/src/AgentTranscript.tsx index b565081..03fa2dc 100644 --- a/sdks/react/src/AgentTranscript.tsx +++ b/sdks/react/src/AgentTranscript.tsx @@ -98,8 +98,8 @@ export interface AgentTranscriptProps { entries: TranscriptEntry[]; className?: string; classNames?: Partial; - endRef?: RefObject; - scrollRef?: RefObject; + endRef?: RefObject; + scrollRef?: RefObject; scrollToEntryId?: string | null; sessionError?: string | null; eventError?: string | null; diff --git a/sdks/react/src/useTranscriptVirtualizer.ts b/sdks/react/src/useTranscriptVirtualizer.ts index dc52717..a90d75f 100644 --- a/sdks/react/src/useTranscriptVirtualizer.ts +++ b/sdks/react/src/useTranscriptVirtualizer.ts @@ -4,7 +4,7 @@ import type { RefObject } from "react"; import { useEffect, useRef } from "react"; import { useVirtualizer } from "@tanstack/react-virtual"; -export function useTranscriptVirtualizer(items: T[], scrollElementRef?: RefObject, onAtBottomChange?: (atBottom: boolean) => void) { +export function useTranscriptVirtualizer(items: T[], scrollElementRef?: RefObject, onAtBottomChange?: (atBottom: boolean) => void) { const isFollowingRef = useRef(true); const virtualizer = useVirtualizer({ diff --git a/server/packages/agent-management/src/agents.rs b/server/packages/agent-management/src/agents.rs index 1ea051c..fa7e243 100644 --- a/server/packages/agent-management/src/agents.rs +++ b/server/packages/agent-management/src/agents.rs @@ -1090,9 +1090,9 @@ fn write_mock_agent_process_launcher(path: &Path) -> Result<(), AgentError> { fs::create_dir_all(parent)?; } let script = if cfg!(windows) { - "@echo off\r\necho mock agent process is in-process in sandbox-agent\r\nexit /b 1\r\n" + "@echo off\r\nsandbox-agent mock-agent-process %*\r\n" } else { - "#!/usr/bin/env sh\necho 'mock agent process is in-process in sandbox-agent'\nexit 1\n" + "#!/usr/bin/env sh\nexec sandbox-agent mock-agent-process \"$@\"\n" }; write_text_file(path, script) } diff --git a/server/packages/sandbox-agent/src/cli.rs b/server/packages/sandbox-agent/src/cli.rs index 1c12e4b..51757b6 100644 --- a/server/packages/sandbox-agent/src/cli.rs +++ b/server/packages/sandbox-agent/src/cli.rs @@ -83,6 +83,9 @@ pub enum Command { InstallAgent(InstallAgentArgs), /// Inspect locally discovered credentials. Credentials(CredentialsArgs), + /// Internal: stdio JSON-RPC echo agent for the mock agent process. + #[command(hide = true)] + MockAgentProcess, } #[derive(Args, Debug)] @@ -406,6 +409,7 @@ pub fn run_command(command: &Command, cli: &CliConfig) -> Result<(), CliError> { Command::Daemon(subcommand) => run_daemon(&subcommand.command, cli), Command::InstallAgent(args) => install_agent_local(args), Command::Credentials(subcommand) => run_credentials(&subcommand.command), + Command::MockAgentProcess => run_mock_agent_process(), } } @@ -929,6 +933,71 @@ fn run_credentials(command: &CredentialsCommand) -> Result<(), CliError> { } } +fn run_mock_agent_process() -> Result<(), CliError> { + use std::io::BufRead; + + let stdin = std::io::stdin(); + let reader = stdin.lock(); + + for line in reader.lines() { + let line = line.map_err(|e| CliError::Server(format!("stdin read error: {}", e)))?; + if line.trim().is_empty() { + continue; + } + + let msg: Value = match serde_json::from_str(&line) { + Ok(v) => v, + Err(e) => { + let err_notification = json!({ + "jsonrpc": "2.0", + "method": "mock/parse_error", + "params": { + "error": e.to_string(), + "raw": line, + } + }); + write_stdout_line(&serde_json::to_string(&err_notification)?)?; + continue; + } + }; + + // Echo notification for every message + let echo = json!({ + "jsonrpc": "2.0", + "method": "mock/echo", + "params": { "message": msg } + }); + write_stdout_line(&serde_json::to_string(&echo)?)?; + + let has_method = msg.get("method").and_then(|v| v.as_str()).is_some(); + let has_id = msg.get("id").is_some(); + + if has_method && has_id { + // Request -> respond with echo result + let response = json!({ + "jsonrpc": "2.0", + "id": msg["id"], + "result": { "echoed": msg } + }); + write_stdout_line(&serde_json::to_string(&response)?)?; + } else if !has_method && has_id { + // Client response + let notification = json!({ + "jsonrpc": "2.0", + "method": "mock/client_response", + "params": { + "id": msg["id"], + "result": msg.get("result").unwrap_or(&Value::Null), + "error": msg.get("error").unwrap_or(&Value::Null), + } + }); + write_stdout_line(&serde_json::to_string(¬ification)?)?; + } + } + + Ok(()) +} + fn load_json_payload( json_inline: Option<&str>, json_file: Option<&std::path::Path>, From 56c80e6c9ebd69d7bb24676c1a4d830b141e9054 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 22:38:30 -0700 Subject: [PATCH 13/48] chore(release): update version to 0.4.0-rc.2 --- Cargo.toml | 16 ++++++++-------- docs/architecture.mdx | 2 +- docs/deploy/daytona.mdx | 2 +- docs/deploy/docker.mdx | 6 +++--- docs/openapi.json | 2 +- docs/quickstart.mdx | 2 +- examples/shared/src/docker.ts | 2 +- sdks/acp-http-client/package.json | 2 +- sdks/cli-shared/package.json | 2 +- sdks/cli/package.json | 2 +- sdks/cli/platforms/darwin-arm64/package.json | 2 +- sdks/cli/platforms/darwin-x64/package.json | 2 +- sdks/cli/platforms/linux-arm64/package.json | 2 +- sdks/cli/platforms/linux-x64/package.json | 2 +- sdks/cli/platforms/win32-x64/package.json | 2 +- sdks/gigacode/package.json | 2 +- .../gigacode/platforms/darwin-arm64/package.json | 2 +- sdks/gigacode/platforms/darwin-x64/package.json | 2 +- sdks/gigacode/platforms/linux-arm64/package.json | 2 +- sdks/gigacode/platforms/linux-x64/package.json | 2 +- sdks/gigacode/platforms/win32-x64/package.json | 2 +- sdks/persist-indexeddb/package.json | 2 +- sdks/persist-postgres/package.json | 2 +- sdks/persist-rivet/package.json | 2 +- sdks/persist-sqlite/package.json | 2 +- sdks/react/package.json | 2 +- sdks/typescript/package.json | 2 +- sdks/typescript/src/providers/shared.ts | 2 +- 28 files changed, 37 insertions(+), 37 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0865385..561e502 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ members = ["server/packages/*", "gigacode"] exclude = ["factory/packages/desktop/src-tauri", "foundry/packages/desktop/src-tauri"] [workspace.package] -version = "0.4.0-rc.1" +version = "0.4.0-rc.2" edition = "2021" authors = [ "Rivet Gaming, LLC " ] license = "Apache-2.0" @@ -13,13 +13,13 @@ description = "Universal API for automatic coding agents in sandboxes. Supports [workspace.dependencies] # Internal crates -sandbox-agent = { version = "0.4.0-rc.1", path = "server/packages/sandbox-agent" } -sandbox-agent-error = { version = "0.4.0-rc.1", path = "server/packages/error" } -sandbox-agent-agent-management = { version = "0.4.0-rc.1", path = "server/packages/agent-management" } -sandbox-agent-agent-credentials = { version = "0.4.0-rc.1", path = "server/packages/agent-credentials" } -sandbox-agent-opencode-adapter = { version = "0.4.0-rc.1", path = "server/packages/opencode-adapter" } -sandbox-agent-opencode-server-manager = { version = "0.4.0-rc.1", path = "server/packages/opencode-server-manager" } -acp-http-adapter = { version = "0.4.0-rc.1", path = "server/packages/acp-http-adapter" } +sandbox-agent = { version = "0.4.0-rc.2", path = "server/packages/sandbox-agent" } +sandbox-agent-error = { version = "0.4.0-rc.2", path = "server/packages/error" } +sandbox-agent-agent-management = { version = "0.4.0-rc.2", path = "server/packages/agent-management" } +sandbox-agent-agent-credentials = { version = "0.4.0-rc.2", path = "server/packages/agent-credentials" } +sandbox-agent-opencode-adapter = { version = "0.4.0-rc.2", path = "server/packages/opencode-adapter" } +sandbox-agent-opencode-server-manager = { version = "0.4.0-rc.2", path = "server/packages/opencode-server-manager" } +acp-http-adapter = { version = "0.4.0-rc.2", path = "server/packages/acp-http-adapter" } # Serialization serde = { version = "1.0", features = ["derive"] } diff --git a/docs/architecture.mdx b/docs/architecture.mdx index a28f133..ae00539 100644 --- a/docs/architecture.mdx +++ b/docs/architecture.mdx @@ -56,7 +56,7 @@ Agents are installed lazily on first use. To avoid the cold-start delay, pre-ins sandbox-agent install-agent --all ``` -The `rivetdev/sandbox-agent:0.3.2-full` Docker image ships with all agents pre-installed. +The `rivetdev/sandbox-agent:0.4.0-rc.2-full` Docker image ships with all agents pre-installed. ## Production-ready agent orchestration diff --git a/docs/deploy/daytona.mdx b/docs/deploy/daytona.mdx index b65aec9..1dc84ae 100644 --- a/docs/deploy/daytona.mdx +++ b/docs/deploy/daytona.mdx @@ -44,7 +44,7 @@ try { } ``` -The `daytona` provider uses the `rivetdev/sandbox-agent:0.3.2-full` image by default and starts the server automatically. +The `daytona` provider uses the `rivetdev/sandbox-agent:0.4.0-rc.2-full` image by default and starts the server automatically. ## Using snapshots for faster startup diff --git a/docs/deploy/docker.mdx b/docs/deploy/docker.mdx index b674b7a..232033b 100644 --- a/docs/deploy/docker.mdx +++ b/docs/deploy/docker.mdx @@ -15,11 +15,11 @@ Run the published full image with all supported agents pre-installed: docker run --rm -p 3000:3000 \ -e ANTHROPIC_API_KEY="$ANTHROPIC_API_KEY" \ -e OPENAI_API_KEY="$OPENAI_API_KEY" \ - rivetdev/sandbox-agent:0.3.2-full \ + rivetdev/sandbox-agent:0.4.0-rc.2-full \ server --no-token --host 0.0.0.0 --port 3000 ``` -The `0.3.2-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. +The `0.4.0-rc.2-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. ## TypeScript with the Docker provider @@ -48,7 +48,7 @@ try { } ``` -The `docker` provider uses the `rivetdev/sandbox-agent:0.3.2-full` image by default. Override with `image`: +The `docker` provider uses the `rivetdev/sandbox-agent:0.4.0-rc.2-full` image by default. Override with `image`: ```typescript docker({ image: "my-custom-image:latest" }) diff --git a/docs/openapi.json b/docs/openapi.json index 1beeb2b..58bec71 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -10,7 +10,7 @@ "license": { "name": "Apache-2.0" }, - "version": "0.4.0-rc.1" + "version": "0.4.0-rc.2" }, "servers": [ { diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index 19d9742..53f8f7d 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -242,7 +242,7 @@ icon: "rocket" docker run -p 2468:2468 \ -e ANTHROPIC_API_KEY="sk-ant-..." \ -e OPENAI_API_KEY="sk-..." \ - rivetdev/sandbox-agent:0.3.2-full \ + rivetdev/sandbox-agent:0.4.0-rc.2-full \ server --no-token --host 0.0.0.0 --port 2468 ``` diff --git a/examples/shared/src/docker.ts b/examples/shared/src/docker.ts index 8459535..89e95dd 100644 --- a/examples/shared/src/docker.ts +++ b/examples/shared/src/docker.ts @@ -9,7 +9,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url)); const REPO_ROOT = path.resolve(__dirname, "..", "..", ".."); /** Pre-built Docker image with all agents installed. */ -export const FULL_IMAGE = "rivetdev/sandbox-agent:0.3.1-full"; +export const FULL_IMAGE = "rivetdev/sandbox-agent:0.4.0-rc.2-full"; export interface DockerSandboxOptions { /** Container port used by sandbox-agent inside Docker. */ diff --git a/sdks/acp-http-client/package.json b/sdks/acp-http-client/package.json index b5d264f..22d5fb1 100644 --- a/sdks/acp-http-client/package.json +++ b/sdks/acp-http-client/package.json @@ -1,6 +1,6 @@ { "name": "acp-http-client", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "Protocol-faithful ACP JSON-RPC over streamable HTTP client.", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli-shared/package.json b/sdks/cli-shared/package.json index b2825fb..d028896 100644 --- a/sdks/cli-shared/package.json +++ b/sdks/cli-shared/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-shared", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "Shared helpers for sandbox-agent CLI and SDK", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/package.json b/sdks/cli/package.json index 17c0401..67a8b18 100644 --- a/sdks/cli/package.json +++ b/sdks/cli/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "CLI for sandbox-agent - run AI coding agents in sandboxes", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/darwin-arm64/package.json b/sdks/cli/platforms/darwin-arm64/package.json index 2b5a7e8..2339503 100644 --- a/sdks/cli/platforms/darwin-arm64/package.json +++ b/sdks/cli/platforms/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-darwin-arm64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "sandbox-agent CLI binary for macOS ARM64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/darwin-x64/package.json b/sdks/cli/platforms/darwin-x64/package.json index 72f3044..f9c2fe7 100644 --- a/sdks/cli/platforms/darwin-x64/package.json +++ b/sdks/cli/platforms/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-darwin-x64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "sandbox-agent CLI binary for macOS x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/linux-arm64/package.json b/sdks/cli/platforms/linux-arm64/package.json index 0d8d92a..ab100f9 100644 --- a/sdks/cli/platforms/linux-arm64/package.json +++ b/sdks/cli/platforms/linux-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-linux-arm64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "sandbox-agent CLI binary for Linux arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/linux-x64/package.json b/sdks/cli/platforms/linux-x64/package.json index 5d70adb..799650f 100644 --- a/sdks/cli/platforms/linux-x64/package.json +++ b/sdks/cli/platforms/linux-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-linux-x64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "sandbox-agent CLI binary for Linux x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/win32-x64/package.json b/sdks/cli/platforms/win32-x64/package.json index e95b818..3c0728a 100644 --- a/sdks/cli/platforms/win32-x64/package.json +++ b/sdks/cli/platforms/win32-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-win32-x64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "sandbox-agent CLI binary for Windows x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/package.json b/sdks/gigacode/package.json index 2f22d77..21b3fa9 100644 --- a/sdks/gigacode/package.json +++ b/sdks/gigacode/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "Gigacode CLI (sandbox-agent with OpenCode attach by default)", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/darwin-arm64/package.json b/sdks/gigacode/platforms/darwin-arm64/package.json index 220ba65..5b2f54b 100644 --- a/sdks/gigacode/platforms/darwin-arm64/package.json +++ b/sdks/gigacode/platforms/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-darwin-arm64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "gigacode CLI binary for macOS arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/darwin-x64/package.json b/sdks/gigacode/platforms/darwin-x64/package.json index 8912854..eadd657 100644 --- a/sdks/gigacode/platforms/darwin-x64/package.json +++ b/sdks/gigacode/platforms/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-darwin-x64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "gigacode CLI binary for macOS x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/linux-arm64/package.json b/sdks/gigacode/platforms/linux-arm64/package.json index 5b042cf..248fadf 100644 --- a/sdks/gigacode/platforms/linux-arm64/package.json +++ b/sdks/gigacode/platforms/linux-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-linux-arm64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "gigacode CLI binary for Linux arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/linux-x64/package.json b/sdks/gigacode/platforms/linux-x64/package.json index ea7c298..4bf5742 100644 --- a/sdks/gigacode/platforms/linux-x64/package.json +++ b/sdks/gigacode/platforms/linux-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-linux-x64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "gigacode CLI binary for Linux x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/win32-x64/package.json b/sdks/gigacode/platforms/win32-x64/package.json index 86997a1..e6d2f5b 100644 --- a/sdks/gigacode/platforms/win32-x64/package.json +++ b/sdks/gigacode/platforms/win32-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-win32-x64", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "gigacode CLI binary for Windows x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-indexeddb/package.json b/sdks/persist-indexeddb/package.json index 86f30b6..08e56c4 100644 --- a/sdks/persist-indexeddb/package.json +++ b/sdks/persist-indexeddb/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-indexeddb", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "IndexedDB persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-postgres/package.json b/sdks/persist-postgres/package.json index 27a3756..a274843 100644 --- a/sdks/persist-postgres/package.json +++ b/sdks/persist-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-postgres", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "PostgreSQL persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-rivet/package.json b/sdks/persist-rivet/package.json index 3c64db2..5a0f457 100644 --- a/sdks/persist-rivet/package.json +++ b/sdks/persist-rivet/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-rivet", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "Rivet Actor persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-sqlite/package.json b/sdks/persist-sqlite/package.json index e7f472c..aaac3dd 100644 --- a/sdks/persist-sqlite/package.json +++ b/sdks/persist-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-sqlite", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "SQLite persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/react/package.json b/sdks/react/package.json index b82892a..6b4fbb1 100644 --- a/sdks/react/package.json +++ b/sdks/react/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/react", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "React components for Sandbox Agent frontend integrations", "license": "Apache-2.0", "repository": { diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index 7e2dfe4..067f1bd 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -1,6 +1,6 @@ { "name": "sandbox-agent", - "version": "0.4.0-rc.1", + "version": "0.4.0-rc.2", "description": "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp.", "license": "Apache-2.0", "repository": { diff --git a/sdks/typescript/src/providers/shared.ts b/sdks/typescript/src/providers/shared.ts index d838a0a..53abc19 100644 --- a/sdks/typescript/src/providers/shared.ts +++ b/sdks/typescript/src/providers/shared.ts @@ -1,4 +1,4 @@ -export const DEFAULT_SANDBOX_AGENT_IMAGE = "rivetdev/sandbox-agent:0.3.2-full"; +export const DEFAULT_SANDBOX_AGENT_IMAGE = "rivetdev/sandbox-agent:0.4.0-rc.2-full"; export const SANDBOX_AGENT_INSTALL_SCRIPT = "https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh"; export const DEFAULT_AGENTS = ["claude", "codex"] as const; From f0ec8e497b952b8ee43693b3c1feca58b819c52f Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 23:17:52 -0700 Subject: [PATCH 14/48] fix: mock agent process launcher not written during install agent_process_status() for mock always returned Some(...) even when the launcher file did not exist. This caused install_agent_process() to short-circuit with "already installed" and never write the launcher script. Fix by checking that the launcher file exists before reporting the mock agent as installed. Co-Authored-By: Claude Opus 4.6 (1M context) --- server/packages/agent-management/src/agents.rs | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/server/packages/agent-management/src/agents.rs b/server/packages/agent-management/src/agents.rs index fa7e243..785603d 100644 --- a/server/packages/agent-management/src/agents.rs +++ b/server/packages/agent-management/src/agents.rs @@ -678,14 +678,17 @@ impl AgentManager { } fn agent_process_status(&self, agent: AgentId) -> Option { - if agent == AgentId::Mock { - return Some(AgentProcessStatus { - source: InstallSource::Builtin, - version: Some("builtin".to_string()), - }); - } - let launcher = self.agent_process_path(agent); + + if agent == AgentId::Mock { + if launcher.exists() { + return Some(AgentProcessStatus { + source: InstallSource::Builtin, + version: Some("builtin".to_string()), + }); + } + return None; + } if launcher.exists() { return Some(AgentProcessStatus { source: InstallSource::LocalPath, From 7faed2f43a0d46ef5b4ef021bff165a53d3b5a04 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Sun, 15 Mar 2026 23:26:42 -0700 Subject: [PATCH 15/48] chore(release): update version to 0.4.0-rc.3 --- Cargo.toml | 16 ++++++++-------- docs/architecture.mdx | 2 +- docs/deploy/daytona.mdx | 2 +- docs/deploy/docker.mdx | 6 +++--- docs/openapi.json | 2 +- docs/quickstart.mdx | 2 +- examples/shared/src/docker.ts | 2 +- sdks/acp-http-client/package.json | 2 +- sdks/cli-shared/package.json | 2 +- sdks/cli/package.json | 2 +- sdks/cli/platforms/darwin-arm64/package.json | 2 +- sdks/cli/platforms/darwin-x64/package.json | 2 +- sdks/cli/platforms/linux-arm64/package.json | 2 +- sdks/cli/platforms/linux-x64/package.json | 2 +- sdks/cli/platforms/win32-x64/package.json | 2 +- sdks/gigacode/package.json | 2 +- .../gigacode/platforms/darwin-arm64/package.json | 2 +- sdks/gigacode/platforms/darwin-x64/package.json | 2 +- sdks/gigacode/platforms/linux-arm64/package.json | 2 +- sdks/gigacode/platforms/linux-x64/package.json | 2 +- sdks/gigacode/platforms/win32-x64/package.json | 2 +- sdks/persist-indexeddb/package.json | 2 +- sdks/persist-postgres/package.json | 2 +- sdks/persist-rivet/package.json | 2 +- sdks/persist-sqlite/package.json | 2 +- sdks/react/package.json | 2 +- sdks/typescript/package.json | 2 +- sdks/typescript/src/providers/shared.ts | 2 +- 28 files changed, 37 insertions(+), 37 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 561e502..a02f922 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ members = ["server/packages/*", "gigacode"] exclude = ["factory/packages/desktop/src-tauri", "foundry/packages/desktop/src-tauri"] [workspace.package] -version = "0.4.0-rc.2" +version = "0.4.0-rc.3" edition = "2021" authors = [ "Rivet Gaming, LLC " ] license = "Apache-2.0" @@ -13,13 +13,13 @@ description = "Universal API for automatic coding agents in sandboxes. Supports [workspace.dependencies] # Internal crates -sandbox-agent = { version = "0.4.0-rc.2", path = "server/packages/sandbox-agent" } -sandbox-agent-error = { version = "0.4.0-rc.2", path = "server/packages/error" } -sandbox-agent-agent-management = { version = "0.4.0-rc.2", path = "server/packages/agent-management" } -sandbox-agent-agent-credentials = { version = "0.4.0-rc.2", path = "server/packages/agent-credentials" } -sandbox-agent-opencode-adapter = { version = "0.4.0-rc.2", path = "server/packages/opencode-adapter" } -sandbox-agent-opencode-server-manager = { version = "0.4.0-rc.2", path = "server/packages/opencode-server-manager" } -acp-http-adapter = { version = "0.4.0-rc.2", path = "server/packages/acp-http-adapter" } +sandbox-agent = { version = "0.4.0-rc.3", path = "server/packages/sandbox-agent" } +sandbox-agent-error = { version = "0.4.0-rc.3", path = "server/packages/error" } +sandbox-agent-agent-management = { version = "0.4.0-rc.3", path = "server/packages/agent-management" } +sandbox-agent-agent-credentials = { version = "0.4.0-rc.3", path = "server/packages/agent-credentials" } +sandbox-agent-opencode-adapter = { version = "0.4.0-rc.3", path = "server/packages/opencode-adapter" } +sandbox-agent-opencode-server-manager = { version = "0.4.0-rc.3", path = "server/packages/opencode-server-manager" } +acp-http-adapter = { version = "0.4.0-rc.3", path = "server/packages/acp-http-adapter" } # Serialization serde = { version = "1.0", features = ["derive"] } diff --git a/docs/architecture.mdx b/docs/architecture.mdx index ae00539..cddfbe6 100644 --- a/docs/architecture.mdx +++ b/docs/architecture.mdx @@ -56,7 +56,7 @@ Agents are installed lazily on first use. To avoid the cold-start delay, pre-ins sandbox-agent install-agent --all ``` -The `rivetdev/sandbox-agent:0.4.0-rc.2-full` Docker image ships with all agents pre-installed. +The `rivetdev/sandbox-agent:0.4.0-rc.3-full` Docker image ships with all agents pre-installed. ## Production-ready agent orchestration diff --git a/docs/deploy/daytona.mdx b/docs/deploy/daytona.mdx index 1dc84ae..fa0fee2 100644 --- a/docs/deploy/daytona.mdx +++ b/docs/deploy/daytona.mdx @@ -44,7 +44,7 @@ try { } ``` -The `daytona` provider uses the `rivetdev/sandbox-agent:0.4.0-rc.2-full` image by default and starts the server automatically. +The `daytona` provider uses the `rivetdev/sandbox-agent:0.4.0-rc.3-full` image by default and starts the server automatically. ## Using snapshots for faster startup diff --git a/docs/deploy/docker.mdx b/docs/deploy/docker.mdx index 232033b..28fb737 100644 --- a/docs/deploy/docker.mdx +++ b/docs/deploy/docker.mdx @@ -15,11 +15,11 @@ Run the published full image with all supported agents pre-installed: docker run --rm -p 3000:3000 \ -e ANTHROPIC_API_KEY="$ANTHROPIC_API_KEY" \ -e OPENAI_API_KEY="$OPENAI_API_KEY" \ - rivetdev/sandbox-agent:0.4.0-rc.2-full \ + rivetdev/sandbox-agent:0.4.0-rc.3-full \ server --no-token --host 0.0.0.0 --port 3000 ``` -The `0.4.0-rc.2-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. +The `0.4.0-rc.3-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. ## TypeScript with the Docker provider @@ -48,7 +48,7 @@ try { } ``` -The `docker` provider uses the `rivetdev/sandbox-agent:0.4.0-rc.2-full` image by default. Override with `image`: +The `docker` provider uses the `rivetdev/sandbox-agent:0.4.0-rc.3-full` image by default. Override with `image`: ```typescript docker({ image: "my-custom-image:latest" }) diff --git a/docs/openapi.json b/docs/openapi.json index 58bec71..cdb1ca6 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -10,7 +10,7 @@ "license": { "name": "Apache-2.0" }, - "version": "0.4.0-rc.2" + "version": "0.4.0-rc.3" }, "servers": [ { diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index 53f8f7d..f0cfe27 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -242,7 +242,7 @@ icon: "rocket" docker run -p 2468:2468 \ -e ANTHROPIC_API_KEY="sk-ant-..." \ -e OPENAI_API_KEY="sk-..." \ - rivetdev/sandbox-agent:0.4.0-rc.2-full \ + rivetdev/sandbox-agent:0.4.0-rc.3-full \ server --no-token --host 0.0.0.0 --port 2468 ``` diff --git a/examples/shared/src/docker.ts b/examples/shared/src/docker.ts index 89e95dd..b161f35 100644 --- a/examples/shared/src/docker.ts +++ b/examples/shared/src/docker.ts @@ -9,7 +9,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url)); const REPO_ROOT = path.resolve(__dirname, "..", "..", ".."); /** Pre-built Docker image with all agents installed. */ -export const FULL_IMAGE = "rivetdev/sandbox-agent:0.4.0-rc.2-full"; +export const FULL_IMAGE = "rivetdev/sandbox-agent:0.4.0-rc.3-full"; export interface DockerSandboxOptions { /** Container port used by sandbox-agent inside Docker. */ diff --git a/sdks/acp-http-client/package.json b/sdks/acp-http-client/package.json index 22d5fb1..c7b8718 100644 --- a/sdks/acp-http-client/package.json +++ b/sdks/acp-http-client/package.json @@ -1,6 +1,6 @@ { "name": "acp-http-client", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "Protocol-faithful ACP JSON-RPC over streamable HTTP client.", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli-shared/package.json b/sdks/cli-shared/package.json index d028896..608d40e 100644 --- a/sdks/cli-shared/package.json +++ b/sdks/cli-shared/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-shared", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "Shared helpers for sandbox-agent CLI and SDK", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/package.json b/sdks/cli/package.json index 67a8b18..ae59b3d 100644 --- a/sdks/cli/package.json +++ b/sdks/cli/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "CLI for sandbox-agent - run AI coding agents in sandboxes", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/darwin-arm64/package.json b/sdks/cli/platforms/darwin-arm64/package.json index 2339503..8b50eff 100644 --- a/sdks/cli/platforms/darwin-arm64/package.json +++ b/sdks/cli/platforms/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-darwin-arm64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "sandbox-agent CLI binary for macOS ARM64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/darwin-x64/package.json b/sdks/cli/platforms/darwin-x64/package.json index f9c2fe7..65d12fd 100644 --- a/sdks/cli/platforms/darwin-x64/package.json +++ b/sdks/cli/platforms/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-darwin-x64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "sandbox-agent CLI binary for macOS x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/linux-arm64/package.json b/sdks/cli/platforms/linux-arm64/package.json index ab100f9..9bb50ef 100644 --- a/sdks/cli/platforms/linux-arm64/package.json +++ b/sdks/cli/platforms/linux-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-linux-arm64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "sandbox-agent CLI binary for Linux arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/linux-x64/package.json b/sdks/cli/platforms/linux-x64/package.json index 799650f..e4fb969 100644 --- a/sdks/cli/platforms/linux-x64/package.json +++ b/sdks/cli/platforms/linux-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-linux-x64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "sandbox-agent CLI binary for Linux x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/win32-x64/package.json b/sdks/cli/platforms/win32-x64/package.json index 3c0728a..13230f0 100644 --- a/sdks/cli/platforms/win32-x64/package.json +++ b/sdks/cli/platforms/win32-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-win32-x64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "sandbox-agent CLI binary for Windows x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/package.json b/sdks/gigacode/package.json index 21b3fa9..879a7ef 100644 --- a/sdks/gigacode/package.json +++ b/sdks/gigacode/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "Gigacode CLI (sandbox-agent with OpenCode attach by default)", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/darwin-arm64/package.json b/sdks/gigacode/platforms/darwin-arm64/package.json index 5b2f54b..4ec9a96 100644 --- a/sdks/gigacode/platforms/darwin-arm64/package.json +++ b/sdks/gigacode/platforms/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-darwin-arm64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "gigacode CLI binary for macOS arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/darwin-x64/package.json b/sdks/gigacode/platforms/darwin-x64/package.json index eadd657..bbbb01a 100644 --- a/sdks/gigacode/platforms/darwin-x64/package.json +++ b/sdks/gigacode/platforms/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-darwin-x64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "gigacode CLI binary for macOS x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/linux-arm64/package.json b/sdks/gigacode/platforms/linux-arm64/package.json index 248fadf..11547bc 100644 --- a/sdks/gigacode/platforms/linux-arm64/package.json +++ b/sdks/gigacode/platforms/linux-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-linux-arm64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "gigacode CLI binary for Linux arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/linux-x64/package.json b/sdks/gigacode/platforms/linux-x64/package.json index 4bf5742..cd912b8 100644 --- a/sdks/gigacode/platforms/linux-x64/package.json +++ b/sdks/gigacode/platforms/linux-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-linux-x64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "gigacode CLI binary for Linux x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/win32-x64/package.json b/sdks/gigacode/platforms/win32-x64/package.json index e6d2f5b..8500b75 100644 --- a/sdks/gigacode/platforms/win32-x64/package.json +++ b/sdks/gigacode/platforms/win32-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-win32-x64", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "gigacode CLI binary for Windows x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-indexeddb/package.json b/sdks/persist-indexeddb/package.json index 08e56c4..e50b10e 100644 --- a/sdks/persist-indexeddb/package.json +++ b/sdks/persist-indexeddb/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-indexeddb", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "IndexedDB persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-postgres/package.json b/sdks/persist-postgres/package.json index a274843..bd3265a 100644 --- a/sdks/persist-postgres/package.json +++ b/sdks/persist-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-postgres", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "PostgreSQL persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-rivet/package.json b/sdks/persist-rivet/package.json index 5a0f457..139a731 100644 --- a/sdks/persist-rivet/package.json +++ b/sdks/persist-rivet/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-rivet", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "Rivet Actor persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-sqlite/package.json b/sdks/persist-sqlite/package.json index aaac3dd..29c12df 100644 --- a/sdks/persist-sqlite/package.json +++ b/sdks/persist-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-sqlite", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "SQLite persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/react/package.json b/sdks/react/package.json index 6b4fbb1..81b8f45 100644 --- a/sdks/react/package.json +++ b/sdks/react/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/react", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "React components for Sandbox Agent frontend integrations", "license": "Apache-2.0", "repository": { diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index 067f1bd..703779f 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -1,6 +1,6 @@ { "name": "sandbox-agent", - "version": "0.4.0-rc.2", + "version": "0.4.0-rc.3", "description": "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp.", "license": "Apache-2.0", "repository": { diff --git a/sdks/typescript/src/providers/shared.ts b/sdks/typescript/src/providers/shared.ts index 53abc19..6034606 100644 --- a/sdks/typescript/src/providers/shared.ts +++ b/sdks/typescript/src/providers/shared.ts @@ -1,4 +1,4 @@ -export const DEFAULT_SANDBOX_AGENT_IMAGE = "rivetdev/sandbox-agent:0.4.0-rc.2-full"; +export const DEFAULT_SANDBOX_AGENT_IMAGE = "rivetdev/sandbox-agent:0.4.0-rc.3-full"; export const SANDBOX_AGENT_INSTALL_SCRIPT = "https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh"; export const DEFAULT_AGENTS = ["claude", "codex"] as const; From 32f3c6c3bc327b67b5551ea5273997d5c7334ef0 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Mon, 16 Mar 2026 00:48:05 -0700 Subject: [PATCH 16/48] chore(release): update version to 0.4.0 --- Cargo.toml | 16 ++++++++-------- docs/architecture.mdx | 2 +- docs/deploy/daytona.mdx | 2 +- docs/deploy/docker.mdx | 6 +++--- docs/openapi.json | 2 +- docs/quickstart.mdx | 2 +- examples/shared/src/docker.ts | 2 +- sdks/acp-http-client/package.json | 2 +- sdks/cli-shared/package.json | 2 +- sdks/cli/package.json | 2 +- sdks/cli/platforms/darwin-arm64/package.json | 2 +- sdks/cli/platforms/darwin-x64/package.json | 2 +- sdks/cli/platforms/linux-arm64/package.json | 2 +- sdks/cli/platforms/linux-x64/package.json | 2 +- sdks/cli/platforms/win32-x64/package.json | 2 +- sdks/gigacode/package.json | 2 +- .../gigacode/platforms/darwin-arm64/package.json | 2 +- sdks/gigacode/platforms/darwin-x64/package.json | 2 +- sdks/gigacode/platforms/linux-arm64/package.json | 2 +- sdks/gigacode/platforms/linux-x64/package.json | 2 +- sdks/gigacode/platforms/win32-x64/package.json | 2 +- sdks/persist-indexeddb/package.json | 2 +- sdks/persist-postgres/package.json | 2 +- sdks/persist-rivet/package.json | 2 +- sdks/persist-sqlite/package.json | 2 +- sdks/react/package.json | 2 +- sdks/typescript/package.json | 2 +- sdks/typescript/src/providers/shared.ts | 2 +- 28 files changed, 37 insertions(+), 37 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index a02f922..de0ff12 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ members = ["server/packages/*", "gigacode"] exclude = ["factory/packages/desktop/src-tauri", "foundry/packages/desktop/src-tauri"] [workspace.package] -version = "0.4.0-rc.3" +version = "0.4.0" edition = "2021" authors = [ "Rivet Gaming, LLC " ] license = "Apache-2.0" @@ -13,13 +13,13 @@ description = "Universal API for automatic coding agents in sandboxes. Supports [workspace.dependencies] # Internal crates -sandbox-agent = { version = "0.4.0-rc.3", path = "server/packages/sandbox-agent" } -sandbox-agent-error = { version = "0.4.0-rc.3", path = "server/packages/error" } -sandbox-agent-agent-management = { version = "0.4.0-rc.3", path = "server/packages/agent-management" } -sandbox-agent-agent-credentials = { version = "0.4.0-rc.3", path = "server/packages/agent-credentials" } -sandbox-agent-opencode-adapter = { version = "0.4.0-rc.3", path = "server/packages/opencode-adapter" } -sandbox-agent-opencode-server-manager = { version = "0.4.0-rc.3", path = "server/packages/opencode-server-manager" } -acp-http-adapter = { version = "0.4.0-rc.3", path = "server/packages/acp-http-adapter" } +sandbox-agent = { version = "0.4.0", path = "server/packages/sandbox-agent" } +sandbox-agent-error = { version = "0.4.0", path = "server/packages/error" } +sandbox-agent-agent-management = { version = "0.4.0", path = "server/packages/agent-management" } +sandbox-agent-agent-credentials = { version = "0.4.0", path = "server/packages/agent-credentials" } +sandbox-agent-opencode-adapter = { version = "0.4.0", path = "server/packages/opencode-adapter" } +sandbox-agent-opencode-server-manager = { version = "0.4.0", path = "server/packages/opencode-server-manager" } +acp-http-adapter = { version = "0.4.0", path = "server/packages/acp-http-adapter" } # Serialization serde = { version = "1.0", features = ["derive"] } diff --git a/docs/architecture.mdx b/docs/architecture.mdx index cddfbe6..467a71f 100644 --- a/docs/architecture.mdx +++ b/docs/architecture.mdx @@ -56,7 +56,7 @@ Agents are installed lazily on first use. To avoid the cold-start delay, pre-ins sandbox-agent install-agent --all ``` -The `rivetdev/sandbox-agent:0.4.0-rc.3-full` Docker image ships with all agents pre-installed. +The `rivetdev/sandbox-agent:0.4.0-full` Docker image ships with all agents pre-installed. ## Production-ready agent orchestration diff --git a/docs/deploy/daytona.mdx b/docs/deploy/daytona.mdx index fa0fee2..42dad40 100644 --- a/docs/deploy/daytona.mdx +++ b/docs/deploy/daytona.mdx @@ -44,7 +44,7 @@ try { } ``` -The `daytona` provider uses the `rivetdev/sandbox-agent:0.4.0-rc.3-full` image by default and starts the server automatically. +The `daytona` provider uses the `rivetdev/sandbox-agent:0.4.0-full` image by default and starts the server automatically. ## Using snapshots for faster startup diff --git a/docs/deploy/docker.mdx b/docs/deploy/docker.mdx index 28fb737..7c9d2e3 100644 --- a/docs/deploy/docker.mdx +++ b/docs/deploy/docker.mdx @@ -15,11 +15,11 @@ Run the published full image with all supported agents pre-installed: docker run --rm -p 3000:3000 \ -e ANTHROPIC_API_KEY="$ANTHROPIC_API_KEY" \ -e OPENAI_API_KEY="$OPENAI_API_KEY" \ - rivetdev/sandbox-agent:0.4.0-rc.3-full \ + rivetdev/sandbox-agent:0.4.0-full \ server --no-token --host 0.0.0.0 --port 3000 ``` -The `0.4.0-rc.3-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. +The `0.4.0-full` tag pins the exact version. The moving `full` tag is also published for contributors who want the latest full image. ## TypeScript with the Docker provider @@ -48,7 +48,7 @@ try { } ``` -The `docker` provider uses the `rivetdev/sandbox-agent:0.4.0-rc.3-full` image by default. Override with `image`: +The `docker` provider uses the `rivetdev/sandbox-agent:0.4.0-full` image by default. Override with `image`: ```typescript docker({ image: "my-custom-image:latest" }) diff --git a/docs/openapi.json b/docs/openapi.json index cdb1ca6..e432a84 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -10,7 +10,7 @@ "license": { "name": "Apache-2.0" }, - "version": "0.4.0-rc.3" + "version": "0.4.0" }, "servers": [ { diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index f0cfe27..5c299c3 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -242,7 +242,7 @@ icon: "rocket" docker run -p 2468:2468 \ -e ANTHROPIC_API_KEY="sk-ant-..." \ -e OPENAI_API_KEY="sk-..." \ - rivetdev/sandbox-agent:0.4.0-rc.3-full \ + rivetdev/sandbox-agent:0.4.0-full \ server --no-token --host 0.0.0.0 --port 2468 ``` diff --git a/examples/shared/src/docker.ts b/examples/shared/src/docker.ts index b161f35..6a8f40a 100644 --- a/examples/shared/src/docker.ts +++ b/examples/shared/src/docker.ts @@ -9,7 +9,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url)); const REPO_ROOT = path.resolve(__dirname, "..", "..", ".."); /** Pre-built Docker image with all agents installed. */ -export const FULL_IMAGE = "rivetdev/sandbox-agent:0.4.0-rc.3-full"; +export const FULL_IMAGE = "rivetdev/sandbox-agent:0.4.0-full"; export interface DockerSandboxOptions { /** Container port used by sandbox-agent inside Docker. */ diff --git a/sdks/acp-http-client/package.json b/sdks/acp-http-client/package.json index c7b8718..409ad8a 100644 --- a/sdks/acp-http-client/package.json +++ b/sdks/acp-http-client/package.json @@ -1,6 +1,6 @@ { "name": "acp-http-client", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "Protocol-faithful ACP JSON-RPC over streamable HTTP client.", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli-shared/package.json b/sdks/cli-shared/package.json index 608d40e..7881c7b 100644 --- a/sdks/cli-shared/package.json +++ b/sdks/cli-shared/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-shared", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "Shared helpers for sandbox-agent CLI and SDK", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/package.json b/sdks/cli/package.json index ae59b3d..e8ddd85 100644 --- a/sdks/cli/package.json +++ b/sdks/cli/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "CLI for sandbox-agent - run AI coding agents in sandboxes", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/darwin-arm64/package.json b/sdks/cli/platforms/darwin-arm64/package.json index 8b50eff..dbaa30c 100644 --- a/sdks/cli/platforms/darwin-arm64/package.json +++ b/sdks/cli/platforms/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-darwin-arm64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "sandbox-agent CLI binary for macOS ARM64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/darwin-x64/package.json b/sdks/cli/platforms/darwin-x64/package.json index 65d12fd..efc4506 100644 --- a/sdks/cli/platforms/darwin-x64/package.json +++ b/sdks/cli/platforms/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-darwin-x64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "sandbox-agent CLI binary for macOS x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/linux-arm64/package.json b/sdks/cli/platforms/linux-arm64/package.json index 9bb50ef..ecc73b5 100644 --- a/sdks/cli/platforms/linux-arm64/package.json +++ b/sdks/cli/platforms/linux-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-linux-arm64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "sandbox-agent CLI binary for Linux arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/linux-x64/package.json b/sdks/cli/platforms/linux-x64/package.json index e4fb969..a5e5879 100644 --- a/sdks/cli/platforms/linux-x64/package.json +++ b/sdks/cli/platforms/linux-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-linux-x64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "sandbox-agent CLI binary for Linux x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/cli/platforms/win32-x64/package.json b/sdks/cli/platforms/win32-x64/package.json index 13230f0..8014d1d 100644 --- a/sdks/cli/platforms/win32-x64/package.json +++ b/sdks/cli/platforms/win32-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/cli-win32-x64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "sandbox-agent CLI binary for Windows x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/package.json b/sdks/gigacode/package.json index 879a7ef..bba83f7 100644 --- a/sdks/gigacode/package.json +++ b/sdks/gigacode/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "Gigacode CLI (sandbox-agent with OpenCode attach by default)", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/darwin-arm64/package.json b/sdks/gigacode/platforms/darwin-arm64/package.json index 4ec9a96..6e343af 100644 --- a/sdks/gigacode/platforms/darwin-arm64/package.json +++ b/sdks/gigacode/platforms/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-darwin-arm64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "gigacode CLI binary for macOS arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/darwin-x64/package.json b/sdks/gigacode/platforms/darwin-x64/package.json index bbbb01a..f61636d 100644 --- a/sdks/gigacode/platforms/darwin-x64/package.json +++ b/sdks/gigacode/platforms/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-darwin-x64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "gigacode CLI binary for macOS x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/linux-arm64/package.json b/sdks/gigacode/platforms/linux-arm64/package.json index 11547bc..1070c84 100644 --- a/sdks/gigacode/platforms/linux-arm64/package.json +++ b/sdks/gigacode/platforms/linux-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-linux-arm64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "gigacode CLI binary for Linux arm64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/linux-x64/package.json b/sdks/gigacode/platforms/linux-x64/package.json index cd912b8..81dd26b 100644 --- a/sdks/gigacode/platforms/linux-x64/package.json +++ b/sdks/gigacode/platforms/linux-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-linux-x64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "gigacode CLI binary for Linux x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/gigacode/platforms/win32-x64/package.json b/sdks/gigacode/platforms/win32-x64/package.json index 8500b75..bc23895 100644 --- a/sdks/gigacode/platforms/win32-x64/package.json +++ b/sdks/gigacode/platforms/win32-x64/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/gigacode-win32-x64", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "gigacode CLI binary for Windows x64", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-indexeddb/package.json b/sdks/persist-indexeddb/package.json index e50b10e..981c9cc 100644 --- a/sdks/persist-indexeddb/package.json +++ b/sdks/persist-indexeddb/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-indexeddb", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "IndexedDB persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-postgres/package.json b/sdks/persist-postgres/package.json index bd3265a..6f999b9 100644 --- a/sdks/persist-postgres/package.json +++ b/sdks/persist-postgres/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-postgres", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "PostgreSQL persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-rivet/package.json b/sdks/persist-rivet/package.json index 139a731..2a257f3 100644 --- a/sdks/persist-rivet/package.json +++ b/sdks/persist-rivet/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-rivet", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "Rivet Actor persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/persist-sqlite/package.json b/sdks/persist-sqlite/package.json index 29c12df..d4254d7 100644 --- a/sdks/persist-sqlite/package.json +++ b/sdks/persist-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/persist-sqlite", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "SQLite persistence driver for the Sandbox Agent TypeScript SDK (DEPRECATED)", "license": "Apache-2.0", "repository": { diff --git a/sdks/react/package.json b/sdks/react/package.json index 81b8f45..8744b2f 100644 --- a/sdks/react/package.json +++ b/sdks/react/package.json @@ -1,6 +1,6 @@ { "name": "@sandbox-agent/react", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "React components for Sandbox Agent frontend integrations", "license": "Apache-2.0", "repository": { diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index 703779f..093b1ad 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -1,6 +1,6 @@ { "name": "sandbox-agent", - "version": "0.4.0-rc.3", + "version": "0.4.0", "description": "Universal API for automatic coding agents in sandboxes. Supports Claude Code, Codex, OpenCode, and Amp.", "license": "Apache-2.0", "repository": { diff --git a/sdks/typescript/src/providers/shared.ts b/sdks/typescript/src/providers/shared.ts index 6034606..5ee80c0 100644 --- a/sdks/typescript/src/providers/shared.ts +++ b/sdks/typescript/src/providers/shared.ts @@ -1,4 +1,4 @@ -export const DEFAULT_SANDBOX_AGENT_IMAGE = "rivetdev/sandbox-agent:0.4.0-rc.3-full"; +export const DEFAULT_SANDBOX_AGENT_IMAGE = "rivetdev/sandbox-agent:0.4.0-full"; export const SANDBOX_AGENT_INSTALL_SCRIPT = "https://releases.rivet.dev/sandbox-agent/0.3.x/install.sh"; export const DEFAULT_AGENTS = ["claude", "codex"] as const; From 77c8f1e3f3f0ccf6db0b4d052cca8b580b932a4f Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Mon, 16 Mar 2026 14:57:49 -0700 Subject: [PATCH 17/48] feat: add E2B auto-pause support with pause/kill/reconnect provider lifecycle Add `pause()`, `kill()`, and `reconnect()` methods to the SandboxProvider interface so providers can support graceful suspension and permanent deletion as distinct operations. The E2B provider now uses `betaCreate` with `autoPause: true` by default, `betaPause()` for suspension, and surfaces `SandboxDestroyedError` on reconnect to a deleted sandbox. SDK exposes `pauseSandbox()` and `killSandbox()` alongside the existing `destroySandbox()`. Co-Authored-By: Claude Opus 4.6 (1M context) --- docs/deploy/e2b.mdx | 2 +- docs/quickstart.mdx | 4 +- docs/sdk-overview.mdx | 4 +- examples/daytona/src/daytona.ts | 33 +++ examples/e2b/src/e2b.ts | 32 +++ examples/vercel/src/vercel.ts | 35 ++++ sdks/typescript/src/client.ts | 57 ++++++ sdks/typescript/src/index.ts | 1 + sdks/typescript/src/providers/e2b.ts | 42 +++- sdks/typescript/src/providers/types.ts | 19 ++ .../tests/provider-lifecycle.test.ts | 193 ++++++++++++++++++ sdks/typescript/tests/providers.test.ts | 7 +- 12 files changed, 416 insertions(+), 13 deletions(-) create mode 100644 examples/daytona/src/daytona.ts create mode 100644 examples/e2b/src/e2b.ts create mode 100644 examples/vercel/src/vercel.ts create mode 100644 sdks/typescript/tests/provider-lifecycle.test.ts diff --git a/docs/deploy/e2b.mdx b/docs/deploy/e2b.mdx index 4e056ee..e6465f2 100644 --- a/docs/deploy/e2b.mdx +++ b/docs/deploy/e2b.mdx @@ -39,7 +39,7 @@ try { } ``` -The `e2b` provider handles sandbox creation, Sandbox Agent installation, agent setup, and server startup automatically. +The `e2b` provider handles sandbox creation, Sandbox Agent installation, agent setup, and server startup automatically. Sandboxes pause by default instead of being deleted, and reconnecting with the same `sandboxId` resumes them automatically. ## Faster cold starts diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index 5c299c3..3701c74 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -357,10 +357,10 @@ icon: "rocket" ```typescript - await client.destroySandbox(); // tears down the sandbox and disconnects + await client.destroySandbox(); // provider-defined cleanup and disconnect ``` - Use `client.dispose()` instead to disconnect without destroying the sandbox (for reconnecting later). + Use `client.dispose()` instead to disconnect without changing sandbox state. On E2B, `client.pauseSandbox()` pauses the sandbox and `client.killSandbox()` deletes it permanently. diff --git a/docs/sdk-overview.mdx b/docs/sdk-overview.mdx index a0f9b84..8e7c8f6 100644 --- a/docs/sdk-overview.mdx +++ b/docs/sdk-overview.mdx @@ -87,7 +87,7 @@ const sdk = await SandboxAgent.start({ // sdk.sandboxId — prefixed provider ID (e.g. "local/127.0.0.1:2468") -await sdk.destroySandbox(); // tears down sandbox + disposes client +await sdk.destroySandbox(); // provider-defined cleanup + disposes client ``` `SandboxAgent.start(...)` requires a `sandbox` provider. Built-in providers: @@ -101,7 +101,7 @@ await sdk.destroySandbox(); // tears down sandbox + disposes client | `sandbox-agent/vercel` | Vercel Sandbox | | `sandbox-agent/cloudflare` | Cloudflare Sandbox | -Use `sdk.dispose()` to disconnect without destroying the sandbox, or `sdk.destroySandbox()` to tear down both. +Use `sdk.dispose()` to disconnect without changing sandbox state, `sdk.pauseSandbox()` for graceful suspension when supported, or `sdk.killSandbox()` for permanent deletion. ## Session flow diff --git a/examples/daytona/src/daytona.ts b/examples/daytona/src/daytona.ts new file mode 100644 index 0000000..ccffc94 --- /dev/null +++ b/examples/daytona/src/daytona.ts @@ -0,0 +1,33 @@ +import { SandboxAgent } from "sandbox-agent"; +import { daytona } from "sandbox-agent/daytona"; + +function collectEnvVars(): Record { + const envVars: Record = {}; + if (process.env.ANTHROPIC_API_KEY) envVars.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) envVars.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + return envVars; +} + +function inspectorUrlToBaseUrl(inspectorUrl: string): string { + return inspectorUrl.replace(/\/ui\/$/, ""); +} + +export async function setupDaytonaSandboxAgent(): Promise<{ + baseUrl: string; + token?: string; + extraHeaders?: Record; + cleanup: () => Promise; +}> { + const client = await SandboxAgent.start({ + sandbox: daytona({ + create: { envVars: collectEnvVars() }, + }), + }); + + return { + baseUrl: inspectorUrlToBaseUrl(client.inspectorUrl), + cleanup: async () => { + await client.killSandbox(); + }, + }; +} diff --git a/examples/e2b/src/e2b.ts b/examples/e2b/src/e2b.ts new file mode 100644 index 0000000..bfd5bda --- /dev/null +++ b/examples/e2b/src/e2b.ts @@ -0,0 +1,32 @@ +import { SandboxAgent } from "sandbox-agent"; +import { e2b } from "sandbox-agent/e2b"; + +function collectEnvVars(): Record { + const envs: Record = {}; + if (process.env.ANTHROPIC_API_KEY) envs.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) envs.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + return envs; +} + +function inspectorUrlToBaseUrl(inspectorUrl: string): string { + return inspectorUrl.replace(/\/ui\/$/, ""); +} + +export async function setupE2BSandboxAgent(): Promise<{ + baseUrl: string; + token?: string; + cleanup: () => Promise; +}> { + const client = await SandboxAgent.start({ + sandbox: e2b({ + create: { envs: collectEnvVars() }, + }), + }); + + return { + baseUrl: inspectorUrlToBaseUrl(client.inspectorUrl), + cleanup: async () => { + await client.killSandbox(); + }, + }; +} diff --git a/examples/vercel/src/vercel.ts b/examples/vercel/src/vercel.ts new file mode 100644 index 0000000..742cd5a --- /dev/null +++ b/examples/vercel/src/vercel.ts @@ -0,0 +1,35 @@ +import { SandboxAgent } from "sandbox-agent"; +import { vercel } from "sandbox-agent/vercel"; + +function collectEnvVars(): Record { + const env: Record = {}; + if (process.env.ANTHROPIC_API_KEY) env.ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY; + if (process.env.OPENAI_API_KEY) env.OPENAI_API_KEY = process.env.OPENAI_API_KEY; + return env; +} + +function inspectorUrlToBaseUrl(inspectorUrl: string): string { + return inspectorUrl.replace(/\/ui\/$/, ""); +} + +export async function setupVercelSandboxAgent(): Promise<{ + baseUrl: string; + token?: string; + cleanup: () => Promise; +}> { + const client = await SandboxAgent.start({ + sandbox: vercel({ + create: { + runtime: "node24", + env: collectEnvVars(), + }, + }), + }); + + return { + baseUrl: inspectorUrlToBaseUrl(client.inspectorUrl), + cleanup: async () => { + await client.killSandbox(); + }, + }; +} diff --git a/sdks/typescript/src/client.ts b/sdks/typescript/src/client.ts index 4752c0a..10200bc 100644 --- a/sdks/typescript/src/client.ts +++ b/sdks/typescript/src/client.ts @@ -216,6 +216,18 @@ export class SandboxAgentError extends Error { } } +export class SandboxDestroyedError extends Error { + readonly sandboxId: string; + readonly provider: string; + + constructor(sandboxId: string, provider: string, options?: { cause?: unknown }) { + super(`Sandbox '${provider}/${sandboxId}' no longer exists and cannot be reconnected.`, options); + this.name = "SandboxDestroyedError"; + this.sandboxId = sandboxId; + this.provider = provider; + } +} + export class UnsupportedSessionCategoryError extends Error { readonly sessionId: string; readonly category: string; @@ -904,6 +916,7 @@ export class SandboxAgent { const createdSandbox = !existingSandbox; if (existingSandbox) { + await provider.reconnect?.(rawSandboxId); await provider.ensureServer?.(rawSandboxId); } @@ -1007,6 +1020,50 @@ export class SandboxAgent { } } + async pauseSandbox(): Promise { + const provider = this.sandboxProvider; + const rawSandboxId = this.sandboxProviderRawId; + + try { + if (provider && rawSandboxId) { + if (provider.pause) { + await provider.pause(rawSandboxId); + } else { + await provider.destroy(rawSandboxId); + } + } else if (!provider || !rawSandboxId) { + throw new Error("SandboxAgent is not attached to a provisioned sandbox."); + } + } finally { + await this.dispose(); + this.sandboxProvider = undefined; + this.sandboxProviderId = undefined; + this.sandboxProviderRawId = undefined; + } + } + + async killSandbox(): Promise { + const provider = this.sandboxProvider; + const rawSandboxId = this.sandboxProviderRawId; + + try { + if (provider && rawSandboxId) { + if (provider.kill) { + await provider.kill(rawSandboxId); + } else { + await provider.destroy(rawSandboxId); + } + } else if (!provider || !rawSandboxId) { + throw new Error("SandboxAgent is not attached to a provisioned sandbox."); + } + } finally { + await this.dispose(); + this.sandboxProvider = undefined; + this.sandboxProviderId = undefined; + this.sandboxProviderRawId = undefined; + } + } + async listSessions(request: ListPageRequest = {}): Promise> { const page = await this.persist.listSessions(request); return { diff --git a/sdks/typescript/src/index.ts b/sdks/typescript/src/index.ts index f0ebe2e..15537dd 100644 --- a/sdks/typescript/src/index.ts +++ b/sdks/typescript/src/index.ts @@ -3,6 +3,7 @@ export { ProcessTerminalSession, SandboxAgent, SandboxAgentError, + SandboxDestroyedError, Session, UnsupportedPermissionReplyError, UnsupportedSessionCategoryError, diff --git a/sdks/typescript/src/providers/e2b.ts b/sdks/typescript/src/providers/e2b.ts index 84d767c..8e99c64 100644 --- a/sdks/typescript/src/providers/e2b.ts +++ b/sdks/typescript/src/providers/e2b.ts @@ -1,13 +1,20 @@ -import { Sandbox } from "@e2b/code-interpreter"; +import { NotFoundError, Sandbox, type SandboxBetaCreateOpts, type SandboxConnectOpts } from "@e2b/code-interpreter"; +import { SandboxDestroyedError } from "../client.ts"; import type { SandboxProvider } from "./types.ts"; import { DEFAULT_AGENTS, SANDBOX_AGENT_INSTALL_SCRIPT } from "./shared.ts"; const DEFAULT_AGENT_PORT = 3000; +const DEFAULT_TIMEOUT_MS = 3_600_000; + +type E2BCreateOverrides = Omit, "timeoutMs" | "autoPause">; +type E2BConnectOverrides = Omit, "timeoutMs">; export interface E2BProviderOptions { - create?: Record | (() => Record | Promise>); - connect?: Record | ((sandboxId: string) => Record | Promise>); + create?: E2BCreateOverrides | (() => E2BCreateOverrides | Promise); + connect?: E2BConnectOverrides | ((sandboxId: string) => E2BConnectOverrides | Promise); agentPort?: number; + timeoutMs?: number; + autoPause?: boolean; } async function resolveOptions(value: E2BProviderOptions["create"] | E2BProviderOptions["connect"], sandboxId?: string): Promise> { @@ -23,13 +30,15 @@ async function resolveOptions(value: E2BProviderOptions["create"] | E2BProviderO export function e2b(options: E2BProviderOptions = {}): SandboxProvider { const agentPort = options.agentPort ?? DEFAULT_AGENT_PORT; + const timeoutMs = options.timeoutMs ?? DEFAULT_TIMEOUT_MS; + const autoPause = options.autoPause ?? true; return { name: "e2b", async create(): Promise { const createOpts = await resolveOptions(options.create); // eslint-disable-next-line @typescript-eslint/no-explicit-any - const sandbox = await Sandbox.create({ allowInternetAccess: true, ...createOpts } as any); + const sandbox = await Sandbox.betaCreate({ allowInternetAccess: true, ...createOpts, timeoutMs, autoPause } as any); await sandbox.commands.run(`curl -fsSL ${SANDBOX_AGENT_INSTALL_SCRIPT} | sh`).then((r) => { if (r.exitCode !== 0) throw new Error(`e2b install failed:\n${r.stderr}`); @@ -44,18 +53,37 @@ export function e2b(options: E2BProviderOptions = {}): SandboxProvider { return sandbox.sandboxId; }, async destroy(sandboxId: string): Promise { + await this.pause?.(sandboxId); + }, + async reconnect(sandboxId: string): Promise { const connectOpts = await resolveOptions(options.connect, sandboxId); - const sandbox = await Sandbox.connect(sandboxId, connectOpts as any); + try { + await Sandbox.connect(sandboxId, { ...connectOpts, timeoutMs } as SandboxConnectOpts); + } catch (error) { + if (error instanceof NotFoundError) { + throw new SandboxDestroyedError(sandboxId, "e2b", { cause: error }); + } + throw error; + } + }, + async pause(sandboxId: string): Promise { + const connectOpts = await resolveOptions(options.connect, sandboxId); + const sandbox = await Sandbox.connect(sandboxId, { ...connectOpts, timeoutMs } as SandboxConnectOpts); + await sandbox.betaPause(); + }, + async kill(sandboxId: string): Promise { + const connectOpts = await resolveOptions(options.connect, sandboxId); + const sandbox = await Sandbox.connect(sandboxId, { ...connectOpts, timeoutMs } as SandboxConnectOpts); await sandbox.kill(); }, async getUrl(sandboxId: string): Promise { const connectOpts = await resolveOptions(options.connect, sandboxId); - const sandbox = await Sandbox.connect(sandboxId, connectOpts as any); + const sandbox = await Sandbox.connect(sandboxId, { ...connectOpts, timeoutMs } as SandboxConnectOpts); return `https://${sandbox.getHost(agentPort)}`; }, async ensureServer(sandboxId: string): Promise { const connectOpts = await resolveOptions(options.connect, sandboxId); - const sandbox = await Sandbox.connect(sandboxId, connectOpts as any); + const sandbox = await Sandbox.connect(sandboxId, { ...connectOpts, timeoutMs } as SandboxConnectOpts); await sandbox.commands.run(`sandbox-agent server --no-token --host 0.0.0.0 --port ${agentPort}`, { background: true, timeoutMs: 0 }); }, }; diff --git a/sdks/typescript/src/providers/types.ts b/sdks/typescript/src/providers/types.ts index ea778de..ab996e1 100644 --- a/sdks/typescript/src/providers/types.ts +++ b/sdks/typescript/src/providers/types.ts @@ -8,6 +8,25 @@ export interface SandboxProvider { /** Permanently tear down a sandbox. */ destroy(sandboxId: string): Promise; + /** + * Reconnect to an existing sandbox before the SDK attempts health checks. + * Providers can use this to resume paused sandboxes or surface provider-specific + * reconnect errors. + */ + reconnect?(sandboxId: string): Promise; + + /** + * Gracefully stop or pause a sandbox without permanently deleting it. + * When omitted, callers should fall back to `destroy()`. + */ + pause?(sandboxId: string): Promise; + + /** + * Permanently delete a sandbox. When omitted, callers should fall back to + * `destroy()`. + */ + kill?(sandboxId: string): Promise; + /** * Return the sandbox-agent base URL for this sandbox. * Providers that cannot expose a URL should implement `getFetch()` instead. diff --git a/sdks/typescript/tests/provider-lifecycle.test.ts b/sdks/typescript/tests/provider-lifecycle.test.ts new file mode 100644 index 0000000..06c85f5 --- /dev/null +++ b/sdks/typescript/tests/provider-lifecycle.test.ts @@ -0,0 +1,193 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { SandboxAgent, SandboxDestroyedError, type SandboxProvider } from "../src/index.ts"; + +const e2bMocks = vi.hoisted(() => { + class MockNotFoundError extends Error { + constructor(message: string) { + super(message); + this.name = "NotFoundError"; + } + } + + return { + MockNotFoundError, + betaCreate: vi.fn(), + connect: vi.fn(), + }; +}); + +vi.mock("@e2b/code-interpreter", () => ({ + NotFoundError: e2bMocks.MockNotFoundError, + Sandbox: { + betaCreate: e2bMocks.betaCreate, + connect: e2bMocks.connect, + }, +})); + +import { e2b } from "../src/providers/e2b.ts"; + +function createFetch(): typeof fetch { + return async () => new Response(null, { status: 200 }); +} + +function createBaseProvider(overrides: Partial = {}): SandboxProvider { + return { + name: "mock", + async create(): Promise { + return "created"; + }, + async destroy(): Promise {}, + async getUrl(): Promise { + return "http://127.0.0.1:3000"; + }, + ...overrides, + }; +} + +function createMockSandbox() { + return { + sandboxId: "sbx-123", + getHost: vi.fn(() => "sandbox.example"), + betaPause: vi.fn(async () => true), + kill: vi.fn(async () => undefined), + commands: { + run: vi.fn(async () => ({ exitCode: 0, stderr: "" })), + }, + }; +} + +describe("SandboxAgent provider lifecycle", () => { + it("reconnects an existing sandbox before ensureServer", async () => { + const order: string[] = []; + const provider = createBaseProvider({ + reconnect: vi.fn(async () => { + order.push("reconnect"); + }), + ensureServer: vi.fn(async () => { + order.push("ensureServer"); + }), + }); + + const sdk = await SandboxAgent.start({ + sandbox: provider, + sandboxId: "mock/existing", + skipHealthCheck: true, + fetch: createFetch(), + }); + + expect(order).toEqual(["reconnect", "ensureServer"]); + + await sdk.killSandbox(); + }); + + it("surfaces SandboxDestroyedError from reconnect", async () => { + const provider = createBaseProvider({ + reconnect: vi.fn(async () => { + throw new SandboxDestroyedError("existing", "mock"); + }), + ensureServer: vi.fn(async () => undefined), + }); + + await expect( + SandboxAgent.start({ + sandbox: provider, + sandboxId: "mock/existing", + skipHealthCheck: true, + fetch: createFetch(), + }), + ).rejects.toBeInstanceOf(SandboxDestroyedError); + + expect(provider.ensureServer).not.toHaveBeenCalled(); + }); + + it("uses provider pause and kill hooks for explicit lifecycle control", async () => { + const pause = vi.fn(async () => undefined); + const kill = vi.fn(async () => undefined); + const provider = createBaseProvider({ pause, kill }); + + const paused = await SandboxAgent.start({ + sandbox: provider, + skipHealthCheck: true, + fetch: createFetch(), + }); + await paused.pauseSandbox(); + expect(pause).toHaveBeenCalledWith("created"); + + const killed = await SandboxAgent.start({ + sandbox: provider, + skipHealthCheck: true, + fetch: createFetch(), + }); + await killed.killSandbox(); + expect(kill).toHaveBeenCalledWith("created"); + }); +}); + +describe("e2b provider", () => { + beforeEach(() => { + e2bMocks.betaCreate.mockReset(); + e2bMocks.connect.mockReset(); + }); + + it("creates sandboxes with betaCreate, autoPause, and the default timeout", async () => { + const sandbox = createMockSandbox(); + e2bMocks.betaCreate.mockResolvedValue(sandbox); + + const provider = e2b({ + create: { + envs: { ANTHROPIC_API_KEY: "test" }, + }, + }); + + await expect(provider.create()).resolves.toBe("sbx-123"); + + expect(e2bMocks.betaCreate).toHaveBeenCalledWith( + expect.objectContaining({ + allowInternetAccess: true, + autoPause: true, + timeoutMs: 3_600_000, + envs: { ANTHROPIC_API_KEY: "test" }, + }), + ); + }); + + it("allows timeoutMs and autoPause to be overridden", async () => { + const sandbox = createMockSandbox(); + e2bMocks.betaCreate.mockResolvedValue(sandbox); + + const provider = e2b({ + timeoutMs: 123_456, + autoPause: false, + }); + + await provider.create(); + + expect(e2bMocks.betaCreate).toHaveBeenCalledWith( + expect.objectContaining({ + autoPause: false, + timeoutMs: 123_456, + }), + ); + }); + + it("pauses by default in destroy and uses kill for permanent deletion", async () => { + const sandbox = createMockSandbox(); + e2bMocks.connect.mockResolvedValue(sandbox); + const provider = e2b(); + + await provider.destroy("sbx-123"); + expect(e2bMocks.connect).toHaveBeenLastCalledWith("sbx-123", { timeoutMs: 3_600_000 }); + expect(sandbox.betaPause).toHaveBeenCalledTimes(1); + expect(sandbox.kill).not.toHaveBeenCalled(); + + await provider.kill?.("sbx-123"); + expect(sandbox.kill).toHaveBeenCalledTimes(1); + }); + + it("maps missing reconnect targets to SandboxDestroyedError", async () => { + e2bMocks.connect.mockRejectedValue(new e2bMocks.MockNotFoundError("gone")); + const provider = e2b(); + + await expect(provider.reconnect?.("missing-sandbox")).rejects.toBeInstanceOf(SandboxDestroyedError); + }); +}); diff --git a/sdks/typescript/tests/providers.test.ts b/sdks/typescript/tests/providers.test.ts index 3376026..d98672d 100644 --- a/sdks/typescript/tests/providers.test.ts +++ b/sdks/typescript/tests/providers.test.ts @@ -291,7 +291,7 @@ function providerSuite(entry: ProviderEntry) { afterEach(async () => { if (!sdk) return; - await sdk.destroySandbox().catch(async () => { + await sdk.killSandbox().catch(async () => { await sdk?.dispose().catch(() => {}); }); sdk = undefined; @@ -364,6 +364,11 @@ function providerSuite(entry: ProviderEntry) { }); await expect(reconnected.listAgents()).rejects.toThrow(); } + + if (entry.name === "e2b") { + const rawSandboxId = sandboxId?.slice(sandboxId.indexOf("/") + 1); + await entry.createProvider().kill?.(rawSandboxId!); + } }, entry.startTimeoutMs, ); From f45a467484964b6517a511ebd7c13975012ef981 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Mon, 16 Mar 2026 15:23:59 -0700 Subject: [PATCH 18/48] chore(foundry): migrate to actions (#262) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(foundry): checkpoint actor and workspace refactor * docs(foundry): add agent handoff context * wip(foundry): continue actor refactor * wip(foundry): capture remaining local changes * Complete Foundry refactor checklist * Fix Foundry validation fallout * wip * wip: convert all actors from workflow to plain run handlers Workaround for RivetKit bug where c.queue.iter() never yields messages for actors created via getOrCreate from another actor's context. The queue accepts messages (visible in inspector) but the iterator hangs. Sleep/wake fixes it, but actors with active connections never sleep. Converted organization, github-data, task, and user actors from run: workflow(...) to plain run: async (c) => { for await ... }. Also fixes: - Missing auth tables in org migration (auth_verification etc) - default_model NOT NULL constraint on org profile upsert - Nested workflow step in github-data (HistoryDivergedError) - Removed --force from frontend Dockerfile pnpm install Co-Authored-By: Claude Opus 4.6 (1M context) * Convert all actors from queues/workflows to direct actions, lazy task creation Major refactor replacing all queue-based workflow communication with direct RivetKit action calls across all actors. This works around a RivetKit bug where c.queue.iter() deadlocks for actors created from another actor's context. Key changes: - All actors (organization, task, user, audit-log, github-data) converted from run: workflow(...) to actions-only (no run handler, no queues) - PR sync creates virtual task entries in org local DB instead of spawning task actors — prevents OOM from 200+ actors created simultaneously - Task actors created lazily on first user interaction via getOrCreate, self-initialize from org's getTaskIndexEntry data - Removed requireRepoExists cross-actor call (caused 500s), replaced with local resolveTaskRepoId from org's taskIndex table - Fixed getOrganizationContext to thread overrides through all sync phases - Fixed sandbox repo path (/home/user/repo for E2B compatibility) - Fixed buildSessionDetail to skip transcript fetch for pending sessions - Added process crash protection (uncaughtException/unhandledRejection) - Fixed React infinite render loop in mock-layout useEffect dependencies - Added sandbox listProcesses error handling for expired E2B sandboxes - Set E2B sandbox timeout to 1 hour (was 5 min default) - Updated CLAUDE.md with lazy task creation rules, no-silent-catch policy, React hook dependency safety rules Co-Authored-By: Claude Opus 4.6 (1M context) * Fix E2B sandbox timeout comment, frontend stability, and create-flow improvements - Add TEMPORARY comment on E2B timeoutMs with pointer to rivetkit sandbox resilience proposal for when autoPause lands - Fix React useEffect dependency stability in mock-layout and organization-dashboard to prevent infinite re-render loops - Fix terminal-pane ref handling - Improve create-flow service and tests Co-Authored-By: Claude Opus 4.6 (1M context) --------- Co-authored-by: Claude Opus 4.6 (1M context) --- docs/openapi.json | 368 ++++- foundry/AGENT-HANDOFF.md | 179 ++ foundry/CLAUDE.md | 36 +- foundry/FOUNDRY-CHANGES.md | 1456 +++++++++++++++++ foundry/docker/frontend.dev.Dockerfile | 2 +- foundry/packages/backend/CLAUDE.md | 173 +- .../actors/{auth-user => audit-log}/db/db.ts | 2 +- .../src/actors/audit-log/db/drizzle.config.ts | 6 + .../db/drizzle/0000_fluffy_kid_colt.sql | 0 .../audit-log/db/drizzle/0001_add_repo_id.sql | 1 + .../db/drizzle/meta/0000_snapshot.json | 0 .../db/drizzle/meta/0001_snapshot.json} | 54 +- .../db/drizzle/meta/_journal.json | 7 + .../{history => audit-log}/db/migrations.ts | 8 + .../{history => audit-log}/db/schema.ts | 3 +- .../backend/src/actors/audit-log/index.ts | 98 ++ .../backend/src/actors/auth-user/db/schema.ts | 70 - .../backend/src/actors/auth-user/index.ts | 353 ---- foundry/packages/backend/src/actors/events.ts | 104 -- .../src/actors/github-data/db/migrations.ts | 25 +- .../src/actors/github-data/db/schema.ts | 35 +- .../backend/src/actors/github-data/index.ts | 1048 +++++++----- .../src/actors/github-data/workflow.ts | 81 + .../packages/backend/src/actors/handles.ts | 41 +- .../src/actors/history/db/drizzle.config.ts | 6 - .../backend/src/actors/history/index.ts | 115 -- foundry/packages/backend/src/actors/index.ts | 16 +- foundry/packages/backend/src/actors/keys.ts | 13 +- .../src/actors/organization/actions.ts | 989 ++--------- .../src/actors/organization/actions/app.ts | 1 + .../organization/actions/better-auth.ts | 323 ++++ .../src/actors/organization/actions/github.ts | 78 + .../actors/organization/actions/onboarding.ts | 82 + .../organization/actions/organization.ts | 55 + .../organization/actions/task-mutations.ts | 543 ++++++ .../src/actors/organization/actions/tasks.ts | 340 ++++ .../src/actors/organization/app-shell.ts | 1262 +++++--------- .../src/actors/organization/constants.ts | 1 + .../db/drizzle/0000_melted_viper.sql | 9 +- .../drizzle/0001_add_auth_and_task_tables.sql | 50 + .../db/drizzle/meta/0000_snapshot.json | 52 +- .../db/drizzle/meta/_journal.json | 7 + .../src/actors/organization/db/migrations.ts | 47 +- .../src/actors/organization/db/schema.ts | 115 +- .../backend/src/actors/organization/index.ts | 13 +- .../backend/src/actors/organization/queues.ts | 39 + .../src/actors/organization/workflow.ts | 163 ++ .../backend/src/actors/repository/actions.ts | 557 ------- .../backend/src/actors/repository/db/db.ts | 5 - .../actors/repository/db/drizzle.config.ts | 6 - .../db/drizzle/0000_useful_la_nuit.sql | 12 - .../repository/db/drizzle/meta/_journal.json | 13 - .../src/actors/repository/db/migrations.ts | 43 - .../src/actors/repository/db/schema.ts | 23 - .../backend/src/actors/repository/index.ts | 27 - .../backend/src/actors/sandbox/index.ts | 54 +- .../task/db/drizzle/0000_charming_maestro.sql | 23 +- .../task/db/drizzle/meta/0000_snapshot.json | 88 +- .../backend/src/actors/task/db/migrations.ts | 35 +- .../backend/src/actors/task/db/schema.ts | 17 +- .../packages/backend/src/actors/task/index.ts | 370 +---- .../src/actors/task/workflow/commands.ts | 36 +- .../src/actors/task/workflow/common.ts | 126 +- .../backend/src/actors/task/workflow/index.ts | 446 +++-- .../backend/src/actors/task/workflow/init.ts | 106 +- .../backend/src/actors/task/workflow/push.ts | 34 +- .../backend/src/actors/task/workflow/queue.ts | 37 +- .../task/{workbench.ts => workspace.ts} | 698 ++++---- .../src/actors/user/actions/better-auth.ts | 47 + .../backend/src/actors/user/actions/user.ts | 44 + .../src/actors/{history => user}/db/db.ts | 2 +- .../{auth-user => user}/db/migrations.ts | 34 +- .../backend/src/actors/user/db/schema.ts | 112 ++ .../packages/backend/src/actors/user/index.ts | 60 + .../backend/src/actors/user/query-helpers.ts | 197 +++ .../backend/src/actors/user/workflow.ts | 197 +++ foundry/packages/backend/src/index.ts | 15 +- .../backend/src/services/better-auth.ts | 148 +- .../src/services/branch-name-prefixes.ts | 584 +++++++ .../backend/src/services/create-flow.ts | 42 +- .../backend/src/services/github-auth.ts | 2 +- .../packages/backend/test/create-flow.test.ts | 43 +- foundry/packages/backend/test/keys.test.ts | 5 +- .../test/organization-isolation.test.ts | 5 +- ...nread.test.ts => workspace-unread.test.ts} | 10 +- foundry/packages/cli/src/tui.ts | 81 +- foundry/packages/cli/test/tui-format.test.ts | 19 +- foundry/packages/client/package.json | 4 +- foundry/packages/client/src/app-client.ts | 2 + foundry/packages/client/src/backend-client.ts | 458 +++--- foundry/packages/client/src/index.ts | 2 +- foundry/packages/client/src/keys.ts | 10 +- foundry/packages/client/src/mock-app.ts | 28 +- .../client/src/mock/backend-client.ts | 206 ++- ...orkbench-client.ts => workspace-client.ts} | 134 +- .../packages/client/src/remote/app-client.ts | 7 +- .../client/src/remote/workbench-client.ts | 198 --- .../client/src/remote/workspace-client.ts | 198 +++ .../client/src/subscription/remote-manager.ts | 35 +- .../client/src/subscription/topics.ts | 75 +- foundry/packages/client/src/view-model.ts | 2 +- .../packages/client/src/workbench-client.ts | 64 - .../packages/client/src/workspace-client.ts | 64 + ...{workbench-model.ts => workspace-model.ts} | 232 +-- .../test/e2e/full-integration-e2e.test.ts | 6 +- .../client/test/e2e/github-pr-e2e.test.ts | 48 +- ...ench-e2e.test.ts => workspace-e2e.test.ts} | 85 +- ...e2e.test.ts => workspace-load-e2e.test.ts} | 68 +- foundry/packages/client/test/keys.test.ts | 10 +- .../client/test/subscription-manager.test.ts | 62 +- .../packages/client/test/view-model.test.ts | 14 +- .../frontend/src/components/dev-panel.tsx | 64 +- .../frontend/src/components/mock-layout.tsx | 520 +++--- .../components/mock-layout/model-picker.tsx | 17 +- .../mock-layout/prompt-composer.tsx | 5 +- .../components/mock-layout/right-sidebar.tsx | 2 +- .../src/components/mock-layout/sidebar.tsx | 59 +- .../components/mock-layout/terminal-pane.tsx | 5 +- .../mock-layout/transcript-header.tsx | 74 +- .../src/components/mock-layout/ui.tsx | 2 + .../components/mock-layout/view-model.test.ts | 4 +- .../src/components/mock-layout/view-model.ts | 60 +- .../src/components/organization-dashboard.tsx | 153 +- .../frontend/src/features/tasks/model.test.ts | 32 +- .../frontend/src/features/tasks/model.ts | 11 - .../src/features/tasks/status.test.ts | 36 +- .../frontend/src/features/tasks/status.ts | 28 +- foundry/packages/frontend/src/lib/backend.ts | 1 + foundry/packages/frontend/src/lib/mock-app.ts | 20 +- .../frontend/src/sandbox-agent-react.d.ts | 12 + foundry/packages/shared/src/app-shell.ts | 14 +- foundry/packages/shared/src/contracts.ts | 43 +- foundry/packages/shared/src/index.ts | 3 +- foundry/packages/shared/src/logging.ts | 4 +- foundry/packages/shared/src/models.ts | 217 +++ .../packages/shared/src/realtime-events.ts | 15 +- foundry/packages/shared/src/workbench.ts | 296 ---- foundry/packages/shared/src/workspace.ts | 311 ++++ sdks/typescript/src/generated/openapi.ts | 80 +- 139 files changed, 9768 insertions(+), 7204 deletions(-) create mode 100644 foundry/AGENT-HANDOFF.md create mode 100644 foundry/FOUNDRY-CHANGES.md rename foundry/packages/backend/src/actors/{auth-user => audit-log}/db/db.ts (69%) create mode 100644 foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts rename foundry/packages/backend/src/actors/{history => audit-log}/db/drizzle/0000_fluffy_kid_colt.sql (100%) create mode 100644 foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql rename foundry/packages/backend/src/actors/{history => audit-log}/db/drizzle/meta/0000_snapshot.json (100%) rename foundry/packages/backend/src/actors/{repository/db/drizzle/meta/0000_snapshot.json => audit-log/db/drizzle/meta/0001_snapshot.json} (64%) rename foundry/packages/backend/src/actors/{history => audit-log}/db/drizzle/meta/_journal.json (59%) rename foundry/packages/backend/src/actors/{history => audit-log}/db/migrations.ts (78%) rename foundry/packages/backend/src/actors/{history => audit-log}/db/schema.ts (77%) create mode 100644 foundry/packages/backend/src/actors/audit-log/index.ts delete mode 100644 foundry/packages/backend/src/actors/auth-user/db/schema.ts delete mode 100644 foundry/packages/backend/src/actors/auth-user/index.ts delete mode 100644 foundry/packages/backend/src/actors/events.ts create mode 100644 foundry/packages/backend/src/actors/github-data/workflow.ts delete mode 100644 foundry/packages/backend/src/actors/history/db/drizzle.config.ts delete mode 100644 foundry/packages/backend/src/actors/history/index.ts create mode 100644 foundry/packages/backend/src/actors/organization/actions/app.ts create mode 100644 foundry/packages/backend/src/actors/organization/actions/better-auth.ts create mode 100644 foundry/packages/backend/src/actors/organization/actions/github.ts create mode 100644 foundry/packages/backend/src/actors/organization/actions/onboarding.ts create mode 100644 foundry/packages/backend/src/actors/organization/actions/organization.ts create mode 100644 foundry/packages/backend/src/actors/organization/actions/task-mutations.ts create mode 100644 foundry/packages/backend/src/actors/organization/actions/tasks.ts create mode 100644 foundry/packages/backend/src/actors/organization/constants.ts create mode 100644 foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql create mode 100644 foundry/packages/backend/src/actors/organization/queues.ts create mode 100644 foundry/packages/backend/src/actors/organization/workflow.ts delete mode 100644 foundry/packages/backend/src/actors/repository/actions.ts delete mode 100644 foundry/packages/backend/src/actors/repository/db/db.ts delete mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle.config.ts delete mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql delete mode 100644 foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json delete mode 100644 foundry/packages/backend/src/actors/repository/db/migrations.ts delete mode 100644 foundry/packages/backend/src/actors/repository/db/schema.ts delete mode 100644 foundry/packages/backend/src/actors/repository/index.ts rename foundry/packages/backend/src/actors/task/{workbench.ts => workspace.ts} (66%) create mode 100644 foundry/packages/backend/src/actors/user/actions/better-auth.ts create mode 100644 foundry/packages/backend/src/actors/user/actions/user.ts rename foundry/packages/backend/src/actors/{history => user}/db/db.ts (70%) rename foundry/packages/backend/src/actors/{auth-user => user}/db/migrations.ts (65%) create mode 100644 foundry/packages/backend/src/actors/user/db/schema.ts create mode 100644 foundry/packages/backend/src/actors/user/index.ts create mode 100644 foundry/packages/backend/src/actors/user/query-helpers.ts create mode 100644 foundry/packages/backend/src/actors/user/workflow.ts create mode 100644 foundry/packages/backend/src/services/branch-name-prefixes.ts rename foundry/packages/backend/test/{workbench-unread.test.ts => workspace-unread.test.ts} (92%) rename foundry/packages/client/src/mock/{workbench-client.ts => workspace-client.ts} (76%) delete mode 100644 foundry/packages/client/src/remote/workbench-client.ts create mode 100644 foundry/packages/client/src/remote/workspace-client.ts delete mode 100644 foundry/packages/client/src/workbench-client.ts create mode 100644 foundry/packages/client/src/workspace-client.ts rename foundry/packages/client/src/{workbench-model.ts => workspace-model.ts} (90%) rename foundry/packages/client/test/e2e/{workbench-e2e.test.ts => workspace-e2e.test.ts} (78%) rename foundry/packages/client/test/e2e/{workbench-load-e2e.test.ts => workspace-load-e2e.test.ts} (85%) create mode 100644 foundry/packages/shared/src/models.ts delete mode 100644 foundry/packages/shared/src/workbench.ts create mode 100644 foundry/packages/shared/src/workspace.ts diff --git a/docs/openapi.json b/docs/openapi.json index e432a84..1ab18a6 100644 --- a/docs/openapi.json +++ b/docs/openapi.json @@ -20,7 +20,9 @@ "paths": { "/v1/acp": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_acp_servers", "responses": { "200": { @@ -38,7 +40,9 @@ }, "/v1/acp/{server_id}": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_acp", "parameters": [ { @@ -88,7 +92,9 @@ } }, "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_acp", "parameters": [ { @@ -198,7 +204,9 @@ } }, "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "delete_v1_acp", "parameters": [ { @@ -220,7 +228,9 @@ }, "/v1/agents": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_agents", "parameters": [ { @@ -270,7 +280,9 @@ }, "/v1/agents/{agent}": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_agent", "parameters": [ { @@ -339,7 +351,9 @@ }, "/v1/agents/{agent}/install": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_agent_install", "parameters": [ { @@ -398,7 +412,9 @@ }, "/v1/config/mcp": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_config_mcp", "parameters": [ { @@ -444,7 +460,9 @@ } }, "put": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "put_v1_config_mcp", "parameters": [ { @@ -483,7 +501,9 @@ } }, "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "delete_v1_config_mcp", "parameters": [ { @@ -514,7 +534,9 @@ }, "/v1/config/skills": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_config_skills", "parameters": [ { @@ -560,7 +582,9 @@ } }, "put": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "put_v1_config_skills", "parameters": [ { @@ -599,7 +623,9 @@ } }, "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "delete_v1_config_skills", "parameters": [ { @@ -630,7 +656,9 @@ }, "/v1/fs/entries": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_fs_entries", "parameters": [ { @@ -663,7 +691,9 @@ }, "/v1/fs/entry": { "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "delete_v1_fs_entry", "parameters": [ { @@ -702,7 +732,9 @@ }, "/v1/fs/file": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_fs_file", "parameters": [ { @@ -722,7 +754,9 @@ } }, "put": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "put_v1_fs_file", "parameters": [ { @@ -762,7 +796,9 @@ }, "/v1/fs/mkdir": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_fs_mkdir", "parameters": [ { @@ -791,7 +827,9 @@ }, "/v1/fs/move": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_fs_move", "requestBody": { "content": { @@ -819,7 +857,9 @@ }, "/v1/fs/stat": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_fs_stat", "parameters": [ { @@ -848,7 +888,9 @@ }, "/v1/fs/upload-batch": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "post_v1_fs_upload_batch", "parameters": [ { @@ -889,7 +931,9 @@ }, "/v1/health": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "operationId": "get_v1_health", "responses": { "200": { @@ -907,7 +951,9 @@ }, "/v1/processes": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "List all managed processes.", "description": "Returns a list of all processes (running and exited) currently tracked\nby the runtime, sorted by process ID.", "operationId": "get_v1_processes", @@ -935,7 +981,9 @@ } }, "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Create a long-lived managed process.", "description": "Spawns a new process with the given command and arguments. Supports both\npipe-based and PTY (tty) modes. Returns the process descriptor on success.", "operationId": "post_v1_processes", @@ -995,7 +1043,9 @@ }, "/v1/processes/config": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Get process runtime configuration.", "description": "Returns the current runtime configuration for the process management API,\nincluding limits for concurrency, timeouts, and buffer sizes.", "operationId": "get_v1_processes_config", @@ -1023,7 +1073,9 @@ } }, "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Update process runtime configuration.", "description": "Replaces the runtime configuration for the process management API.\nValidates that all values are non-zero and clamps default timeout to max.", "operationId": "post_v1_processes_config", @@ -1073,7 +1125,9 @@ }, "/v1/processes/run": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Run a one-shot command.", "description": "Executes a command to completion and returns its stdout, stderr, exit code,\nand duration. Supports configurable timeout and output size limits.", "operationId": "post_v1_processes_run", @@ -1123,7 +1177,9 @@ }, "/v1/processes/{id}": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Get a single process by ID.", "description": "Returns the current state of a managed process including its status,\nPID, exit code, and creation/exit timestamps.", "operationId": "get_v1_process", @@ -1172,7 +1228,9 @@ } }, "delete": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Delete a process record.", "description": "Removes a stopped process from the runtime. Returns 409 if the process\nis still running; stop or kill it first.", "operationId": "delete_v1_process", @@ -1226,7 +1284,9 @@ }, "/v1/processes/{id}/input": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Write input to a process.", "description": "Sends data to a process's stdin (pipe mode) or PTY writer (tty mode).\nData can be encoded as base64, utf8, or text. Returns 413 if the decoded\npayload exceeds the configured `maxInputBytesPerRequest` limit.", "operationId": "post_v1_process_input", @@ -1307,7 +1367,9 @@ }, "/v1/processes/{id}/kill": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Send SIGKILL to a process.", "description": "Sends SIGKILL to the process and optionally waits up to `waitMs`\nmilliseconds for the process to exit before returning.", "operationId": "post_v1_process_kill", @@ -1370,7 +1432,9 @@ }, "/v1/processes/{id}/logs": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Fetch process logs.", "description": "Returns buffered log entries for a process. Supports filtering by stream\ntype, tail count, and sequence-based resumption. When `follow=true`,\nreturns an SSE stream that replays buffered entries then streams live output.", "operationId": "get_v1_process_logs", @@ -1468,7 +1532,9 @@ }, "/v1/processes/{id}/stop": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Send SIGTERM to a process.", "description": "Sends SIGTERM to the process and optionally waits up to `waitMs`\nmilliseconds for the process to exit before returning.", "operationId": "post_v1_process_stop", @@ -1531,7 +1597,9 @@ }, "/v1/processes/{id}/terminal/resize": { "post": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Resize a process terminal.", "description": "Sets the PTY window size (columns and rows) for a tty-mode process and\nsends SIGWINCH so the child process can adapt.", "operationId": "post_v1_process_terminal_resize", @@ -1612,7 +1680,9 @@ }, "/v1/processes/{id}/terminal/ws": { "get": { - "tags": ["v1"], + "tags": [ + "v1" + ], "summary": "Open an interactive WebSocket terminal session.", "description": "Upgrades the connection to a WebSocket for bidirectional PTY I/O. Accepts\n`access_token` query param for browser-based auth (WebSocket API cannot\nsend custom headers). Streams raw PTY output as binary frames and accepts\nJSON control frames for input, resize, and close.", "operationId": "get_v1_process_terminal_ws", @@ -1689,7 +1759,9 @@ "schemas": { "AcpEnvelope": { "type": "object", - "required": ["jsonrpc"], + "required": [ + "jsonrpc" + ], "properties": { "error": { "nullable": true @@ -1723,7 +1795,11 @@ }, "AcpServerInfo": { "type": "object", - "required": ["serverId", "agent", "createdAtMs"], + "required": [ + "serverId", + "agent", + "createdAtMs" + ], "properties": { "agent": { "type": "string" @@ -1739,7 +1815,9 @@ }, "AcpServerListResponse": { "type": "object", - "required": ["servers"], + "required": [ + "servers" + ], "properties": { "servers": { "type": "array", @@ -1830,7 +1908,12 @@ }, "AgentInfo": { "type": "object", - "required": ["id", "installed", "credentialsAvailable", "capabilities"], + "required": [ + "id", + "installed", + "credentialsAvailable", + "capabilities" + ], "properties": { "capabilities": { "$ref": "#/components/schemas/AgentCapabilities" @@ -1873,7 +1956,11 @@ }, "AgentInstallArtifact": { "type": "object", - "required": ["kind", "path", "source"], + "required": [ + "kind", + "path", + "source" + ], "properties": { "kind": { "type": "string" @@ -1909,7 +1996,10 @@ }, "AgentInstallResponse": { "type": "object", - "required": ["already_installed", "artifacts"], + "required": [ + "already_installed", + "artifacts" + ], "properties": { "already_installed": { "type": "boolean" @@ -1924,7 +2014,9 @@ }, "AgentListResponse": { "type": "object", - "required": ["agents"], + "required": [ + "agents" + ], "properties": { "agents": { "type": "array", @@ -1957,7 +2049,9 @@ }, "FsActionResponse": { "type": "object", - "required": ["path"], + "required": [ + "path" + ], "properties": { "path": { "type": "string" @@ -1966,7 +2060,9 @@ }, "FsDeleteQuery": { "type": "object", - "required": ["path"], + "required": [ + "path" + ], "properties": { "path": { "type": "string" @@ -1988,7 +2084,12 @@ }, "FsEntry": { "type": "object", - "required": ["name", "path", "entryType", "size"], + "required": [ + "name", + "path", + "entryType", + "size" + ], "properties": { "entryType": { "$ref": "#/components/schemas/FsEntryType" @@ -2012,11 +2113,17 @@ }, "FsEntryType": { "type": "string", - "enum": ["file", "directory"] + "enum": [ + "file", + "directory" + ] }, "FsMoveRequest": { "type": "object", - "required": ["from", "to"], + "required": [ + "from", + "to" + ], "properties": { "from": { "type": "string" @@ -2032,7 +2139,10 @@ }, "FsMoveResponse": { "type": "object", - "required": ["from", "to"], + "required": [ + "from", + "to" + ], "properties": { "from": { "type": "string" @@ -2044,7 +2154,9 @@ }, "FsPathQuery": { "type": "object", - "required": ["path"], + "required": [ + "path" + ], "properties": { "path": { "type": "string" @@ -2053,7 +2165,11 @@ }, "FsStat": { "type": "object", - "required": ["path", "entryType", "size"], + "required": [ + "path", + "entryType", + "size" + ], "properties": { "entryType": { "$ref": "#/components/schemas/FsEntryType" @@ -2083,7 +2199,10 @@ }, "FsUploadBatchResponse": { "type": "object", - "required": ["paths", "truncated"], + "required": [ + "paths", + "truncated" + ], "properties": { "paths": { "type": "array", @@ -2098,7 +2217,10 @@ }, "FsWriteResponse": { "type": "object", - "required": ["path", "bytesWritten"], + "required": [ + "path", + "bytesWritten" + ], "properties": { "bytesWritten": { "type": "integer", @@ -2112,7 +2234,9 @@ }, "HealthResponse": { "type": "object", - "required": ["status"], + "required": [ + "status" + ], "properties": { "status": { "type": "string" @@ -2121,7 +2245,10 @@ }, "McpConfigQuery": { "type": "object", - "required": ["directory", "mcpName"], + "required": [ + "directory", + "mcpName" + ], "properties": { "directory": { "type": "string" @@ -2135,7 +2262,10 @@ "oneOf": [ { "type": "object", - "required": ["command", "type"], + "required": [ + "command", + "type" + ], "properties": { "args": { "type": "array", @@ -2169,13 +2299,18 @@ }, "type": { "type": "string", - "enum": ["local"] + "enum": [ + "local" + ] } } }, { "type": "object", - "required": ["url", "type"], + "required": [ + "url", + "type" + ], "properties": { "bearerTokenEnvVar": { "type": "string", @@ -2223,7 +2358,9 @@ }, "type": { "type": "string", - "enum": ["remote"] + "enum": [ + "remote" + ] }, "url": { "type": "string" @@ -2237,7 +2374,11 @@ }, "ProblemDetails": { "type": "object", - "required": ["type", "title", "status"], + "required": [ + "type", + "title", + "status" + ], "properties": { "detail": { "type": "string", @@ -2263,7 +2404,14 @@ }, "ProcessConfig": { "type": "object", - "required": ["maxConcurrentProcesses", "defaultRunTimeoutMs", "maxRunTimeoutMs", "maxOutputBytes", "maxLogBytesPerProcess", "maxInputBytesPerRequest"], + "required": [ + "maxConcurrentProcesses", + "defaultRunTimeoutMs", + "maxRunTimeoutMs", + "maxOutputBytes", + "maxLogBytesPerProcess", + "maxInputBytesPerRequest" + ], "properties": { "defaultRunTimeoutMs": { "type": "integer", @@ -2295,7 +2443,9 @@ }, "ProcessCreateRequest": { "type": "object", - "required": ["command"], + "required": [ + "command" + ], "properties": { "args": { "type": "array", @@ -2326,7 +2476,15 @@ }, "ProcessInfo": { "type": "object", - "required": ["id", "command", "args", "tty", "interactive", "status", "createdAtMs"], + "required": [ + "id", + "command", + "args", + "tty", + "interactive", + "status", + "createdAtMs" + ], "properties": { "args": { "type": "array", @@ -2377,7 +2535,9 @@ }, "ProcessInputRequest": { "type": "object", - "required": ["data"], + "required": [ + "data" + ], "properties": { "data": { "type": "string" @@ -2390,7 +2550,9 @@ }, "ProcessInputResponse": { "type": "object", - "required": ["bytesWritten"], + "required": [ + "bytesWritten" + ], "properties": { "bytesWritten": { "type": "integer", @@ -2400,7 +2562,9 @@ }, "ProcessListResponse": { "type": "object", - "required": ["processes"], + "required": [ + "processes" + ], "properties": { "processes": { "type": "array", @@ -2412,7 +2576,13 @@ }, "ProcessLogEntry": { "type": "object", - "required": ["sequence", "stream", "timestampMs", "data", "encoding"], + "required": [ + "sequence", + "stream", + "timestampMs", + "data", + "encoding" + ], "properties": { "data": { "type": "string" @@ -2464,7 +2634,11 @@ }, "ProcessLogsResponse": { "type": "object", - "required": ["processId", "stream", "entries"], + "required": [ + "processId", + "stream", + "entries" + ], "properties": { "entries": { "type": "array", @@ -2482,11 +2656,18 @@ }, "ProcessLogsStream": { "type": "string", - "enum": ["stdout", "stderr", "combined", "pty"] + "enum": [ + "stdout", + "stderr", + "combined", + "pty" + ] }, "ProcessRunRequest": { "type": "object", - "required": ["command"], + "required": [ + "command" + ], "properties": { "args": { "type": "array", @@ -2522,7 +2703,14 @@ }, "ProcessRunResponse": { "type": "object", - "required": ["timedOut", "stdout", "stderr", "stdoutTruncated", "stderrTruncated", "durationMs"], + "required": [ + "timedOut", + "stdout", + "stderr", + "stdoutTruncated", + "stderrTruncated", + "durationMs" + ], "properties": { "durationMs": { "type": "integer", @@ -2564,11 +2752,17 @@ }, "ProcessState": { "type": "string", - "enum": ["running", "exited"] + "enum": [ + "running", + "exited" + ] }, "ProcessTerminalResizeRequest": { "type": "object", - "required": ["cols", "rows"], + "required": [ + "cols", + "rows" + ], "properties": { "cols": { "type": "integer", @@ -2584,7 +2778,10 @@ }, "ProcessTerminalResizeResponse": { "type": "object", - "required": ["cols", "rows"], + "required": [ + "cols", + "rows" + ], "properties": { "cols": { "type": "integer", @@ -2600,11 +2797,16 @@ }, "ServerStatus": { "type": "string", - "enum": ["running", "stopped"] + "enum": [ + "running", + "stopped" + ] }, "ServerStatusInfo": { "type": "object", - "required": ["status"], + "required": [ + "status" + ], "properties": { "status": { "$ref": "#/components/schemas/ServerStatus" @@ -2619,7 +2821,10 @@ }, "SkillSource": { "type": "object", - "required": ["type", "source"], + "required": [ + "type", + "source" + ], "properties": { "ref": { "type": "string", @@ -2646,7 +2851,9 @@ }, "SkillsConfig": { "type": "object", - "required": ["sources"], + "required": [ + "sources" + ], "properties": { "sources": { "type": "array", @@ -2658,7 +2865,10 @@ }, "SkillsConfigQuery": { "type": "object", - "required": ["directory", "skillName"], + "required": [ + "directory", + "skillName" + ], "properties": { "directory": { "type": "string" @@ -2676,4 +2886,4 @@ "description": "ACP proxy v1 API" } ] -} +} \ No newline at end of file diff --git a/foundry/AGENT-HANDOFF.md b/foundry/AGENT-HANDOFF.md new file mode 100644 index 0000000..20bade7 --- /dev/null +++ b/foundry/AGENT-HANDOFF.md @@ -0,0 +1,179 @@ +# Foundry Agent Handoff + +## Baseline + +- Repo: `rivet-dev/sandbox-agent` +- Branch: `columbus-v2` +- Last pushed commit: `3174fe73` (`feat(foundry): checkpoint actor and workspace refactor`) +- Progress/spec tracker: [FOUNDRY-CHANGES.md](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/FOUNDRY-CHANGES.md) + +## What is already landed + +These spec slices are already implemented and pushed: + +- Item `1`: backend actor rename `auth-user` -> `user` +- Item `2`: Better Auth mapping comments +- Item `5`: task raw SQL cleanup into migrations +- Item `6`: `history` -> `audit-log` +- Item `7`: default model moved to user-scoped app state +- Item `20`: admin action prefixing +- Item `23`: dead `getTaskEnriched` / `enrichTaskRecord` removal +- Item `25`: `Workbench` -> `Workspace` rename across backend/shared/client/frontend +- Item `26`: branch rename deleted +- Organization realtime was already collapsed to full-snapshot `organizationUpdated` +- Task realtime was already aligned to `taskUpdated` + +## Known blocker + +Spec item `3` is only partially done. The singleton constraint for the Better Auth `user` table is still blocked. + +- File: [foundry/packages/backend/src/actors/user/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/db/schema.ts) +- Reason: Better Auth still depends on external string `user.id`, so a literal singleton `CHECK (id = 1)` on that table is not a safe mechanical change. + +## Important current state + +There are uncommitted edits on top of the pushed checkpoint. Another agent should start from the current worktree, not just `origin/columbus-v2`. + +Current dirty files: + +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/repository/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/actions.ts) +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- [foundry/packages/client/src/mock/backend-client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/mock/backend-client.ts) + +These files are the current hot path for the unfinished structural work. + +## What is partially in place but not finished + +### User-owned task UI state + +The user actor already has the schema and CRUD surface for per-user task/session UI state: + +- [foundry/packages/backend/src/actors/user/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/db/schema.ts) + `user_task_state` +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) + `getTaskState`, `upsertTaskState`, `deleteTaskState` + +But the task actor and UI are still reading/writing the old task-global fields: + +- [foundry/packages/backend/src/actors/task/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/db/schema.ts) + still contains `task_runtime.active_session_id` and session `unread` / `draft_*` +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) + still derives unread/draft/active-session from task-local rows +- [foundry/packages/frontend/src/components/mock-layout.tsx](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout.tsx) + still treats `activeSessionId` as frontend-local and uses task-level unread/draft state + +So items `21`, `22`, `24`, and part of `19` are only half-done. + +### Coordinator ownership + +The current architecture still violates the intended coordinator pattern: + +- Organization still owns `taskLookup` and `taskSummaries` + - [foundry/packages/backend/src/actors/organization/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/db/schema.ts) +- Organization still resolves `taskId -> repoId` + - [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- Task still pushes summary updates to organization instead of repository + - [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- Repository still does not own a `tasks` projection table yet + - [foundry/packages/backend/src/actors/repository/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/db/schema.ts) + +So items `9`, `13`, and `15` are still open. + +### Queue-only mutations + +Task actor workspace commands already go through queue sends. Other actors still do not fully follow the queue-only mutation rule: + +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/organization/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/app-shell.ts) + +So items `4`, `10`, and `11` are still open. + +### Dynamic model/agent data + +The frontend/client still hardcode model groups: + +- [foundry/packages/frontend/src/components/mock-layout/view-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout/view-model.ts) +- [foundry/packages/client/src/workspace-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/workspace-model.ts) +- [foundry/packages/shared/src/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/workspace.ts) + `WorkspaceModelId` is still a hardcoded union + +The repo already has the API source of truth available through the TypeScript SDK: + +- [sdks/typescript/src/client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/sdks/typescript/src/client.ts) + `SandboxAgent.listAgents({ config: true })` +- [server/packages/sandbox-agent/src/router.rs](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/server/packages/sandbox-agent/src/router.rs) + `/v1/agents` +- [server/packages/sandbox-agent/src/router/support.rs](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/server/packages/sandbox-agent/src/router/support.rs) + `fallback_config_options` + +So item `8` is still open. + +### GitHub sync chunking/progress + +GitHub data sync is still a delete-and-replace flow: + +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) + `replaceRepositories`, `replaceBranches`, `replaceMembers`, `replacePullRequests`, and full-sync flow +- [foundry/packages/backend/src/actors/github-data/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/db/schema.ts) + no generation/progress columns yet +- [foundry/packages/shared/src/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/app-shell.ts) + no structured sync progress field yet + +So item `16` is still open. + +## Recommended next order + +If another agent picks this up, this is the safest order: + +1. Finish items `21`, `22`, `24`, `19` together. + Reason: user-owned task UI state is already half-wired, and task schema cleanup depends on the same files. + +2. Finish items `9`, `13`, `15` together. + Reason: coordinator ownership, repo-owned task projections, and PR/task unification are the same refactor seam. + +3. Finish item `16`. + Reason: GitHub sync chunking is mostly isolated to `github-data` plus app-shell/shared snapshot wiring. + +4. Finish item `8`. + Reason: dynamic model/agent data is largely independent once user default model is already user-scoped. + +5. Finish items `4`, `10`, `11`, `12`, `18`, final event audit. + +6. Do item `17` last. + +## Concrete file hotspots for the next agent + +Backend: + +- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts) +- [foundry/packages/backend/src/actors/task/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/db/schema.ts) +- [foundry/packages/backend/src/actors/task/workflow/common.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/common.ts) +- [foundry/packages/backend/src/actors/task/workflow/commands.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/commands.ts) +- [foundry/packages/backend/src/actors/task/workflow/init.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/init.ts) +- [foundry/packages/backend/src/actors/repository/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/actions.ts) +- [foundry/packages/backend/src/actors/repository/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/db/schema.ts) +- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts) +- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts) +- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts) + +Shared/client/frontend: + +- [foundry/packages/shared/src/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/workspace.ts) +- [foundry/packages/shared/src/contracts.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/contracts.ts) +- [foundry/packages/shared/src/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/app-shell.ts) +- [foundry/packages/client/src/backend-client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/backend-client.ts) +- [foundry/packages/client/src/workspace-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/workspace-model.ts) +- [foundry/packages/frontend/src/components/mock-layout.tsx](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout.tsx) +- [foundry/packages/frontend/src/components/mock-layout/view-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx) +- [foundry/packages/frontend/src/features/tasks/status.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/features/tasks/status.ts) + +## Notes that matter + +- The pushed checkpoint is useful, but it is not the full current state. There are uncommitted edits in the hot-path backend files listed above. +- The current tree already contains a partially added `user_task_state` path. Do not duplicate that work; finish the migration by removing the old task-owned fields and rewiring readers/writers. +- The current task actor still reads mutable fields from `c.state` such as `repoRemote`, `branchName`, `title`, `task`, `sandboxProviderId`, and `agentType`. That is part of item `19`. +- The current frontend still synthesizes PR-only rows into fake tasks. That should go away as part of repo-owned task projection / PR unification. diff --git a/foundry/CLAUDE.md b/foundry/CLAUDE.md index e347a60..268b04c 100644 --- a/foundry/CLAUDE.md +++ b/foundry/CLAUDE.md @@ -56,6 +56,8 @@ Use `pnpm` workspaces and Turborepo. - mock frontend changes: `just foundry-mock` or restart with `just foundry-mock-down && just foundry-mock` - local frontend-only work outside Docker: restart `pnpm --filter @sandbox-agent/foundry-frontend dev` or `just foundry-dev-mock` as appropriate - The backend does **not** hot reload. Bun's `--hot` flag causes the server to re-bind on a different port (e.g. 6421 instead of 6420), breaking all client connections while the container still exposes the original port. After backend code changes, restart the backend container: `just foundry-dev-down && just foundry-dev`. +- The dev server has debug logging enabled by default (`RIVET_LOG_LEVEL=debug`, `FOUNDRY_LOG_LEVEL=debug`) via `compose.dev.yaml`. Error stacks and timestamps are also enabled. +- The frontend client uses JSON encoding for RivetKit in development (`import.meta.env.DEV`) for easier debugging. Production uses the default encoding. ## Railway Logs @@ -73,13 +75,14 @@ Use `pnpm` workspaces and Turborepo. - All backend interaction (actor calls, metadata/health checks, backend HTTP endpoint access) must go through the dedicated client library in `packages/client`. - Outside `packages/client`, do not call backend endpoints directly (for example `fetch(.../v1/rivet...)`), except in black-box E2E tests that intentionally exercise raw transport behavior. - GUI state should update in realtime (no manual refresh buttons). Prefer RivetKit push reactivity and actor-driven events; do not add polling/refetch for normal product flows. -- Keep the mock workbench types and mock client in `packages/shared` + `packages/client` up to date with the frontend contract. The mock is the UI testing reference implementation while backend functionality catches up. +- Keep the mock workspace types and mock client in `packages/shared` + `packages/client` up to date with the frontend contract. The mock is the UI testing reference implementation while backend functionality catches up. - Keep frontend route/state coverage current in code and tests; there is no separate page-inventory doc to maintain. - If Foundry uses a shared component from `@sandbox-agent/react`, make changes in `sdks/react` instead of copying or forking that component into Foundry. - When changing shared React components in `sdks/react` for Foundry, verify they still work in the Sandbox Agent Inspector before finishing. -- When making UI changes, verify the live flow with `agent-browser`, take screenshots of the updated UI, and offer to open those screenshots in Preview when you finish. +- When making UI changes, verify the live flow with the Chrome DevTools MCP or `agent-browser`, take screenshots of the updated UI, and offer to open those screenshots in Preview when you finish. - When asked for screenshots, capture all relevant affected screens and modal states, not just a single viewport. Include empty, populated, success, and blocked/error states when they are part of the changed flow. - If a screenshot catches a transition frame, blank modal, or otherwise misleading state, retake it before reporting it. +- When verifying UI in the browser, attempt to sign in by navigating to `/signin` and clicking "Continue with GitHub". If the browser lands on the GitHub login page (github.com/login) and you don't have credentials, stop and ask the user to complete the sign-in. Do not assume the session is invalid just because you see the Foundry sign-in page — always attempt the OAuth flow first. ## Realtime Data Architecture @@ -99,7 +102,7 @@ Do not use polling (`refetchInterval`), empty "go re-fetch" broadcast events, or - **Organization actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the organization actor when they mutate. The organization actor broadcasts the updated entity to connected clients. `getOrganizationSummary` reads from local tables only — no fan-out to child actors. - **Task actor** materializes its own detail state (session summaries, sandbox info, diffs, file tree). `getTaskDetail` reads from the task actor's own SQLite. The task actor broadcasts updates directly to clients connected to it. - **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor). -- The expensive fan-out (querying every repository/task actor) only exists as a background reconciliation/rebuild path, never on the hot read path. +- There is no fan-out on the read path. The organization actor owns all task summaries locally. ### Subscription manager @@ -141,6 +144,15 @@ The client subscribes to `app` always, `organization` when entering an organizat - Do not add backend git clone paths, `git fetch`, `git for-each-ref`, or direct backend git CLI calls. If you need git data, either read stored GitHub metadata or run the command inside a sandbox. - The `BackendDriver` has no `GitDriver` or `StackDriver`. Only `GithubDriver` and `TmuxDriver` remain. +## React Hook Dependency Safety + +- **Never use unstable references as `useEffect`/`useMemo`/`useCallback` dependencies.** React compares dependencies by reference, not value. Expressions like `?? []`, `?? {}`, `.map(...)`, `.filter(...)`, or object/array literals create new references every render, causing infinite re-render loops when used as dependencies. +- If the upstream value may be `undefined`/`null` and you need a fallback, either: + - Use the raw upstream value as the dependency and apply the fallback inside the effect body: `useEffect(() => { doThing(value ?? []); }, [value]);` + - Derive a stable primitive key: `const key = JSON.stringify(value ?? []);` then depend on `key` + - Memoize: `const stable = useMemo(() => value ?? [], [value]);` +- When reviewing code, treat any `?? []`, `?? {}`, or inline `.map()/.filter()` in a dependency array as a bug. + ## UI System - Foundry's base UI system is `BaseUI` with `Styletron`, plus Foundry-specific theme/tokens on top. Treat that as the default UI foundation. @@ -165,6 +177,7 @@ The client subscribes to `app` always, `organization` when entering an organizat - If the system reaches an unexpected state, raise an explicit error with actionable context. - Do not fail silently, swallow errors, or auto-ignore inconsistent data. - Prefer fail-fast behavior over hidden degradation when correctness is uncertain. +- **Never use bare `catch {}` or `catch { }` blocks.** Every catch must at minimum log the error with `logActorWarning` or `console.warn`. Silent catches hide bugs and make debugging impossible. If a catch is intentionally degrading (e.g. returning empty data when a sandbox is expired), it must still log so operators can see what happened. Use `catch (error) { logActorWarning(..., { error: resolveErrorMessage(error) }); }` or equivalent. ## RivetKit Dependency Policy @@ -205,8 +218,9 @@ For all Rivet/RivetKit implementation: - Do not add custom backend REST endpoints (no `/v1/*` shim layer). - We own the sandbox-agent project; treat sandbox-agent defects as first-party bugs and fix them instead of working around them. - Keep strict single-writer ownership: each table/row has exactly one actor writer. -- Parent actors (`organization`, `repository`, `task`, `history`, `sandbox-instance`) use command-only loops with no timeout. +- Parent actors (`organization`, `task`, `sandbox-instance`) use command-only loops with no timeout. - Periodic syncing lives in dedicated child actors with one timeout cadence each. +- **Task actors must be created lazily** — never during sync or bulk operations. PR sync writes virtual entries to the org's local `taskIndex`/`taskSummaries` tables. The task actor is created on first user interaction via `getOrCreate`. See `packages/backend/CLAUDE.md` "Lazy Task Actor Creation" for details. - Do not build blocking flows that wait on external systems to become ready or complete. Prefer push-based progression driven by actor messages, events, webhooks, or queue/workflow state changes. - Use workflows/background commands for any repo sync, sandbox provisioning, agent install, branch restack/rebase, or other multi-step external work. Do not keep user-facing actions/requests open while that work runs. - `send` policy: always `await` the `send(...)` call itself so enqueue failures surface immediately, but default to `wait: false`. @@ -227,8 +241,8 @@ Action handlers must return fast. The pattern: Examples: - `createTask` → `wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via organization events. -- `sendWorkbenchMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running` → `idle` via session events. -- `createWorkbenchSession` → `wait: true` (returns `{ tabId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision` → `ready` via task events. +- `sendWorkspaceMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running` → `idle` via session events. +- `createWorkspaceSession` → `wait: true` (returns `{ sessionId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision` → `ready` via task events. Never use `wait: true` for operations that depend on external readiness, sandbox I/O, agent responses, git network operations, polling loops, or long-running queue drains. Never hold an action open while waiting for an external system to become ready — that is a polling/retry loop in disguise. @@ -240,11 +254,11 @@ All `wait: true` sends must have an explicit `timeout`. Maximum timeout for any ### Task creation: resolve metadata before creating the actor -When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the parent actor (repository) *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. +When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the organization actor *before* the task actor is created. The task actor must never be created with null `branchName` or `title`. - Title is derived from the task description via `deriveFallbackTitle()` — pure string manipulation, no external I/O. - Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against the repository's task index. -- The repository actor already has the task index and GitHub-backed default branch metadata. Resolve the branch name there without local git fetches. +- The organization actor owns the task index and reads GitHub-backed default branch metadata from the github-data actor. Resolve the branch name there without local git fetches. - Do not defer naming to a background provision workflow. Do not poll for names to become available. - The `onBranch` path (attaching to an existing branch) and the new-task path should both produce a fully-named task record on return. - Actor handle policy: @@ -320,9 +334,9 @@ Each entry must include: - Friction/issue - Attempted fix/workaround and outcome -## History Events +## Audit Log Events -Log notable workflow changes to `events` so `hf history` remains complete: +Log notable workflow changes to `events` so the audit log remains complete: - create - attach @@ -331,6 +345,8 @@ Log notable workflow changes to `events` so `hf history` remains complete: - status transitions - PR state transitions +When adding new task/workspace commands, always add a corresponding audit log event. + ## Validation After Changes Always run and fix failures: diff --git a/foundry/FOUNDRY-CHANGES.md b/foundry/FOUNDRY-CHANGES.md new file mode 100644 index 0000000..2bd76d2 --- /dev/null +++ b/foundry/FOUNDRY-CHANGES.md @@ -0,0 +1,1456 @@ +# Foundry Planned Changes + +## How to use this document + +Work through items checking boxes as you go. Some items have dependencies — do not start an item until its dependencies are checked off. After each item, run `pnpm -w typecheck && pnpm -w build && pnpm -w test` to validate. If an item includes a "CLAUDE.md update" section, apply it in the same change. Commit after each item passes validation. + +## Progress Log + +- 2026-03-14 10: Initial architecture mapping complete. + - Confirmed the current hot spots match the spec: `auth-user` is still mutation-by-action, `history` is still a separate actor with an `append` action wrapper, organization still owns `taskLookup`/`taskSummaries`, and the `Workbench*` surface is still shared across backend/client/frontend. + - Started foundational rename and migration planning for items `1`, `6`, and `25` because they drive most of the later fallout. +- 2026-03-14 11: Audit-log rename slice landed. + - Renamed the backend actor from `history` to `audit-log`, switched the queue name to `auditLog.command.append`, and removed the `append` action wrapper. + - Updated task/repository/organization call sites to send directly to the audit-log queue or read through the renamed audit-log handle. +- 2026-03-14 12: Foundational naming and dead-surface cleanup landed. + - Renamed the backend auth actor surface from `authUser` to `user`, including actor registration, key helpers, handles, and Better Auth service routing. + - Deleted the dead `getTaskEnriched` / `enrichTaskRecord` fan-out path and changed organization task reads to go straight to the task actor. + - Renamed admin-only GitHub rebuild/reload actions with the `admin*` prefix across backend, client, and frontend. + - Collapsed organization realtime to full-snapshot `organizationUpdated` events and aligned task events to `type: "taskUpdated"`. +- 2026-03-14 13: Task schema migration cleanup landed. + - Removed the task actor's runtime `CREATE TABLE IF NOT EXISTS` / `ALTER TABLE` helpers from `task/workbench.ts` and `task/workflow/init.ts`. + - Updated the checked-in task migration artifacts so the schema-defined task/session/runtime columns are created directly by migrations. +- 2026-03-14 14: Item 3 blocker documented. + - The spec's requested literal singleton `CHECK (id = 1)` on the Better Auth `user` table conflicts with the existing Better Auth adapter contract, which relies on external string `user.id`. + - Proceeding safely will require a design adjustment for that table rather than a straight mechanical migration. +- 2026-03-14 15: Better Auth mapping comments landed. + - Added Better Auth vs custom Foundry table/action comments in the user and organization actor schema/action surfaces so the adapter-constrained paths are explicit. +- 2026-03-15 09: Branch rename surface deleted and stale organization subscription fixed. + - Removed the remaining branch-rename surface from the client, mock backend, frontend UI, and repository action layer. There are no remaining `renameBranch` / `renameWorkbenchBranch` references in Foundry. + - Fixed the remote backend client to listen for `organizationUpdated` on the organization connection instead of the dead `workspaceUpdated` event name. +- 2026-03-15 10: Backend workspace rename landed. + - Renamed the backend task UI/workflow surface from `workbench` to `workspace`, including the task actor file, queue topic family, organization proxy actions, and the task session table name (`task_workspace_sessions`). + - Backend actor code no longer contains `Workbench` / `workbench` references, so the remaining shared/client/frontend rename can align to a stable backend target. +- 2026-03-15 11: Default model moved to user-scoped app state. + - Removed `defaultModel` from the organization schema/snapshot and stored it on the user profile instead, exposed through the app snapshot as a user preference. + - Wired `setAppDefaultModel` through the backend/app clients and changed the model picker to persist the starred/default model instead of resetting local React state on reload. +- 2026-03-15 11: Workspace surface completed across Foundry packages. + - Renamed the shared/client/frontend surface from `Workbench` to `Workspace`, including `workspace.ts`, workspace client/model files, DTO/type names, backend-client method names, frontend view-model imports, and the affected e2e/test files. + - Verified that Foundry backend/shared/client/frontend packages no longer contain `Workbench` / `workbench` references. +- 2026-03-15 11: Singleton constraints tightened where safe. + - Added `CHECK (id = 1)` enforcement for `github_meta`, `repo_meta`, `organization_profile`, and `user_profiles`, and updated the affected code paths/migrations to use row id `1`. + - The Better Auth `user` table remains blocked by the adapter contract, so item `3` is still open overall. +- 2026-03-14 12: Confirmed blocker for later user-table singleton work. + - Item `3` conflicts with the current Better Auth adapter contract for the `user` table: the adapter depends on the external string `user.id`, while the spec also asks for a literal singleton `CHECK (id = 1)` on that same table. + - That cannot be applied mechanically without redesigning the Better Auth adapter contract or introducing a separate surrogate identity column. I have not forced that change yet. +- 2026-03-15 13: Task/repository durable-state cleanup and auth-scoped workspace reads landed. + - Removed the remaining task/repository actor durable-state duplication: task `createState` now holds only `(organizationId, repoId, taskId)`, repository `createState` now holds only `(organizationId, repoId)`, task initialization seeds SQLite from the initialize queue payload, and task record reads fetch `repoRemote` through repository metadata instead of stale actor state. + - Removed the repository creation-time `remoteUrl` dependency from actor handles/callers and changed repository metadata to backfill/persist `remoteUrl` from GitHub data when needed. + - Wired Better Auth session ids through the remote client workspace/task-detail reads and through the task workflow queue handlers so user-scoped workspace state is no longer dropped on the floor by the organization/task proxy path. +- 2026-03-15 14: Coordinator routing boundary tightened. + - Removed the organization actor's fallback `taskId -> repoId` scan across repositories; task proxy actions now require `repoId` and route directly to the repository/task coordinator path the client already uses. + - Updated backend architecture notes to reflect the live repo-owned task projection (`tasks`) and the removal of the old organization-owned `taskLookup` / `taskSummaries` indexes. +- 2026-03-15 15: Workspace session-selection and dead task-status cleanup landed. + - Surfaced viewer-scoped `activeSessionId` through workspace task summary/detail DTOs, threaded it through the backend/client/mock surfaces, and added a dedicated workspace `select_session` mutation so session-tab selection now persists in `user_task_state` instead of living only in frontend local state. + - Removed dead task `diffStat` and sandbox `statusMessage` fields from the live workspace/task contracts and backend writes, and updated stale frontend/mock/e2e consumers to stop reading them. +- 2026-03-15 16: GitHub sync progress is now live on the organization topic. + - Added persisted GitHub sync phase/generation/progress fields to the github-data actor meta row and the organization profile projection, and exposed them through `organizationUpdated` snapshots so workspace consumers no longer wait on stale app-topic state during repo imports. + - Chunked branch and pull-request fetches by repository batches, added generation markers to imported GitHub rows, switched sync refreshes to upsert+sweep instead of delete-then-replace, and updated the workspace shell/dev panel to show live sync phase progress from the organization subscription. +- 2026-03-15 17: Foundry-local model lists now route through shared Sandbox Agent config resources. + - Removed the remaining duplicated hardcoded model tables from the frontend/client workspace view-model layer and switched backend default-model / agent-inference fallbacks to the shared catalog helpers in `shared/src/models.ts`. + - Updated mock/default app state to stop seeding deleted `claude-sonnet-4` / `claude-opus-4` ids, and aligned the user-profile default-model migration fallback with the shared catalog default. +- 2026-03-15 17: Shared model catalog moved off the old fixed union. + - Replaced the shared `WorkspaceModelId` closed union with string ids, introduced a shared model catalog derived from the sandbox-agent agent-config resources, and switched the client/frontend picker label helpers to consume that catalog instead of maintaining separate hardcoded `MODEL_GROUPS` arrays. + - Updated backend default-model and model→agent fallback logic to use the shared catalog/default id, and relaxed e2e env parsing so new sandbox-agent model ids can flow through without patching Foundry first. +- 2026-03-15 18: Workspace task status collapsed to a single live field. + - Removed the duplicate `runtimeStatus` field from workspace task/detail DTOs and all current backend/client/frontend consumers, so workspace task `status` is now the only task-state field on that surface. + - Removed the remaining synthetic `"new"` task status from the live workspace path; mock task creation now starts in the first concrete init state instead of exposing a frontend-only status. +- 2026-03-15 19: GitHub sync now persists branch and PR batches as they are fetched. + - The branch and pull-request phases now upsert each fetched repository batch immediately and only sweep stale rows after the phase completes, instead of buffering the full dataset in memory until the end of the sync. + - This aligns chunked progress reporting with chunked persistence and tightens recovery behavior for large repository imports. +- 2026-03-15 20: Repository-owned task projection artifacts are now aligned with runtime. + - Removed the last stale `task_lookup` Drizzle artifacts from the organization actor so the checked-in schema snapshots match the live repository-owned `tasks` projection. + - There are no remaining org/repo runtime references to the old org-side task lookup table. +- 2026-03-15 21: Legacy task/runtime fields are fully gone from the live Foundry surface. + - Confirmed the old task-table/runtime fields from item `21` are removed across backend/shared/client/frontend, and renamed the last leftover `agentTypeForModel()` helper to the neutral `sandboxAgentIdForModel()`. + - Deleted the final dead frontend diff-stat formatter/test that only referenced already-removed task diff state. +- 2026-03-15 22: Task status tracking is now fully collapsed to the canonical task status enum. + - With the earlier backend `statusMessage` removal plus this turn's workspace contract cleanup, the workspace/task surface now derives all task status UI from the canonical backend `status` enum. + - There are no remaining live workspace `runtimeStatus` or synthetic `"new"` task-state branches. +- 2026-03-15 23: Per-user workspace UI state is fully sourced from the user actor overlay. + - Confirmed the shared task actor no longer stores per-user `activeSessionId`, unread, or draft columns; those values are persisted in `user_task_state` and only projected back into workspace DTOs for the current viewer. + - The remaining active-session/unread/draft references in client/frontend code are consumer fields of that user-scoped overlay, not shared task-actor storage. +- 2026-03-15 24: Subscription topics are now fully normalized to single-snapshot events. + - Confirmed the shared realtime contracts now expose one full replacement event per topic (`appUpdated`, `organizationUpdated`, `taskUpdated`, `sessionUpdated`, `processesUpdated`) with matching wire event names and type fields. + - The client subscription manager already treats organization/task topics as full-snapshot refreshes, so there are no remaining multi-variant organization events or `taskDetailUpdated` name mismatches in live code. +- 2026-03-15 25: Sidebar PR/task split dead branches trimmed further. + - Removed the remaining dead `pr:`-id sidebar branch and switched the workspace sidebar to the real `pullRequest.isDraft` field instead of stale `pullRequest.status` reads. + - This does not finish item `15`, but it reduces the remaining synthetic PR/task split surface in the frontend. +- 2026-03-15 26: User-actor mutations now flow through a dedicated workflow queue. + - Added [user/workflow.ts](/home/nathan/sandbox-agent/foundry/packages/backend/src/actors/user/workflow.ts) plus shared query helpers, wired the user actor up with explicit queue names, and moved auth/profile/session/task-state mutations behind workflow handlers instead of direct action bodies. +- 2026-03-15 27: Organization GitHub/shell/billing mutations now route through workflow queues. + - Added shared organization queue definitions in `organization/queues.ts`, taught the organization workflow to handle the remaining GitHub projection, org-profile, and billing mutation commands, and switched the app-shell, Better Auth, GitHub-data actor, and org-isolation test to send queue messages instead of calling direct org mutation actions. + - Deleted the dead organization shell mutation actions that no longer had callers (`applyOrganizationSyncCompleted`, `markOrganizationSyncFailed`, `applyGithubInstallationCreated`, `applyGithubInstallationRemoved`, `applyGithubRepositoryChanges`), which moves items `4`, `10`, and `12` forward even though the broader org action split is still open. +- 2026-03-15 28: Organization action split trimmed more of the monolith and removed dead event types. + - Moved `starSandboxAgentRepo` into `organization/actions/onboarding.ts` and the admin GitHub reload actions into `organization/actions/github.ts`, so `organization/actions.ts` is carrying fewer unrelated app-shell responsibilities. + - Deleted the dead backend-only `actors/events.ts` type file after confirming nothing in Foundry still imports those old task/PR event interfaces. +- 2026-03-15 29: Repo overview branch rows now carry a single PR object. + - Replaced the repo-overview branch DTO's scalar PR fields (`prNumber`, `prState`, `prUrl`, `reviewStatus`, `reviewer`) with `pullRequest: WorkspacePullRequestSummary | null`, and updated repository overview assembly plus the organization dashboard to consume that unified PR shape. + - This does not finish item `15`, but it removes another synthetic PR-only read surface and makes the repo overview align better with the task summary PR model. +- 2026-03-15 30: Repo overview stopped falling back to raw GitHub PR rows. + - Changed repository overview assembly to read PR metadata only from the repo-owned task projection instead of rejoining live GitHub PR rows on read, so the dashboard is one step closer to treating PRs as task data rather than a separate UI entity. +- 2026-03-15 31: GitHub organization-shell repair now uses the org workflow queue. + - Converted `syncOrganizationShellFromGithub` from a direct org action into a workflow-backed mutation command and updated the GitHub org sync path to send `organization.command.github.organization_shell.sync_from_github` instead of calling the action directly. + - Updated Better Auth adapter writes and task user-overlay writes to send directly to the user workflow queue, which partially lands item `4` and sets up item `11` for the user actor. +- 2026-03-15 27: Workflow layout standardized and queue-only write paths expanded. + - Split the remaining inline actor workflows into dedicated files for `audit-log`, `repository`, `github-data`, and `organization`, and moved user read actions into `user/actions/*` with Better Auth-prefixed action names. + - Removed the task actor's public mutation action wrappers entirely, moved organization/repository/github-data/task coordination onto direct queue sends, and made repository metadata reads stop mutating `repo_meta` on cache misses. +- 2026-03-15 28: PR-only admin/UI seams trimmed and PR branches now claim real tasks. + - Removed the remaining dedicated "reload pull requests" / "reload pull request" admin hooks from the backend/client/frontend surfaces and deleted the sidebar PR-only context action. + - Repository PR refresh now lazily creates a branch-owned task when a pull request arrives for an unclaimed branch, so PR-only branches stop living purely as a side table in GitHub sync flows. +- 2026-03-15 29: Organization Better Auth writes now use workflow queues. + - Split the organization actor's Better Auth routing and verification reads into `organization/actions/better-auth.ts`, moved `APP_SHELL_ORGANIZATION_ID` to `organization/constants.ts`, and renamed the org Better Auth read surface to the `betterAuth*` form. + - Added dedicated organization workflow queue handlers for session/email/account index writes plus verification CRUD, and updated `services/better-auth.ts` to send those mutations directly to organization queues instead of calling mutation actions. +- 2026-03-15 30: Shared model routing metadata is now centralized. + - Extended the shared model catalog with explicit `agentKind` and `sandboxAgentId` metadata, changed `WorkspaceAgentKind` to a dynamic string, and switched backend task session creation to resolve sandbox agent ids through the shared catalog instead of hardcoded `Codex` vs `Claude` branching. + - Updated the mock app/workspace and frontend model picker/new-task flows to consume the shared catalog/default model instead of forcing stale `Claude`/`Codex` fallbacks or a baked-in `gpt-5.3-codex` create-task default. +- 2026-03-15 31: Dead GitHub-data PR reload surface removed and fixture PR shapes aligned. + - Deleted the unused GitHub-data `reloadPullRequest` workflow command plus the dead `listOpenPullRequests` / `getPullRequestForBranch` action surface that no longer has live Foundry callers. + - Fixed the stale client `workspace-model.ts` pull-request fixtures to use the live `WorkspacePullRequestSummary` shape, which removes the last targeted client type errors in the touched slice. +- 2026-03-15 32: Organization action splitting continued past Better Auth. + - Moved the app snapshot/default-model/org-profile actions into `organization/actions/organization.ts`, onboarding actions into `organization/actions/onboarding.ts`, and app-level GitHub token/import actions into `organization/actions/github.ts`, then composed those files at the actor boundary. + - `organization/app-shell.ts` now exports shared helpers for those domains and no longer directly defines the moved action handlers, shrinking the remaining monolith and advancing item `10`. +- 2026-03-15 33: Task PR detail now reads the repository-owned task projection. + - Removed duplicate scalar PR fields from `TaskRecord` and `WorkspaceTaskDetail`, switched the remaining frontend/client consumers to the canonical `pullRequest` object, and trimmed stale mock/test scaffolding that still populated those dead fields. + - Replaced the task actor's PR lookup path with a repository projection read (`getProjectedTaskSummary`) so task detail/summary no longer ask the repo actor to re-query GitHub PR rows by branch. +- 2026-03-15 34: Workspace model catalogs now come from the live sandbox-agent API. + - Added a shared normalizer for `/v1/agents?config=true` payloads, exposed sandbox-scoped `listWorkspaceModelGroups()` from the task sandbox actor, and switched backend workspace session creation to resolve sandbox agent ids from the live sandbox catalog instead of only the checked-in default tables. + - Updated the frontend workspace model picker to query the active sandbox for model groups and use that live catalog for labels/options, while keeping the shared default catalog only as a fallback when no sandbox is available yet or the sandbox-agent connection is unavailable. +- 2026-03-15 35: Backend-only organization snapshot refresh is now queue-backed. + - Added `organization.command.snapshot.broadcast` to the organization workflow, switched repository and app-import callers to send that queue message instead of calling the organization actor's `refreshOrganizationSnapshot` action directly, and removed the direct action wrapper. + - Deleted the dead `adminReconcileWorkspaceState` organization action/interface entry after confirming nothing in Foundry still calls it. +- 2026-03-15 36: Dead backend actor export cleanup continued. + - Removed the stale `export * from "./events.js"` line from `backend/src/actors/index.ts`, which was left behind after deleting the dead backend event type file. + - This keeps the backend actor barrel aligned with the live file set and advances the final dead-code/event audit. +- 2026-03-15 34: Item 17 removed from this checklist; do not leave started items half-finished. + - By request, item `17` (`Type all actor context parameters — remove c: any`) is deferred out of this Foundry task and should not block completion here. + - Process note for the remaining checklist work: once an item is started, finish that item to completion before opening a different partial seam. Item `15` is the current priority under that rule. +- 2026-03-15 35: Task/PR unification now routes live PR changes through repository-owned task summaries only. + - GitHub PR sync and webhook handling now send concrete PR summaries directly to the repository coordinator, which lazily creates a real branch-owned task when needed and persists PR metadata on the task projection instead of re-querying raw `github_pull_requests` rows from repository reads. + - Cleared the last stale scalar PR test references (`prUrl`, `reviewStatus`, `reviewer`) so the remaining Foundry surfaces consistently use the canonical `pullRequest` object. +- 2026-03-15 36: Organization action entrypoints are now fully organized under `actions/`, and the public mutation surface is queue-only. + - Moved organization task/workspace proxy actions plus `createTaskMutation` into `organization/actions/tasks.ts`, added `organization/actions/app.ts` so every composed org action bundle now lives under `organization/actions/*`, and removed dead `app-shell` exports that no longer had external callers. + - Audited the remaining public organization actor actions and confirmed the write paths go through organization/repository/task/github-data workflow queues instead of direct mutation actions, which closes item `4` and item `10`. +- 2026-03-15 37: Organization dead-code audit completed. + - Removed the leftover exported-only Better Auth predicate helper from `organization/actions/better-auth.ts`; it is now module-private because nothing outside that file uses it. + - Audited the remaining organization actor surface and confirmed the live public reads/writes still in use are the composed `actions/*` bundles plus workflow mutation helpers. There are no remaining dead org action exports from the pre-refactor monolith. +- 2026-03-15 38: Final dead-event and dead-surface audit completed for the in-scope Foundry refactor. + - Confirmed the live Foundry realtime topics each have a single event type (`appUpdated`, `organizationUpdated`, `taskUpdated`, `sessionUpdated`), and the deleted legacy event names (`workspaceUpdated`, `taskSummaryUpdated`, `taskDetailUpdated`, `pullRequestUpdated`, `pullRequestRemoved`) no longer exist in live Foundry code. + - Re-audited the major removed compatibility seams (`Workbench`, branch rename, PR-only sidebar ids, duplicate runtime task status, `getTaskEnriched`, organization-owned task lookup tables) and found no remaining live references beyond expected domain strings like GitHub webhook event names or CLI `pr` labels. +- 2026-03-15 39: Item 15 was finished for real by moving PR ownership into the task actor. + - Added task-local `pull_request_json` storage, switched task detail/summary reads to the task DB, and added `task.command.pull_request.sync` so GitHub/repository flows update PR metadata through the task coordinator instead of overlaying it in the repository projection. + - The mock right sidebar now trusts the canonical `task.pullRequest.url` field instead of rebuilding a PR URL from repo name + PR number. +- 2026-03-15 40: Better Auth user singleton constraint is now enforced without breaking the adapter contract. + - The user actor's `user` table now uses an integer singleton primary key with `CHECK (id = 1)` plus a separate `auth_user_id` column for Better Auth's external string identity. + - Updated the user actor query/join/mutation helpers so Better Auth still reads and writes logical `user.id` as the external string id while SQLite enforces the singleton row invariant locally. + +No backwards compatibility — delete old code, don't deprecate. If something is removed, remove it everywhere (backend, client, shared types, frontend, tests, mocks). + +### Suggested execution order (respects dependencies) + +**Wave 1 — no dependencies, can be done in any order:** +1, 2, 3, 4, 5, 6, 13, 16, 20, 21, 23, 25 + +**Wave 2 — depends on wave 1:** +7 (after 1), 9 (after 13), 10 (after 1+6), 11 (after 4), 22 (after 1), 24 (after 21), 26 (after 25) + +**Wave 3 — depends on wave 2:** +8 (after 7+25), 12 (after 10), 15 (after 9+13), 19 (after 21+24) + +**Wave 4 — depends on wave 3:** +14 (after 15) + +**Final:** +18 (after everything), final audit pass (after everything) + +### Index + +- [x] 1. Rename Auth User actor → User actor +- [x] 2. Add Better Auth mapping comments to user/org actor tables +- [x] 3. Enforce `id = 1` CHECK constraint on single-row tables +- [x] 4. Move all mutation actions to queue messages +- [x] 5. Migrate task actor raw SQL to Drizzle migrations +- [x] 6. Rename History actor → Audit Log actor +- [x] 7. Move starred/default model to user actor settings *(depends on: 1)* +- [x] 8. Replace hardcoded model/agent lists with sandbox-agent API data *(depends on: 7, 25)* +- [x] 9. Flatten `taskLookup` + `taskSummaries` into single `tasks` table *(depends on: 13)* +- [x] 10. Reorganize user and org actor actions into `actions/` folders *(depends on: 1, 6)* +- [x] 11. Standardize workflow file structure across all actors *(depends on: 4)* +- [x] 12. Audit and remove dead code in organization actor *(depends on: 10)* +- [x] 13. Enforce coordinator pattern and fix ownership violations +- [x] 14. Standardize one event per subscription topic *(depends on: 15)* +- [x] 15. Unify tasks and pull requests — PRs are just task data *(depends on: 9, 13)* +- [x] 16. Chunk GitHub data sync and publish progress +- [x] 18. Final pass: remove all dead code *(depends on: all other items)* +- [x] 19. Remove duplicate data between `c.state` and SQLite *(depends on: 21, 24)* +- [x] 20. Prefix admin/recovery actions with `admin` +- [x] 21. Remove legacy/session-scoped fields from task table +- [x] 22. Move per-user UI state from task actor to user actor *(depends on: 1)* +- [x] 23. Delete `getTaskEnriched` and `enrichTaskRecord` (dead code) +- [x] 24. Clean up task status tracking *(depends on: 21)* +- [x] 25. Remove "Workbench" prefix from all types, functions, files, tables +- [x] 26. Delete branch rename (branches immutable after creation) *(depends on: 25)* +- [x] Final audit pass: dead events scan *(depends on: all other items)* + +Deferred follow-up outside this checklist: + +- 17. Type all actor context parameters — remove `c: any` *(removed from this task's scope by request)* + +--- + +## [ ] 1. Rename Auth User actor → User actor + +**Rationale:** The actor is already a single per-user actor storing all user data. The "Auth" prefix is unnecessary. + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/`** → rename directory to `user/` + - `index.ts` — rename export `authUser` → `user`, display name `"Auth User"` → `"User"` + - `db/schema.ts`, `db/db.ts`, `db/migrations.ts`, `db/drizzle.config.ts` — update any auth-prefixed references +- **`foundry/packages/backend/src/actors/keys.ts`** — `authUserKey()` → `userKey()` +- **`foundry/packages/backend/src/actors/handles.ts`** — `getOrCreateAuthUser` → `getOrCreateUser`, `getAuthUser` → `getUser`, `selfAuthUser` → `selfUser` +- **`foundry/packages/backend/src/actors/index.ts`** — update import path and registration +- **`foundry/packages/backend/src/services/better-auth.ts`** — update all `authUser` references +- **Action names** — consider dropping "Auth" prefix from `createAuthRecord`, `findOneAuthRecord`, `updateAuthRecord`, `deleteAuthRecord`, `countAuthRecords`, etc. + +--- + +## [ ] 2. Add Better Auth mapping comments to user/org actor tables, actions, and queues + +**Rationale:** The user and organization actors contain a mix of Better Auth-driven and custom Foundry code. Tables, actions, and queues that exist to serve Better Auth's adapter need comments so developers know which pieces are constrained by Better Auth's schema/contract and which are ours to change freely. + +### Table mapping + +| Actor | Table | Better Auth? | Notes | +|---|---|---|---| +| user | `user` | Yes — 1:1 `user` model | All fields from Better Auth | +| user | `session` | Yes — 1:1 `session` model | All fields from Better Auth | +| user | `account` | Yes — 1:1 `account` model | All fields from Better Auth | +| user | `user_profiles` | No — custom Foundry | GitHub login, role, eligible orgs, starter repo status | +| user | `session_state` | No — custom Foundry | Active organization per session | +| org | `auth_verification` | Yes — Better Auth `verification` model | Lives on org actor because verification happens before user exists | +| org | `auth_session_index` | No — custom routing index | Maps session tokens → user actor IDs for Better Auth adapter routing | +| org | `auth_email_index` | No — custom routing index | Maps emails → user actor IDs for Better Auth adapter routing | +| org | `auth_account_index` | No — custom routing index | Maps OAuth accounts → user actor IDs for Better Auth adapter routing | + +### Action/queue mapping (user actor) + +| Action/Queue | Better Auth? | Notes | +|---|---|---| +| `createAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to create user/session/account records | +| `findOneAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter for single-record lookups with joins | +| `findManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for multi-record queries | +| `updateAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to update records | +| `updateManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for bulk updates | +| `deleteAuthRecord` | Yes — Better Auth adapter | Called by Better Auth adapter to delete records | +| `deleteManyAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for bulk deletes | +| `countAuthRecords` | Yes — Better Auth adapter | Called by Better Auth adapter for count queries | +| `getAppAuthState` | No — custom Foundry | Aggregates auth state for frontend consumption | +| `upsertUserProfile` | No — custom Foundry | Manages Foundry-specific user profile data | +| `upsertSessionState` | No — custom Foundry | Manages Foundry-specific session state | + +### Action/queue mapping (organization actor app-shell) + +| Action/Queue | Better Auth? | Notes | +|---|---|---| +| App-shell auth index CRUD actions | Yes — Better Auth adapter routing | Maintain lookup indexes so the adapter can route by session/email/account to the correct user actor | +| `auth_verification` CRUD | Yes — Better Auth `verification` model | Used for email verification and password resets | + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — add doc comments to each table: + - `user`, `session`, `account`: "Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database" + - `user_profiles`, `session_state`: "Custom Foundry table — not part of Better Auth" +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** — add doc comments to each action/queue: + - Better Auth adapter actions: "Better Auth adapter — called by the Better Auth adapter in better-auth.ts. Schema constrained by Better Auth." + - Custom actions: "Custom Foundry action — not part of Better Auth" +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — add doc comments to `auth_verification` (Better Auth core), and the three index tables (Better Auth adapter routing) +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — add doc comments to auth index actions marking them as Better Auth adapter routing infrastructure + +--- + +## [x] 3. Enforce `id = 1` CHECK constraint on all single-row actor tables + +**Rationale:** When an actor instance represents a single entity, tables that hold exactly one row should enforce this at the DB level with a `CHECK (id = 1)` constraint. The task actor already does this correctly; other actors don't. + +### Tables needing the constraint + +| Actor | Table | Current enforcement | Fix needed | +|---|---|---|---| +| auth-user (→ user) | `user` | None | Add `CHECK (id = 1)`, use integer PK | +| auth-user (→ user) | `user_profiles` | None | Add `CHECK (id = 1)`, use integer PK | +| github-data | `github_meta` | Hardcoded `id=1` in code only | Add `CHECK (id = 1)` in schema | +| organization | `organization_profile` | None | Add `CHECK (id = 1)`, use integer PK | +| repository | `repo_meta` | Hardcoded `id=1` in code only | Add `CHECK (id = 1)` in schema | +| task | `task` | CHECK constraint | Already correct | +| task | `task_runtime` | CHECK constraint | Already correct | + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — change `user` and `user_profiles` tables to integer PK with CHECK constraint +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** — update queries to use `id = 1` pattern +- **`foundry/packages/backend/src/services/better-auth.ts`** — update adapter to use fixed `id = 1` +- **`foundry/packages/backend/src/actors/github-data/db/schema.ts`** — add CHECK constraint to `github_meta` (already uses `id=1` in code) +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — change `organization_profile` to integer PK with CHECK constraint +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — update queries to use `id = 1` +- **`foundry/packages/backend/src/actors/repository/db/schema.ts`** — add CHECK constraint to `repo_meta` (already uses `id=1` in code) +- All affected actors — regenerate `db/migrations.ts` + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Single-row tables (tables that hold exactly one record per actor instance, e.g. metadata or profile tables) must use an integer primary key with a `CHECK (id = 1)` constraint to enforce the singleton invariant at the database level. Follow the pattern established in the task actor's `task` and `task_runtime` tables." + +--- + +## [x] 4. Move all mutation actions to queue messages + +**Rationale:** Actions should be read-only (queries). All mutations (INSERT/UPDATE/DELETE) should go through queue messages processed by workflow handlers. This ensures single-writer consistency and aligns with the actor model. No actor currently does this correctly — the history actor has the mutation in the workflow handler, but the `append` action wraps a `wait: true` queue send, which is the same anti-pattern (callers should send to the queue directly). + +### Violations by actor + +**User actor (auth-user)** — `auth-user/index.ts` — 7 mutation actions: +- `createAuthRecord` (INSERT, line 164) +- `updateAuthRecord` (UPDATE, line 205) +- `updateManyAuthRecords` (UPDATE, line 219) +- `deleteAuthRecord` (DELETE, line 234) +- `deleteManyAuthRecords` (DELETE, line 243) +- `upsertUserProfile` (UPSERT, line 283) +- `upsertSessionState` (UPSERT, line 331) + +**GitHub Data actor** — `github-data/index.ts` — 7 mutation actions: +- `fullSync` (batch INSERT/DELETE/UPDATE, line 686) +- `reloadOrganization` (batch, line 690) +- `reloadAllPullRequests` (batch, line 694) +- `reloadRepository` (INSERT/UPDATE, line 698) +- `reloadPullRequest` (INSERT/DELETE/UPDATE, line 763) +- `clearState` (batch DELETE, line 851) +- `handlePullRequestWebhook` (INSERT/UPDATE/DELETE, line 879) + +**Organization actor — `actions.ts`** — 5 mutation actions: +- `applyTaskSummaryUpdate` (UPSERT, line 464) +- `removeTaskSummary` (DELETE, line 476) +- `applyGithubRepositoryProjection` (UPSERT, line 521) +- `applyGithubDataProjection` (INSERT/UPDATE/DELETE, line 547) +- `recordGithubWebhookReceipt` (UPDATE, line 620) + +**Organization actor — `app-shell.ts`** — 38 mutation actions: + +Better Auth index mutations (11): +- `authUpsertSessionIndex` (UPSERT) +- `authDeleteSessionIndex` (DELETE) +- `authUpsertEmailIndex` (UPSERT) +- `authDeleteEmailIndex` (DELETE) +- `authUpsertAccountIndex` (UPSERT) +- `authDeleteAccountIndex` (DELETE) +- `authCreateVerification` (INSERT) +- `authUpdateVerification` (UPDATE) +- `authUpdateManyVerification` (UPDATE) +- `authDeleteVerification` (DELETE) +- `authDeleteManyVerification` (DELETE) + +Organization profile/state mutations (13): +- `updateOrganizationShellProfile` (UPDATE on organizationProfile) +- `markOrganizationSyncStarted` (UPDATE on organizationProfile) +- `applyOrganizationSyncCompleted` (UPDATE on organizationProfile) +- `markOrganizationSyncFailed` (UPDATE on organizationProfile) +- `applyOrganizationStripeCustomer` (UPDATE on organizationProfile) +- `applyOrganizationStripeSubscription` (UPSERT on organizationProfile) +- `applyOrganizationFreePlan` (UPDATE on organizationProfile) +- `setOrganizationBillingPaymentMethod` (UPDATE on organizationProfile) +- `setOrganizationBillingStatus` (UPDATE on organizationProfile) +- `upsertOrganizationInvoice` (UPSERT on invoices) +- `recordOrganizationSeatUsage` (UPSERT on seatAssignments) +- `applyGithubInstallationCreated` (UPDATE on organizationProfile) +- `applyGithubInstallationRemoved` (UPDATE on organizationProfile) + +App-level mutations that delegate + mutate (8): +- `skipAppStarterRepo` (calls upsertUserProfile) +- `starAppStarterRepo` (calls upsertUserProfile + child mutation) +- `selectAppOrganization` (calls setActiveOrganization) +- `triggerAppRepoImport` (calls markOrganizationSyncStarted) +- `createAppCheckoutSession` (calls applyOrganizationFreePlan + applyOrganizationStripeCustomer) +- `finalizeAppCheckoutSession` (calls applyOrganizationStripeCustomer) +- `cancelAppScheduledRenewal` (calls setOrganizationBillingStatus) +- `resumeAppSubscription` (calls setOrganizationBillingStatus) +- `recordAppSeatUsage` (calls recordOrganizationSeatUsage) +- `handleAppStripeWebhook` (calls multiple org mutations) +- `handleAppGithubWebhook` (calls org mutations + github-data mutations) +- `syncOrganizationShellFromGithub` (multiple DB operations) +- `applyGithubRepositoryChanges` (calls applyGithubRepositoryProjection) + +**Task actor workbench** — `task/workbench.ts` — 14 mutation actions: +- `renameWorkbenchTask` (UPDATE, line 970) +- `renameWorkbenchBranch` (UPDATE, line 988) +- `createWorkbenchSession` (INSERT, line 1039) +- `renameWorkbenchSession` (UPDATE, line 1125) +- `setWorkbenchSessionUnread` (UPDATE, line 1136) +- `updateWorkbenchDraft` (UPDATE, line 1143) +- `changeWorkbenchModel` (UPDATE, line 1152) +- `sendWorkbenchMessage` (UPDATE, line 1205) +- `stopWorkbenchSession` (UPDATE, line 1255) +- `syncWorkbenchSessionStatus` (UPDATE, line 1265) +- `closeWorkbenchSession` (UPDATE, line 1331) +- `markWorkbenchUnread` (UPDATE, line 1363) +- `publishWorkbenchPr` (UPDATE, line 1375) +- `revertWorkbenchFile` (UPDATE, line 1403) + +**Repository actor** — `repository/actions.ts` — 5 mutation actions/helpers: +- `createTask` → calls `createTaskMutation()` (INSERT on taskIndex + creates task actor) +- `registerTaskBranch` → calls `registerTaskBranchMutation()` (INSERT/UPDATE on taskIndex) +- `reinsertTaskIndexRow()` (INSERT/UPDATE, called from `getTaskEnriched`) +- `deleteStaleTaskIndexRow()` (DELETE) +- `persistRemoteUrl()` (INSERT/UPDATE on repoMeta, called from `getRepoOverview`) + +### History (audit log) actor — `append` action must also be removed + +The history actor's workflow handler is correct (mutation in queue handler), but the `append` action (line 77) is a `wait: true` wrapper around the queue send — same anti-pattern. Delete the `append` action. Callers (the `appendHistory()` helper in `task/workflow/common.ts`) should send directly to the `auditLog.command.append` queue with `wait: false` (audit log writes are fire-and-forget, no need to block the caller). + +### Reference patterns (queue handlers only, no action wrappers) +- **Task actor core** — initialize, attach, push, sync, merge, archive, kill all use queue messages directly + +### Migration approach + +This is NOT about wrapping queue sends inside actions. The mutation actions must be **removed entirely** and replaced with queue messages that callers (including `packages/client`) send directly. + +Each actor needs: +1. Define queue message types for each mutation +2. Move mutation logic from action handlers into workflow/queue handlers +3. **Delete the mutation actions** — do not wrap them +4. Update `packages/client` to send queue messages directly to the actor instead of calling the old action +5. Update any inter-actor callers (e.g. `better-auth.ts`, `app-shell.ts`, other actors) to send queue messages instead of calling actions + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Actions must be read-only. All database mutations (INSERT, UPDATE, DELETE, UPSERT) must be queue messages processed by workflow handlers. Callers (client, other actors, services) send messages directly to the queue — do not wrap queue sends inside actions. Follow the pattern established in the task workflow actor's queue handlers." + +--- + +## [ ] 5. Migrate task actor raw SQL to Drizzle migrations + +**Rationale:** The task actor uses raw `db.execute()` with `ALTER TABLE ... ADD COLUMN` in `workbench.ts` and `workflow/init.ts` instead of proper Drizzle migrations. All actor DBs should use the standard Drizzle migration pattern. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/workbench.ts`** (lines 24-56) — remove `ALTER TABLE` raw SQL, add columns to `db/schema.ts` and generate a proper migration +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** (lines 12-15) — same treatment +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — add the missing columns that are currently added via `ALTER TABLE` +- **`foundry/packages/backend/src/actors/task/db/migrations.ts`** — regenerate with new migration + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "All actor databases must use Drizzle ORM with proper schema definitions and generated migrations. No raw SQL (`db.execute()`, `ALTER TABLE`, etc.). Schema changes must go through `schema.ts` + migration generation." + +--- + +## [ ] 6. Rename History actor → Audit Log actor + +**Rationale:** The actor functions as a comprehensive audit log tracking task lifecycle events. "Audit Log" better describes its purpose. + +### Files to change + +- **`foundry/packages/backend/src/actors/history/`** → rename directory to `audit-log/` + - `index.ts` — rename export `history` → `auditLog`, display name `"History"` → `"Audit Log"`, queue `history.command.append` → `auditLog.command.append` + - Internal types: `HistoryInput` → `AuditLogInput`, `AppendHistoryCommand` → `AppendAuditLogCommand`, `ListHistoryParams` → `ListAuditLogParams` +- **`foundry/packages/backend/src/actors/keys.ts`** — `historyKey()` → `auditLogKey()` +- **`foundry/packages/backend/src/actors/handles.ts`** — `getOrCreateHistory` → `getOrCreateAuditLog`, `selfHistory` → `selfAuditLog` +- **`foundry/packages/backend/src/actors/index.ts`** — update import path and registration +- **`foundry/packages/shared/src/contracts.ts`** — `HistoryEvent` → `AuditLogEvent` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — `history()` action → `auditLog()`, update imports +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update `getOrCreateHistory` calls +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — `appendHistory()` → `appendAuditLog()` +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — update imports and calls +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — update imports and calls +- **`foundry/packages/backend/src/actors/task/workflow/push.ts`** — update imports and calls + +### Coverage gaps to fix + +The audit log only covers 9 of ~24 significant events (37.5%). The entire `task/workbench.ts` file has zero logging. Add audit log calls for: + +**High priority (missing lifecycle events):** +- `task.switch` — in `task/workflow/index.ts` handleSwitchActivity +- `task.session.created` — in `task/workbench.ts` createWorkbenchSession +- `task.session.closed` — in `task/workbench.ts` closeWorkbenchSession +- `task.session.stopped` — in `task/workbench.ts` stopWorkbenchSession + +**Medium priority (missing user actions):** +- `task.session.renamed` — renameWorkbenchSession +- `task.message.sent` — sendWorkbenchMessage +- `task.model.changed` — changeWorkbenchModel +- `task.title.changed` — renameWorkbenchTask +- `task.branch.renamed` — renameWorkbenchBranch +- `task.pr.published` — publishWorkbenchPr +- `task.file.reverted` — revertWorkbenchFile + +**Low priority / debatable:** +- `task.draft.updated`, `task.session.unread`, `task.derived.refreshed`, `task.transcript.refreshed` + +### CLAUDE.md updates needed + +- **`foundry/packages/backend/CLAUDE.md`** — rename `HistoryActor` → `AuditLogActor` in actor hierarchy, add maintenance rule: "Every new action or command handler that represents a user-visible or workflow-significant event must append to the audit log actor. The audit log must remain a comprehensive record of all significant operations." +- **`foundry/CLAUDE.md`** — rename "History Events" section → "Audit Log Events", update the list to include all events above, add note: "When adding new task/workbench commands, always add a corresponding audit log event." + +--- + +## [ ] 7. Move starred/default model to user actor settings + +**Dependencies:** item 1 + +**Rationale:** The starred/default model preference is currently broken — the frontend stores it in local React state that resets on reload. The org actor's `organizationProfile` table has a `defaultModel` column but there's no action to update it and it's the wrong scope anyway. This is a per-user preference, not an org setting. + +### Current state (broken) + +- **Frontend** (`mock-layout.tsx` line 313) — `useState("claude-sonnet-4")` — local state, lost on reload +- **Model picker UI** (`model-picker.tsx`) — has star icons + `onSetDefault` callback, but it only updates local state +- **Org actor** (`organization/db/schema.ts` line 43) — `defaultModel` column exists but nothing writes to it +- **No backend persistence** — starred model is not saved anywhere + +### Changes needed + +1. **Add `user_settings` table to user actor** (or add `defaultModel` column to `user_profiles`): + - `defaultModel` (text) — the user's starred/preferred model + - File: `foundry/packages/backend/src/actors/auth-user/db/schema.ts` + +2. **Add queue message to user actor** to update the default model: + - File: `foundry/packages/backend/src/actors/auth-user/index.ts` + +3. **Remove `defaultModel` from org actor** `organizationProfile` table (wrong scope): + - File: `foundry/packages/backend/src/actors/organization/db/schema.ts` + +4. **Update frontend** to read starred model from user settings (via `app` subscription) and send queue message on star click: + - File: `foundry/packages/frontend/src/components/mock-layout/model-picker.tsx` + - File: `foundry/packages/frontend/src/components/mock-layout.tsx` + +5. **Update shared types** — move `defaultModel` from `FoundryOrganizationSettings` to user settings type: + - File: `foundry/packages/shared/src/app-shell.ts` + +6. **Update client** to send the queue message to user actor: + - File: `foundry/packages/client/` + +--- + +## [ ] 8. Replace hardcoded model/agent lists with sandbox-agent API data + +**Dependencies:** items 7, 25 + +**Rationale:** The frontend hardcodes 8 models in a static list and ignores the sandbox-agent API's `GET /v1/agents` endpoint which already exposes the full agent config — models, modes, and reasoning/thought levels per agent. The frontend should consume this API 1:1 instead of maintaining its own stale copy. + +### Current state (hardcoded) + +- **`foundry/packages/frontend/src/components/mock-layout/view-model.ts`** (lines 20-39) — hardcoded `MODEL_GROUPS` with 8 models +- **`foundry/packages/client/src/workbench-model.ts`** (lines 18-37) — identical hardcoded `MODEL_GROUPS` copy +- **`foundry/packages/shared/src/workbench.ts`** (lines 5-13) — `WorkbenchModelId` hardcoded union type +- No modes or thought/reasoning levels exposed in UI at all +- No API calls to discover available models + +### What the sandbox-agent API already provides (`GET /v1/agents`) + +Per agent, the API returns: +- **models** — full list with display names (Claude: 4, Codex: 6, Cursor: 35+, OpenCode: 239) +- **modes** — execution modes (Claude: 5, Codex: 3, OpenCode: 2) +- **thought_level** — reasoning levels (Codex: low/medium/high/xhigh, Mock: low/medium/high) +- **capabilities** — plan_mode, reasoning, status support +- **credentialsAvailable** / **installed** — agent availability + +### Changes needed + +1. **Remove hardcoded model lists** from: + - `foundry/packages/frontend/src/components/mock-layout/view-model.ts` — delete `MODEL_GROUPS` + - `foundry/packages/client/src/workbench-model.ts` — delete `MODEL_GROUPS` + - `foundry/packages/shared/src/workbench.ts` — replace `WorkbenchModelId` union type with `string` (dynamic from API) + +2. **Backend: fetch and cache agent config from sandbox-agent API** + - Add an action or startup flow that calls `GET /v1/agents?config=true` on the sandbox-agent API + - Cache the result (agent list + models + modes + thought levels) in the appropriate actor + - Expose it to the frontend via the existing subscription/event system + +3. **Frontend: consume API-driven config** + - Model picker reads available models from backend-provided agent config, not hardcoded list + - Expose modes selector per agent + - Expose thought/reasoning level selector for agents that support it (Codex, Mock) + - Group models by agent as the API does (not by arbitrary provider grouping) + +4. **Update shared types** — make model/mode/thought_level types dynamic strings rather than hardcoded unions: + - `foundry/packages/shared/src/workbench.ts` + +5. **No backwards compatibility needed** — we're cleaning up, not preserving old behavior + +--- + +## [ ] 9. Flatten `taskLookup` + `taskSummaries` into single `tasks` table on org actor + +**Dependencies:** item 13 + +**Rationale:** `taskLookup` (taskId → repoId) is a strict subset of `taskSummaries` (which also has repoId + title, status, branch, PR, sessions). There's no reason for two tables with the same primary key. Flatten into one `tasks` table. + +### Current state + +- **`taskLookup`** — `taskId` (PK), `repoId` — used only for taskId → repoId resolution +- **`taskSummaries`** — `taskId` (PK), `repoId`, `title`, `status`, `repoName`, `updatedAtMs`, `branch`, `pullRequestJson`, `sessionsSummaryJson` — materialized sidebar data + +### Changes needed + +1. **Merge into single `tasks` table** in `foundry/packages/backend/src/actors/organization/db/schema.ts`: + - Drop `taskLookup` table + - Rename `taskSummaries` → `tasks` + - Keep all columns from `taskSummaries` (already includes `repoId`) + +2. **Update all references**: + - `foundry/packages/backend/src/actors/organization/actions.ts` — replace `taskLookup` queries with `tasks` table lookups + - `foundry/packages/backend/src/actors/organization/app-shell.ts` — if it references either table + - Any imports of the old table names from schema + +3. **Regenerate migrations** — `foundry/packages/backend/src/actors/organization/db/migrations.ts` + +--- + +## [x] 10. Reorganize user and organization actor actions into `actions/` folders + +**Dependencies:** items 1, 6 + +**Rationale:** Both actors cram too many concerns into single files. The organization actor has `app-shell.ts` (1,947 lines) + `actions.ts` mixing Better Auth, Stripe, GitHub, onboarding, workbench proxying, and org state. The user actor mixes Better Auth adapter CRUD with custom Foundry actions. Split into `actions/` folders grouped by domain, with `betterAuth` prefix on all Better Auth actions. + +### User actor → `user/actions/` + +| File | Actions | Source | +|---|---|---| +| `actions/better-auth.ts` | `betterAuthCreateRecord`, `betterAuthFindOneRecord`, `betterAuthFindManyRecords`, `betterAuthUpdateRecord`, `betterAuthUpdateManyRecords`, `betterAuthDeleteRecord`, `betterAuthDeleteManyRecords`, `betterAuthCountRecords` + all helper functions (`tableFor`, `columnFor`, `normalizeValue`, `clauseToExpr`, `buildWhere`, `applyJoinToRow`, `applyJoinToRows`) | Currently in `index.ts` | +| `actions/user.ts` | `getAppAuthState`, `upsertUserProfile`, `upsertSessionState` | Currently in `index.ts` | + +### Organization actor → `organization/actions/` + +**Delete `app-shell.ts`** — split its ~50 actions + helpers across these files: + +| File | Actions | Source | +|---|---|---| +| `actions/better-auth.ts` | `betterAuthFindSessionIndex`, `betterAuthUpsertSessionIndex`, `betterAuthDeleteSessionIndex`, `betterAuthFindEmailIndex`, `betterAuthUpsertEmailIndex`, `betterAuthDeleteEmailIndex`, `betterAuthFindAccountIndex`, `betterAuthUpsertAccountIndex`, `betterAuthDeleteAccountIndex`, `betterAuthCreateVerification`, `betterAuthFindOneVerification`, `betterAuthFindManyVerification`, `betterAuthUpdateVerification`, `betterAuthUpdateManyVerification`, `betterAuthDeleteVerification`, `betterAuthDeleteManyVerification`, `betterAuthCountVerification` + auth clause builder helpers | Currently in `app-shell.ts` | +| `actions/stripe.ts` | `createAppCheckoutSession`, `finalizeAppCheckoutSession`, `createAppBillingPortalSession`, `cancelAppScheduledRenewal`, `resumeAppSubscription`, `recordAppSeatUsage`, `handleAppStripeWebhook`, `applyOrganizationStripeCustomer`, `applyOrganizationStripeSubscription`, `applyOrganizationFreePlan`, `setOrganizationBillingPaymentMethod`, `setOrganizationBillingStatus`, `upsertOrganizationInvoice`, `recordOrganizationSeatUsage` | Currently in `app-shell.ts` | +| `actions/github.ts` | `resolveAppGithubToken`, `beginAppGithubInstall`, `triggerAppRepoImport`, `handleAppGithubWebhook`, `syncOrganizationShellFromGithub`, `syncGithubOrganizations`, `applyGithubInstallationCreated`, `applyGithubInstallationRemoved`, `applyGithubRepositoryChanges`, `reloadGithubOrganization`, `reloadGithubPullRequests`, `reloadGithubRepository`, `reloadGithubPullRequest`, `applyGithubRepositoryProjection`, `applyGithubDataProjection`, `recordGithubWebhookReceipt`, `refreshTaskSummaryForGithubBranch` | Currently split across `app-shell.ts` and `actions.ts` | +| `actions/onboarding.ts` | `skipAppStarterRepo`, `starAppStarterRepo`, `starSandboxAgentRepo`, `selectAppOrganization` | Currently in `app-shell.ts` | +| `actions/organization.ts` | `getAppSnapshot`, `getOrganizationShellState`, `getOrganizationShellStateIfInitialized`, `updateOrganizationShellProfile`, `updateAppOrganizationProfile`, `markOrganizationSyncStarted`, `applyOrganizationSyncCompleted`, `markOrganizationSyncFailed`, `useOrganization`, `getOrganizationSummary`, `reconcileWorkbenchState` | Currently split across `app-shell.ts` and `actions.ts` | +| `actions/tasks.ts` | `createTask`, `createWorkbenchTask`, `listTasks`, `getTask`, `switchTask`, `applyTaskSummaryUpdate`, `removeTaskSummary`, `findTaskForGithubBranch`, `applyOpenPullRequestUpdate`, `removeOpenPullRequest`, `attachTask`, `pushTask`, `syncTask`, `mergeTask`, `archiveTask`, `killTask` | Currently in `actions.ts` | +| `actions/workbench.ts` | `markWorkbenchUnread`, `renameWorkbenchTask`, `renameWorkbenchBranch`, `createWorkbenchSession`, `renameWorkbenchSession`, `setWorkbenchSessionUnread`, `updateWorkbenchDraft`, `changeWorkbenchModel`, `sendWorkbenchMessage`, `stopWorkbenchSession`, `closeWorkbenchSession`, `publishWorkbenchPr`, `revertWorkbenchFile` | Currently in `actions.ts` (proxy calls to task actor) | +| `actions/repos.ts` | `listRepos`, `getRepoOverview` | Currently in `actions.ts` | +| `actions/history.ts` | `history` (→ `auditLog` after rename) | Currently in `actions.ts` | + +Also move: +- `APP_SHELL_ORGANIZATION_ID` constant → `organization/constants.ts` +- `runOrganizationWorkflow` → `organization/workflow.ts` +- Private helpers (`buildAppSnapshot`, `assertAppOrganization`, `collectAllTaskSummaries`, etc.) → colocate with the action file that uses them + +### Files to update + +- **`foundry/packages/backend/src/services/better-auth.ts`** — update all action name references to use `betterAuth` prefix +- **`foundry/packages/backend/src/actors/organization/index.ts`** — import and spread action objects from `actions/` files instead of `app-shell.ts` + `actions.ts` +- **`foundry/packages/backend/src/actors/auth-user/index.ts`** (or `user/index.ts`) — import actions from `actions/` files + +--- + +## [ ] 11. Standardize workflow file structure across all actors + +**Dependencies:** item 4 + +**Rationale:** Workflow logic is inconsistently placed — inline in `index.ts`, in `actions.ts`, or in a `workflow/` directory. Standardize: every actor with a workflow gets a `workflow.ts` file. If the workflow is large, use `workflow/{index,...}.ts`. + +### Changes per actor + +| Actor | Current location | New location | Notes | +|---|---|---|---| +| user (auth-user) | None | `workflow.ts` (new) | Needs a workflow for mutations (item 4) | +| github-data | Inline in `index.ts` (~57 lines) | `workflow.ts` | Extract `runGithubDataWorkflow` + handler | +| history (→ audit-log) | Inline in `index.ts` (~18 lines) | `workflow.ts` | Extract `runHistoryWorkflow` + `appendHistoryRow` | +| organization | In `actions.ts` (~51 lines) | `workflow.ts` | Extract `runOrganizationWorkflow` + queue handlers | +| repository | In `actions.ts` (~42 lines) | `workflow.ts` | Extract `runRepositoryWorkflow` + queue handlers | +| task | `workflow/` directory (926 lines) | `workflow/` directory — already correct | Keep as-is: `workflow/index.ts`, `workflow/queue.ts`, `workflow/common.ts`, `workflow/init.ts`, `workflow/commands.ts`, `workflow/push.ts` | +| sandbox | None (wrapper) | N/A | No custom workflow needed | + +### Pattern + +- **Small workflows** (< ~200 lines): single `workflow.ts` file +- **Large workflows** (> ~200 lines): `workflow/index.ts` holds the main loop, other files hold step groups: + - `workflow/index.ts` — main loop + handler dispatch + - `workflow/queue.ts` — queue name definitions (if many) + - `workflow/{group}.ts` — step/activity functions grouped by domain + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Every actor with a message queue must have its workflow logic in a dedicated `workflow.ts` file (or `workflow/index.ts` for complex actors). Do not inline workflow logic in `index.ts` or `actions.ts`. Actions are read-only handlers; workflow handlers process queue messages and perform mutations." + +--- + +--- + +## [ ] 12. Audit and remove dead code in organization actor + +**Dependencies:** item 10 + +**Rationale:** The organization actor has ~50+ actions across `app-shell.ts` and `actions.ts`. Likely some are unused or vestigial. Audit all actions and queues for dead code and remove anything that has no callers. + +### Scope + +- All actions in `organization/actions.ts` and `organization/app-shell.ts` +- All queue message types and their handlers +- Helper functions that may no longer be called +- Shared types in `packages/shared` that only served removed actions + +### Approach + +- Trace each action/queue from caller → handler to confirm it's live +- Remove any action with no callers (client, other actors, services, HTTP endpoints) +- Remove any queue handler with no senders +- Remove associated types and helpers + +--- + +## [ ] 13. Enforce coordinator pattern and fix ownership violations + +**Rationale:** The actor hierarchy follows a coordinator pattern: org → repo → task → session. The coordinator owns the index/summary of its children, handles create/destroy, and children push updates up to their coordinator. Several violations exist where levels are skipped. + +### Coordinator hierarchy (add to CLAUDE.md) + +``` +Organization (coordinator for repos) +├── Repository (coordinator for tasks) +│ └── Task (coordinator for sessions) +│ └── Session +``` + +**Rules:** +- The coordinator owns the index/summary table for its direct children +- The coordinator handles create/destroy of its direct children +- Children push summary updates UP to their direct coordinator (not skipping levels) +- Read paths go through the coordinator, not direct cross-level access +- No backwards compatibility needed — we're cleaning up + +### Violations to fix + +#### V1: Task index tables on wrong actor (HIGH) + +`taskLookup` and `taskSummaries` (item 9 merges these into `tasks`) are on the **organization** actor but should be on the **repository** actor, since repo is the coordinator for tasks. + +**Fix:** +- Move the merged `tasks` table (from item 9) to `repository/db/schema.ts` +- Repository owns task summaries, not organization +- Organization gets a `repoSummaries` table instead (repo count, latest activity, etc.) — the repo pushes its summary up to org + +#### V2: Tasks push summaries directly to org, skipping repo (HIGH) + +Task actors call `organization.applyTaskSummaryUpdate()` directly (line 464 in `actions.ts`), bypassing the repository coordinator. + +**Fix:** +- Task pushes summary to `repository.applyTaskSummaryUpdate()` instead +- Repository updates its `tasks` table, then pushes a repo summary up to organization +- Organization never receives task-level updates directly + +#### V3: Org resolves taskId → repoId from its own table (MEDIUM) + +`resolveRepoId(c, taskId)` in `organization/actions.ts` queries `taskLookup` directly. Used by `switchTask`, `attachTask`, `pushTask`, `syncTask`, `mergeTask`, `archiveTask`, `killTask` (7 actions). + +**Fix:** +- Remove `resolveRepoId()` from org actor +- Org must know the `repoId` from the caller (frontend already knows which repo a task belongs to) or query the repo actor +- Update all 7 proxy actions to require `repoId` in their input instead of looking it up + +#### V4: Duplicate task creation bookkeeping at org level (MEDIUM) + +`createTaskMutation` in org actor calls `repository.createTask()`, then independently inserts `taskLookup` and seeds `taskSummaries`. Repository already inserts its own `taskIndex` row. + +**Fix:** +- Org calls `repository.createTask()` — that's it +- Repository handles all task index bookkeeping internally +- Repository pushes the new task summary back up to org as part of its repo summary update + +### Files to change + +- **`foundry/packages/backend/src/actors/organization/db/schema.ts`** — remove `taskLookup` and `taskSummaries`, add `repoSummaries` if needed +- **`foundry/packages/backend/src/actors/repository/db/schema.ts`** — add merged `tasks` table (task summaries) +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove `resolveRepoId()`, `applyTaskSummaryUpdate`, `removeTaskSummary`, `findTaskForGithubBranch`, `refreshTaskSummaryForGithubBranch`; update proxy actions to require `repoId` in input +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — add `applyTaskSummaryUpdate` action (receives from task), push repo summary to org +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — change summary push target from org → repo +- **`foundry/packages/shared/src/contracts.ts`** — update input types to include `repoId` where needed +- **`foundry/packages/client/`** — update calls to pass `repoId` + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add coordinator pattern rules: + ``` + ## Coordinator Pattern + + The actor hierarchy follows a strict coordinator pattern: + - Organization = coordinator for repositories + - Repository = coordinator for tasks + - Task = coordinator for sessions + + Rules: + - Each coordinator owns the index/summary table for its direct children. + - Only the coordinator handles create/destroy of its direct children. + - Children push summary updates to their direct coordinator only (never skip levels). + - Cross-level access (e.g. org directly querying task state) is not allowed — go through the coordinator. + - Proxy actions at higher levels (e.g. org.pushTask) must delegate to the correct coordinator, not bypass it. + ``` + +--- + +--- + +## [ ] 14. Standardize one event per subscription topic across all actors + +**Dependencies:** item 15 + +**Rationale:** Each subscription topic should have exactly one event type carrying the full replacement snapshot. The organization topic currently violates this with 7 subtypes. Additionally, event naming is inconsistent across actors. Standardize all of them. + +### Current state + +| Topic | Wire event name | Event type field | Subtypes | Issue | +|---|---|---|---|---| +| `app` | `appUpdated` | `type: "appUpdated"` | 1 | Name is fine | +| `organization` | `organizationUpdated` | 7 variants | **7** | Needs consolidation | +| `task` | `taskUpdated` | `type: "taskDetailUpdated"` | 1 | Wire name ≠ type name | +| `session` | `sessionUpdated` | `type: "sessionUpdated"` | 1 | Fine | +| `sandboxProcesses` | `processesUpdated` | `type: "processesUpdated"` | 1 | Fine | + +### Target state + +Every topic gets exactly one event. Wire event name = type field = `{topic}Updated`. Each carries the full snapshot for that topic. + +| Topic | Event name | Payload | +|---|---|---| +| `app` | `appUpdated` | `FoundryAppSnapshot` | +| `organization` | `organizationUpdated` | `OrganizationSummarySnapshot` | +| `task` | `taskUpdated` | `WorkbenchTaskDetail` | +| `session` | `sessionUpdated` | `WorkbenchSessionDetail` | +| `sandboxProcesses` | `processesUpdated` | `SandboxProcessSnapshot[]` | + +### Organization — consolidate 7 subtypes into 1 + +Remove the discriminated union. Replace all 7 subtypes: +- `taskSummaryUpdated`, `taskRemoved`, `repoAdded`, `repoUpdated`, `repoRemoved`, `pullRequestUpdated`, `pullRequestRemoved` + +With a single `organizationUpdated` event carrying the full `OrganizationSummarySnapshot`. The client replaces its cached state — same pattern as every other topic. + +### Task — fix event type name mismatch + +Wire event is `taskUpdated` but the type field says `taskDetailUpdated`. Rename to `taskUpdated` everywhere for consistency. + +### Files to change + +- **`foundry/packages/shared/src/realtime-events.ts`** — replace `OrganizationEvent` union with single event type; rename `TaskEvent.type` from `taskDetailUpdated` → `taskUpdated` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — update all 7 `c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", ... })` calls to emit single event with full snapshot +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — same for any broadcasts here +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — rename `taskDetailUpdated` → `taskUpdated` in broadcast calls +- **`foundry/packages/client/src/subscription/topics.ts`** — simplify `applyEvent` for organization topic (no more discriminated union handling); update task event type name +- **`foundry/packages/client/src/subscription/mock-manager.ts`** — update mock event handling +- **`foundry/packages/frontend/`** — update any direct references to event type names + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Each subscription topic must have exactly one event type. The event carries the full replacement snapshot for that topic — no discriminated unions, no partial patches, no subtypes. Event name must match the pattern `{topic}Updated` (e.g. `organizationUpdated`, `taskUpdated`). When state changes, broadcast the full snapshot; the client replaces its cached state." + +--- + +## [x] 15. Unify tasks and pull requests — PRs are just task data + +**Dependencies:** items 9, 13 + +**Rationale:** From the client's perspective, tasks and PRs are the same thing — a branch with work on it. The frontend already merges them into one sorted list, converting PRs to synthetic task objects with `pr:{prId}` IDs. The distinction is artificial. A "task" should represent any branch, and the task actor lazily wraps it. PR metadata is just data the task holds. + +### Current state (separate entities) + +- **Tasks**: stored in task actor SQLite, surfaced via `WorkbenchTaskSummary`, events via `taskSummaryUpdated` +- **PRs**: stored in GitHub data actor (`githubPullRequests` table), surfaced via `WorkbenchOpenPrSummary`, events via `pullRequestUpdated`/`pullRequestRemoved` +- **Frontend hack**: converts PRs to fake task objects with `pr:{prId}` IDs, merges into one list +- **Filtering logic**: org actor silently swallows `pullRequestUpdated` if a task claims the same branch — fragile coupling +- **Two separate types**: `WorkbenchTaskSummary` and `WorkbenchOpenPrSummary` with overlapping fields + +### Target state (unified) + +- **One entity**: a "task" represents a branch. Task actors are lazily created when needed (user creates one, or a PR arrives for an unclaimed branch). +- **PR data lives on the task**: the task actor stores PR metadata (number, title, state, url, isDraft, authorLogin, etc.) as part of its state, not as a separate entity +- **One type**: `WorkbenchTaskSummary` includes full PR fields (nullable). No separate `WorkbenchOpenPrSummary`. +- **One event**: `organizationUpdated` carries task summaries that include PR data. No separate PR events. +- **No synthetic IDs**: every item in the sidebar is a real task with a real taskId + +### Changes needed + +1. **Remove `WorkbenchOpenPrSummary` type** from `packages/shared/src/workbench.ts` — merge its fields into `WorkbenchTaskSummary` +2. **Expand task's `pullRequest` field** from `{ number, status }` to full PR metadata (number, title, state, url, headRefName, baseRefName, isDraft, authorLogin, updatedAtMs) +3. **Remove `openPullRequests` from `OrganizationSummarySnapshot`** — all items are tasks now +4. **Remove PR-specific events** from `realtime-events.ts`: `pullRequestUpdated`, `pullRequestRemoved` +5. **Remove PR-specific actions** from organization actor: `applyOpenPullRequestUpdate`, `removeOpenPullRequest` +6. **Remove branch-claiming filter logic** in org actor (the `if task claims branch, skip PR` check) +7. **GitHub data actor PR sync**: when PRs arrive (webhook or sync), create/update a task for that branch lazily via the repository coordinator +8. **Task actor**: store PR metadata in its DB (new columns or table), update when GitHub data pushes changes +9. **Frontend**: remove `toOpenPrTaskModel` conversion, remove `pr:` ID prefix hack, remove separate `openPullRequests` state — sidebar is just tasks +10. **Repository actor**: when a PR arrives for a branch with no task, lazily create a task actor for it (lightweight, no sandbox needed) + +### Implications for coordinator pattern (item 13) + +This reinforces: repo is the coordinator for tasks. When GitHub data detects a new PR for a branch, it tells the repo coordinator, which creates/updates the task. The task holds the PR data and pushes its summary to the repo coordinator. + +### No backwards compatibility needed + +The `authSessionIndex`, `authEmailIndex`, `authAccountIndex`, and `authVerification` tables stay on the org actor. They're routing indexes needed by the Better Auth adapter to resolve user identity before the user actor can be accessed (e.g. session token → userId lookup). Already covered in item 2 for adding comments explaining this. + +--- + +## [ ] 16. Chunk GitHub data sync and publish progress + +**Rationale:** `runFullSync` in the github-data actor fetches everything at once (all repos, branches, members, PRs), replaces all tables atomically, and has a 5-minute timeout. For large orgs this will timeout or lose all data mid-sync (replace pattern deletes everything first). Needs to be chunked with incremental progress. + +### Current state (broken for large orgs) + +- `runFullSync()` (`github-data/index.ts` line 486-538): + 1. Fetches ALL repos, branches, members, PRs in 4 sequential calls + 2. `replaceRepositories/Branches/Members/PullRequests` — deletes all rows then inserts all new rows + 3. Single 5-minute timeout wraps the entire operation + 4. No progress reporting to the client — just "Syncing GitHub data..." → "Synced N repositories" + 5. If it fails mid-sync, data is partially deleted with no recovery + +### Changes needed + +1. **Chunk the sync by repository** — sync repos first (paginated from GitHub API), then for each repo chunk, sync its branches and PRs. Members can be a separate chunk. + +2. **Incremental upsert, not replace** — don't delete-then-insert. Use upsert per row so partial sync doesn't lose data. Mark rows with a sync generation ID; after full sync completes, delete rows from previous generations. + +3. **Run in a loop, not a single step** — each chunk is a separate workflow step with its own timeout. If one chunk fails, previous chunks are persisted. + +4. **Publish progress per chunk** — after each chunk completes: + - Update `github_meta` with progress (e.g. `syncedRepos: 15/42`) + - Push progress to the organization actor + - Organization broadcasts to clients so the UI shows progress (e.g. "Syncing repositories... 15/42") + +5. **Initial sync uses the same chunked approach** — `github-data-initial-sync` step should kick off the chunked loop, not call `runFullSync` directly + +### Files to change + +- **`foundry/packages/backend/src/actors/github-data/index.ts`**: + - Refactor `runFullSync` into chunked loop + - Replace `replaceRepositories/Branches/Members/PullRequests` with upsert + generation sweep + - Add progress metadata to `github_meta` table + - Publish progress to org actor after each chunk +- **`foundry/packages/backend/src/actors/github-data/db/schema.ts`** — add sync generation column to all tables, add progress fields to `github_meta` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** (or `app-shell.ts`) — handle sync progress updates and broadcast to clients +- **`foundry/packages/shared/src/app-shell.ts`** — add sync progress fields to `FoundryGithubState` (e.g. `syncProgress: { current: number; total: number } | null`) +- **`foundry/packages/frontend/`** — show sync progress in UI (e.g. "Syncing repositories... 15/42") + +--- + +--- + +# Deferred follow-up outside this task + +## 17. Type all actor context parameters — remove `c: any` + +**Rationale:** 272+ instances of `c: any`, `ctx: any`, `loopCtx: any` across all actor code. This eliminates type safety for DB access, state access, broadcasts, and queue operations. All context parameters should use RivetKit's proper context types. + +### Scope (by file, approximate count) + +| File | `any` contexts | +|---|---| +| `organization/app-shell.ts` | ~108 | +| `organization/actions.ts` | ~56 | +| `task/workbench.ts` | ~53 | +| `github-data/index.ts` | ~23 | +| `repository/actions.ts` | ~22 | +| `sandbox/index.ts` | ~21 | +| `handles.ts` | ~19 | +| `task/workflow/commands.ts` | ~10 | +| `task/workflow/init.ts` | ~4 | +| `auth-user/index.ts` | ~2 | +| `history/index.ts` | ~2 | +| `task/workflow/index.ts` | ~2 | +| `task/workflow/common.ts` | ~2 | +| `task/workflow/push.ts` | ~1 | +| `polling.ts` | ~1 | + +### Changes needed + +1. **Determine correct RivetKit context types** — check RivetKit exports for `ActionContext`, `ActorContextOf`, `WorkflowContext`, `LoopContext`, or equivalent. Reference `polling.ts` which already defines typed contexts (`PollingActorContext`, `WorkflowPollingActorContext`). + +2. **Define per-actor context types** — each actor has its own state shape and DB schema, so the context type should be specific (e.g. `ActionContext` or similar). + +3. **Replace all `c: any`** with the proper typed context across every file listed above. + +4. **Type workflow/loop contexts** — `ctx: any` in workflow functions and `loopCtx: any` in loop callbacks need proper types too. + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "All actor context parameters (`c`, `ctx`, `loopCtx`) must be properly typed using RivetKit's context types. Never use `any` for actor contexts. Each actor should define or derive its context type from the actor definition." + +--- + +## [ ] 18. Final pass: remove all dead code + +**Dependencies:** all other items (do this last, after 17) + +**Rationale:** After completing all changes above, many actions, queues, SQLite tables, workflow steps, shared types, and helper functions will be orphaned. Do a full scan to find and remove everything that's dead. + +### Scope + +Scan the entire foundry codebase for: +- **Dead actions** — actions with no callers (client, other actors, services, HTTP endpoints) +- **Dead queues** — queue message types with no senders +- **Dead SQLite tables** — tables with no reads or writes +- **Dead workflow steps** — step names that are no longer referenced +- **Dead shared types** — types in `packages/shared` that are no longer imported +- **Dead helper functions** — private functions with no callers +- **Dead imports** — unused imports across all files + +### When to do this + +After all items 1–17 are complete. Not before — removing code while other items are in progress will create conflicts. + +--- + +## [ ] 19. Remove duplicate data between `c.state` and SQLite + +**Dependencies:** items 21, 24 + +**Rationale:** Several actors store the same data in both `c.state` (RivetKit durable state) and their SQLite tables. Mutable fields that exist in both can silently diverge — `c.state` becomes stale when the SQLite copy is updated. Per the existing CLAUDE.md rule, `c.state` should hold only small scalars/identifiers; anything queryable or mutable belongs in SQLite. + +### Duplicates found + +**Task actor** — `c.state` (`createState` in `task/index.ts` lines 124-139) vs `task`/`taskRuntime` tables: + +| Field | In SQLite? | Mutable? | Verdict | +|---|---|---|---| +| `organizationId` | No | No | **KEEP** — identity field | +| `repoId` | No | No | **KEEP** — identity field | +| `taskId` | No | No | **KEEP** — identity field | +| `repoRemote` | No (but org `repos` table has it) | No | **DELETE** — not needed on task, read from repo/org | +| `branchName` | Yes (`task.branch_name`) | Yes | **REMOVE from c.state** — HIGH risk, goes stale on rename | +| `title` | Yes (`task.title`) | Yes | **REMOVE from c.state** — HIGH risk, goes stale on rename | +| `task` (description) | Yes (`task.task`) | No | **REMOVE from c.state** — redundant | +| `sandboxProviderId` | Yes (`task.sandbox_provider_id`) | No | **REMOVE from c.state** — redundant | +| `agentType` | Yes (`task.agent_type`) | Yes | **DELETE entirely** — session-specific (item 21) | +| `explicitTitle` | No | No | **MOVE to SQLite** — creation metadata | +| `explicitBranchName` | No | No | **MOVE to SQLite** — creation metadata | +| `initialPrompt` | No | No | **DELETE entirely** — dead code, session-specific (item 21) | +| `initialized` | No | Yes | **DELETE entirely** — dead code, `status` already tracks init progress | +| `previousStatus` | No | No | **DELETE entirely** — never set, never read | + +**Repository actor** — `c.state` (`createState` in `repository/index.ts`) vs `repoMeta` table: + +| Field | Mutable? | Risk | +|---|---|---| +| `remoteUrl` | No | Low — redundant but safe | + +### Fix + +Remove all duplicated fields from `c.state`. Keep only identity fields needed for actor key resolution (e.g. `organizationId`, `repoId`, `taskId`). Read mutable data from SQLite. + +**Task actor `c.state` should become:** +```typescript +createState: (_c, input) => ({ + organizationId: input.organizationId, + repoId: input.repoId, + taskId: input.taskId, +}) +``` + +Fields already in SQLite (`branchName`, `title`, `task`, `sandboxProviderId`) — remove from `c.state`, read from SQLite only. Fields not yet in SQLite (`explicitTitle`, `explicitBranchName`) — add to `task` table, remove from `c.state`. Dead code to delete entirely: `agentType`, `initialPrompt` (item 21), `initialized`, `previousStatus`, `repoRemote`. + +**Repository actor `c.state` should become:** +```typescript +createState: (_c, input) => ({ + organizationId: input.organizationId, + repoId: input.repoId, +}) +``` + +`remoteUrl` is removed from repo actor `c.state` entirely. The repo actor reads `remoteUrl` from its own `repoMeta` SQLite table when needed. The org actor already stores `remoteUrl` in its `repos` table (source of truth from GitHub data). The `getOrCreateRepository()` helper in `handles.ts` currently requires `remoteUrl` as a parameter and passes it as `createWithInput` — this parameter must be removed. Every call site in `organization/actions.ts` and `organization/app-shell.ts` currently does a DB lookup for `remoteUrl` just to pass it to `getOrCreateRepository()` — all of those lookups go away. On actor creation, the repo actor should populate its `repoMeta.remoteUrl` by querying the org actor or github-data actor, not by receiving it as a create input. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/index.ts`** — trim `createState`, update all `c.state.*` reads for removed fields to read from SQLite instead +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — update `c.state.*` reads +- **`foundry/packages/backend/src/actors/task/workflow/*.ts`** — update `c.state.*` reads +- **`foundry/packages/backend/src/actors/repository/index.ts`** — trim `createState`, remove `remoteUrl` from input type +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update all `c.state.remoteUrl` reads to query `repoMeta` table; remove `persistRemoteUrl()` helper +- **`foundry/packages/backend/src/actors/handles.ts`** — remove `remoteUrl` parameter from `getOrCreateRepository()` +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove all `remoteUrl` lookups done solely to pass to `getOrCreateRepository()` (~10 call sites) +- **`foundry/packages/backend/src/actors/organization/app-shell.ts`** — same cleanup for app-shell call sites + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Never duplicate data between `c.state` and SQLite. `c.state` holds only immutable identity fields needed for actor key resolution (e.g. `organizationId`, `repoId`, `taskId`). All mutable data and anything queryable must live exclusively in SQLite. If a field can change after actor creation, it must not be in `c.state`." + +--- + +## [ ] 20. Prefix all admin/recovery actions with `admin` + +**Rationale:** Several actions are admin-only recovery/rebuild operations but their names don't distinguish them from normal product flows. Prefix with `admin` so it's immediately clear these are not part of regular user flows. + +### Actions to rename + +**Organization actor:** + +| Current name | New name | Why it's admin | +|---|---|---| +| `reconcileWorkbenchState` | `adminReconcileWorkbenchState` | Full fan-out rebuild of task summary projection | +| `reloadGithubOrganization` | `adminReloadGithubOrganization` | Manual trigger to refetch all org GitHub data | +| `reloadGithubPullRequests` | `adminReloadGithubPullRequests` | Manual trigger to refetch all PR data | +| `reloadGithubRepository` | `adminReloadGithubRepository` | Manual trigger to refetch single repo | +| `reloadGithubPullRequest` | `adminReloadGithubPullRequest` | Manual trigger to refetch single PR | + +**GitHub Data actor:** + +| Current name | New name | Why it's admin | +|---|---|---| +| `fullSync` | `adminFullSync` | Full replace of all GitHub data — recovery operation | +| `reloadOrganization` | `adminReloadOrganization` | Triggers full sync manually | +| `reloadAllPullRequests` | `adminReloadAllPullRequests` | Triggers full sync manually | +| `clearState` | `adminClearState` | Deletes all GitHub data — recovery from lost access | + +**NOT renamed** (these are triggered by webhooks/normal flows, not manual admin actions): +- `reloadRepository` — called by push/create/delete webhooks (incremental, normal flow) +- `reloadPullRequest` — called by PR webhooks (incremental, normal flow) +- `handlePullRequestWebhook` — webhook handler (normal flow) +- `syncGithubOrganizations` — called during OAuth callback (normal flow, though also used for repair) + +### Files to change + +- **`foundry/packages/backend/src/actors/github-data/index.ts`** — rename actions +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — rename actions +- **`foundry/packages/client/src/backend-client.ts`** — update method names +- **`foundry/packages/frontend/`** — update any references to renamed actions + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Admin-only actions (recovery, rebuild, manual resync, state reset) must be prefixed with `admin` (e.g. `adminReconcileState`, `adminClearState`). This makes it clear they are not part of normal product flows and should not be called from regular client code paths." + +--- + +## [ ] 21. Remove legacy/session-scoped fields from task table + +**Rationale:** The `task` table has fields that either belong on the session, are redundant with data from other actors, or are dead code from the removed local git clone. These should be cleaned up. + +### Fields to remove from `task` table and `c.state` + +**`agentType`** — Legacy from when task = 1 session. Only used for `defaultModelForAgent(c.state.agentType)` to pick the default model when creating a new session. Sessions already have their own `model` column in `taskWorkbenchSessions`. The default model for new sessions should come from user settings (see item 16 — starred model stored in user actor). Remove `agentType` from task table, `c.state`, `createState`, `TaskRecord`, and all `defaultModelForAgent()` call sites. Replace with user settings lookup. + +**`initialPrompt`** — Stored on `c.state` at task creation but **never read anywhere**. Completely dead code. This is also session-specific, not task-specific — the initial prompt belongs on the first session, not the task. Remove from `c.state`, `createState` input type, and `CreateTaskCommand`/`CreateTaskInput` types. Remove from `repository/actions.ts` create flow. + +**`prSubmitted`** — Redundant boolean set when `submitPullRequest` runs. PR state already flows from GitHub webhooks → github-data actor → branch name lookup. This boolean can go stale (PR closed and reopened, PR deleted, etc.). Remove entirely — PR existence is derivable from github-data by branch name (already how `enrichTaskRecord` and `buildTaskSummary` work). + +### Dead fields on `taskRuntime` table + +**`provisionStage`** — Values: `"queued"`, `"ready"`, `"error"`. Redundant with `status` — `init_complete` implies ready, `error` implies error. Never read in business logic. Delete. + +**`provisionStageUpdatedAt`** — Timestamp for `provisionStage` changes. Never read anywhere. Delete. + +### Dead fields on `TaskRecord` (in `workflow/common.ts`) + +These are always hardcoded to `null` — remnants of the removed local git clone: + +- `diffStat` — was populated from `branches` table (deleted) +- `hasUnpushed` — was populated from `branches` table (deleted) +- `conflictsWithMain` — was populated from `branches` table (deleted) +- `parentBranch` — was populated from `branches` table (deleted) + +Remove from `TaskRecord` type, `getCurrentRecord()`, and all consumers (contracts, mock client, tests, frontend). + +### Files to change + +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `agentType` and `prSubmitted` columns from `task` table; remove `provisionStage` and `provisionStageUpdatedAt` from `taskRuntime` table +- **`foundry/packages/backend/src/actors/task/index.ts`** — remove `agentType`, `initialPrompt`, `initialized`, `previousStatus`, `repoRemote` from `createState` and input type +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove `defaultModelForAgent()`, `agentTypeForModel()`, update session creation to use user settings for default model; remove `prSubmitted` set in `submitPullRequest` +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `agentType`, `prSubmitted`, `diffStat`, `hasUnpushed`, `conflictsWithMain`, `parentBranch` from `getCurrentRecord()` and `TaskRecord` construction +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `agentType` from task row inserts +- **`foundry/packages/shared/src/contracts.ts`** — remove `agentType`, `prSubmitted`, `diffStat`, `prUrl`, `hasUnpushed`, `conflictsWithMain`, `parentBranch` from `TaskRecord` schema (note: `prUrl` and `prAuthor` should stay if still populated by `enrichTaskRecord`, or move to the unified task/PR model from item 15) +- **`foundry/packages/client/src/mock/backend-client.ts`** — update mock to remove dead fields +- **`foundry/packages/client/test/view-model.test.ts`** — update test fixtures +- **`foundry/packages/frontend/src/features/tasks/model.test.ts`** — update test fixtures +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — remove any references to `agentType` in task creation input +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — update `enrichTaskRecord()` to stop setting dead fields + +--- + +## [ ] 22. Move per-user UI state from task actor to user actor + +**Dependencies:** item 1 + +**Rationale:** The task actor stores UI-facing state that is user-specific, not task-global. With multiplayer (multiple users viewing the same task), this breaks — each user has their own active session, their own unread state, their own drafts. These must live on the user actor, keyed by `(taskId, sessionId)`, not on the shared task actor. + +### Per-user state currently on the task actor (wrong) + +**`taskRuntime.activeSessionId`** — Which session the user is "looking at." Used to: +- Determine which session's status drives the task-level status (running/idle) — this is wrong, the task status should reflect ALL sessions, not one user's active tab +- Return a "current" session in `attachTask` responses — this is per-user +- Migration path for legacy single-session tasks in `ensureWorkbenchSeeded` + +This should move to the user actor as `activeSessionId` per `(userId, taskId)`. + +**`taskWorkbenchSessions.unread`** — Per-user unread state stored globally on the session. If user A reads a session, user B's unread state is also cleared. Move to user actor keyed by `(userId, taskId, sessionId)`. + +**`taskWorkbenchSessions.draftText` / `draftAttachmentsJson` / `draftUpdatedAt`** — Per-user draft state stored globally. If user A starts typing a draft, it overwrites user B's draft. Move to user actor keyed by `(userId, taskId, sessionId)`. + +### What stays on the task actor (correct — task-global state) + +- `taskRuntime.activeSandboxId` — which sandbox is running (global to the task) +- `taskRuntime.activeSwitchTarget` / `activeCwd` — sandbox connection state (global) +- `taskRuntime.statusMessage` — provisioning/runtime status (global) +- `taskWorkbenchSessions.model` — which model the session uses (global) +- `taskWorkbenchSessions.status` — session runtime status (global) +- `taskWorkbenchSessions.transcriptJson` — session transcript (global) + +### Fix + +Add a `userTaskState` table to the user actor: + +```typescript +export const userTaskState = sqliteTable("user_task_state", { + taskId: text("task_id").notNull(), + sessionId: text("session_id").notNull(), + activeSessionId: text("active_session_id"), // per-user active tab + unread: integer("unread").notNull().default(0), + draftText: text("draft_text").notNull().default(""), + draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"), + draftUpdatedAt: integer("draft_updated_at"), + updatedAt: integer("updated_at").notNull(), +}, (table) => ({ + pk: primaryKey(table.taskId, table.sessionId), +})); +``` + +Remove `activeSessionId` from `taskRuntime`. Remove `unread`, `draftText`, `draftAttachmentsJson`, `draftUpdatedAt` from `taskWorkbenchSessions`. + +The task-level status should be derived from ALL sessions (e.g., task is "running" if ANY session is running), not from one user's `activeSessionId`. + +### Files to change + +- **`foundry/packages/backend/src/actors/auth-user/db/schema.ts`** — add `userTaskState` table +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `activeSessionId` from `taskRuntime`; remove `unread`, `draftText`, `draftAttachmentsJson`, `draftUpdatedAt` from `taskWorkbenchSessions` +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove all `activeSessionId` reads/writes; remove draft/unread mutation functions; task status derivation should check all sessions +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `activeSessionId` from `getCurrentRecord()` +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — remove `activeSessionId` references in `attachTask` +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `activeSessionId` initialization +- **`foundry/packages/client/`** — draft/unread/activeSession operations route to user actor instead of task actor +- **`foundry/packages/frontend/`** — update subscription to fetch per-user state from user actor + +### CLAUDE.md update + +- **`foundry/packages/backend/CLAUDE.md`** — add constraint: "Per-user UI state (active session tab, unread counts, draft text, draft attachments) must live on the user actor, not on shared task/session actors. Task actors hold only task-global state visible to all users. This is critical for multiplayer correctness — multiple users may view the same task simultaneously with different active sessions, unread states, and in-progress drafts." + +--- + +## [ ] 23. Delete `getTaskEnriched` and `enrichTaskRecord` (dead code) + +**Rationale:** `getTaskEnriched` is dead code with zero callers from the client. It's also the worst fan-out pattern in the codebase: org → repo actor → task actor (`.get()`) → github-data actor (`listPullRequestsForRepository` fetches ALL PRs, then `.find()`s by branch name). This is exactly the pattern the coordinator model eliminates — task detail comes from `getTaskDetail` on the task actor, sidebar data comes from materialized `taskSummaries` on the org actor. + +### What to delete + +- **`enrichTaskRecord()`** — `repository/actions.ts:117-143`. Fetches all PRs for a repo to find one by branch name. Dead code. +- **`getTaskEnriched` action** — `repository/actions.ts:432-450`. Only caller of `enrichTaskRecord`. Dead code. +- **`getTaskEnriched` org proxy** — `organization/actions.ts:838-849`. Only caller of the repo action. Dead code. +- **`GetTaskEnrichedCommand` type** — wherever defined. + +### Files to change + +- **`foundry/packages/backend/src/actors/repository/actions.ts`** — delete `enrichTaskRecord()` and `getTaskEnriched` action +- **`foundry/packages/backend/src/actors/organization/actions.ts`** — delete `getTaskEnriched` proxy action + +--- + +## [ ] 24. Clean up task status tracking + +**Dependencies:** item 21 + +**Rationale:** Task status tracking is spread across `c.state`, the `task` SQLite table, and the `taskRuntime` table with redundant and dead fields. Consolidate to a single `status` enum on the `task` table. Remove `statusMessage` — human-readable status text should be derived on the client from the `status` enum, not stored on the backend. + +### Fields to delete + +| Field | Location | Why | +|---|---|---| +| `initialized` | `c.state` | Dead code — never read. `status` already tracks init progress. | +| `previousStatus` | `c.state` | Dead code — never set, never read. | +| `statusMessage` | `taskRuntime` table | Client concern — the client should derive display text from the `status` enum. The backend should not store UI copy. | +| `provisionStage` | `taskRuntime` table | Redundant — `status` already encodes provision progress (`init_bootstrap_db` → `init_enqueue_provision` → `init_complete`). | +| `provisionStageUpdatedAt` | `taskRuntime` table | Dead — never read. | + +### What remains + +- **`status`** on the `task` table — the single canonical state machine enum. Values: `init_bootstrap_db`, `init_enqueue_provision`, `init_complete`, `running`, `idle`, `error`, `archive_*`, `kill_*`, `archived`, `killed`. + +### Files to change + +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — remove `statusMessage`, `provisionStage`, `provisionStageUpdatedAt` from `taskRuntime` table +- **`foundry/packages/backend/src/actors/task/index.ts`** — remove `initialized`, `previousStatus` from `createState` +- **`foundry/packages/backend/src/actors/task/workflow/common.ts`** — remove `statusMessage` parameter from `setTaskState()`, remove it from `getCurrentRecord()` query +- **`foundry/packages/backend/src/actors/task/workflow/init.ts`** — remove `statusMessage`, `provisionStage`, `provisionStageUpdatedAt` from taskRuntime inserts/updates; remove `ensureTaskRuntimeCacheColumns()` raw ALTER TABLE for these columns +- **`foundry/packages/backend/src/actors/task/workflow/commands.ts`** — remove `statusMessage` from handler updates +- **`foundry/packages/backend/src/actors/task/workflow/push.ts`** — remove `statusMessage` updates +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — remove `statusMessage` from `buildTaskDetail()`, remove `ensureTaskRuntimeCacheColumns()` for these columns +- **`foundry/packages/shared/src/workbench.ts`** — remove `statusMessage` from `WorkbenchTaskDetail` +- **`foundry/packages/frontend/`** — derive display text from `status` enum instead of reading `statusMessage` + +--- + +## [ ] 25. Remove "Workbench" prefix from all types, functions, files, and tables + +**Rationale:** "Workbench" is not a real concept in the system. It's a namespace prefix applied to every type, function, file, and table name. The actual entities are Task, Session, Repository, Sandbox, Transcript, Draft, etc. — "Workbench" adds zero information and obscures what things actually are. + +### Rename strategy + +Drop "Workbench" everywhere. If the result collides with an existing name (e.g., auth `Session`), use the domain prefix (e.g., `TaskSession` vs auth `Session`). + +### Type renames (`shared/src/workbench.ts`) + +| Before | After | +|---|---| +| `WorkbenchTaskStatus` | `TaskStatus` (already exists as base, merge) | +| `WorkbenchAgentKind` | `AgentKind` | +| `WorkbenchModelId` | `ModelId` | +| `WorkbenchSessionStatus` | `SessionStatus` | +| `WorkbenchTranscriptEvent` | `TranscriptEvent` | +| `WorkbenchComposerDraft` | `ComposerDraft` | +| `WorkbenchSessionSummary` | `SessionSummary` | +| `WorkbenchSessionDetail` | `SessionDetail` | +| `WorkbenchFileChange` | `FileChange` | +| `WorkbenchFileTreeNode` | `FileTreeNode` | +| `WorkbenchLineAttachment` | `LineAttachment` | +| `WorkbenchHistoryEvent` | `HistoryEvent` | +| `WorkbenchDiffLineKind` | `DiffLineKind` | +| `WorkbenchParsedDiffLine` | `ParsedDiffLine` | +| `WorkbenchPullRequestSummary` | `PullRequestSummary` | +| `WorkbenchOpenPrSummary` | `OpenPrSummary` | +| `WorkbenchSandboxSummary` | `SandboxSummary` | +| `WorkbenchTaskSummary` | `TaskSummary` | +| `WorkbenchTaskDetail` | `TaskDetail` | +| `WorkbenchRepositorySummary` | `RepositorySummary` | +| `WorkbenchSession` | `TaskSession` (avoids auth `Session` collision) | +| `WorkbenchTask` | `TaskSnapshot` (avoids `task` table collision) | +| `WorkbenchRepo` | `RepoSnapshot` | +| `WorkbenchRepositorySection` | `RepositorySection` | +| `TaskWorkbenchSnapshot` | `DashboardSnapshot` | +| `WorkbenchModelOption` | `ModelOption` | +| `WorkbenchModelGroup` | `ModelGroup` | +| `TaskWorkbenchSelectInput` | `SelectTaskInput` | +| `TaskWorkbenchCreateTaskInput` | `CreateTaskInput` | +| `TaskWorkbenchRenameInput` | `RenameTaskInput` | +| `TaskWorkbenchSendMessageInput` | `SendMessageInput` | +| `TaskWorkbenchSessionInput` | `SessionInput` | +| `TaskWorkbenchRenameSessionInput` | `RenameSessionInput` | +| `TaskWorkbenchChangeModelInput` | `ChangeModelInput` | +| `TaskWorkbenchUpdateDraftInput` | `UpdateDraftInput` | +| `TaskWorkbenchSetSessionUnreadInput` | `SetSessionUnreadInput` | +| `TaskWorkbenchDiffInput` | `DiffInput` | +| `TaskWorkbenchCreateTaskResponse` | `CreateTaskResponse` | +| `TaskWorkbenchAddSessionResponse` | `AddSessionResponse` | + +### File renames + +| Before | After | +|---|---| +| `shared/src/workbench.ts` | `shared/src/types.ts` (or split into `task.ts`, `session.ts`, etc.) | +| `backend/src/actors/task/workbench.ts` | `backend/src/actors/task/sessions.ts` (already planned in item 7) | +| `client/src/workbench-client.ts` | `client/src/task-client.ts` | +| `client/src/workbench-model.ts` | `client/src/model.ts` | +| `client/src/remote/workbench-client.ts` | `client/src/remote/task-client.ts` | +| `client/src/mock/workbench-client.ts` | `client/src/mock/task-client.ts` | + +### Table rename + +| Before | After | +|---|---| +| `task_workbench_sessions` | `task_sessions` | + +### Function renames (backend — drop "Workbench" infix) + +All functions in `backend/src/actors/task/workbench.ts`: +- `createWorkbenchSession` → `createSession` +- `closeWorkbenchSession` → `closeSession` +- `changeWorkbenchModel` → `changeModel` +- `sendWorkbenchMessage` → `sendMessage` +- `stopWorkbenchSession` → `stopSession` +- `renameWorkbenchBranch` → deleted (see item 26) +- `renameWorkbenchTask` → `renameTask` +- `renameWorkbenchSession` → `renameSession` +- `revertWorkbenchFile` → `revertFile` +- `publishWorkbenchPr` → `publishPr` +- `updateWorkbenchDraft` → `updateDraft` +- `setWorkbenchSessionUnread` → `setSessionUnread` +- `markWorkbenchUnread` → `markUnread` +- `syncWorkbenchSessionStatus` → `syncSessionStatus` +- `ensureWorkbenchSeeded` → `ensureSessionSeeded` + +### Queue/command type renames (backend) + +- `TaskWorkbenchValueCommand` → `TaskValueCommand` +- `TaskWorkbenchSessionTitleCommand` → `SessionTitleCommand` +- `TaskWorkbenchSessionUnreadCommand` → `SessionUnreadCommand` + +### Scope + +~420 occurrences across shared (35+ types), backend (200+ refs), client (324 refs), frontend (96 refs). Mechanical find-and-replace once the rename map is settled. + +### Files to change + +- **`foundry/packages/shared/src/workbench.ts`** — rename file, rename all exported types +- **`foundry/packages/shared/src/index.ts`** — update re-export path +- **`foundry/packages/shared/src/app-shell.ts`** — update `WorkbenchModelId` → `ModelId` import +- **`foundry/packages/shared/src/realtime-events.ts`** — update all `Workbench*` type imports +- **`foundry/packages/backend/src/actors/task/workbench.ts`** — rename file + all functions +- **`foundry/packages/backend/src/actors/task/index.ts`** — update imports and action registrations +- **`foundry/packages/backend/src/actors/task/db/schema.ts`** — rename `taskWorkbenchSessions` → `taskSessions` +- **`foundry/packages/backend/src/actors/task/workflow/`** — update all workbench references +- **`foundry/packages/backend/src/actors/organization/`** — update type imports and action names +- **`foundry/packages/backend/src/actors/repository/`** — update type imports +- **`foundry/packages/client/src/`** — rename files + update all type/function references +- **`foundry/packages/frontend/src/`** — update all type imports + +### CLAUDE.md update + +Update `foundry/packages/backend/CLAUDE.md` coordinator hierarchy diagram: `taskWorkbenchSessions` → `taskSessions`. + +--- + +## [ ] 26. Delete branch rename (branches immutable after creation) + +**Dependencies:** item 25 + +**Rationale:** Branch name is assigned once at task creation and never changes. Branch rename is unused in the frontend UI and SDK, adds ~80 lines of code, and creates a transactional consistency risk (git rename succeeds but index update fails). + +### Delete + +- **`task/workbench.ts`** — delete `renameWorkbenchBranch()` (~50 lines) +- **`task/index.ts`** — delete `renameWorkbenchBranch` action +- **`task/workflow/queue.ts`** — remove `"task.command.workbench.rename_branch"` queue type +- **`task/workflow/index.ts`** — remove `"task.command.workbench.rename_branch"` handler +- **`organization/actions.ts`** — delete `renameWorkbenchBranch` proxy action +- **`repository/actions.ts`** — delete `registerTaskBranch` action (only caller was rename flow) +- **`client/src/workbench-client.ts`** — remove `renameBranch` from interface +- **`client/src/remote/workbench-client.ts`** — delete `renameBranch()` method +- **`client/src/mock/workbench-client.ts`** — delete `renameBranch()` method +- **`client/src/backend-client.ts`** — delete `renameWorkbenchBranch` from interface + implementation +- **`client/src/mock/backend-client.ts`** — delete `renameWorkbenchBranch` implementation +- **`frontend/src/components/mock-layout.tsx`** — remove `renameBranch` from client interface, delete `onRenameBranch` callbacks and all `renameBranch` wiring (~8 refs) +- **`shared/src/workbench.ts`** — delete `TaskWorkbenchRenameInput` (if only used by branch rename; check if task title rename shares it) + +### Keep + +- `deriveFallbackTitle()` + `sanitizeBranchName()` + `resolveCreateFlowDecision()` — initial branch derivation at creation +- `registerTaskBranchMutation()` — used during task creation for `onBranch` path +- `renameWorkbenchTask()` — title rename is independent, stays +- `taskIndex` table — still the coordinator index for branch→task mapping + +--- + +## [ ] Final audit pass (run after all items above are complete) + +### Dead code scan + +Already tracked in item 18: once all changes are complete, do a full scan to find dead actions, queues, SQLite tables, and workflow steps that need to be removed. + +### Dead events audit + +Scan all event types emitted by actors (in `packages/shared/src/realtime-events.ts` and anywhere actors call `c.broadcast()` or similar). Cross-reference against all client subscribers (in `packages/client/` and `packages/frontend/`). Remove any events that are emitted but never subscribed to by any client. This includes events that may have been superseded by the consolidated single-topic-per-actor pattern (item 14). diff --git a/foundry/docker/frontend.dev.Dockerfile b/foundry/docker/frontend.dev.Dockerfile index 3b0d8e4..dd74dd0 100644 --- a/foundry/docker/frontend.dev.Dockerfile +++ b/foundry/docker/frontend.dev.Dockerfile @@ -8,4 +8,4 @@ RUN npm install -g pnpm@10.28.2 WORKDIR /app -CMD ["bash", "-lc", "pnpm install --force --frozen-lockfile --filter @sandbox-agent/foundry-frontend... && cd foundry/packages/frontend && exec pnpm vite --host 0.0.0.0 --port 4173"] +CMD ["bash", "-lc", "pnpm install --frozen-lockfile --filter @sandbox-agent/foundry-frontend... && cd foundry/packages/frontend && exec pnpm vite --host 0.0.0.0 --port 4173"] diff --git a/foundry/packages/backend/CLAUDE.md b/foundry/packages/backend/CLAUDE.md index 432bc85..ae4257e 100644 --- a/foundry/packages/backend/CLAUDE.md +++ b/foundry/packages/backend/CLAUDE.md @@ -5,14 +5,12 @@ Keep the backend actor tree aligned with this shape unless we explicitly decide to change it: ```text -OrganizationActor -├─ HistoryActor(organization-scoped global feed) +OrganizationActor (direct coordinator for tasks) +├─ AuditLogActor (organization-scoped global feed) ├─ GithubDataActor -├─ RepositoryActor(repo) -│ └─ TaskActor(task) -│ ├─ TaskSessionActor(session) × N -│ │ └─ SessionStatusSyncActor(session) × 0..1 -│ └─ Task-local workbench state +├─ TaskActor(task) +│ ├─ taskSessions → session metadata/transcripts +│ └─ taskSandboxes → sandbox instance index └─ SandboxInstanceActor(sandboxProviderId, sandboxId) × N ``` @@ -28,53 +26,125 @@ Children push updates **up** to their direct coordinator only. Coordinators broa ### Coordinator hierarchy and index tables ```text -OrganizationActor (coordinator for repos + auth users) +OrganizationActor (coordinator for tasks + auth users) │ │ Index tables: -│ ├─ repos → RepositoryActor index (repo catalog) -│ ├─ taskLookup → TaskActor index (taskId → repoId routing) -│ ├─ taskSummaries → TaskActor index (materialized sidebar projection) -│ ├─ authSessionIndex → AuthUserActor index (session token → userId) -│ ├─ authEmailIndex → AuthUserActor index (email → userId) -│ └─ authAccountIndex → AuthUserActor index (OAuth account → userId) +│ ├─ taskIndex → TaskActor index (taskId → repoId + branchName) +│ ├─ taskSummaries → TaskActor materialized sidebar projection +│ ├─ authSessionIndex → UserActor index (session token → userId) +│ ├─ authEmailIndex → UserActor index (email → userId) +│ └─ authAccountIndex → UserActor index (OAuth account → userId) │ -├─ RepositoryActor (coordinator for tasks) +├─ TaskActor (coordinator for sessions + sandboxes) │ │ │ │ Index tables: -│ │ └─ taskIndex → TaskActor index (taskId → branchName) +│ │ ├─ taskWorkspaceSessions → Session index (session metadata + transcript) +│ │ └─ taskSandboxes → SandboxInstanceActor index (sandbox history) │ │ -│ └─ TaskActor (coordinator for sessions + sandboxes) -│ │ -│ │ Index tables: -│ │ ├─ taskWorkbenchSessions → Session index (session metadata, transcript, draft) -│ │ └─ taskSandboxes → SandboxInstanceActor index (sandbox history) -│ │ -│ └─ SandboxInstanceActor (leaf) +│ └─ SandboxInstanceActor (leaf) │ -├─ HistoryActor (organization-scoped audit log, not a coordinator) +├─ AuditLogActor (organization-scoped audit log, not a coordinator) └─ GithubDataActor (GitHub API cache, not a coordinator) ``` When adding a new index table, annotate it in the schema file with a doc comment identifying it as a coordinator index and which child actor it indexes (see existing examples). +## Lazy Task Actor Creation — CRITICAL + +**Task actors must NEVER be created during GitHub sync or bulk operations.** Creating hundreds of task actors simultaneously causes OOM crashes. An org can have 200+ PRs; spawning an actor per PR kills the process. + +### The two creation points + +There are exactly **two** places that may create a task actor: + +1. **`createTaskMutation`** in `task-mutations.ts` — the only backend code that calls `getOrCreateTask`. Triggered by explicit user action ("New Task" button). One actor at a time. + +2. **`backend-client.ts` client helper** — calls `client.task.getOrCreate(...)`. This is the lazy materialization point: when a user clicks a virtual task in the sidebar, the client creates the actor, and it self-initializes in `getCurrentRecord()` (`workflow/common.ts`) by reading branch/title from the org's `getTaskIndexEntry` action. + +### The rule + +### The rule + +**Never use `getOrCreateTask` inside a sync loop, webhook handler, or any bulk operation.** That's what caused the OOM — 186 actors spawned simultaneously during PR sync. + +`getOrCreateTask` IS allowed in: +- `createTaskMutation` — explicit user "New Task" action +- `requireWorkspaceTask` — user-initiated actions (createSession, sendMessage, etc.) that may hit a virtual task +- `getTask` action on the org — called by sandbox actor and client, needs to materialize virtual tasks +- `backend-client.ts` client helper — lazy materialization when user views a task + +### Virtual tasks (PR-driven) + +During PR sync, `refreshTaskSummaryForBranchMutation` is called for every changed PR (via github-data's `emitPullRequestChangeEvents`). It writes **virtual task entries** to the org actor's local `taskIndex` + `taskSummaries` tables only. No task actor is spawned. No cross-actor calls to task actors. + +When the user interacts with a virtual task (clicks it, creates a session): +1. Client or org actor calls `getOrCreate` on the task actor key → actor is created with empty DB +2. Any action on the actor calls `getCurrentRecord()` → sees empty DB → reads branch/title from org's `getTaskIndexEntry` → calls `initBootstrapDbActivity` + `initCompleteActivity` → task is now real + +### Call sites to watch + +- `refreshTaskSummaryForBranchMutation` — called in bulk during sync. Must ONLY write to org local tables. Never create task actors or call task actor actions. +- `emitPullRequestChangeEvents` in github-data — iterates all changed PRs. Must remain fire-and-forget with no actor fan-out. + ## Ownership Rules -- `OrganizationActor` is the organization coordinator and lookup/index owner. -- `HistoryActor` is organization-scoped. There is one organization-level history feed. -- `RepositoryActor` is the repo coordinator and owns repo-local caches/indexes. +- `OrganizationActor` is the organization coordinator, direct coordinator for tasks, and lookup/index owner. It owns the task index, task summaries, and repo catalog. +- `AuditLogActor` is organization-scoped. There is one organization-level audit log feed. - `TaskActor` is one branch. Treat `1 task = 1 branch` once branch assignment is finalized. - `TaskActor` can have many sessions. - `TaskActor` can reference many sandbox instances historically, but should have only one active sandbox/session at a time. -- Session unread state and draft prompts are backend-owned workbench state, not frontend-local state. -- Branch rename is a real git operation, not just metadata. +- Session unread state and draft prompts are backend-owned workspace state, not frontend-local state. +- Branch names are immutable after task creation. Do not implement branch-rename flows. - `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity. - The backend stores no local git state. No clones, no refs, no working trees, and no git-spice. Repository metadata comes from GitHub API data and webhook events. Any working-tree git operation runs inside a sandbox via `executeInSandbox()`. - When a backend request path must aggregate multiple independent actor calls or reads, prefer bounded parallelism over sequential fan-out when correctness permits. Do not serialize independent work by default. - Only a coordinator creates/destroys its children. Do not create child actors from outside the coordinator. -- Children push state changes up to their direct coordinator only — never skip levels (e.g., task pushes to repo, not directly to org, unless org is the direct coordinator for that index). +- Children push state changes up to their direct coordinator only. Task actors push summary updates directly to the organization actor. - Read paths must use the coordinator's local index tables. Do not fan out to child actors on the hot read path. - Never build "enriched" read actions that chain through multiple actors (e.g., coordinator → child actor → sibling actor). If data from multiple actors is needed for a read, it should already be materialized in the coordinator's index tables via push updates. If it's not there, fix the write path to push it — do not add a fan-out read path. +## Drizzle Migration Maintenance + +After changing any actor's `db/schema.ts`, you **must** regenerate the corresponding migration so the runtime creates the tables that match the schema. Forgetting this step causes `no such table` errors at runtime. + +1. **Generate a new drizzle migration.** Run from `packages/backend`: + ```bash + npx drizzle-kit generate --config=./src/actors//db/drizzle.config.ts + ``` + If the interactive prompt is unavailable (e.g. in a non-TTY), manually create a new `.sql` file under `./src/actors//db/drizzle/` and add the corresponding entry to `meta/_journal.json`. + +2. **Regenerate the compiled `migrations.ts`.** Run from the foundry root: + ```bash + npx tsx packages/backend/src/actors/_scripts/generate-actor-migrations.ts + ``` + +3. **Verify insert/upsert calls.** Every column with `.notNull()` (and no `.default(...)`) must be provided a value in all `insert()` and `onConflictDoUpdate()` calls. Missing a NOT NULL column causes a runtime constraint violation, not a type error. + +4. **Nuke RivetKit state in dev** after migration changes to start fresh: + ```bash + docker compose -f compose.dev.yaml down + docker volume rm foundry_foundry_rivetkit_storage + docker compose -f compose.dev.yaml up -d + ``` + +Actors with drizzle migrations: `organization`, `audit-log`, `task`. Other actors (`user`, `github-data`) use inline migrations without drizzle. + +## Workflow Step Nesting — FORBIDDEN + +**Never call `c.step()` / `ctx.step()` from inside another step's `run` callback.** RivetKit workflow steps cannot be nested. Doing so causes the runtime error: *"Cannot start a new workflow entry while another is in progress."* + +This means: +- Functions called from within a step `run` callback must NOT use `c.step()`, `c.loop()`, `c.sleep()`, or `c.queue.next()`. +- If a mutation function needs to be called both from a step and standalone, it must only do plain DB/API work — no workflow primitives. The workflow step wrapping belongs in the workflow file, not in the mutation. +- Helper wrappers that conditionally call `c.step()` (like a `runSyncStep` pattern) are dangerous — if the caller is already inside a step, the nested `c.step()` will crash at runtime with no compile-time warning. + +**Rule of thumb:** Workflow primitives (`step`, `loop`, `sleep`, `queue.next`) may only appear at the top level of a workflow function or inside a `loop` callback — never inside a step's `run`. + +## SQLite Constraints + +- Single-row tables must use an integer primary key with `CHECK (id = 1)` to enforce the singleton invariant at the database level. +- Follow the task actor pattern for metadata/profile rows and keep the fixed row id in code as `1`, not a string sentinel. + ## Multiplayer Correctness Per-user UI state must live on the user actor, not on shared task/session actors. This is critical for multiplayer — multiple users may view the same task simultaneously with different active sessions, unread states, and in-progress drafts. @@ -85,6 +155,49 @@ Per-user UI state must live on the user actor, not on shared task/session actors Do not store per-user preferences, selections, or ephemeral UI state on shared actors. If a field's value should differ between two users looking at the same task, it belongs on the user actor. +## Audit Log Maintenance + +Every new action or command handler that represents a user-visible or workflow-significant event must append to the audit log actor. The audit log must remain a comprehensive record of significant operations. + +## Debugging Actors + +### RivetKit Inspector UI + +The RivetKit inspector UI at `http://localhost:6420/ui/` is the most reliable way to debug actor state in local development. The inspector HTTP API (`/inspector/workflow-history`) has a known bug where it returns empty `{}` even when the workflow has entries — always cross-check with the UI. + +**Useful inspector URL pattern:** +``` +http://localhost:6420/ui/?u=http%3A%2F%2F127.0.0.1%3A6420&ns=default&r=default&n=[%22%22]&actorId=&tab= +``` + +Tabs: `workflow`, `database`, `state`, `queue`, `connections`, `metadata`. + +**To find actor IDs:** +```bash +curl -s 'http://127.0.0.1:6420/actors?name=organization' +``` + +**To query actor DB via bun (inside container):** +```bash +docker compose -f compose.dev.yaml exec -T backend bun -e ' + var Database = require("bun:sqlite"); + var db = new Database("/root/.local/share/foundry/rivetkit/databases/.db", { readonly: true }); + console.log(JSON.stringify(db.query("SELECT name FROM sqlite_master WHERE type=?").all("table"))); +' +``` + +**To call actor actions via inspector:** +```bash +curl -s -X POST 'http://127.0.0.1:6420/gateway//inspector/action/' \ + -H 'Content-Type: application/json' -d '{"args":[{}]}' +``` + +### Known inspector API bugs + +- `GET /inspector/workflow-history` may return `{"history":{}}` even when workflow has run. Use the UI's Workflow tab instead. +- `GET /inspector/queue` is reliable for checking pending messages. +- `GET /inspector/state` is reliable for checking actor state. + ## Maintenance - Keep this file up to date whenever actor ownership, hierarchy, or lifecycle responsibilities change. diff --git a/foundry/packages/backend/src/actors/auth-user/db/db.ts b/foundry/packages/backend/src/actors/audit-log/db/db.ts similarity index 69% rename from foundry/packages/backend/src/actors/auth-user/db/db.ts rename to foundry/packages/backend/src/actors/audit-log/db/db.ts index b434338..d808ec0 100644 --- a/foundry/packages/backend/src/actors/auth-user/db/db.ts +++ b/foundry/packages/backend/src/actors/audit-log/db/db.ts @@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle"; import * as schema from "./schema.js"; import migrations from "./migrations.js"; -export const authUserDb = db({ schema, migrations }); +export const auditLogDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts b/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts new file mode 100644 index 0000000..da5e904 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle.config.ts @@ -0,0 +1,6 @@ +import { defineConfig } from "rivetkit/db/drizzle"; + +export default defineConfig({ + out: "./src/actors/audit-log/db/drizzle", + schema: "./src/actors/audit-log/db/schema.ts", +}); diff --git a/foundry/packages/backend/src/actors/history/db/drizzle/0000_fluffy_kid_colt.sql b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0000_fluffy_kid_colt.sql similarity index 100% rename from foundry/packages/backend/src/actors/history/db/drizzle/0000_fluffy_kid_colt.sql rename to foundry/packages/backend/src/actors/audit-log/db/drizzle/0000_fluffy_kid_colt.sql diff --git a/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql new file mode 100644 index 0000000..9ada559 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/0001_add_repo_id.sql @@ -0,0 +1 @@ +ALTER TABLE `events` ADD COLUMN `repo_id` text; diff --git a/foundry/packages/backend/src/actors/history/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0000_snapshot.json similarity index 100% rename from foundry/packages/backend/src/actors/history/db/drizzle/meta/0000_snapshot.json rename to foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0000_snapshot.json diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json similarity index 64% rename from foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json rename to foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json index 940b4e6..cf2910c 100644 --- a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/0001_snapshot.json @@ -1,48 +1,31 @@ { "version": "6", "dialect": "sqlite", - "id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9", - "prevId": "00000000-0000-0000-0000-000000000000", + "id": "a1b2c3d4-0001-4000-8000-000000000001", + "prevId": "e592c829-141f-4740-88b7-09cf957a4405", "tables": { - "repo_meta": { - "name": "repo_meta", + "events": { + "name": "events", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": true, "notNull": true, - "autoincrement": false + "autoincrement": true }, - "remote_url": { - "name": "remote_url", + "repo_id": { + "name": "repo_id", "type": "text", "primaryKey": false, - "notNull": true, + "notNull": false, "autoincrement": false }, - "updated_at": { - "name": "updated_at", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} - }, - "task_index": { - "name": "task_index", - "columns": { "task_id": { "name": "task_id", "type": "text", - "primaryKey": true, - "notNull": true, + "primaryKey": false, + "notNull": false, "autoincrement": false }, "branch_name": { @@ -52,15 +35,22 @@ "notNull": false, "autoincrement": false }, - "created_at": { - "name": "created_at", - "type": "integer", + "kind": { + "name": "kind", + "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, - "updated_at": { - "name": "updated_at", + "payload_json": { + "name": "payload_json", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "created_at": { + "name": "created_at", "type": "integer", "primaryKey": false, "notNull": true, diff --git a/foundry/packages/backend/src/actors/history/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json similarity index 59% rename from foundry/packages/backend/src/actors/history/db/drizzle/meta/_journal.json rename to foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json index 93cf8ce..0393be2 100644 --- a/foundry/packages/backend/src/actors/history/db/drizzle/meta/_journal.json +++ b/foundry/packages/backend/src/actors/audit-log/db/drizzle/meta/_journal.json @@ -8,6 +8,13 @@ "when": 1773376223815, "tag": "0000_fluffy_kid_colt", "breakpoints": true + }, + { + "idx": 1, + "version": "6", + "when": 1773376223816, + "tag": "0001_add_repo_id", + "breakpoints": true } ] } diff --git a/foundry/packages/backend/src/actors/history/db/migrations.ts b/foundry/packages/backend/src/actors/audit-log/db/migrations.ts similarity index 78% rename from foundry/packages/backend/src/actors/history/db/migrations.ts rename to foundry/packages/backend/src/actors/audit-log/db/migrations.ts index 766c225..5bf9b5a 100644 --- a/foundry/packages/backend/src/actors/history/db/migrations.ts +++ b/foundry/packages/backend/src/actors/audit-log/db/migrations.ts @@ -10,6 +10,12 @@ const journal = { tag: "0000_fluffy_kid_colt", breakpoints: true, }, + { + idx: 1, + when: 1773376223816, + tag: "0001_add_repo_id", + breakpoints: true, + }, ], } as const; @@ -24,6 +30,8 @@ export default { \`payload_json\` text NOT NULL, \`created_at\` integer NOT NULL ); +`, + m0001: `ALTER TABLE \`events\` ADD COLUMN \`repo_id\` text; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/history/db/schema.ts b/foundry/packages/backend/src/actors/audit-log/db/schema.ts similarity index 77% rename from foundry/packages/backend/src/actors/history/db/schema.ts rename to foundry/packages/backend/src/actors/audit-log/db/schema.ts index 80eb7f4..d275dd4 100644 --- a/foundry/packages/backend/src/actors/history/db/schema.ts +++ b/foundry/packages/backend/src/actors/audit-log/db/schema.ts @@ -2,10 +2,11 @@ import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; export const events = sqliteTable("events", { id: integer("id").primaryKey({ autoIncrement: true }), + repoId: text("repo_id"), taskId: text("task_id"), branchName: text("branch_name"), kind: text("kind").notNull(), - // Structured by the history event kind definitions in application code. + // Structured by the audit-log event kind definitions in application code. payloadJson: text("payload_json").notNull(), createdAt: integer("created_at").notNull(), }); diff --git a/foundry/packages/backend/src/actors/audit-log/index.ts b/foundry/packages/backend/src/actors/audit-log/index.ts new file mode 100644 index 0000000..e189011 --- /dev/null +++ b/foundry/packages/backend/src/actors/audit-log/index.ts @@ -0,0 +1,98 @@ +// @ts-nocheck +import { and, desc, eq } from "drizzle-orm"; +import { actor } from "rivetkit"; +import type { AuditLogEvent } from "@sandbox-agent/foundry-shared"; +import { auditLogDb } from "./db/db.js"; +import { events } from "./db/schema.js"; + +export interface AuditLogInput { + organizationId: string; +} + +export interface AppendAuditLogCommand { + kind: string; + repoId?: string; + taskId?: string; + branchName?: string; + payload: Record; +} + +export interface ListAuditLogParams { + repoId?: string; + branch?: string; + taskId?: string; + limit?: number; +} + +/** + * Organization-scoped audit log. One per org, not one per repo. + * + * The org is the coordinator for all tasks across repos, and we frequently need + * to query the full audit trail across repos (e.g. org-wide activity feed, + * compliance). A per-repo audit log would require fan-out reads every time. + * Keeping it org-scoped gives us a single queryable feed with optional repoId + * filtering when callers want a narrower view. + */ +export const auditLog = actor({ + db: auditLogDb, + options: { + name: "Audit Log", + icon: "database", + }, + createState: (_c, input: AuditLogInput) => ({ + organizationId: input.organizationId, + }), + actions: { + async append(c, body: AppendAuditLogCommand): Promise<{ ok: true }> { + const now = Date.now(); + await c.db + .insert(events) + .values({ + repoId: body.repoId ?? null, + taskId: body.taskId ?? null, + branchName: body.branchName ?? null, + kind: body.kind, + payloadJson: JSON.stringify(body.payload), + createdAt: now, + }) + .run(); + return { ok: true }; + }, + + async list(c, params?: ListAuditLogParams): Promise { + const whereParts = []; + if (params?.repoId) { + whereParts.push(eq(events.repoId, params.repoId)); + } + if (params?.taskId) { + whereParts.push(eq(events.taskId, params.taskId)); + } + if (params?.branch) { + whereParts.push(eq(events.branchName, params.branch)); + } + + const base = c.db + .select({ + id: events.id, + repoId: events.repoId, + taskId: events.taskId, + branchName: events.branchName, + kind: events.kind, + payloadJson: events.payloadJson, + createdAt: events.createdAt, + }) + .from(events); + + const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base) + .orderBy(desc(events.createdAt)) + .limit(params?.limit ?? 100) + .all(); + + return rows.map((row) => ({ + ...row, + organizationId: c.state.organizationId, + repoId: row.repoId ?? null, + })); + }, + }, +}); diff --git a/foundry/packages/backend/src/actors/auth-user/db/schema.ts b/foundry/packages/backend/src/actors/auth-user/db/schema.ts deleted file mode 100644 index b87567a..0000000 --- a/foundry/packages/backend/src/actors/auth-user/db/schema.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { integer, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core"; - -export const authUsers = sqliteTable("user", { - id: text("id").notNull().primaryKey(), - name: text("name").notNull(), - email: text("email").notNull(), - emailVerified: integer("email_verified").notNull(), - image: text("image"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -export const authSessions = sqliteTable( - "session", - { - id: text("id").notNull().primaryKey(), - token: text("token").notNull(), - userId: text("user_id").notNull(), - expiresAt: integer("expires_at").notNull(), - ipAddress: text("ip_address"), - userAgent: text("user_agent"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), - }, - (table) => ({ - tokenIdx: uniqueIndex("session_token_idx").on(table.token), - }), -); - -export const authAccounts = sqliteTable( - "account", - { - id: text("id").notNull().primaryKey(), - accountId: text("account_id").notNull(), - providerId: text("provider_id").notNull(), - userId: text("user_id").notNull(), - accessToken: text("access_token"), - refreshToken: text("refresh_token"), - idToken: text("id_token"), - accessTokenExpiresAt: integer("access_token_expires_at"), - refreshTokenExpiresAt: integer("refresh_token_expires_at"), - scope: text("scope"), - password: text("password"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), - }, - (table) => ({ - providerAccountIdx: uniqueIndex("account_provider_account_idx").on(table.providerId, table.accountId), - }), -); - -export const userProfiles = sqliteTable("user_profiles", { - userId: text("user_id").notNull().primaryKey(), - githubAccountId: text("github_account_id"), - githubLogin: text("github_login"), - roleLabel: text("role_label").notNull(), - eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(), - starterRepoStatus: text("starter_repo_status").notNull(), - starterRepoStarredAt: integer("starter_repo_starred_at"), - starterRepoSkippedAt: integer("starter_repo_skipped_at"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -export const sessionState = sqliteTable("session_state", { - sessionId: text("session_id").notNull().primaryKey(), - activeOrganizationId: text("active_organization_id"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); diff --git a/foundry/packages/backend/src/actors/auth-user/index.ts b/foundry/packages/backend/src/actors/auth-user/index.ts deleted file mode 100644 index a77635a..0000000 --- a/foundry/packages/backend/src/actors/auth-user/index.ts +++ /dev/null @@ -1,353 +0,0 @@ -import { and, asc, count as sqlCount, desc, eq, gt, gte, inArray, isNotNull, isNull, like, lt, lte, ne, notInArray, or } from "drizzle-orm"; -import { actor } from "rivetkit"; -import { authUserDb } from "./db/db.js"; -import { authAccounts, authSessions, authUsers, sessionState, userProfiles } from "./db/schema.js"; - -const tables = { - user: authUsers, - session: authSessions, - account: authAccounts, - userProfiles, - sessionState, -} as const; - -function tableFor(model: string) { - const table = tables[model as keyof typeof tables]; - if (!table) { - throw new Error(`Unsupported auth user model: ${model}`); - } - return table as any; -} - -function columnFor(table: any, field: string) { - const column = table[field]; - if (!column) { - throw new Error(`Unsupported auth user field: ${field}`); - } - return column; -} - -function normalizeValue(value: unknown): unknown { - if (value instanceof Date) { - return value.getTime(); - } - if (Array.isArray(value)) { - return value.map((entry) => normalizeValue(entry)); - } - return value; -} - -function clauseToExpr(table: any, clause: any) { - const column = columnFor(table, clause.field); - const value = normalizeValue(clause.value); - - switch (clause.operator) { - case "ne": - return value === null ? isNotNull(column) : ne(column, value as any); - case "lt": - return lt(column, value as any); - case "lte": - return lte(column, value as any); - case "gt": - return gt(column, value as any); - case "gte": - return gte(column, value as any); - case "in": - return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); - case "not_in": - return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); - case "contains": - return like(column, `%${String(value ?? "")}%`); - case "starts_with": - return like(column, `${String(value ?? "")}%`); - case "ends_with": - return like(column, `%${String(value ?? "")}`); - case "eq": - default: - return value === null ? isNull(column) : eq(column, value as any); - } -} - -function buildWhere(table: any, where: any[] | undefined) { - if (!where || where.length === 0) { - return undefined; - } - - let expr = clauseToExpr(table, where[0]); - for (const clause of where.slice(1)) { - const next = clauseToExpr(table, clause); - expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); - } - return expr; -} - -function applyJoinToRow(c: any, model: string, row: any, join: any) { - if (!row || !join) { - return row; - } - - if (model === "session" && join.user) { - return c.db - .select() - .from(authUsers) - .where(eq(authUsers.id, row.userId)) - .get() - .then((user: any) => ({ ...row, user: user ?? null })); - } - - if (model === "account" && join.user) { - return c.db - .select() - .from(authUsers) - .where(eq(authUsers.id, row.userId)) - .get() - .then((user: any) => ({ ...row, user: user ?? null })); - } - - if (model === "user" && join.account) { - return c.db - .select() - .from(authAccounts) - .where(eq(authAccounts.userId, row.id)) - .all() - .then((accounts: any[]) => ({ ...row, account: accounts })); - } - - return Promise.resolve(row); -} - -async function applyJoinToRows(c: any, model: string, rows: any[], join: any) { - if (!join || rows.length === 0) { - return rows; - } - - if (model === "session" && join.user) { - const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; - const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.id, userIds)).all() : []; - const userMap = new Map(users.map((user: any) => [user.id, user])); - return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); - } - - if (model === "account" && join.user) { - const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; - const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.id, userIds)).all() : []; - const userMap = new Map(users.map((user: any) => [user.id, user])); - return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); - } - - if (model === "user" && join.account) { - const userIds = rows.map((row) => row.id); - const accounts = userIds.length > 0 ? await c.db.select().from(authAccounts).where(inArray(authAccounts.userId, userIds)).all() : []; - const accountsByUserId = new Map(); - for (const account of accounts) { - const entries = accountsByUserId.get(account.userId) ?? []; - entries.push(account); - accountsByUserId.set(account.userId, entries); - } - return rows.map((row) => ({ ...row, account: accountsByUserId.get(row.id) ?? [] })); - } - - return rows; -} - -export const authUser = actor({ - db: authUserDb, - options: { - name: "Auth User", - icon: "shield", - actionTimeout: 60_000, - }, - createState: (_c, input: { userId: string }) => ({ - userId: input.userId, - }), - actions: { - async createAuthRecord(c, input: { model: string; data: Record }) { - const table = tableFor(input.model); - await c.db - .insert(table) - .values(input.data as any) - .run(); - return await c.db - .select() - .from(table) - .where(eq(columnFor(table, "id"), input.data.id as any)) - .get(); - }, - - async findOneAuthRecord(c, input: { model: string; where: any[]; join?: any }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - const row = predicate ? await c.db.select().from(table).where(predicate).get() : await c.db.select().from(table).get(); - return await applyJoinToRow(c, input.model, row ?? null, input.join); - }, - - async findManyAuthRecords(c, input: { model: string; where?: any[]; limit?: number; offset?: number; sortBy?: any; join?: any }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - let query: any = c.db.select().from(table); - if (predicate) { - query = query.where(predicate); - } - if (input.sortBy?.field) { - const column = columnFor(table, input.sortBy.field); - query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); - } - if (typeof input.limit === "number") { - query = query.limit(input.limit); - } - if (typeof input.offset === "number") { - query = query.offset(input.offset); - } - const rows = await query.all(); - return await applyJoinToRows(c, input.model, rows, input.join); - }, - - async updateAuthRecord(c, input: { model: string; where: any[]; update: Record }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - if (!predicate) { - throw new Error("updateAuthRecord requires a where clause"); - } - await c.db - .update(table) - .set(input.update as any) - .where(predicate) - .run(); - return await c.db.select().from(table).where(predicate).get(); - }, - - async updateManyAuthRecords(c, input: { model: string; where: any[]; update: Record }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - if (!predicate) { - throw new Error("updateManyAuthRecords requires a where clause"); - } - await c.db - .update(table) - .set(input.update as any) - .where(predicate) - .run(); - const row = await c.db.select({ value: sqlCount() }).from(table).where(predicate).get(); - return row?.value ?? 0; - }, - - async deleteAuthRecord(c, input: { model: string; where: any[] }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - if (!predicate) { - throw new Error("deleteAuthRecord requires a where clause"); - } - await c.db.delete(table).where(predicate).run(); - }, - - async deleteManyAuthRecords(c, input: { model: string; where: any[] }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - if (!predicate) { - throw new Error("deleteManyAuthRecords requires a where clause"); - } - const rows = await c.db.select().from(table).where(predicate).all(); - await c.db.delete(table).where(predicate).run(); - return rows.length; - }, - - async countAuthRecords(c, input: { model: string; where?: any[] }) { - const table = tableFor(input.model); - const predicate = buildWhere(table, input.where); - const row = predicate - ? await c.db.select({ value: sqlCount() }).from(table).where(predicate).get() - : await c.db.select({ value: sqlCount() }).from(table).get(); - return row?.value ?? 0; - }, - - async getAppAuthState(c, input: { sessionId: string }) { - const session = await c.db.select().from(authSessions).where(eq(authSessions.id, input.sessionId)).get(); - if (!session) { - return null; - } - const [user, profile, currentSessionState, accounts] = await Promise.all([ - c.db.select().from(authUsers).where(eq(authUsers.id, session.userId)).get(), - c.db.select().from(userProfiles).where(eq(userProfiles.userId, session.userId)).get(), - c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(), - c.db.select().from(authAccounts).where(eq(authAccounts.userId, session.userId)).all(), - ]); - return { - session, - user, - profile: profile ?? null, - sessionState: currentSessionState ?? null, - accounts, - }; - }, - - async upsertUserProfile( - c, - input: { - userId: string; - patch: { - githubAccountId?: string | null; - githubLogin?: string | null; - roleLabel?: string; - eligibleOrganizationIdsJson?: string; - starterRepoStatus?: string; - starterRepoStarredAt?: number | null; - starterRepoSkippedAt?: number | null; - }; - }, - ) { - const now = Date.now(); - await c.db - .insert(userProfiles) - .values({ - userId: input.userId, - githubAccountId: input.patch.githubAccountId ?? null, - githubLogin: input.patch.githubLogin ?? null, - roleLabel: input.patch.roleLabel ?? "GitHub user", - eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson ?? "[]", - starterRepoStatus: input.patch.starterRepoStatus ?? "pending", - starterRepoStarredAt: input.patch.starterRepoStarredAt ?? null, - starterRepoSkippedAt: input.patch.starterRepoSkippedAt ?? null, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: userProfiles.userId, - set: { - ...(input.patch.githubAccountId !== undefined ? { githubAccountId: input.patch.githubAccountId } : {}), - ...(input.patch.githubLogin !== undefined ? { githubLogin: input.patch.githubLogin } : {}), - ...(input.patch.roleLabel !== undefined ? { roleLabel: input.patch.roleLabel } : {}), - ...(input.patch.eligibleOrganizationIdsJson !== undefined ? { eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson } : {}), - ...(input.patch.starterRepoStatus !== undefined ? { starterRepoStatus: input.patch.starterRepoStatus } : {}), - ...(input.patch.starterRepoStarredAt !== undefined ? { starterRepoStarredAt: input.patch.starterRepoStarredAt } : {}), - ...(input.patch.starterRepoSkippedAt !== undefined ? { starterRepoSkippedAt: input.patch.starterRepoSkippedAt } : {}), - updatedAt: now, - }, - }) - .run(); - - return await c.db.select().from(userProfiles).where(eq(userProfiles.userId, input.userId)).get(); - }, - - async upsertSessionState(c, input: { sessionId: string; activeOrganizationId: string | null }) { - const now = Date.now(); - await c.db - .insert(sessionState) - .values({ - sessionId: input.sessionId, - activeOrganizationId: input.activeOrganizationId, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: sessionState.sessionId, - set: { - activeOrganizationId: input.activeOrganizationId, - updatedAt: now, - }, - }) - .run(); - - return await c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(); - }, - }, -}); diff --git a/foundry/packages/backend/src/actors/events.ts b/foundry/packages/backend/src/actors/events.ts deleted file mode 100644 index 4a514ad..0000000 --- a/foundry/packages/backend/src/actors/events.ts +++ /dev/null @@ -1,104 +0,0 @@ -import type { TaskStatus, SandboxProviderId } from "@sandbox-agent/foundry-shared"; - -export interface TaskCreatedEvent { - organizationId: string; - repoId: string; - taskId: string; - sandboxProviderId: SandboxProviderId; - branchName: string; - title: string; -} - -export interface TaskStatusEvent { - organizationId: string; - repoId: string; - taskId: string; - status: TaskStatus; - message: string; -} - -export interface RepositorySnapshotEvent { - organizationId: string; - repoId: string; - updatedAt: number; -} - -export interface AgentStartedEvent { - organizationId: string; - repoId: string; - taskId: string; - sessionId: string; -} - -export interface AgentIdleEvent { - organizationId: string; - repoId: string; - taskId: string; - sessionId: string; -} - -export interface AgentErrorEvent { - organizationId: string; - repoId: string; - taskId: string; - message: string; -} - -export interface PrCreatedEvent { - organizationId: string; - repoId: string; - taskId: string; - prNumber: number; - url: string; -} - -export interface PrClosedEvent { - organizationId: string; - repoId: string; - taskId: string; - prNumber: number; - merged: boolean; -} - -export interface PrReviewEvent { - organizationId: string; - repoId: string; - taskId: string; - prNumber: number; - reviewer: string; - status: string; -} - -export interface CiStatusChangedEvent { - organizationId: string; - repoId: string; - taskId: string; - prNumber: number; - status: string; -} - -export type TaskStepName = "auto_commit" | "push" | "pr_submit"; -export type TaskStepStatus = "started" | "completed" | "skipped" | "failed"; - -export interface TaskStepEvent { - organizationId: string; - repoId: string; - taskId: string; - step: TaskStepName; - status: TaskStepStatus; - message: string; -} - -export interface BranchSwitchedEvent { - organizationId: string; - repoId: string; - taskId: string; - branchName: string; -} - -export interface SessionAttachedEvent { - organizationId: string; - repoId: string; - taskId: string; - sessionId: string; -} diff --git a/foundry/packages/backend/src/actors/github-data/db/migrations.ts b/foundry/packages/backend/src/actors/github-data/db/migrations.ts index 87cc76f..6584968 100644 --- a/foundry/packages/backend/src/actors/github-data/db/migrations.ts +++ b/foundry/packages/backend/src/actors/github-data/db/migrations.ts @@ -18,6 +18,12 @@ const journal = { tag: "0002_github_branches", breakpoints: true, }, + { + idx: 3, + when: 1773907200000, + tag: "0003_sync_progress", + breakpoints: true, + }, ], } as const; @@ -32,7 +38,8 @@ export default { \`installation_id\` integer, \`last_sync_label\` text NOT NULL, \`last_sync_at\` integer, - \`updated_at\` integer NOT NULL + \`updated_at\` integer NOT NULL, + CONSTRAINT \`github_meta_singleton_id_check\` CHECK(\`id\` = 1) ); --> statement-breakpoint CREATE TABLE \`github_repositories\` ( @@ -78,6 +85,22 @@ CREATE TABLE \`github_pull_requests\` ( \`commit_sha\` text NOT NULL, \`updated_at\` integer NOT NULL ); +`, + m0003: `ALTER TABLE \`github_meta\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`sync_phase\` text; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`processed_repository_count\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_meta\` ADD \`total_repository_count\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_repositories\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_members\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_pull_requests\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; +--> statement-breakpoint +ALTER TABLE \`github_branches\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/github-data/db/schema.ts b/foundry/packages/backend/src/actors/github-data/db/schema.ts index fe37863..a11ac9a 100644 --- a/foundry/packages/backend/src/actors/github-data/db/schema.ts +++ b/foundry/packages/backend/src/actors/github-data/db/schema.ts @@ -1,15 +1,24 @@ -import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { sql } from "drizzle-orm"; -export const githubMeta = sqliteTable("github_meta", { - id: integer("id").primaryKey(), - connectedAccount: text("connected_account").notNull(), - installationStatus: text("installation_status").notNull(), - syncStatus: text("sync_status").notNull(), - installationId: integer("installation_id"), - lastSyncLabel: text("last_sync_label").notNull(), - lastSyncAt: integer("last_sync_at"), - updatedAt: integer("updated_at").notNull(), -}); +export const githubMeta = sqliteTable( + "github_meta", + { + id: integer("id").primaryKey(), + connectedAccount: text("connected_account").notNull(), + installationStatus: text("installation_status").notNull(), + syncStatus: text("sync_status").notNull(), + installationId: integer("installation_id"), + lastSyncLabel: text("last_sync_label").notNull(), + lastSyncAt: integer("last_sync_at"), + syncGeneration: integer("sync_generation").notNull(), + syncPhase: text("sync_phase"), + processedRepositoryCount: integer("processed_repository_count").notNull(), + totalRepositoryCount: integer("total_repository_count").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("github_meta_singleton_id_check", sql`${table.id} = 1`)], +); export const githubRepositories = sqliteTable("github_repositories", { repoId: text("repo_id").notNull().primaryKey(), @@ -17,6 +26,7 @@ export const githubRepositories = sqliteTable("github_repositories", { cloneUrl: text("clone_url").notNull(), private: integer("private").notNull(), defaultBranch: text("default_branch").notNull(), + syncGeneration: integer("sync_generation").notNull(), updatedAt: integer("updated_at").notNull(), }); @@ -25,6 +35,7 @@ export const githubBranches = sqliteTable("github_branches", { repoId: text("repo_id").notNull(), branchName: text("branch_name").notNull(), commitSha: text("commit_sha").notNull(), + syncGeneration: integer("sync_generation").notNull(), updatedAt: integer("updated_at").notNull(), }); @@ -35,6 +46,7 @@ export const githubMembers = sqliteTable("github_members", { email: text("email"), role: text("role"), state: text("state").notNull(), + syncGeneration: integer("sync_generation").notNull(), updatedAt: integer("updated_at").notNull(), }); @@ -51,5 +63,6 @@ export const githubPullRequests = sqliteTable("github_pull_requests", { baseRefName: text("base_ref_name").notNull(), authorLogin: text("author_login"), isDraft: integer("is_draft").notNull(), + syncGeneration: integer("sync_generation").notNull(), updatedAt: integer("updated_at").notNull(), }); diff --git a/foundry/packages/backend/src/actors/github-data/index.ts b/foundry/packages/backend/src/actors/github-data/index.ts index 08c815d..a7d65a0 100644 --- a/foundry/packages/backend/src/actors/github-data/index.ts +++ b/foundry/packages/backend/src/actors/github-data/index.ts @@ -1,16 +1,20 @@ // @ts-nocheck -import { eq } from "drizzle-orm"; -import { actor, queue } from "rivetkit"; -import { workflow, Loop } from "rivetkit/workflow"; +import { eq, inArray } from "drizzle-orm"; +import { actor } from "rivetkit"; import type { FoundryOrganization } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; import { getOrCreateOrganization, getTask } from "../handles.js"; import { repoIdFromRemote } from "../../services/repo.js"; import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; +// actions called directly (no queue) import { githubDataDb } from "./db/db.js"; import { githubBranches, githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js"; +// workflow.ts is no longer used — commands are actions now const META_ROW_ID = 1; +const SYNC_REPOSITORY_BATCH_SIZE = 10; + +type GithubSyncPhase = "discovering_repositories" | "syncing_repositories" | "syncing_branches" | "syncing_members" | "syncing_pull_requests"; interface GithubDataInput { organizationId: string; @@ -70,6 +74,8 @@ interface ClearStateInput { label: string; } +// sendOrganizationCommand removed — org actions called directly + interface PullRequestWebhookInput { connectedAccount: string; installationStatus: FoundryOrganization["github"]["installationStatus"]; @@ -93,6 +99,19 @@ interface PullRequestWebhookInput { }; } +interface GithubMetaState { + connectedAccount: string; + installationStatus: FoundryOrganization["github"]["installationStatus"]; + syncStatus: FoundryOrganization["github"]["syncStatus"]; + installationId: number | null; + lastSyncLabel: string; + lastSyncAt: number | null; + syncGeneration: number; + syncPhase: GithubSyncPhase | null; + processedRepositoryCount: number; + totalRepositoryCount: number; +} + function normalizePrStatus(input: { state: string; isDraft?: boolean; merged?: boolean }): "OPEN" | "DRAFT" | "CLOSED" | "MERGED" { const state = input.state.trim().toUpperCase(); if (input.merged || state === "MERGED") return "MERGED"; @@ -106,6 +125,7 @@ function pullRequestSummaryFromRow(row: any) { repoId: row.repoId, repoFullName: row.repoFullName, number: row.number, + status: Boolean(row.isDraft) ? "draft" : "ready", title: row.title, state: row.state, url: row.url, @@ -117,7 +137,18 @@ function pullRequestSummaryFromRow(row: any) { }; } -async function readMeta(c: any) { +function chunkItems(items: T[], size: number): T[][] { + if (items.length === 0) { + return []; + } + const chunks: T[][] = []; + for (let index = 0; index < items.length; index += size) { + chunks.push(items.slice(index, index + size)); + } + return chunks; +} + +export async function readMeta(c: any): Promise { const row = await c.db.select().from(githubMeta).where(eq(githubMeta.id, META_ROW_ID)).get(); return { connectedAccount: row?.connectedAccount ?? "", @@ -126,10 +157,14 @@ async function readMeta(c: any) { installationId: row?.installationId ?? null, lastSyncLabel: row?.lastSyncLabel ?? "Waiting for first import", lastSyncAt: row?.lastSyncAt ?? null, + syncGeneration: row?.syncGeneration ?? 0, + syncPhase: (row?.syncPhase ?? null) as GithubSyncPhase | null, + processedRepositoryCount: row?.processedRepositoryCount ?? 0, + totalRepositoryCount: row?.totalRepositoryCount ?? 0, }; } -async function writeMeta(c: any, patch: Partial>>) { +async function writeMeta(c: any, patch: Partial) { const current = await readMeta(c); const next = { ...current, @@ -145,6 +180,10 @@ async function writeMeta(c: any, patch: Partial): Promise { + const meta = await writeMeta(c, patch); + const organization = await getOrCreateOrganization(c, c.state.organizationId); + await organization.commandApplyGithubSyncProgress({ + connectedAccount: meta.connectedAccount, + installationStatus: meta.installationStatus, + installationId: meta.installationId, + syncStatus: meta.syncStatus, + lastSyncLabel: meta.lastSyncLabel, + lastSyncAt: meta.lastSyncAt, + syncGeneration: meta.syncGeneration, + syncPhase: meta.syncPhase, + processedRepositoryCount: meta.processedRepositoryCount, + totalRepositoryCount: meta.totalRepositoryCount, + }); + return meta; +} + async function getOrganizationContext(c: any, overrides?: FullSyncInput) { + // Try to read the org profile for fallback values, but don't require it. + // Webhook-triggered syncs can arrive before the user signs in and creates the + // org profile row. The webhook callers already pass the necessary overrides + // (connectedAccount, installationId, githubLogin, kind), so we can proceed + // without the profile as long as overrides cover the required fields. const organizationHandle = await getOrCreateOrganization(c, c.state.organizationId); const organizationState = await organizationHandle.getOrganizationShellStateIfInitialized({}); - if (!organizationState) { - throw new Error(`Organization ${c.state.organizationId} is not initialized`); + + // If the org profile doesn't exist and overrides don't provide enough context, fail. + if (!organizationState && !overrides?.connectedAccount) { + throw new Error(`Organization ${c.state.organizationId} is not initialized and no override context was provided`); } + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); return { - kind: overrides?.kind ?? organizationState.snapshot.kind, - githubLogin: overrides?.githubLogin ?? organizationState.githubLogin, - connectedAccount: overrides?.connectedAccount ?? organizationState.snapshot.github.connectedAccount ?? organizationState.githubLogin, - installationId: overrides?.installationId ?? organizationState.githubInstallationId ?? null, + kind: overrides?.kind ?? organizationState?.snapshot.kind, + githubLogin: overrides?.githubLogin ?? organizationState?.githubLogin, + connectedAccount: overrides?.connectedAccount ?? organizationState?.snapshot.github.connectedAccount ?? organizationState?.githubLogin, + installationId: overrides?.installationId ?? organizationState?.githubInstallationId ?? null, installationStatus: overrides?.installationStatus ?? - organizationState.snapshot.github.installationStatus ?? - (organizationState.snapshot.kind === "personal" ? "connected" : "reconnect_required"), + organizationState?.snapshot.github.installationStatus ?? + (organizationState?.snapshot.kind === "personal" ? "connected" : "reconnect_required"), accessToken: overrides?.accessToken ?? auth?.githubToken ?? null, }; } -async function replaceRepositories(c: any, repositories: GithubRepositoryRecord[], updatedAt: number) { - await c.db.delete(githubRepositories).run(); +async function upsertRepositories(c: any, repositories: GithubRepositoryRecord[], updatedAt: number, syncGeneration: number) { for (const repository of repositories) { await c.db .insert(githubRepositories) @@ -194,14 +262,35 @@ async function replaceRepositories(c: any, repositories: GithubRepositoryRecord[ cloneUrl: repository.cloneUrl, private: repository.private ? 1 : 0, defaultBranch: repository.defaultBranch, + syncGeneration, updatedAt, }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, + syncGeneration, + updatedAt, + }, + }) .run(); } } -async function replaceBranches(c: any, branches: GithubBranchRecord[], updatedAt: number) { - await c.db.delete(githubBranches).run(); +async function sweepRepositories(c: any, syncGeneration: number) { + const rows = await c.db.select({ repoId: githubRepositories.repoId, syncGeneration: githubRepositories.syncGeneration }).from(githubRepositories).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubRepositories).where(eq(githubRepositories.repoId, row.repoId)).run(); + } +} + +async function upsertBranches(c: any, branches: GithubBranchRecord[], updatedAt: number, syncGeneration: number) { for (const branch of branches) { await c.db .insert(githubBranches) @@ -210,14 +299,34 @@ async function replaceBranches(c: any, branches: GithubBranchRecord[], updatedAt repoId: branch.repoId, branchName: branch.branchName, commitSha: branch.commitSha, + syncGeneration, updatedAt, }) + .onConflictDoUpdate({ + target: githubBranches.branchId, + set: { + repoId: branch.repoId, + branchName: branch.branchName, + commitSha: branch.commitSha, + syncGeneration, + updatedAt, + }, + }) .run(); } } -async function replaceMembers(c: any, members: GithubMemberRecord[], updatedAt: number) { - await c.db.delete(githubMembers).run(); +async function sweepBranches(c: any, syncGeneration: number) { + const rows = await c.db.select({ branchId: githubBranches.branchId, syncGeneration: githubBranches.syncGeneration }).from(githubBranches).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubBranches).where(eq(githubBranches.branchId, row.branchId)).run(); + } +} + +async function upsertMembers(c: any, members: GithubMemberRecord[], updatedAt: number, syncGeneration: number) { for (const member of members) { await c.db .insert(githubMembers) @@ -228,14 +337,36 @@ async function replaceMembers(c: any, members: GithubMemberRecord[], updatedAt: email: member.email ?? null, role: member.role ?? null, state: member.state ?? "active", + syncGeneration, updatedAt, }) + .onConflictDoUpdate({ + target: githubMembers.memberId, + set: { + login: member.login, + displayName: member.name || member.login, + email: member.email ?? null, + role: member.role ?? null, + state: member.state ?? "active", + syncGeneration, + updatedAt, + }, + }) .run(); } } -async function replacePullRequests(c: any, pullRequests: GithubPullRequestRecord[]) { - await c.db.delete(githubPullRequests).run(); +async function sweepMembers(c: any, syncGeneration: number) { + const rows = await c.db.select({ memberId: githubMembers.memberId, syncGeneration: githubMembers.syncGeneration }).from(githubMembers).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubMembers).where(eq(githubMembers.memberId, row.memberId)).run(); + } +} + +async function upsertPullRequests(c: any, pullRequests: GithubPullRequestRecord[], syncGeneration: number) { for (const pullRequest of pullRequests) { await c.db .insert(githubPullRequests) @@ -252,19 +383,51 @@ async function replacePullRequests(c: any, pullRequests: GithubPullRequestRecord baseRefName: pullRequest.baseRefName, authorLogin: pullRequest.authorLogin ?? null, isDraft: pullRequest.isDraft ? 1 : 0, + syncGeneration, updatedAt: pullRequest.updatedAt, }) + .onConflictDoUpdate({ + target: githubPullRequests.prId, + set: { + repoId: pullRequest.repoId, + repoFullName: pullRequest.repoFullName, + number: pullRequest.number, + title: pullRequest.title, + body: pullRequest.body ?? null, + state: pullRequest.state, + url: pullRequest.url, + headRefName: pullRequest.headRefName, + baseRefName: pullRequest.baseRefName, + authorLogin: pullRequest.authorLogin ?? null, + isDraft: pullRequest.isDraft ? 1 : 0, + syncGeneration, + updatedAt: pullRequest.updatedAt, + }, + }) .run(); } } -async function refreshTaskSummaryForBranch(c: any, repoId: string, branchName: string) { +async function sweepPullRequests(c: any, syncGeneration: number) { + const rows = await c.db.select({ prId: githubPullRequests.prId, syncGeneration: githubPullRequests.syncGeneration }).from(githubPullRequests).all(); + for (const row of rows) { + if (row.syncGeneration === syncGeneration) { + continue; + } + await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, row.prId)).run(); + } +} + +async function refreshTaskSummaryForBranch(c: any, repoId: string, branchName: string, pullRequest: ReturnType | null) { + const repositoryRecord = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); + if (!repositoryRecord) { + return; + } const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.refreshTaskSummaryForGithubBranch({ repoId, branchName }); + void organization.commandRefreshTaskSummaryForBranch({ repoId, branchName, pullRequest, repoName: repositoryRecord.fullName ?? undefined }).catch(() => {}); } async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: any[]) { - const organization = await getOrCreateOrganization(c, c.state.organizationId); const beforeById = new Map(beforeRows.map((row) => [row.prId, row])); const afterById = new Map(afterRows.map((row) => [row.prId, row])); @@ -283,24 +446,24 @@ async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: if (!changed) { continue; } - await organization.applyOpenPullRequestUpdate({ - pullRequest: pullRequestSummaryFromRow(row), - }); - await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); + await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName, pullRequestSummaryFromRow(row)); } for (const [prId, row] of beforeById) { if (afterById.has(prId)) { continue; } - await organization.removeOpenPullRequest({ prId }); - await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName); + await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName, null); } } async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { + const repositoryRecord = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, row.repoId)).get(); + if (!repositoryRecord) { + return; + } const organization = await getOrCreateOrganization(c, c.state.organizationId); - const match = await organization.findTaskForGithubBranch({ + const match = await organization.findTaskForBranch({ repoId: row.repoId, branchName: row.headRefName, }); @@ -309,7 +472,7 @@ async function autoArchiveTaskForClosedPullRequest(c: any, row: any) { } try { const task = getTask(c, c.state.organizationId, row.repoId, match.taskId); - await task.archive({ reason: `PR ${String(row.state).toLowerCase()}` }); + void task.archive({ reason: `PR ${String(row.state).toLowerCase()}` }).catch(() => {}); } catch { // Best-effort only. Task summary refresh will still clear the PR state. } @@ -361,8 +524,7 @@ async function resolveMembers(c: any, context: Awaited>, repositories: GithubRepositoryRecord[], ): Promise { @@ -445,20 +607,13 @@ async function listRepositoryBranchesForContext( })); } -async function resolveBranches( - _c: any, - context: Awaited>, - repositories: GithubRepositoryRecord[], -): Promise { - return (await Promise.all(repositories.map((repository) => listRepositoryBranchesForContext(context, repository)))).flat(); -} - async function refreshRepositoryBranches( c: any, context: Awaited>, repository: GithubRepositoryRecord, updatedAt: number, ): Promise { + const currentMeta = await readMeta(c); const nextBranches = await listRepositoryBranchesForContext(context, repository); await c.db .delete(githubBranches) @@ -473,6 +628,7 @@ async function refreshRepositoryBranches( repoId: branch.repoId, branchName: branch.branchName, commitSha: branch.commitSha, + syncGeneration: currentMeta.syncGeneration, updatedAt, }) .run(); @@ -483,132 +639,254 @@ async function readAllPullRequestRows(c: any) { return await c.db.select().from(githubPullRequests).all(); } -async function runFullSync(c: any, input: FullSyncInput = {}) { - const startedAt = Date.now(); - const beforeRows = await readAllPullRequestRows(c); - const context = await getOrganizationContext(c, input); +/** Config returned by fullSyncSetup, passed to subsequent sync phases. */ +export interface FullSyncConfig { + syncGeneration: number; + startedAt: number; + totalRepositoryCount: number; + connectedAccount: string; + installationStatus: string; + installationId: number | null; + beforePrRows: any[]; +} - await writeMeta(c, { +async function readRepositoriesFromDb(c: any): Promise { + const rows = await c.db.select().from(githubRepositories).all(); + return rows.map((r: any) => ({ + fullName: r.fullName, + cloneUrl: r.cloneUrl, + private: Boolean(r.private), + defaultBranch: r.defaultBranch, + })); +} + +/** + * Phase 1: Discover repositories and persist them. + * Returns the config needed by all subsequent phases, or null if nothing to do. + */ +export async function fullSyncSetup(c: any, input: FullSyncInput = {}): Promise { + const startedAt = Date.now(); + const beforePrRows = await readAllPullRequestRows(c); + const currentMeta = await readMeta(c); + const context = await getOrganizationContext(c, input); + const syncGeneration = currentMeta.syncGeneration + 1; + + await publishSyncProgress(c, { connectedAccount: context.connectedAccount, installationStatus: context.installationStatus, installationId: context.installationId, syncStatus: "syncing", lastSyncLabel: input.label?.trim() || "Syncing GitHub data...", + syncGeneration, + syncPhase: "discovering_repositories", + processedRepositoryCount: 0, + totalRepositoryCount: 0, }); const repositories = await resolveRepositories(c, context); - const branches = await resolveBranches(c, context, repositories); - const members = await resolveMembers(c, context); - const pullRequests = await resolvePullRequests(c, context, repositories); + const totalRepositoryCount = repositories.length; - await replaceRepositories(c, repositories, startedAt); - await replaceBranches(c, branches, startedAt); - await replaceMembers(c, members, startedAt); - await replacePullRequests(c, pullRequests); - - const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyGithubDataProjection({ + await publishSyncProgress(c, { connectedAccount: context.connectedAccount, installationStatus: context.installationStatus, installationId: context.installationId, - syncStatus: "synced", - lastSyncLabel: repositories.length > 0 ? `Synced ${repositories.length} repositories` : "No repositories available", - lastSyncAt: startedAt, - repositories, + syncStatus: "syncing", + lastSyncLabel: totalRepositoryCount > 0 ? `Importing ${totalRepositoryCount} repositories...` : "No repositories available", + syncGeneration, + syncPhase: "syncing_repositories", + processedRepositoryCount: totalRepositoryCount, + totalRepositoryCount, }); - const meta = await writeMeta(c, { - connectedAccount: context.connectedAccount, - installationStatus: context.installationStatus, - installationId: context.installationId, - syncStatus: "synced", - lastSyncLabel: repositories.length > 0 ? `Synced ${repositories.length} repositories` : "No repositories available", - lastSyncAt: startedAt, - }); - - const afterRows = await readAllPullRequestRows(c); - await emitPullRequestChangeEvents(c, beforeRows, afterRows); + await upsertRepositories(c, repositories, startedAt, syncGeneration); return { - ...meta, - repositoryCount: repositories.length, - memberCount: members.length, - pullRequestCount: afterRows.length, + syncGeneration, + startedAt, + totalRepositoryCount, + connectedAccount: context.connectedAccount, + installationStatus: context.installationStatus, + installationId: context.installationId, + beforePrRows, }; } -const GITHUB_DATA_QUEUE_NAMES = ["githubData.command.syncRepos"] as const; +/** + * Phase 2 (per-batch): Fetch and upsert branches for one batch of repos. + * Returns true when all batches have been processed. + */ +export async function fullSyncBranchBatch(c: any, config: FullSyncConfig, batchIndex: number): Promise { + const repos = await readRepositoriesFromDb(c); + const batches = chunkItems(repos, SYNC_REPOSITORY_BATCH_SIZE); + if (batchIndex >= batches.length) return true; -async function runGithubDataWorkflow(ctx: any): Promise { - // Initial sync: if this actor was just created and has never synced, - // kick off the first full sync automatically. - await ctx.step({ - name: "github-data-initial-sync", - timeout: 5 * 60_000, - run: async () => { - const meta = await readMeta(ctx); - if (meta.syncStatus !== "pending") { - return; // Already synced or syncing — skip initial sync - } - try { - await runFullSync(ctx, { label: "Importing repository catalog..." }); - } catch (error) { - // Best-effort initial sync. Write the error to meta so the client - // sees the failure and can trigger a manual retry. - const currentMeta = await readMeta(ctx); - const organization = await getOrCreateOrganization(ctx, ctx.state.organizationId); - await organization.markOrganizationSyncFailed({ - message: error instanceof Error ? error.message : "GitHub import failed", - installationStatus: currentMeta.installationStatus, - }); - } - }, + const batch = batches[batchIndex]!; + const context = await getOrganizationContext(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus as any, + installationId: config.installationId, + }); + const batchBranches = (await Promise.all(batch.map((repo) => listRepositoryBranchesForContext(context, repo)))).flat(); + await upsertBranches(c, batchBranches, config.startedAt, config.syncGeneration); + + const processedCount = Math.min((batchIndex + 1) * SYNC_REPOSITORY_BATCH_SIZE, repos.length); + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "syncing", + lastSyncLabel: `Synced branches for ${processedCount} of ${repos.length} repositories`, + syncGeneration: config.syncGeneration, + syncPhase: "syncing_branches", + processedRepositoryCount: processedCount, + totalRepositoryCount: repos.length, }); - // Command loop for explicit sync requests (reload, re-import, etc.) - await ctx.loop("github-data-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-github-data-command", { - names: [...GITHUB_DATA_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } + return false; +} - try { - if (msg.name === "githubData.command.syncRepos") { - await loopCtx.step({ - name: "github-data-sync-repos", - timeout: 5 * 60_000, - run: async () => { - const body = msg.body as FullSyncInput; - await runFullSync(loopCtx, body); - }, - }); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - await msg.complete({ error: message }).catch(() => {}); - } +/** + * Phase 3: Resolve, upsert, and sweep members. + */ +export async function fullSyncMembers(c: any, config: FullSyncConfig): Promise { + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "syncing", + lastSyncLabel: "Syncing GitHub members...", + syncGeneration: config.syncGeneration, + syncPhase: "syncing_members", + processedRepositoryCount: config.totalRepositoryCount, + totalRepositoryCount: config.totalRepositoryCount, + }); - return Loop.continue(undefined); + const context = await getOrganizationContext(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus as any, + installationId: config.installationId, + }); + const members = await resolveMembers(c, context); + await upsertMembers(c, members, config.startedAt, config.syncGeneration); + await sweepMembers(c, config.syncGeneration); +} + +/** + * Phase 4 (per-batch): Fetch and upsert pull requests for one batch of repos. + * Returns true when all batches have been processed. + */ +export async function fullSyncPullRequestBatch(c: any, config: FullSyncConfig, batchIndex: number): Promise { + const repos = await readRepositoriesFromDb(c); + const batches = chunkItems(repos, SYNC_REPOSITORY_BATCH_SIZE); + if (batchIndex >= batches.length) return true; + + const batch = batches[batchIndex]!; + const context = await getOrganizationContext(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus as any, + installationId: config.installationId, + }); + const batchPRs = await listPullRequestsForRepositories(context, batch); + await upsertPullRequests(c, batchPRs, config.syncGeneration); + + const processedCount = Math.min((batchIndex + 1) * SYNC_REPOSITORY_BATCH_SIZE, repos.length); + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "syncing", + lastSyncLabel: `Synced pull requests for ${processedCount} of ${repos.length} repositories`, + syncGeneration: config.syncGeneration, + syncPhase: "syncing_pull_requests", + processedRepositoryCount: processedCount, + totalRepositoryCount: repos.length, + }); + + return false; +} + +/** + * Phase 5: Sweep stale data, publish final state, emit PR change events. + */ +export async function fullSyncFinalize(c: any, config: FullSyncConfig): Promise { + await sweepBranches(c, config.syncGeneration); + await sweepPullRequests(c, config.syncGeneration); + await sweepRepositories(c, config.syncGeneration); + + await publishSyncProgress(c, { + connectedAccount: config.connectedAccount, + installationStatus: config.installationStatus, + installationId: config.installationId, + syncStatus: "synced", + lastSyncLabel: config.totalRepositoryCount > 0 ? `Synced ${config.totalRepositoryCount} repositories` : "No repositories available", + lastSyncAt: config.startedAt, + syncGeneration: config.syncGeneration, + syncPhase: null, + processedRepositoryCount: config.totalRepositoryCount, + totalRepositoryCount: config.totalRepositoryCount, + }); + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, config.beforePrRows, afterRows); +} + +/** + * Error handler: publish error sync state when a full sync fails. + */ +/** + * Single-shot full sync: runs all phases (setup, branches, members, PRs, finalize) + * using native JS loops. This must NOT use workflow primitives (step/loop/sleep) + * because it runs inside a workflow step. See workflow.ts for context on why + * sub-loops cause HistoryDivergedError. + */ +export async function runFullSync(c: any, input: FullSyncInput = {}): Promise { + const config = await fullSyncSetup(c, input); + + // Branches — native loop over batches + for (let i = 0; ; i++) { + const done = await fullSyncBranchBatch(c, config, i); + if (done) break; + } + + // Members + await fullSyncMembers(c, config); + + // Pull requests — native loop over batches + for (let i = 0; ; i++) { + const done = await fullSyncPullRequestBatch(c, config, i); + if (done) break; + } + + // Finalize + await fullSyncFinalize(c, config); +} + +export async function fullSyncError(c: any, error: unknown): Promise { + const currentMeta = await readMeta(c); + const message = error instanceof Error ? error.message : "GitHub import failed"; + await publishSyncProgress(c, { + connectedAccount: currentMeta.connectedAccount, + installationStatus: currentMeta.installationStatus, + installationId: currentMeta.installationId, + syncStatus: "error", + lastSyncLabel: message, + syncGeneration: currentMeta.syncGeneration, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, }); } export const githubData = actor({ db: githubDataDb, - queues: Object.fromEntries(GITHUB_DATA_QUEUE_NAMES.map((name) => [name, queue()])), options: { name: "GitHub Data", icon: "github", - actionTimeout: 5 * 60_000, + actionTimeout: 10 * 60_000, }, createState: (_c, input: GithubDataInput) => ({ organizationId: input.organizationId, }), - run: workflow(runGithubDataWorkflow), actions: { async getSummary(c) { const repositories = await c.db.select().from(githubRepositories).all(); @@ -649,9 +927,13 @@ export const githubData = actor({ }; }, - async listPullRequestsForRepository(c, input: { repoId: string }) { - const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all(); - return rows.map(pullRequestSummaryFromRow); + async listOpenPullRequests(c) { + const rows = await c.db + .select() + .from(githubPullRequests) + .where(inArray(githubPullRequests.state, ["OPEN", "DRAFT"])) + .all(); + return rows.map((row) => pullRequestSummaryFromRow(row)); }, async listBranchesForRepository(c, input: { repoId: string }) { @@ -664,309 +946,215 @@ export const githubData = actor({ .sort((left, right) => left.branchName.localeCompare(right.branchName)); }, - async listOpenPullRequests(c) { - const rows = await c.db.select().from(githubPullRequests).all(); - return rows.map(pullRequestSummaryFromRow).sort((left, right) => right.updatedAtMs - left.updatedAtMs); - }, - - async getPullRequestForBranch(c, input: { repoId: string; branchName: string }) { - const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all(); - const match = rows.find((candidate) => candidate.headRefName === input.branchName) ?? null; - if (!match) { - return null; - } - return { - number: match.number, - status: match.isDraft ? ("draft" as const) : ("ready" as const), - }; - }, - - async fullSync(c, input: FullSyncInput = {}) { - return await runFullSync(c, input); - }, - - async reloadOrganization(c) { - return await runFullSync(c, { label: "Reloading GitHub organization..." }); - }, - - async reloadAllPullRequests(c) { - return await runFullSync(c, { label: "Reloading GitHub pull requests..." }); - }, - - async reloadRepository(c, input: { repoId: string }) { - const context = await getOrganizationContext(c); - const current = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); - if (!current) { - throw new Error(`Unknown GitHub repository: ${input.repoId}`); - } - const { appShell } = getActorRuntimeContext(); - const repository = - context.installationId != null - ? await appShell.github.getInstallationRepository(context.installationId, current.fullName) - : context.accessToken - ? await appShell.github.getUserRepository(context.accessToken, current.fullName) - : null; - if (!repository) { - throw new Error(`Unable to reload repository: ${current.fullName}`); - } - - const updatedAt = Date.now(); - await c.db - .insert(githubRepositories) - .values({ - repoId: input.repoId, - fullName: repository.fullName, - cloneUrl: repository.cloneUrl, - private: repository.private ? 1 : 0, - defaultBranch: repository.defaultBranch, - updatedAt, - }) - .onConflictDoUpdate({ - target: githubRepositories.repoId, - set: { - fullName: repository.fullName, - cloneUrl: repository.cloneUrl, - private: repository.private ? 1 : 0, - defaultBranch: repository.defaultBranch, - updatedAt, - }, - }) - .run(); - await refreshRepositoryBranches( - c, - context, - { - fullName: repository.fullName, - cloneUrl: repository.cloneUrl, - private: repository.private, - defaultBranch: repository.defaultBranch, - }, - updatedAt, - ); - - const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyGithubRepositoryProjection({ - repoId: input.repoId, - remoteUrl: repository.cloneUrl, - }); - return { - repoId: input.repoId, - fullName: repository.fullName, - cloneUrl: repository.cloneUrl, - private: repository.private, - defaultBranch: repository.defaultBranch, - }; - }, - - async reloadPullRequest(c, input: { repoId: string; prNumber: number }) { - const repository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); - if (!repository) { - throw new Error(`Unknown GitHub repository: ${input.repoId}`); - } - const context = await getOrganizationContext(c); - const { appShell } = getActorRuntimeContext(); - const pullRequest = - context.installationId != null - ? await appShell.github.getInstallationPullRequest(context.installationId, repository.fullName, input.prNumber) - : context.accessToken - ? await appShell.github.getUserPullRequest(context.accessToken, repository.fullName, input.prNumber) - : null; - if (!pullRequest) { - throw new Error(`Unable to reload pull request #${input.prNumber} for ${repository.fullName}`); - } - - const beforeRows = await readAllPullRequestRows(c); - const updatedAt = Date.now(); - const nextState = normalizePrStatus(pullRequest); - const prId = `${input.repoId}#${input.prNumber}`; - if (nextState === "CLOSED" || nextState === "MERGED") { - await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); - } else { - await c.db - .insert(githubPullRequests) - .values({ - prId, - repoId: input.repoId, - repoFullName: repository.fullName, - number: pullRequest.number, - title: pullRequest.title, - body: pullRequest.body ?? null, - state: nextState, - url: pullRequest.url, - headRefName: pullRequest.headRefName, - baseRefName: pullRequest.baseRefName, - authorLogin: pullRequest.authorLogin ?? null, - isDraft: pullRequest.isDraft ? 1 : 0, - updatedAt, - }) - .onConflictDoUpdate({ - target: githubPullRequests.prId, - set: { - title: pullRequest.title, - body: pullRequest.body ?? null, - state: nextState, - url: pullRequest.url, - headRefName: pullRequest.headRefName, - baseRefName: pullRequest.baseRefName, - authorLogin: pullRequest.authorLogin ?? null, - isDraft: pullRequest.isDraft ? 1 : 0, - updatedAt, - }, - }) - .run(); - } - - const afterRows = await readAllPullRequestRows(c); - await emitPullRequestChangeEvents(c, beforeRows, afterRows); - const closed = afterRows.find((row) => row.prId === prId); - if (!closed && (nextState === "CLOSED" || nextState === "MERGED")) { - const previous = beforeRows.find((row) => row.prId === prId); - if (previous) { - await autoArchiveTaskForClosedPullRequest(c, { - ...previous, - state: nextState, - }); + async syncRepos(c, body: any) { + try { + await runFullSync(c, body); + return { ok: true }; + } catch (error) { + try { + await fullSyncError(c, error); + } catch { + /* best effort */ } + throw error; } - return pullRequestSummaryFromRow( - afterRows.find((row) => row.prId === prId) ?? { - prId, - repoId: input.repoId, - repoFullName: repository.fullName, - number: input.prNumber, - title: pullRequest.title, - state: nextState, - url: pullRequest.url, - headRefName: pullRequest.headRefName, - baseRefName: pullRequest.baseRefName, - authorLogin: pullRequest.authorLogin ?? null, - isDraft: pullRequest.isDraft ? 1 : 0, - updatedAt, - }, - ); }, - async clearState(c, input: ClearStateInput) { - const beforeRows = await readAllPullRequestRows(c); - await c.db.delete(githubPullRequests).run(); - await c.db.delete(githubBranches).run(); - await c.db.delete(githubRepositories).run(); - await c.db.delete(githubMembers).run(); - await writeMeta(c, { - connectedAccount: input.connectedAccount, - installationStatus: input.installationStatus, - installationId: input.installationId, - syncStatus: "pending", - lastSyncLabel: input.label, - lastSyncAt: null, - }); - - const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyGithubDataProjection({ - connectedAccount: input.connectedAccount, - installationStatus: input.installationStatus, - installationId: input.installationId, - syncStatus: "pending", - lastSyncLabel: input.label, - lastSyncAt: null, - repositories: [], - }); - await emitPullRequestChangeEvents(c, beforeRows, []); + async reloadRepository(c, body: { repoId: string }) { + return await reloadRepositoryMutation(c, body); }, - async handlePullRequestWebhook(c, input: PullRequestWebhookInput) { - const beforeRows = await readAllPullRequestRows(c); - const repoId = repoIdFromRemote(input.repository.cloneUrl); - const currentRepository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); - const updatedAt = Date.now(); - const state = normalizePrStatus(input.pullRequest); - const prId = `${repoId}#${input.pullRequest.number}`; + async clearState(c, body: any) { + await clearStateMutation(c, body); + return { ok: true }; + }, - await c.db - .insert(githubRepositories) - .values({ - repoId, - fullName: input.repository.fullName, - cloneUrl: input.repository.cloneUrl, - private: input.repository.private ? 1 : 0, - defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", - updatedAt, - }) - .onConflictDoUpdate({ - target: githubRepositories.repoId, - set: { - fullName: input.repository.fullName, - cloneUrl: input.repository.cloneUrl, - private: input.repository.private ? 1 : 0, - defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", - updatedAt, - }, - }) - .run(); - - if (state === "CLOSED" || state === "MERGED") { - await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); - } else { - await c.db - .insert(githubPullRequests) - .values({ - prId, - repoId, - repoFullName: input.repository.fullName, - number: input.pullRequest.number, - title: input.pullRequest.title, - body: input.pullRequest.body ?? null, - state, - url: input.pullRequest.url, - headRefName: input.pullRequest.headRefName, - baseRefName: input.pullRequest.baseRefName, - authorLogin: input.pullRequest.authorLogin ?? null, - isDraft: input.pullRequest.isDraft ? 1 : 0, - updatedAt, - }) - .onConflictDoUpdate({ - target: githubPullRequests.prId, - set: { - title: input.pullRequest.title, - body: input.pullRequest.body ?? null, - state, - url: input.pullRequest.url, - headRefName: input.pullRequest.headRefName, - baseRefName: input.pullRequest.baseRefName, - authorLogin: input.pullRequest.authorLogin ?? null, - isDraft: input.pullRequest.isDraft ? 1 : 0, - updatedAt, - }, - }) - .run(); - } - - await writeMeta(c, { - connectedAccount: input.connectedAccount, - installationStatus: input.installationStatus, - installationId: input.installationId, - syncStatus: "synced", - lastSyncLabel: "GitHub webhook received", - lastSyncAt: updatedAt, - }); - - const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyGithubRepositoryProjection({ - repoId, - remoteUrl: input.repository.cloneUrl, - }); - - const afterRows = await readAllPullRequestRows(c); - await emitPullRequestChangeEvents(c, beforeRows, afterRows); - if (state === "CLOSED" || state === "MERGED") { - const previous = beforeRows.find((row) => row.prId === prId); - if (previous) { - await autoArchiveTaskForClosedPullRequest(c, { - ...previous, - state, - }); - } - } + async handlePullRequestWebhook(c, body: any) { + await handlePullRequestWebhookMutation(c, body); + return { ok: true }; }, }, }); + +export async function reloadRepositoryMutation(c: any, input: { repoId: string }) { + const context = await getOrganizationContext(c); + const current = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get(); + if (!current) { + throw new Error(`Unknown GitHub repository: ${input.repoId}`); + } + const { appShell } = getActorRuntimeContext(); + const repository = + context.installationId != null + ? await appShell.github.getInstallationRepository(context.installationId, current.fullName) + : context.accessToken + ? await appShell.github.getUserRepository(context.accessToken, current.fullName) + : null; + if (!repository) { + throw new Error(`Unable to reload repository: ${current.fullName}`); + } + + const updatedAt = Date.now(); + const currentMeta = await readMeta(c); + await c.db + .insert(githubRepositories) + .values({ + repoId: input.repoId, + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private ? 1 : 0, + defaultBranch: repository.defaultBranch, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }, + }) + .run(); + await refreshRepositoryBranches( + c, + context, + { + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private, + defaultBranch: repository.defaultBranch, + }, + updatedAt, + ); + + return { + repoId: input.repoId, + fullName: repository.fullName, + cloneUrl: repository.cloneUrl, + private: repository.private, + defaultBranch: repository.defaultBranch, + }; +} + +export async function clearStateMutation(c: any, input: ClearStateInput) { + const beforeRows = await readAllPullRequestRows(c); + const currentMeta = await readMeta(c); + await c.db.delete(githubPullRequests).run(); + await c.db.delete(githubBranches).run(); + await c.db.delete(githubRepositories).run(); + await c.db.delete(githubMembers).run(); + await writeMeta(c, { + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "pending", + lastSyncLabel: input.label, + lastSyncAt: null, + syncGeneration: currentMeta.syncGeneration, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, + }); + + await emitPullRequestChangeEvents(c, beforeRows, []); +} + +export async function handlePullRequestWebhookMutation(c: any, input: PullRequestWebhookInput) { + const beforeRows = await readAllPullRequestRows(c); + const repoId = repoIdFromRemote(input.repository.cloneUrl); + const currentRepository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get(); + const updatedAt = Date.now(); + const currentMeta = await readMeta(c); + const state = normalizePrStatus(input.pullRequest); + const prId = `${repoId}#${input.pullRequest.number}`; + + await c.db + .insert(githubRepositories) + .values({ + repoId, + fullName: input.repository.fullName, + cloneUrl: input.repository.cloneUrl, + private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubRepositories.repoId, + set: { + fullName: input.repository.fullName, + cloneUrl: input.repository.cloneUrl, + private: input.repository.private ? 1 : 0, + defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main", + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }, + }) + .run(); + + if (state === "CLOSED" || state === "MERGED") { + await c.db.delete(githubPullRequests).where(eq(githubPullRequests.prId, prId)).run(); + } else { + await c.db + .insert(githubPullRequests) + .values({ + prId, + repoId, + repoFullName: input.repository.fullName, + number: input.pullRequest.number, + title: input.pullRequest.title, + body: input.pullRequest.body ?? null, + state, + url: input.pullRequest.url, + headRefName: input.pullRequest.headRefName, + baseRefName: input.pullRequest.baseRefName, + authorLogin: input.pullRequest.authorLogin ?? null, + isDraft: input.pullRequest.isDraft ? 1 : 0, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }) + .onConflictDoUpdate({ + target: githubPullRequests.prId, + set: { + title: input.pullRequest.title, + body: input.pullRequest.body ?? null, + state, + url: input.pullRequest.url, + headRefName: input.pullRequest.headRefName, + baseRefName: input.pullRequest.baseRefName, + authorLogin: input.pullRequest.authorLogin ?? null, + isDraft: input.pullRequest.isDraft ? 1 : 0, + syncGeneration: currentMeta.syncGeneration, + updatedAt, + }, + }) + .run(); + } + + await publishSyncProgress(c, { + connectedAccount: input.connectedAccount, + installationStatus: input.installationStatus, + installationId: input.installationId, + syncStatus: "synced", + lastSyncLabel: "GitHub webhook received", + lastSyncAt: updatedAt, + syncPhase: null, + processedRepositoryCount: 0, + totalRepositoryCount: 0, + }); + + const afterRows = await readAllPullRequestRows(c); + await emitPullRequestChangeEvents(c, beforeRows, afterRows); + if (state === "CLOSED" || state === "MERGED") { + const previous = beforeRows.find((row) => row.prId === prId); + if (previous) { + await autoArchiveTaskForClosedPullRequest(c, { + ...previous, + state, + }); + } + } +} diff --git a/foundry/packages/backend/src/actors/github-data/workflow.ts b/foundry/packages/backend/src/actors/github-data/workflow.ts new file mode 100644 index 0000000..3497381 --- /dev/null +++ b/foundry/packages/backend/src/actors/github-data/workflow.ts @@ -0,0 +1,81 @@ +// @ts-nocheck +import { logActorWarning, resolveErrorMessage } from "../logging.js"; + +// Dynamic imports to break circular dependency: index.ts imports workflow.ts, +// and workflow.ts needs functions from index.ts. +async function getIndexModule() { + return await import("./index.js"); +} + +export const GITHUB_DATA_QUEUE_NAMES = [ + "githubData.command.syncRepos", + "githubData.command.reloadRepository", + "githubData.command.clearState", + "githubData.command.handlePullRequestWebhook", +] as const; + +export type GithubDataQueueName = (typeof GITHUB_DATA_QUEUE_NAMES)[number]; + +export function githubDataWorkflowQueueName(name: GithubDataQueueName): GithubDataQueueName { + return name; +} + +/** + * Plain run handler (no workflow engine). Drains the queue using `c.queue.iter()` + * with completable messages. This avoids the RivetKit bug where actors created + * from another actor's workflow context never start their `run: workflow(...)`. + */ +export async function runGithubDataCommandLoop(c: any): Promise { + for await (const msg of c.queue.iter({ names: [...GITHUB_DATA_QUEUE_NAMES], completable: true })) { + try { + if (msg.name === "githubData.command.syncRepos") { + try { + const { runFullSync } = await getIndexModule(); + await runFullSync(c, msg.body); + await msg.complete({ ok: true }); + } catch (error) { + const { fullSyncError } = await getIndexModule(); + try { + await fullSyncError(c, error); + } catch { + /* best effort */ + } + const message = error instanceof Error ? error.message : String(error); + await msg.complete({ error: message }).catch(() => {}); + } + continue; + } + + if (msg.name === "githubData.command.reloadRepository") { + const { reloadRepositoryMutation } = await getIndexModule(); + const result = await reloadRepositoryMutation(c, msg.body); + await msg.complete(result); + continue; + } + + if (msg.name === "githubData.command.clearState") { + const { clearStateMutation } = await getIndexModule(); + await clearStateMutation(c, msg.body); + await msg.complete({ ok: true }); + continue; + } + + if (msg.name === "githubData.command.handlePullRequestWebhook") { + const { handlePullRequestWebhookMutation } = await getIndexModule(); + await handlePullRequestWebhookMutation(c, msg.body); + await msg.complete({ ok: true }); + continue; + } + + logActorWarning("githubData", "unknown queue message", { queueName: msg.name }); + await msg.complete({ error: `Unknown command: ${msg.name}` }); + } catch (error) { + const message = resolveErrorMessage(error); + logActorWarning("githubData", "github-data command failed", { + queueName: msg.name, + error: message, + }); + await msg.complete({ error: message }).catch(() => {}); + } + } +} diff --git a/foundry/packages/backend/src/actors/handles.ts b/foundry/packages/backend/src/actors/handles.ts index bd17fb0..2cc83d9 100644 --- a/foundry/packages/backend/src/actors/handles.ts +++ b/foundry/packages/backend/src/actors/handles.ts @@ -1,4 +1,4 @@ -import { authUserKey, githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "./keys.js"; +import { auditLogKey, githubDataKey, organizationKey, taskKey, taskSandboxKey, userKey } from "./keys.js"; export function actorClient(c: any) { return c.client(); @@ -10,28 +10,14 @@ export async function getOrCreateOrganization(c: any, organizationId: string) { }); } -export async function getOrCreateAuthUser(c: any, userId: string) { - return await actorClient(c).authUser.getOrCreate(authUserKey(userId), { +export async function getOrCreateUser(c: any, userId: string) { + return await actorClient(c).user.getOrCreate(userKey(userId), { createWithInput: { userId }, }); } -export function getAuthUser(c: any, userId: string) { - return actorClient(c).authUser.get(authUserKey(userId)); -} - -export async function getOrCreateRepository(c: any, organizationId: string, repoId: string, remoteUrl: string) { - return await actorClient(c).repository.getOrCreate(repositoryKey(organizationId, repoId), { - createWithInput: { - organizationId, - repoId, - remoteUrl, - }, - }); -} - -export function getRepository(c: any, organizationId: string, repoId: string) { - return actorClient(c).repository.get(repositoryKey(organizationId, repoId)); +export function getUser(c: any, userId: string) { + return actorClient(c).user.get(userKey(userId)); } export function getTask(c: any, organizationId: string, repoId: string, taskId: string) { @@ -44,11 +30,10 @@ export async function getOrCreateTask(c: any, organizationId: string, repoId: st }); } -export async function getOrCreateHistory(c: any, organizationId: string, repoId: string) { - return await actorClient(c).history.getOrCreate(historyKey(organizationId, repoId), { +export async function getOrCreateAuditLog(c: any, organizationId: string) { + return await actorClient(c).auditLog.getOrCreate(auditLogKey(organizationId), { createWithInput: { organizationId, - repoId, }, }); } @@ -75,8 +60,8 @@ export async function getOrCreateTaskSandbox(c: any, organizationId: string, san }); } -export function selfHistory(c: any) { - return actorClient(c).history.getForId(c.actorId); +export function selfAuditLog(c: any) { + return actorClient(c).auditLog.getForId(c.actorId); } export function selfTask(c: any) { @@ -87,12 +72,8 @@ export function selfOrganization(c: any) { return actorClient(c).organization.getForId(c.actorId); } -export function selfRepository(c: any) { - return actorClient(c).repository.getForId(c.actorId); -} - -export function selfAuthUser(c: any) { - return actorClient(c).authUser.getForId(c.actorId); +export function selfUser(c: any) { + return actorClient(c).user.getForId(c.actorId); } export function selfGithubData(c: any) { diff --git a/foundry/packages/backend/src/actors/history/db/drizzle.config.ts b/foundry/packages/backend/src/actors/history/db/drizzle.config.ts deleted file mode 100644 index 3b1d8bd..0000000 --- a/foundry/packages/backend/src/actors/history/db/drizzle.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { defineConfig } from "rivetkit/db/drizzle"; - -export default defineConfig({ - out: "./src/actors/history/db/drizzle", - schema: "./src/actors/history/db/schema.ts", -}); diff --git a/foundry/packages/backend/src/actors/history/index.ts b/foundry/packages/backend/src/actors/history/index.ts deleted file mode 100644 index fa1373b..0000000 --- a/foundry/packages/backend/src/actors/history/index.ts +++ /dev/null @@ -1,115 +0,0 @@ -// @ts-nocheck -import { and, desc, eq } from "drizzle-orm"; -import { actor, queue } from "rivetkit"; -import { Loop, workflow } from "rivetkit/workflow"; -import type { HistoryEvent } from "@sandbox-agent/foundry-shared"; -import { selfHistory } from "../handles.js"; -import { historyDb } from "./db/db.js"; -import { events } from "./db/schema.js"; - -export interface HistoryInput { - organizationId: string; - repoId: string; -} - -export interface AppendHistoryCommand { - kind: string; - taskId?: string; - branchName?: string; - payload: Record; -} - -export interface ListHistoryParams { - branch?: string; - taskId?: string; - limit?: number; -} - -const HISTORY_QUEUE_NAMES = ["history.command.append"] as const; - -async function appendHistoryRow(loopCtx: any, body: AppendHistoryCommand): Promise { - const now = Date.now(); - await loopCtx.db - .insert(events) - .values({ - taskId: body.taskId ?? null, - branchName: body.branchName ?? null, - kind: body.kind, - payloadJson: JSON.stringify(body.payload), - createdAt: now, - }) - .run(); -} - -async function runHistoryWorkflow(ctx: any): Promise { - await ctx.loop("history-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-history-command", { - names: [...HISTORY_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - if (msg.name === "history.command.append") { - await loopCtx.step("append-history-row", async () => appendHistoryRow(loopCtx, msg.body as AppendHistoryCommand)); - await msg.complete({ ok: true }); - } - - return Loop.continue(undefined); - }); -} - -export const history = actor({ - db: historyDb, - queues: { - "history.command.append": queue(), - }, - options: { - name: "History", - icon: "database", - }, - createState: (_c, input: HistoryInput) => ({ - organizationId: input.organizationId, - repoId: input.repoId, - }), - actions: { - async append(c, command: AppendHistoryCommand): Promise { - const self = selfHistory(c); - await self.send("history.command.append", command, { wait: true, timeout: 15_000 }); - }, - - async list(c, params?: ListHistoryParams): Promise { - const whereParts = []; - if (params?.taskId) { - whereParts.push(eq(events.taskId, params.taskId)); - } - if (params?.branch) { - whereParts.push(eq(events.branchName, params.branch)); - } - - const base = c.db - .select({ - id: events.id, - taskId: events.taskId, - branchName: events.branchName, - kind: events.kind, - payloadJson: events.payloadJson, - createdAt: events.createdAt, - }) - .from(events); - - const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base) - .orderBy(desc(events.createdAt)) - .limit(params?.limit ?? 100) - .all(); - - return rows.map((row) => ({ - ...row, - organizationId: c.state.organizationId, - repoId: c.state.repoId, - })); - }, - }, - run: workflow(runHistoryWorkflow), -}); diff --git a/foundry/packages/backend/src/actors/index.ts b/foundry/packages/backend/src/actors/index.ts index 2f9e566..52bb914 100644 --- a/foundry/packages/backend/src/actors/index.ts +++ b/foundry/packages/backend/src/actors/index.ts @@ -1,9 +1,8 @@ -import { authUser } from "./auth-user/index.js"; +import { user } from "./user/index.js"; import { setup } from "rivetkit"; import { githubData } from "./github-data/index.js"; import { task } from "./task/index.js"; -import { history } from "./history/index.js"; -import { repository } from "./repository/index.js"; +import { auditLog } from "./audit-log/index.js"; import { taskSandbox } from "./sandbox/index.js"; import { organization } from "./organization/index.js"; import { logger } from "../logging.js"; @@ -21,23 +20,20 @@ export const registry = setup({ baseLogger: logger, }, use: { - authUser, + user, organization, - repository, task, taskSandbox, - history, + auditLog, githubData, }, }); export * from "./context.js"; -export * from "./events.js"; -export * from "./auth-user/index.js"; +export * from "./audit-log/index.js"; +export * from "./user/index.js"; export * from "./github-data/index.js"; export * from "./task/index.js"; -export * from "./history/index.js"; export * from "./keys.js"; -export * from "./repository/index.js"; export * from "./sandbox/index.js"; export * from "./organization/index.js"; diff --git a/foundry/packages/backend/src/actors/keys.ts b/foundry/packages/backend/src/actors/keys.ts index 59e669e..03bd014 100644 --- a/foundry/packages/backend/src/actors/keys.ts +++ b/foundry/packages/backend/src/actors/keys.ts @@ -4,24 +4,21 @@ export function organizationKey(organizationId: string): ActorKey { return ["org", organizationId]; } -export function authUserKey(userId: string): ActorKey { +export function userKey(userId: string): ActorKey { return ["org", "app", "user", userId]; } -export function repositoryKey(organizationId: string, repoId: string): ActorKey { - return ["org", organizationId, "repository", repoId]; -} - export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { - return ["org", organizationId, "repository", repoId, "task", taskId]; + return ["org", organizationId, "task", repoId, taskId]; } export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { return ["org", organizationId, "sandbox", sandboxId]; } -export function historyKey(organizationId: string, repoId: string): ActorKey { - return ["org", organizationId, "repository", repoId, "history"]; +/** One audit log per org (not per repo) — see audit-log/index.ts for rationale. */ +export function auditLogKey(organizationId: string): ActorKey { + return ["org", organizationId, "audit-log"]; } export function githubDataKey(organizationId: string): ActorKey { diff --git a/foundry/packages/backend/src/actors/organization/actions.ts b/foundry/packages/backend/src/actors/organization/actions.ts index 70da62b..436765c 100644 --- a/foundry/packages/backend/src/actors/organization/actions.ts +++ b/foundry/packages/backend/src/actors/organization/actions.ts @@ -1,78 +1,29 @@ // @ts-nocheck import { desc, eq } from "drizzle-orm"; -import { Loop } from "rivetkit/workflow"; import type { - CreateTaskInput, - HistoryEvent, - HistoryQueryInput, - ListTasksInput, - SandboxProviderId, - RepoOverview, RepoRecord, - StarSandboxAgentRepoInput, - StarSandboxAgentRepoResult, - SwitchResult, - TaskRecord, - TaskSummary, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, - WorkbenchOpenPrSummary, - WorkbenchRepositorySummary, - WorkbenchSessionSummary, - WorkbenchTaskSummary, + WorkspaceRepositorySummary, + WorkspaceTaskSummary, OrganizationEvent, + OrganizationGithubSummary, OrganizationSummarySnapshot, OrganizationUseInput, } from "@sandbox-agent/foundry-shared"; -import { getActorRuntimeContext } from "../context.js"; -import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateRepository, selfOrganization } from "../handles.js"; import { logActorWarning, resolveErrorMessage } from "../logging.js"; -import { defaultSandboxProviderId } from "../../sandbox-config.js"; -import { repoIdFromRemote } from "../../services/repo.js"; -import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; -import { organizationProfile, taskLookup, repos, taskSummaries } from "./db/schema.js"; -import { agentTypeForModel } from "../task/workbench.js"; -import { expectQueueResponse } from "../../services/queue.js"; -import { organizationAppActions } from "./app-shell.js"; +import { getOrCreateGithubData } from "../handles.js"; +import { organizationProfile, taskSummaries } from "./db/schema.js"; +import { organizationAppActions } from "./actions/app.js"; +import { organizationBetterAuthActions } from "./actions/better-auth.js"; +import { organizationOnboardingActions } from "./actions/onboarding.js"; +import { organizationGithubActions } from "./actions/github.js"; +import { organizationShellActions } from "./actions/organization.js"; +import { organizationTaskActions } from "./actions/tasks.js"; interface OrganizationState { organizationId: string; } -interface GetTaskInput { - organizationId: string; - taskId: string; -} - -interface TaskProxyActionInput extends GetTaskInput { - reason?: string; -} - -interface RepoOverviewInput { - organizationId: string; - repoId: string; -} - -const ORGANIZATION_QUEUE_NAMES = ["organization.command.createTask", "organization.command.syncGithubSession"] as const; -const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent"; - -type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number]; - -export { ORGANIZATION_QUEUE_NAMES }; - -export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName { - return name; -} - -const ORGANIZATION_PROFILE_ROW_ID = "profile"; +const ORGANIZATION_PROFILE_ROW_ID = 1; function assertOrganization(c: { state: OrganizationState }, organizationId: string): void { if (organizationId !== c.state.organizationId) { @@ -80,64 +31,6 @@ function assertOrganization(c: { state: OrganizationState }, organizationId: str } } -async function resolveRepoId(c: any, taskId: string): Promise { - const row = await c.db.select({ repoId: taskLookup.repoId }).from(taskLookup).where(eq(taskLookup.taskId, taskId)).get(); - - if (!row) { - throw new Error(`Unknown task: ${taskId} (not in lookup)`); - } - - return row.repoId; -} - -async function upsertTaskLookupRow(c: any, taskId: string, repoId: string): Promise { - await c.db - .insert(taskLookup) - .values({ - taskId, - repoId, - }) - .onConflictDoUpdate({ - target: taskLookup.taskId, - set: { repoId }, - }) - .run(); -} - -function parseJsonValue(value: string | null | undefined, fallback: T): T { - if (!value) { - return fallback; - } - - try { - return JSON.parse(value) as T; - } catch { - return fallback; - } -} - -async function collectAllTaskSummaries(c: any): Promise { - const repoRows = await c.db.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl }).from(repos).orderBy(desc(repos.updatedAt)).all(); - - const all: TaskSummary[] = []; - for (const row of repoRows) { - try { - const repository = await getOrCreateRepository(c, c.state.organizationId, row.repoId, row.remoteUrl); - const snapshot = await repository.listTaskSummaries({ includeArchived: true }); - all.push(...snapshot); - } catch (error) { - logActorWarning("organization", "failed collecting tasks for repo", { - organizationId: c.state.organizationId, - repoId: row.repoId, - error: resolveErrorMessage(error), - }); - } - } - - all.sort((a, b) => b.updatedAt - a.updatedAt); - return all; -} - function repoLabelFromRemote(remoteUrl: string): string { try { const url = new URL(remoteUrl.startsWith("http") ? remoteUrl : `https://${remoteUrl}`); @@ -152,34 +45,43 @@ function repoLabelFromRemote(remoteUrl: string): string { return remoteUrl; } -function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepositorySummary { - const repoTasks = taskRows.filter((task) => task.repoId === repoRow.repoId); - const latestActivityMs = repoTasks.reduce((latest, task) => Math.max(latest, task.updatedAtMs), repoRow.updatedAt); - +function buildGithubSummary(profile: any, importedRepoCount: number): OrganizationGithubSummary { return { - id: repoRow.repoId, - label: repoLabelFromRemote(repoRow.remoteUrl), - taskCount: repoTasks.length, - latestActivityMs, + connectedAccount: profile?.githubConnectedAccount ?? "", + installationStatus: profile?.githubInstallationStatus ?? "install_required", + syncStatus: profile?.githubSyncStatus ?? "pending", + importedRepoCount, + lastSyncLabel: profile?.githubLastSyncLabel ?? "Waiting for first import", + lastSyncAt: profile?.githubLastSyncAt ?? null, + lastWebhookAt: profile?.githubLastWebhookAt ?? null, + lastWebhookEvent: profile?.githubLastWebhookEvent ?? "", + syncGeneration: profile?.githubSyncGeneration ?? 0, + syncPhase: profile?.githubSyncPhase ?? null, + processedRepositoryCount: profile?.githubProcessedRepositoryCount ?? 0, + totalRepositoryCount: profile?.githubTotalRepositoryCount ?? 0, }; } -function taskSummaryRowFromSummary(taskSummary: WorkbenchTaskSummary) { - return { - taskId: taskSummary.id, - repoId: taskSummary.repoId, - title: taskSummary.title, - status: taskSummary.status, - repoName: taskSummary.repoName, - updatedAtMs: taskSummary.updatedAtMs, - branch: taskSummary.branch, - pullRequestJson: JSON.stringify(taskSummary.pullRequest), - sessionsSummaryJson: JSON.stringify(taskSummary.sessionsSummary), - }; -} +/** + * Reads the organization sidebar snapshot from local tables only — no fan-out + * to child actors. Task summaries are organization-owned and updated via push + * from task actors. + */ +async function getOrganizationSummarySnapshot(c: any): Promise { + const profile = await c.db.select().from(organizationProfile).where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)).get(); -function taskSummaryFromRow(row: any): WorkbenchTaskSummary { - return { + // Fetch repos + open PRs from github-data actor (single actor, not fan-out) + let repoRows: Array<{ repoId: string; fullName: string; cloneUrl: string; private: boolean; defaultBranch: string }> = []; + let openPullRequests: any[] = []; + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + [repoRows, openPullRequests] = await Promise.all([githubData.listRepositories({}), githubData.listOpenPullRequests({})]); + } catch { + // github-data actor may not exist yet + } + + const summaryRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + const summaries = summaryRows.map((row) => ({ id: row.taskId, repoId: row.repoId, title: row.title, @@ -187,219 +89,60 @@ function taskSummaryFromRow(row: any): WorkbenchTaskSummary { repoName: row.repoName, updatedAtMs: row.updatedAtMs, branch: row.branch ?? null, - pullRequest: parseJsonValue(row.pullRequestJson, null), - sessionsSummary: parseJsonValue(row.sessionsSummaryJson, []), - }; -} - -async function listOpenPullRequestsSnapshot(c: any, taskRows: WorkbenchTaskSummary[]): Promise { - const githubData = getGithubData(c, c.state.organizationId); - const openPullRequests = await githubData.listOpenPullRequests({}).catch(() => []); - const claimedBranches = new Set(taskRows.filter((task) => task.branch).map((task) => `${task.repoId}:${task.branch}`)); - - return openPullRequests.filter((pullRequest: WorkbenchOpenPrSummary) => !claimedBranches.has(`${pullRequest.repoId}:${pullRequest.headRefName}`)); -} - -async function reconcileWorkbenchProjection(c: any): Promise { - const repoRows = await c.db - .select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt }) - .from(repos) - .orderBy(desc(repos.updatedAt)) - .all(); - - const taskRows: WorkbenchTaskSummary[] = []; - for (const row of repoRows) { - try { - const repository = await getOrCreateRepository(c, c.state.organizationId, row.repoId, row.remoteUrl); - const summaries = await repository.listTaskSummaries({ includeArchived: true }); - for (const summary of summaries) { - try { - await upsertTaskLookupRow(c, summary.taskId, row.repoId); - const task = getTask(c, c.state.organizationId, row.repoId, summary.taskId); - const taskSummary = await task.getTaskSummary({}); - taskRows.push(taskSummary); - await c.db - .insert(taskSummaries) - .values(taskSummaryRowFromSummary(taskSummary)) - .onConflictDoUpdate({ - target: taskSummaries.taskId, - set: taskSummaryRowFromSummary(taskSummary), - }) - .run(); - } catch (error) { - logActorWarning("organization", "failed collecting task summary during reconciliation", { - organizationId: c.state.organizationId, - repoId: row.repoId, - taskId: summary.taskId, - error: resolveErrorMessage(error), - }); - } - } - } catch (error) { - logActorWarning("organization", "failed collecting repo during workbench reconciliation", { - organizationId: c.state.organizationId, - repoId: row.repoId, - error: resolveErrorMessage(error), - }); - } - } - - taskRows.sort((left, right) => right.updatedAtMs - left.updatedAtMs); - return { - organizationId: c.state.organizationId, - repos: repoRows.map((row) => buildRepoSummary(row, taskRows)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), - taskSummaries: taskRows, - openPullRequests: await listOpenPullRequestsSnapshot(c, taskRows), - }; -} - -async function requireWorkbenchTask(c: any, taskId: string) { - const repoId = await resolveRepoId(c, taskId); - return getTask(c, c.state.organizationId, repoId, taskId); -} - -/** - * Reads the organization sidebar snapshot from the organization actor's local SQLite - * plus the org-scoped GitHub actor for open PRs. Task actors still push - * summary updates into `task_summaries`, so the hot read path stays bounded. - */ -async function getOrganizationSummarySnapshot(c: any): Promise { - const repoRows = await c.db - .select({ - repoId: repos.repoId, - remoteUrl: repos.remoteUrl, - updatedAt: repos.updatedAt, - }) - .from(repos) - .orderBy(desc(repos.updatedAt)) - .all(); - const taskRows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); - const summaries = taskRows.map(taskSummaryFromRow); + pullRequest: row.pullRequestJson + ? (() => { + try { + return JSON.parse(row.pullRequestJson); + } catch { + return null; + } + })() + : null, + sessionsSummary: row.sessionsSummaryJson + ? (() => { + try { + return JSON.parse(row.sessionsSummaryJson); + } catch { + return []; + } + })() + : [], + })); return { organizationId: c.state.organizationId, - repos: repoRows.map((row) => buildRepoSummary(row, summaries)).sort((left, right) => right.latestActivityMs - left.latestActivityMs), + github: buildGithubSummary(profile, repoRows.length), + repos: repoRows + .map((repo) => { + const repoTasks = summaries.filter((t) => t.repoId === repo.repoId); + const latestTaskMs = repoTasks.reduce((latest, t) => Math.max(latest, t.updatedAtMs), 0); + return { + id: repo.repoId, + label: repoLabelFromRemote(repo.cloneUrl), + taskCount: repoTasks.length, + latestActivityMs: latestTaskMs || Date.now(), + }; + }) + .sort((a, b) => b.latestActivityMs - a.latestActivityMs), taskSummaries: summaries, - openPullRequests: await listOpenPullRequestsSnapshot(c, summaries), + openPullRequests, }; } -async function broadcastRepoSummary( - c: any, - type: "repoAdded" | "repoUpdated", - repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, -): Promise { - const matchingTaskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoRow.repoId)).all(); - const repo = buildRepoSummary(repoRow, matchingTaskRows.map(taskSummaryFromRow)); - c.broadcast("organizationUpdated", { type, repo } satisfies OrganizationEvent); -} - -async function createTaskMutation(c: any, input: CreateTaskInput): Promise { - assertOrganization(c, input.organizationId); - - const { config } = getActorRuntimeContext(); - const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config); - - const repoId = input.repoId; - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${repoId}`); - } - const remoteUrl = repoRow.remoteUrl; - - const repository = await getOrCreateRepository(c, c.state.organizationId, repoId, remoteUrl); - - const created = await repository.createTask({ - task: input.task, - sandboxProviderId, - agentType: input.agentType ?? null, - explicitTitle: input.explicitTitle ?? null, - explicitBranchName: input.explicitBranchName ?? null, - onBranch: input.onBranch ?? null, - }); - - await c.db - .insert(taskLookup) - .values({ - taskId: created.taskId, - repoId, - }) - .onConflictDoUpdate({ - target: taskLookup.taskId, - set: { repoId }, - }) - .run(); - - try { - const task = getTask(c, c.state.organizationId, repoId, created.taskId); - await organizationActions.applyTaskSummaryUpdate(c, { - taskSummary: await task.getTaskSummary({}), - }); - } catch (error) { - logActorWarning("organization", "failed seeding task summary after task creation", { - organizationId: c.state.organizationId, - repoId, - taskId: created.taskId, - error: resolveErrorMessage(error), - }); - } - - return created; -} - -export async function runOrganizationWorkflow(ctx: any): Promise { - await ctx.loop("organization-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-organization-command", { - names: [...ORGANIZATION_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - try { - if (msg.name === "organization.command.createTask") { - const result = await loopCtx.step({ - name: "organization-create-task", - timeout: 5 * 60_000, - run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "organization.command.syncGithubSession") { - await loopCtx.step({ - name: "organization-sync-github-session", - timeout: 60_000, - run: async () => { - const { syncGithubOrganizations } = await import("./app-shell.js"); - await syncGithubOrganizations(loopCtx, msg.body as { sessionId: string; accessToken: string }); - }, - }); - await msg.complete({ ok: true }); - return Loop.continue(undefined); - } - } catch (error) { - const message = resolveErrorMessage(error); - logActorWarning("organization", "organization workflow command failed", { - queueName: msg.name, - error: message, - }); - await msg.complete({ error: message }).catch((completeError: unknown) => { - logActorWarning("organization", "organization workflow failed completing error response", { - queueName: msg.name, - error: resolveErrorMessage(completeError), - }); - }); - } - - return Loop.continue(undefined); - }); +export async function refreshOrganizationSnapshotMutation(c: any): Promise { + c.broadcast("organizationUpdated", { + type: "organizationUpdated", + snapshot: await getOrganizationSummarySnapshot(c), + } satisfies OrganizationEvent); } export const organizationActions = { + ...organizationBetterAuthActions, + ...organizationGithubActions, + ...organizationOnboardingActions, + ...organizationShellActions, ...organizationAppActions, + ...organizationTaskActions, async useOrganization(c: any, input: OrganizationUseInput): Promise<{ organizationId: string }> { assertOrganization(c, input.organizationId); return { organizationId: c.state.organizationId }; @@ -407,482 +150,98 @@ export const organizationActions = { async listRepos(c: any, input: OrganizationUseInput): Promise { assertOrganization(c, input.organizationId); - - const rows = await c.db - .select({ - repoId: repos.repoId, - remoteUrl: repos.remoteUrl, - createdAt: repos.createdAt, - updatedAt: repos.updatedAt, - }) - .from(repos) - .orderBy(desc(repos.updatedAt)) - .all(); - - return rows.map((row) => ({ - organizationId: c.state.organizationId, - repoId: row.repoId, - remoteUrl: row.remoteUrl, - createdAt: row.createdAt, - updatedAt: row.updatedAt, - })); - }, - - async createTask(c: any, input: CreateTaskInput): Promise { - const self = selfOrganization(c); - return expectQueueResponse( - await self.send(organizationWorkflowQueueName("organization.command.createTask"), input, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise { - assertOrganization(c, input.organizationId); - const { driver } = getActorRuntimeContext(); - const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); - await driver.github.starRepository(SANDBOX_AGENT_REPO, { - githubToken: auth?.githubToken ?? null, - }); - return { - repo: SANDBOX_AGENT_REPO, - starredAt: Date.now(), - }; - }, - - /** - * Called by task actors when their summary-level state changes. - * This is the write path for the local materialized projection; clients read - * the projection via `getOrganizationSummary`, but only task actors should push - * rows into it. - */ - async applyTaskSummaryUpdate(c: any, input: { taskSummary: WorkbenchTaskSummary }): Promise { - await c.db - .insert(taskSummaries) - .values(taskSummaryRowFromSummary(input.taskSummary)) - .onConflictDoUpdate({ - target: taskSummaries.taskId, - set: taskSummaryRowFromSummary(input.taskSummary), - }) - .run(); - c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies OrganizationEvent); - }, - - async removeTaskSummary(c: any, input: { taskId: string }): Promise { - await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); - c.broadcast("organizationUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies OrganizationEvent); - }, - - async findTaskForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> { - const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.repoId)).all(); - const existing = summaries.find((summary) => summary.branch === input.branchName); - return { taskId: existing?.taskId ?? null }; - }, - - async refreshTaskSummaryForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise { - const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.repoId)).all(); - const matches = summaries.filter((summary) => summary.branch === input.branchName); - - for (const summary of matches) { - try { - const task = getTask(c, c.state.organizationId, input.repoId, summary.taskId); - await organizationActions.applyTaskSummaryUpdate(c, { - taskSummary: await task.getTaskSummary({}), - }); - } catch (error) { - logActorWarning("organization", "failed refreshing task summary for GitHub branch", { - organizationId: c.state.organizationId, - repoId: input.repoId, - branchName: input.branchName, - taskId: summary.taskId, - error: resolveErrorMessage(error), - }); - } + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + const rows = await githubData.listRepositories({}); + return rows.map((row: any) => ({ + organizationId: c.state.organizationId, + repoId: row.repoId, + remoteUrl: row.cloneUrl, + createdAt: row.updatedAt ?? Date.now(), + updatedAt: row.updatedAt ?? Date.now(), + })); + } catch { + return []; } }, - async applyOpenPullRequestUpdate(c: any, input: { pullRequest: WorkbenchOpenPrSummary }): Promise { - const summaries = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, input.pullRequest.repoId)).all(); - if (summaries.some((summary) => summary.branch === input.pullRequest.headRefName)) { - return; - } - c.broadcast("organizationUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies OrganizationEvent); - }, - - async removeOpenPullRequest(c: any, input: { prId: string }): Promise { - c.broadcast("organizationUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies OrganizationEvent); - }, - - async applyGithubRepositoryProjection(c: any, input: { repoId: string; remoteUrl: string }): Promise { - const now = Date.now(); - const existing = await c.db.select({ repoId: repos.repoId }).from(repos).where(eq(repos.repoId, input.repoId)).get(); - await c.db - .insert(repos) - .values({ - repoId: input.repoId, - remoteUrl: input.remoteUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl: input.remoteUrl, - updatedAt: now, - }, - }) - .run(); - await broadcastRepoSummary(c, existing ? "repoUpdated" : "repoAdded", { - repoId: input.repoId, - remoteUrl: input.remoteUrl, - updatedAt: now, - }); - }, - - async applyGithubDataProjection( - c: any, - input: { - connectedAccount: string; - installationStatus: string; - installationId: number | null; - syncStatus: string; - lastSyncLabel: string; - lastSyncAt: number | null; - repositories: Array<{ fullName: string; cloneUrl: string; private: boolean }>; - }, - ): Promise { - const existingRepos = await c.db.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt }).from(repos).all(); - const existingById = new Map(existingRepos.map((repo) => [repo.repoId, repo])); - const nextRepoIds = new Set(); - const now = Date.now(); - - for (const repository of input.repositories) { - const repoId = repoIdFromRemote(repository.cloneUrl); - nextRepoIds.add(repoId); - await c.db - .insert(repos) - .values({ - repoId, - remoteUrl: repository.cloneUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl: repository.cloneUrl, - updatedAt: now, - }, - }) - .run(); - await broadcastRepoSummary(c, existingById.has(repoId) ? "repoUpdated" : "repoAdded", { - repoId, - remoteUrl: repository.cloneUrl, - updatedAt: now, - }); - } - - for (const repo of existingRepos) { - if (nextRepoIds.has(repo.repoId)) { - continue; - } - await c.db.delete(repos).where(eq(repos.repoId, repo.repoId)).run(); - c.broadcast("organizationUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies OrganizationEvent); - } - - const profile = await c.db - .select({ id: organizationProfile.id }) - .from(organizationProfile) - .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) - .get(); - if (profile) { - await c.db - .update(organizationProfile) - .set({ - githubConnectedAccount: input.connectedAccount, - githubInstallationStatus: input.installationStatus, - githubSyncStatus: input.syncStatus, - githubInstallationId: input.installationId, - githubLastSyncLabel: input.lastSyncLabel, - githubLastSyncAt: input.lastSyncAt, - updatedAt: now, - }) - .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) - .run(); - } - }, - - async recordGithubWebhookReceipt( - c: any, - input: { - organizationId: string; - event: string; - action?: string | null; - receivedAt?: number; - }, - ): Promise { - assertOrganization(c, input.organizationId); - - const profile = await c.db - .select({ id: organizationProfile.id }) - .from(organizationProfile) - .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) - .get(); - if (!profile) { - return; - } - - await c.db - .update(organizationProfile) - .set({ - githubLastWebhookAt: input.receivedAt ?? Date.now(), - githubLastWebhookEvent: input.action ? `${input.event}.${input.action}` : input.event, - }) - .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) - .run(); - }, - async getOrganizationSummary(c: any, input: OrganizationUseInput): Promise { assertOrganization(c, input.organizationId); return await getOrganizationSummarySnapshot(c); }, - - async reconcileWorkbenchState(c: any, input: OrganizationUseInput): Promise { - assertOrganization(c, input.organizationId); - return await reconcileWorkbenchProjection(c); - }, - - async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> { - // Step 1: Create the task record (wait: true — local state mutations only). - const created = await organizationActions.createTask(c, { - organizationId: c.state.organizationId, - repoId: input.repoId, - task: input.task, - ...(input.title ? { explicitTitle: input.title } : {}), - ...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}), - ...(input.model ? { agentType: agentTypeForModel(input.model) } : {}), - }); - - // Step 2: Enqueue session creation + initial message (wait: false). - // The task workflow creates the session record and sends the message in - // the background. The client observes progress via push events on the - // task subscription topic. - const task = await requireWorkbenchTask(c, created.taskId); - await task.createWorkbenchSessionAndSend({ - model: input.model, - text: input.task, - }); - - return { taskId: created.taskId }; - }, - - async markWorkbenchUnread(c: any, input: TaskWorkbenchSelectInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.markWorkbenchUnread({}); - }, - - async renameWorkbenchTask(c: any, input: TaskWorkbenchRenameInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.renameWorkbenchTask(input); - }, - - async renameWorkbenchBranch(c: any, input: TaskWorkbenchRenameInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.renameWorkbenchBranch(input); - }, - - async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { - const task = await requireWorkbenchTask(c, input.taskId); - return await task.createWorkbenchSession({ ...(input.model ? { model: input.model } : {}) }); - }, - - async renameWorkbenchSession(c: any, input: TaskWorkbenchRenameSessionInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.renameWorkbenchSession(input); - }, - - async setWorkbenchSessionUnread(c: any, input: TaskWorkbenchSetSessionUnreadInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.setWorkbenchSessionUnread(input); - }, - - async updateWorkbenchDraft(c: any, input: TaskWorkbenchUpdateDraftInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.updateWorkbenchDraft(input); - }, - - async changeWorkbenchModel(c: any, input: TaskWorkbenchChangeModelInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.changeWorkbenchModel(input); - }, - - async sendWorkbenchMessage(c: any, input: TaskWorkbenchSendMessageInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.sendWorkbenchMessage(input); - }, - - async stopWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.stopWorkbenchSession(input); - }, - - async closeWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.closeWorkbenchSession(input); - }, - - async publishWorkbenchPr(c: any, input: TaskWorkbenchSelectInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.publishWorkbenchPr({}); - }, - - async revertWorkbenchFile(c: any, input: TaskWorkbenchDiffInput): Promise { - const task = await requireWorkbenchTask(c, input.taskId); - await task.revertWorkbenchFile(input); - }, - - async reloadGithubOrganization(c: any): Promise { - await getOrCreateGithubData(c, c.state.organizationId).reloadOrganization({}); - }, - - async reloadGithubPullRequests(c: any): Promise { - await getOrCreateGithubData(c, c.state.organizationId).reloadAllPullRequests({}); - }, - - async reloadGithubRepository(c: any, input: { repoId: string }): Promise { - await getOrCreateGithubData(c, c.state.organizationId).reloadRepository(input); - }, - - async reloadGithubPullRequest(c: any, input: { repoId: string; prNumber: number }): Promise { - await getOrCreateGithubData(c, c.state.organizationId).reloadPullRequest(input); - }, - - async listTasks(c: any, input: ListTasksInput): Promise { - assertOrganization(c, input.organizationId); - - if (input.repoId) { - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${input.repoId}`); - } - - const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl); - return await repository.listTaskSummaries({ includeArchived: true }); - } - - return await collectAllTaskSummaries(c); - }, - - async getRepoOverview(c: any, input: RepoOverviewInput): Promise { - assertOrganization(c, input.organizationId); - - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${input.repoId}`); - } - - const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl); - return await repository.getRepoOverview({}); - }, - - async switchTask(c: any, taskId: string): Promise { - const repoId = await resolveRepoId(c, taskId); - const h = getTask(c, c.state.organizationId, repoId, taskId); - const record = await h.get(); - const switched = await h.switch(); - - return { - organizationId: c.state.organizationId, - taskId, - sandboxProviderId: record.sandboxProviderId, - switchTarget: switched.switchTarget, - }; - }, - - async history(c: any, input: HistoryQueryInput): Promise { - assertOrganization(c, input.organizationId); - - const limit = input.limit ?? 20; - const repoRows = await c.db.select({ repoId: repos.repoId }).from(repos).all(); - - const allEvents: HistoryEvent[] = []; - - for (const row of repoRows) { - try { - const hist = await getOrCreateHistory(c, c.state.organizationId, row.repoId); - const items = await hist.list({ - branch: input.branch, - taskId: input.taskId, - limit, - }); - allEvents.push(...items); - } catch (error) { - logActorWarning("organization", "history lookup failed for repo", { - organizationId: c.state.organizationId, - repoId: row.repoId, - error: resolveErrorMessage(error), - }); - } - } - - allEvents.sort((a, b) => b.createdAt - a.createdAt); - return allEvents.slice(0, limit); - }, - - async getTask(c: any, input: GetTaskInput): Promise { - assertOrganization(c, input.organizationId); - - const repoId = await resolveRepoId(c, input.taskId); - - const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get(); - if (!repoRow) { - throw new Error(`Unknown repo: ${repoId}`); - } - - const repository = await getOrCreateRepository(c, c.state.organizationId, repoId, repoRow.remoteUrl); - return await repository.getTaskEnriched({ taskId: input.taskId }); - }, - - async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - return await h.attach({ reason: input.reason }); - }, - - async pushTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.push({ reason: input.reason }); - }, - - async syncTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.sync({ reason: input.reason }); - }, - - async mergeTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.merge({ reason: input.reason }); - }, - - async archiveTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.archive({ reason: input.reason }); - }, - - async killTask(c: any, input: TaskProxyActionInput): Promise { - assertOrganization(c, input.organizationId); - const repoId = await resolveRepoId(c, input.taskId); - const h = getTask(c, c.state.organizationId, repoId, input.taskId); - await h.kill({ reason: input.reason }); - }, }; + +export async function applyGithubSyncProgressMutation( + c: any, + input: { + connectedAccount: string; + installationStatus: string; + installationId: number | null; + syncStatus: string; + lastSyncLabel: string; + lastSyncAt: number | null; + syncGeneration: number; + syncPhase: string | null; + processedRepositoryCount: number; + totalRepositoryCount: number; + }, +): Promise { + const profile = await c.db + .select({ id: organizationProfile.id }) + .from(organizationProfile) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .get(); + if (!profile) { + return; + } + + await c.db + .update(organizationProfile) + .set({ + githubConnectedAccount: input.connectedAccount, + githubInstallationStatus: input.installationStatus, + githubSyncStatus: input.syncStatus, + githubInstallationId: input.installationId, + githubLastSyncLabel: input.lastSyncLabel, + githubLastSyncAt: input.lastSyncAt, + githubSyncGeneration: input.syncGeneration, + githubSyncPhase: input.syncPhase, + githubProcessedRepositoryCount: input.processedRepositoryCount, + githubTotalRepositoryCount: input.totalRepositoryCount, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .run(); + + await refreshOrganizationSnapshotMutation(c); +} + +export async function recordGithubWebhookReceiptMutation( + c: any, + input: { + organizationId: string; + event: string; + action?: string | null; + receivedAt?: number; + }, +): Promise { + assertOrganization(c, input.organizationId); + + const profile = await c.db + .select({ id: organizationProfile.id }) + .from(organizationProfile) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .get(); + if (!profile) { + return; + } + + await c.db + .update(organizationProfile) + .set({ + githubLastWebhookAt: input.receivedAt ?? Date.now(), + githubLastWebhookEvent: input.action ? `${input.event}.${input.action}` : input.event, + }) + .where(eq(organizationProfile.id, ORGANIZATION_PROFILE_ROW_ID)) + .run(); +} diff --git a/foundry/packages/backend/src/actors/organization/actions/app.ts b/foundry/packages/backend/src/actors/organization/actions/app.ts new file mode 100644 index 0000000..d3cc329 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/app.ts @@ -0,0 +1 @@ +export { organizationAppActions } from "../app-shell.js"; diff --git a/foundry/packages/backend/src/actors/organization/actions/better-auth.ts b/foundry/packages/backend/src/actors/organization/actions/better-auth.ts new file mode 100644 index 0000000..37f34b4 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/better-auth.ts @@ -0,0 +1,323 @@ +import { + and, + asc, + count as sqlCount, + desc, + eq, + gt, + gte, + inArray, + isNotNull, + isNull, + like, + lt, + lte, + ne, + notInArray, + or, +} from "drizzle-orm"; +import { authAccountIndex, authEmailIndex, authSessionIndex, authVerification } from "../db/schema.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../constants.js"; + +function assertAppOrganization(c: any): void { + if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) { + throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`); + } +} + +function organizationAuthColumn(table: any, field: string): any { + const column = table[field]; + if (!column) { + throw new Error(`Unknown auth table field: ${field}`); + } + return column; +} + +function normalizeAuthValue(value: unknown): unknown { + if (value instanceof Date) { + return value.getTime(); + } + if (Array.isArray(value)) { + return value.map((entry) => normalizeAuthValue(entry)); + } + return value; +} + +function organizationAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { + const column = organizationAuthColumn(table, clause.field); + const value = normalizeAuthValue(clause.value); + switch (clause.operator) { + case "ne": + return value === null ? isNotNull(column) : ne(column, value as any); + case "lt": + return lt(column, value as any); + case "lte": + return lte(column, value as any); + case "gt": + return gt(column, value as any); + case "gte": + return gte(column, value as any); + case "in": + return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "not_in": + return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "contains": + return like(column, `%${String(value ?? "")}%`); + case "starts_with": + return like(column, `${String(value ?? "")}%`); + case "ends_with": + return like(column, `%${String(value ?? "")}`); + case "eq": + default: + return value === null ? isNull(column) : eq(column, value as any); + } +} + +function organizationBetterAuthWhere(table: any, clauses: any[] | undefined): any { + if (!clauses || clauses.length === 0) { + return undefined; + } + let expr = organizationAuthClause(table, clauses[0]); + for (const clause of clauses.slice(1)) { + const next = organizationAuthClause(table, clause); + expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); + } + return expr; +} + +export async function betterAuthUpsertSessionIndexMutation(c: any, input: { sessionId: string; sessionToken: string; userId: string }) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authSessionIndex) + .values({ + sessionId: input.sessionId, + sessionToken: input.sessionToken, + userId: input.userId, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authSessionIndex.sessionId, + set: { + sessionToken: input.sessionToken, + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authSessionIndex).where(eq(authSessionIndex.sessionId, input.sessionId)).get(); +} + +export async function betterAuthDeleteSessionIndexMutation(c: any, input: { sessionId?: string; sessionToken?: string }) { + assertAppOrganization(c); + + const clauses = [ + ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), + ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), + ]; + if (clauses.length === 0) { + return; + } + const predicate = organizationBetterAuthWhere(authSessionIndex, clauses); + await c.db.delete(authSessionIndex).where(predicate!).run(); +} + +export async function betterAuthUpsertEmailIndexMutation(c: any, input: { email: string; userId: string }) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authEmailIndex) + .values({ + email: input.email, + userId: input.userId, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authEmailIndex.email, + set: { + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); +} + +export async function betterAuthDeleteEmailIndexMutation(c: any, input: { email: string }) { + assertAppOrganization(c); + await c.db.delete(authEmailIndex).where(eq(authEmailIndex.email, input.email)).run(); +} + +export async function betterAuthUpsertAccountIndexMutation( + c: any, + input: { id: string; providerId: string; accountId: string; userId: string }, +) { + assertAppOrganization(c); + + const now = Date.now(); + await c.db + .insert(authAccountIndex) + .values({ + id: input.id, + providerId: input.providerId, + accountId: input.accountId, + userId: input.userId, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: authAccountIndex.id, + set: { + providerId: input.providerId, + accountId: input.accountId, + userId: input.userId, + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); +} + +export async function betterAuthDeleteAccountIndexMutation(c: any, input: { id?: string; providerId?: string; accountId?: string }) { + assertAppOrganization(c); + + if (input.id) { + await c.db.delete(authAccountIndex).where(eq(authAccountIndex.id, input.id)).run(); + return; + } + if (input.providerId && input.accountId) { + await c.db + .delete(authAccountIndex) + .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) + .run(); + } +} + +export async function betterAuthCreateVerificationMutation(c: any, input: { data: Record }) { + assertAppOrganization(c); + + await c.db.insert(authVerification).values(input.data as any).run(); + return await c.db.select().from(authVerification).where(eq(authVerification.id, input.data.id as string)).get(); +} + +export async function betterAuthUpdateVerificationMutation(c: any, input: { where: any[]; update: Record }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return null; + } + await c.db.update(authVerification).set(input.update as any).where(predicate).run(); + return await c.db.select().from(authVerification).where(predicate).get(); +} + +export async function betterAuthUpdateManyVerificationMutation(c: any, input: { where: any[]; update: Record }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return 0; + } + await c.db.update(authVerification).set(input.update as any).where(predicate).run(); + const row = await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get(); + return row?.value ?? 0; +} + +export async function betterAuthDeleteVerificationMutation(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return; + } + await c.db.delete(authVerification).where(predicate).run(); +} + +export async function betterAuthDeleteManyVerificationMutation(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + if (!predicate) { + return 0; + } + const rows = await c.db.select().from(authVerification).where(predicate).all(); + await c.db.delete(authVerification).where(predicate).run(); + return rows.length; +} + +export const organizationBetterAuthActions = { + async betterAuthFindSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { + assertAppOrganization(c); + + const clauses = [ + ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), + ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), + ]; + if (clauses.length === 0) { + return null; + } + const predicate = organizationBetterAuthWhere(authSessionIndex, clauses); + return await c.db.select().from(authSessionIndex).where(predicate!).get(); + }, + + async betterAuthFindEmailIndex(c: any, input: { email: string }) { + assertAppOrganization(c); + return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); + }, + + async betterAuthFindAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { + assertAppOrganization(c); + + if (input.id) { + return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); + } + if (!input.providerId || !input.accountId) { + return null; + } + return await c.db + .select() + .from(authAccountIndex) + .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) + .get(); + }, + + async betterAuthFindOneVerification(c: any, input: { where: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + return predicate ? await c.db.select().from(authVerification).where(predicate).get() : null; + }, + + async betterAuthFindManyVerification(c: any, input: { where?: any[]; limit?: number; sortBy?: any; offset?: number }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + let query = c.db.select().from(authVerification); + if (predicate) { + query = query.where(predicate); + } + if (input.sortBy?.field) { + const column = organizationAuthColumn(authVerification, input.sortBy.field); + query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); + } + if (typeof input.limit === "number") { + query = query.limit(input.limit); + } + if (typeof input.offset === "number") { + query = query.offset(input.offset); + } + return await query.all(); + }, + + async betterAuthCountVerification(c: any, input: { where?: any[] }) { + assertAppOrganization(c); + + const predicate = organizationBetterAuthWhere(authVerification, input.where); + const row = predicate + ? await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get() + : await c.db.select({ value: sqlCount() }).from(authVerification).get(); + return row?.value ?? 0; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/github.ts b/foundry/packages/backend/src/actors/organization/actions/github.ts new file mode 100644 index 0000000..ff14d7e --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/github.ts @@ -0,0 +1,78 @@ +import { desc } from "drizzle-orm"; +import type { FoundryAppSnapshot } from "@sandbox-agent/foundry-shared"; +import { getOrCreateGithubData, getOrCreateOrganization } from "../../handles.js"; +import { authSessionIndex } from "../db/schema.js"; +import { + assertAppOrganization, + buildAppSnapshot, + requireEligibleOrganization, + requireSignedInSession, + markOrganizationSyncStartedMutation, +} from "../app-shell.js"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { refreshOrganizationSnapshotMutation } from "../actions.js"; + +export const organizationGithubActions = { + async resolveAppGithubToken( + c: any, + input: { organizationId: string; requireRepoScope?: boolean }, + ): Promise<{ accessToken: string; scopes: string[] } | null> { + assertAppOrganization(c); + const auth = getBetterAuthService(); + const rows = await c.db.select().from(authSessionIndex).orderBy(desc(authSessionIndex.updatedAt)).all(); + + for (const row of rows) { + const authState = await auth.getAuthState(row.sessionId); + if (authState?.sessionState?.activeOrganizationId !== input.organizationId) { + continue; + } + + const token = await auth.getAccessTokenForSession(row.sessionId); + if (!token?.accessToken) { + continue; + } + + const scopes = token.scopes; + if (input.requireRepoScope !== false && scopes.length > 0 && !scopes.some((scope) => scope === "repo" || scope.startsWith("repo:"))) { + continue; + } + + return { + accessToken: token.accessToken, + scopes, + }; + } + + return null; + }, + + async triggerAppRepoImport(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + + const githubData = await getOrCreateGithubData(c, input.organizationId); + const summary = await githubData.getSummary({}); + if (summary.syncStatus === "syncing") { + return await buildAppSnapshot(c, input.sessionId); + } + + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + await organizationHandle.commandMarkSyncStarted({ label: "Importing repository catalog..." }); + await organizationHandle.commandBroadcastSnapshot({}); + + void githubData.syncRepos({ label: "Importing repository catalog..." }).catch(() => {}); + + return await buildAppSnapshot(c, input.sessionId); + }, + + async adminReloadGithubOrganization(c: any): Promise { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + await githubData.syncRepos({ label: "Reloading GitHub organization..." }); + }, + + async adminReloadGithubRepository(c: any, input: { repoId: string }): Promise { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + await githubData.reloadRepository(input); + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/onboarding.ts b/foundry/packages/backend/src/actors/organization/actions/onboarding.ts new file mode 100644 index 0000000..22153f4 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/onboarding.ts @@ -0,0 +1,82 @@ +import { randomUUID } from "node:crypto"; +import type { FoundryAppSnapshot, StarSandboxAgentRepoInput, StarSandboxAgentRepoResult } from "@sandbox-agent/foundry-shared"; +import { getOrCreateGithubData, getOrCreateOrganization } from "../../handles.js"; +import { + assertAppOrganization, + buildAppSnapshot, + getOrganizationState, + requireEligibleOrganization, + requireSignedInSession, +} from "../app-shell.js"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { getActorRuntimeContext } from "../../context.js"; +import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js"; + +const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent"; + +export const organizationOnboardingActions = { + async skipAppStarterRepo(c: any, input: { sessionId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + starterRepoStatus: "skipped", + starterRepoSkippedAt: Date.now(), + starterRepoStarredAt: null, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async starAppStarterRepo(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.starSandboxAgentRepo({ + organizationId: input.organizationId, + }); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + starterRepoStatus: "starred", + starterRepoStarredAt: Date.now(), + starterRepoSkippedAt: null, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async selectAppOrganization(c: any, input: { sessionId: string; organizationId: string }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId); + await getOrCreateGithubData(c, input.organizationId); + return await buildAppSnapshot(c, input.sessionId); + }, + + async beginAppGithubInstall(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const { appShell } = getActorRuntimeContext(); + const organizationHandle = await getOrCreateOrganization(c, input.organizationId); + const organizationState = await getOrganizationState(organizationHandle); + if (organizationState.snapshot.kind !== "organization") { + return { + url: `${appShell.appUrl}/organizations/${input.organizationId}`, + }; + } + return { + url: await appShell.github.buildInstallationUrl(organizationState.githubLogin, randomUUID()), + }; + }, + + async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise { + const { driver } = getActorRuntimeContext(); + const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); + await driver.github.starRepository(SANDBOX_AGENT_REPO, { + githubToken: auth?.githubToken ?? null, + }); + return { + repo: SANDBOX_AGENT_REPO, + starredAt: Date.now(), + }; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/organization.ts b/foundry/packages/backend/src/actors/organization/actions/organization.ts new file mode 100644 index 0000000..d38e113 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/organization.ts @@ -0,0 +1,55 @@ +import type { FoundryAppSnapshot, UpdateFoundryOrganizationProfileInput, WorkspaceModelId } from "@sandbox-agent/foundry-shared"; +import { getBetterAuthService } from "../../../services/better-auth.js"; +import { getOrCreateOrganization } from "../../handles.js"; +// actions called directly (no queue) +import { + assertAppOrganization, + assertOrganizationShell, + buildAppSnapshot, + buildOrganizationState, + buildOrganizationStateIfInitialized, + requireEligibleOrganization, + requireSignedInSession, +} from "../app-shell.js"; +// org queue names removed — using direct actions + +export const organizationShellActions = { + async getAppSnapshot(c: any, input: { sessionId: string }): Promise { + return await buildAppSnapshot(c, input.sessionId); + }, + + async setAppDefaultModel(c: any, input: { sessionId: string; defaultModel: WorkspaceModelId }): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + await getBetterAuthService().upsertUserProfile(session.authUserId, { + defaultModel: input.defaultModel, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async updateAppOrganizationProfile( + c: any, + input: { sessionId: string; organizationId: string } & UpdateFoundryOrganizationProfileInput, + ): Promise { + assertAppOrganization(c); + const session = await requireSignedInSession(c, input.sessionId); + requireEligibleOrganization(session, input.organizationId); + const organization = await getOrCreateOrganization(c, input.organizationId); + await organization.commandUpdateShellProfile({ + displayName: input.displayName, + slug: input.slug, + primaryDomain: input.primaryDomain, + }); + return await buildAppSnapshot(c, input.sessionId); + }, + + async getOrganizationShellState(c: any): Promise { + assertOrganizationShell(c); + return await buildOrganizationState(c); + }, + + async getOrganizationShellStateIfInitialized(c: any): Promise { + assertOrganizationShell(c); + return await buildOrganizationStateIfInitialized(c); + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts b/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts new file mode 100644 index 0000000..73abea2 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/task-mutations.ts @@ -0,0 +1,543 @@ +// @ts-nocheck +import { randomUUID } from "node:crypto"; +import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; +import type { + RepoOverview, + SandboxProviderId, + TaskRecord, + TaskSummary, + WorkspacePullRequestSummary, + WorkspaceSessionSummary, + WorkspaceTaskSummary, +} from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../../context.js"; +import { getGithubData, getOrCreateAuditLog, getOrCreateTask, getTask } from "../../handles.js"; +// task actions called directly (no queue) +import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../../services/create-flow.js"; +// actions return directly (no queue response unwrapping) +import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; +import { taskIndex, taskSummaries } from "../db/schema.js"; +import { refreshOrganizationSnapshotMutation } from "../actions.js"; + +interface CreateTaskCommand { + repoId: string; + task: string; + sandboxProviderId: SandboxProviderId; + explicitTitle: string | null; + explicitBranchName: string | null; + onBranch: string | null; +} + +interface RegisterTaskBranchCommand { + repoId: string; + taskId: string; + branchName: string; + requireExistingRemote?: boolean; +} + +function isStaleTaskReferenceError(error: unknown): boolean { + const message = resolveErrorMessage(error); + return isActorNotFoundError(error) || message.startsWith("Task not found:"); +} + +function parseJsonValue(value: string | null | undefined, fallback: T): T { + if (!value) { + return fallback; + } + + try { + return JSON.parse(value) as T; + } catch { + return fallback; + } +} + +function taskSummaryRowFromSummary(taskSummary: WorkspaceTaskSummary) { + return { + taskId: taskSummary.id, + repoId: taskSummary.repoId, + title: taskSummary.title, + status: taskSummary.status, + repoName: taskSummary.repoName, + updatedAtMs: taskSummary.updatedAtMs, + branch: taskSummary.branch, + pullRequestJson: JSON.stringify(taskSummary.pullRequest), + sessionsSummaryJson: JSON.stringify(taskSummary.sessionsSummary), + }; +} + +export function taskSummaryFromRow(repoId: string, row: any): WorkspaceTaskSummary { + return { + id: row.taskId, + repoId, + title: row.title, + status: row.status, + repoName: row.repoName, + updatedAtMs: row.updatedAtMs, + branch: row.branch ?? null, + pullRequest: parseJsonValue(row.pullRequestJson, null), + sessionsSummary: parseJsonValue(row.sessionsSummaryJson, []), + }; +} + +export async function upsertTaskSummary(c: any, taskSummary: WorkspaceTaskSummary): Promise { + await c.db + .insert(taskSummaries) + .values(taskSummaryRowFromSummary(taskSummary)) + .onConflictDoUpdate({ + target: taskSummaries.taskId, + set: taskSummaryRowFromSummary(taskSummary), + }) + .run(); +} + +async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { + try { + await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); + } catch { + // Best effort cleanup only. + } +} + +async function listKnownTaskBranches(c: any, repoId: string): Promise { + const rows = await c.db + .select({ branchName: taskIndex.branchName }) + .from(taskIndex) + .where(and(eq(taskIndex.repoId, repoId), isNotNull(taskIndex.branchName))) + .all(); + return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0); +} + +async function resolveGitHubRepository(c: any, repoId: string) { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.getRepository({ repoId }).catch(() => null); +} + +async function listGitHubBranches(c: any, repoId: string): Promise> { + const githubData = getGithubData(c, c.state.organizationId); + return await githubData.listBranchesForRepository({ repoId }).catch(() => []); +} + +async function resolveRepositoryRemoteUrl(c: any, repoId: string): Promise { + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = repository?.cloneUrl?.trim(); + if (!remoteUrl) { + throw new Error(`Missing remote URL for repo ${repoId}`); + } + return remoteUrl; +} + +/** + * The ONLY backend code path that creates a task actor via getOrCreateTask. + * Called when a user explicitly creates a new task (not during sync/webhooks). + * + * All other code must use getTask (handles.ts) which calls .get() and will + * error if the actor doesn't exist. Virtual tasks created during PR sync + * are materialized lazily by the client's getOrCreate in backend-client.ts. + * + * NEVER call this from a sync loop or webhook handler. + */ +export async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { + const organizationId = c.state.organizationId; + const repoId = cmd.repoId; + await resolveRepositoryRemoteUrl(c, repoId); + const onBranch = cmd.onBranch?.trim() || null; + const taskId = randomUUID(); + let initialBranchName: string | null = null; + let initialTitle: string | null = null; + + if (onBranch) { + initialBranchName = onBranch; + initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined); + + await registerTaskBranchMutation(c, { + repoId, + taskId, + branchName: onBranch, + requireExistingRemote: true, + }); + } else { + const reservedBranches = await listKnownTaskBranches(c, repoId); + const resolved = resolveCreateFlowDecision({ + task: cmd.task, + explicitTitle: cmd.explicitTitle ?? undefined, + explicitBranchName: cmd.explicitBranchName ?? undefined, + localBranches: [], + taskBranches: reservedBranches, + }); + + initialBranchName = resolved.branchName; + initialTitle = resolved.title; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId, + repoId, + branchName: resolved.branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoNothing() + .run(); + } + + let taskHandle: Awaited>; + try { + taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, { + organizationId, + repoId, + taskId, + }); + } catch (error) { + if (initialBranchName) { + await deleteStaleTaskIndexRow(c, taskId); + } + throw error; + } + + const created = await taskHandle.initialize({ + sandboxProviderId: cmd.sandboxProviderId, + branchName: initialBranchName, + title: initialTitle, + task: cmd.task, + }); + + try { + await upsertTaskSummary(c, await taskHandle.getTaskSummary({})); + await refreshOrganizationSnapshotMutation(c); + } catch (error) { + logActorWarning("organization", "failed seeding task summary after task creation", { + organizationId, + repoId, + taskId, + error: resolveErrorMessage(error), + }); + } + + const auditLog = await getOrCreateAuditLog(c, organizationId); + void auditLog.append({ + kind: "task.created", + repoId, + taskId, + payload: { + repoId, + sandboxProviderId: cmd.sandboxProviderId, + }, + }); + + try { + const taskSummary = await taskHandle.getTaskSummary({}); + await upsertTaskSummary(c, taskSummary); + } catch (error) { + logActorWarning("organization", "failed seeding organization task projection", { + organizationId, + repoId, + taskId, + error: resolveErrorMessage(error), + }); + } + + return created; +} + +export async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { + const branchName = cmd.branchName.trim(); + if (!branchName) { + throw new Error("branchName is required"); + } + + const existingOwner = await c.db + .select({ taskId: taskIndex.taskId }) + .from(taskIndex) + .where(and(eq(taskIndex.branchName, branchName), eq(taskIndex.repoId, cmd.repoId), ne(taskIndex.taskId, cmd.taskId))) + .get(); + + if (existingOwner) { + let ownerMissing = false; + try { + await getTask(c, c.state.organizationId, cmd.repoId, existingOwner.taskId).get(); + } catch (error) { + if (isStaleTaskReferenceError(error)) { + ownerMissing = true; + await deleteStaleTaskIndexRow(c, existingOwner.taskId); + } else { + throw error; + } + } + if (!ownerMissing) { + throw new Error(`branch is already assigned to a different task: ${branchName}`); + } + } + + const branches = await listGitHubBranches(c, cmd.repoId); + const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null; + if (cmd.requireExistingRemote && !branchMatch) { + throw new Error(`Remote branch not found: ${branchName}`); + } + + const repository = await resolveGitHubRepository(c, cmd.repoId); + const defaultBranch = repository?.defaultBranch ?? "main"; + const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? ""; + + const now = Date.now(); + await c.db + .insert(taskIndex) + .values({ + taskId: cmd.taskId, + repoId: cmd.repoId, + branchName, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: taskIndex.taskId, + set: { + branchName, + updatedAt: now, + }, + }) + .run(); + + return { branchName, headSha }; +} + +export async function applyTaskSummaryUpdateMutation(c: any, input: { taskSummary: WorkspaceTaskSummary }): Promise { + await upsertTaskSummary(c, input.taskSummary); + await refreshOrganizationSnapshotMutation(c); +} + +export async function removeTaskSummaryMutation(c: any, input: { taskId: string }): Promise { + await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run(); + await refreshOrganizationSnapshotMutation(c); +} + +/** + * Called for every changed PR during sync and on webhook PR events. + * Runs in a bulk loop — MUST NOT create task actors or make cross-actor calls + * to task actors. Only writes to the org's local taskIndex/taskSummaries tables. + * Task actors are created lazily when the user views the task. + */ +export async function refreshTaskSummaryForBranchMutation( + c: any, + input: { repoId: string; branchName: string; pullRequest?: WorkspacePullRequestSummary | null; repoName?: string }, +): Promise { + const pullRequest = input.pullRequest ?? null; + let rows = await c.db + .select({ taskId: taskSummaries.taskId }) + .from(taskSummaries) + .where(and(eq(taskSummaries.branch, input.branchName), eq(taskSummaries.repoId, input.repoId))) + .all(); + + if (rows.length === 0 && pullRequest) { + // Create a virtual task entry in the org's local tables only. + // No task actor is spawned — it will be created lazily when the user + // clicks on the task in the sidebar (the "materialize" path). + const taskId = randomUUID(); + const now = Date.now(); + const title = pullRequest.title?.trim() || input.branchName; + const repoName = input.repoName ?? `${c.state.organizationId}/${input.repoId}`; + + await c.db + .insert(taskIndex) + .values({ taskId, repoId: input.repoId, branchName: input.branchName, createdAt: now, updatedAt: now }) + .onConflictDoNothing() + .run(); + + await c.db + .insert(taskSummaries) + .values({ + taskId, + repoId: input.repoId, + title, + status: "init_complete", + repoName, + updatedAtMs: pullRequest.updatedAtMs ?? now, + branch: input.branchName, + pullRequestJson: JSON.stringify(pullRequest), + sessionsSummaryJson: "[]", + }) + .onConflictDoNothing() + .run(); + + rows = [{ taskId }]; + } else { + // Update PR data on existing task summaries locally. + // If a real task actor exists, also notify it. + for (const row of rows) { + // Update the local summary with the new PR data + await c.db + .update(taskSummaries) + .set({ + pullRequestJson: pullRequest ? JSON.stringify(pullRequest) : null, + updatedAtMs: pullRequest?.updatedAtMs ?? Date.now(), + }) + .where(eq(taskSummaries.taskId, row.taskId)) + .run(); + + // Best-effort notify the task actor if it exists (fire-and-forget) + try { + const task = getTask(c, c.state.organizationId, input.repoId, row.taskId); + void task.pullRequestSync({ pullRequest }).catch(() => {}); + } catch { + // Task actor doesn't exist yet — that's fine, it's virtual + } + } + } + + await refreshOrganizationSnapshotMutation(c); +} + +export function sortOverviewBranches( + branches: Array<{ + branchName: string; + commitSha: string; + taskId: string | null; + taskTitle: string | null; + taskStatus: TaskRecord["status"] | null; + pullRequest: WorkspacePullRequestSummary | null; + ciStatus: string | null; + updatedAt: number; + }>, + defaultBranch: string | null, +) { + return [...branches].sort((left, right) => { + if (defaultBranch) { + if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1; + if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1; + } + if (Boolean(left.taskId) !== Boolean(right.taskId)) { + return left.taskId ? -1 : 1; + } + if (left.updatedAt !== right.updatedAt) { + return right.updatedAt - left.updatedAt; + } + return left.branchName.localeCompare(right.branchName); + }); +} + +export async function listTaskSummariesForRepo(c: any, repoId: string, includeArchived = false): Promise { + const rows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoId)).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows + .map((row) => ({ + organizationId: c.state.organizationId, + repoId, + taskId: row.taskId, + branchName: row.branch ?? null, + title: row.title, + status: row.status, + updatedAt: row.updatedAtMs, + pullRequest: parseJsonValue(row.pullRequestJson, null), + })) + .filter((row) => includeArchived || row.status !== "archived"); +} + +export async function listAllTaskSummaries(c: any, includeArchived = false): Promise { + const rows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows + .map((row) => ({ + organizationId: c.state.organizationId, + repoId: row.repoId, + taskId: row.taskId, + branchName: row.branch ?? null, + title: row.title, + status: row.status, + updatedAt: row.updatedAtMs, + pullRequest: parseJsonValue(row.pullRequestJson, null), + })) + .filter((row) => includeArchived || row.status !== "archived"); +} + +export async function listWorkspaceTaskSummaries(c: any): Promise { + const rows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all(); + return rows.map((row) => taskSummaryFromRow(row.repoId, row)); +} + +export async function getRepoOverviewFromOrg(c: any, repoId: string): Promise { + const now = Date.now(); + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = await resolveRepositoryRemoteUrl(c, repoId); + const githubBranches = await listGitHubBranches(c, repoId).catch(() => []); + const taskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoId)).all(); + + const taskMetaByBranch = new Map< + string, + { taskId: string; title: string | null; status: TaskRecord["status"] | null; updatedAt: number; pullRequest: WorkspacePullRequestSummary | null } + >(); + for (const row of taskRows) { + if (!row.branch) { + continue; + } + taskMetaByBranch.set(row.branch, { + taskId: row.taskId, + title: row.title ?? null, + status: row.status, + updatedAt: row.updatedAtMs, + pullRequest: parseJsonValue(row.pullRequestJson, null), + }); + } + + const branchMap = new Map(); + for (const branch of githubBranches) { + branchMap.set(branch.branchName, branch); + } + for (const branchName of taskMetaByBranch.keys()) { + if (!branchMap.has(branchName)) { + branchMap.set(branchName, { branchName, commitSha: "" }); + } + } + if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) { + branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" }); + } + + const branches = sortOverviewBranches( + [...branchMap.values()].map((branch) => { + const taskMeta = taskMetaByBranch.get(branch.branchName); + const pr = taskMeta?.pullRequest ?? null; + return { + branchName: branch.branchName, + commitSha: branch.commitSha, + taskId: taskMeta?.taskId ?? null, + taskTitle: taskMeta?.title ?? null, + taskStatus: taskMeta?.status ?? null, + pullRequest: pr, + ciStatus: null, + updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now), + }; + }), + repository?.defaultBranch ?? null, + ); + + return { + organizationId: c.state.organizationId, + repoId, + remoteUrl, + baseRef: repository?.defaultBranch ?? null, + fetchedAt: now, + branches, + }; +} + +export async function getRepositoryMetadataFromOrg( + c: any, + repoId: string, +): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + const repository = await resolveGitHubRepository(c, repoId); + const remoteUrl = await resolveRepositoryRemoteUrl(c, repoId); + return { + defaultBranch: repository?.defaultBranch ?? null, + fullName: repository?.fullName ?? null, + remoteUrl, + }; +} + +export async function findTaskForBranch(c: any, repoId: string, branchName: string): Promise<{ taskId: string | null }> { + const row = await c.db + .select({ taskId: taskSummaries.taskId }) + .from(taskSummaries) + .where(and(eq(taskSummaries.branch, branchName), eq(taskSummaries.repoId, repoId))) + .get(); + return { taskId: row?.taskId ?? null }; +} diff --git a/foundry/packages/backend/src/actors/organization/actions/tasks.ts b/foundry/packages/backend/src/actors/organization/actions/tasks.ts new file mode 100644 index 0000000..118ff15 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/actions/tasks.ts @@ -0,0 +1,340 @@ +// @ts-nocheck +import { desc, eq } from "drizzle-orm"; +import type { + AuditLogEvent, + CreateTaskInput, + HistoryQueryInput, + ListTasksInput, + RepoOverview, + SwitchResult, + TaskRecord, + TaskSummary, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import { getActorRuntimeContext } from "../../context.js"; +import { getOrCreateAuditLog, getOrCreateTask, getTask as getTaskHandle } from "../../handles.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; +import { logActorWarning, resolveErrorMessage } from "../../logging.js"; +import { taskIndex, taskSummaries } from "../db/schema.js"; +import { + createTaskMutation, + getRepoOverviewFromOrg, + getRepositoryMetadataFromOrg, + findTaskForBranch, + listTaskSummariesForRepo, + listAllTaskSummaries, +} from "./task-mutations.js"; + +function assertOrganization(c: { state: { organizationId: string } }, organizationId: string): void { + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} command=${organizationId}`); + } +} + +/** + * Look up the repoId for a task from the local task index. + * Used when callers (e.g. sandbox actor) only have taskId but need repoId + * to construct the task actor key. + */ +async function resolveTaskRepoId(c: any, taskId: string): Promise { + const row = await c.db.select({ repoId: taskIndex.repoId }).from(taskIndex).where(eq(taskIndex.taskId, taskId)).get(); + if (!row) { + throw new Error(`Task ${taskId} not found in task index`); + } + return row.repoId; +} + +/** + * Get or lazily create a task actor for a user-initiated action. + * Uses getOrCreate because the user may be interacting with a virtual task + * (PR-driven) that has no actor yet. The task actor self-initializes in + * getCurrentRecord() from the org's getTaskIndexEntry data. + * + * This is safe because requireWorkspaceTask is only called from user-initiated + * actions (createSession, sendMessage, etc.), never from sync loops. + * See CLAUDE.md "Lazy Task Actor Creation". + */ +async function requireWorkspaceTask(c: any, repoId: string, taskId: string) { + return getOrCreateTask(c, c.state.organizationId, repoId, taskId, { + organizationId: c.state.organizationId, + repoId, + taskId, + }); +} + +interface GetTaskInput { + organizationId: string; + repoId: string; + taskId: string; +} + +interface TaskProxyActionInput extends GetTaskInput { + reason?: string; +} + +interface RepoOverviewInput { + organizationId: string; + repoId: string; +} + +export { createTaskMutation }; + +export const organizationTaskActions = { + async createTask(c: any, input: CreateTaskInput): Promise { + assertOrganization(c, input.organizationId); + const { config } = getActorRuntimeContext(); + const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config); + + // Self-call: call the mutation directly since we're inside the org actor + return await createTaskMutation(c, { + repoId: input.repoId, + task: input.task, + sandboxProviderId, + explicitTitle: input.explicitTitle ?? null, + explicitBranchName: input.explicitBranchName ?? null, + onBranch: input.onBranch ?? null, + }); + }, + + async materializeTask(c: any, input: { organizationId: string; repoId: string; virtualTaskId: string }): Promise { + assertOrganization(c, input.organizationId); + const { config } = getActorRuntimeContext(); + // Self-call: call the mutation directly + return await createTaskMutation(c, { + repoId: input.repoId, + task: input.virtualTaskId, + sandboxProviderId: defaultSandboxProviderId(config), + explicitTitle: null, + explicitBranchName: null, + onBranch: null, + }); + }, + + async createWorkspaceTask(c: any, input: TaskWorkspaceCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> { + const created = await organizationTaskActions.createTask(c, { + organizationId: c.state.organizationId, + repoId: input.repoId, + task: input.task, + ...(input.title ? { explicitTitle: input.title } : {}), + ...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}), + }); + + const task = await requireWorkspaceTask(c, input.repoId, created.taskId); + void task + .createSessionAndSend({ + model: input.model, + text: input.task, + authSessionId: input.authSessionId, + }) + .catch(() => {}); + + return { taskId: created.taskId }; + }, + + async markWorkspaceUnread(c: any, input: TaskWorkspaceSelectInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.markUnread({ authSessionId: input.authSessionId }); + }, + + async renameWorkspaceTask(c: any, input: TaskWorkspaceRenameInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.renameTask({ value: input.value }); + }, + + async createWorkspaceSession(c: any, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + return await task.createSession({ + ...(input.model ? { model: input.model } : {}), + ...(input.authSessionId ? { authSessionId: input.authSessionId } : {}), + }); + }, + + async renameWorkspaceSession(c: any, input: TaskWorkspaceRenameSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.renameSession({ sessionId: input.sessionId, title: input.title, authSessionId: input.authSessionId }); + }, + + async selectWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.selectSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }); + }, + + async setWorkspaceSessionUnread(c: any, input: TaskWorkspaceSetSessionUnreadInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.setSessionUnread({ sessionId: input.sessionId, unread: input.unread, authSessionId: input.authSessionId }); + }, + + async updateWorkspaceDraft(c: any, input: TaskWorkspaceUpdateDraftInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task + .updateDraft({ + sessionId: input.sessionId, + text: input.text, + attachments: input.attachments, + authSessionId: input.authSessionId, + }) + .catch(() => {}); + }, + + async changeWorkspaceModel(c: any, input: TaskWorkspaceChangeModelInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + await task.changeModel({ sessionId: input.sessionId, model: input.model, authSessionId: input.authSessionId }); + }, + + async sendWorkspaceMessage(c: any, input: TaskWorkspaceSendMessageInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task + .sendMessage({ + sessionId: input.sessionId, + text: input.text, + attachments: input.attachments, + authSessionId: input.authSessionId, + }) + .catch(() => {}); + }, + + async stopWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.stopSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }).catch(() => {}); + }, + + async closeWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.closeSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }).catch(() => {}); + }, + + async publishWorkspacePr(c: any, input: TaskWorkspaceSelectInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.publishPr({}).catch(() => {}); + }, + + async revertWorkspaceFile(c: any, input: TaskWorkspaceDiffInput): Promise { + const task = await requireWorkspaceTask(c, input.repoId, input.taskId); + void task.revertFile(input).catch(() => {}); + }, + + async getRepoOverview(c: any, input: RepoOverviewInput): Promise { + assertOrganization(c, input.organizationId); + + return await getRepoOverviewFromOrg(c, input.repoId); + }, + + async listTasks(c: any, input: ListTasksInput): Promise { + assertOrganization(c, input.organizationId); + if (input.repoId) { + return await listTaskSummariesForRepo(c, input.repoId, true); + } + return await listAllTaskSummaries(c, true); + }, + + async switchTask(c: any, input: { repoId: string; taskId: string }): Promise { + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + const record = await h.get(); + const switched = await h.switchTask({}); + return { + organizationId: c.state.organizationId, + taskId: input.taskId, + sandboxProviderId: record.sandboxProviderId, + switchTarget: switched.switchTarget, + }; + }, + + async auditLog(c: any, input: HistoryQueryInput): Promise { + assertOrganization(c, input.organizationId); + const auditLog = await getOrCreateAuditLog(c, c.state.organizationId); + return await auditLog.list({ + repoId: input.repoId, + branch: input.branch, + taskId: input.taskId, + limit: input.limit ?? 20, + }); + }, + + async getTask(c: any, input: GetTaskInput): Promise { + assertOrganization(c, input.organizationId); + // Resolve repoId from local task index if not provided (e.g. sandbox actor only has taskId) + const repoId = input.repoId || (await resolveTaskRepoId(c, input.taskId)); + // Use getOrCreate — the task may be virtual (PR-driven, no actor yet). + // The task actor self-initializes in getCurrentRecord(). + const handle = await getOrCreateTask(c, c.state.organizationId, repoId, input.taskId, { + organizationId: c.state.organizationId, + repoId, + taskId: input.taskId, + }); + return await handle.get(); + }, + + async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + return await h.attach({ reason: input.reason }); + }, + + async pushTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.push({ reason: input.reason }).catch(() => {}); + }, + + async syncTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.sync({ reason: input.reason }).catch(() => {}); + }, + + async mergeTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.merge({ reason: input.reason }).catch(() => {}); + }, + + async archiveTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.archive({ reason: input.reason }).catch(() => {}); + }, + + async killTask(c: any, input: TaskProxyActionInput): Promise { + assertOrganization(c, input.organizationId); + + const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId); + void h.kill({ reason: input.reason }).catch(() => {}); + }, + + async getRepositoryMetadata(c: any, input: { repoId: string }): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + return await getRepositoryMetadataFromOrg(c, input.repoId); + }, + + async findTaskForBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> { + return await findTaskForBranch(c, input.repoId, input.branchName); + }, + + /** + * Lightweight read of task index + summary data. Used by the task actor + * to self-initialize when lazily materialized from a virtual task. + * Does NOT trigger materialization — no circular dependency. + */ + async getTaskIndexEntry(c: any, input: { taskId: string }): Promise<{ branchName: string | null; title: string | null } | null> { + const idx = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(eq(taskIndex.taskId, input.taskId)).get(); + const summary = await c.db.select({ title: taskSummaries.title }).from(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).get(); + if (!idx && !summary) return null; + return { + branchName: idx?.branchName ?? null, + title: summary?.title ?? null, + }; + }, +}; diff --git a/foundry/packages/backend/src/actors/organization/app-shell.ts b/foundry/packages/backend/src/actors/organization/app-shell.ts index 3339590..dce5855 100644 --- a/foundry/packages/backend/src/actors/organization/app-shell.ts +++ b/foundry/packages/backend/src/actors/organization/app-shell.ts @@ -1,4 +1,4 @@ -import { and, asc, count as sqlCount, desc, eq, gt, gte, inArray, isNotNull, isNull, like, lt, lte, ne, notInArray, or } from "drizzle-orm"; +import { desc, eq } from "drizzle-orm"; import { randomUUID } from "node:crypto"; import type { FoundryAppSnapshot, @@ -8,109 +8,35 @@ import type { FoundryOrganizationMember, FoundryUser, UpdateFoundryOrganizationProfileInput, + WorkspaceModelId, } from "@sandbox-agent/foundry-shared"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; import { getOrCreateGithubData, getOrCreateOrganization, selfOrganization } from "../handles.js"; import { GitHubAppError } from "../../services/app-github.js"; import { getBetterAuthService } from "../../services/better-auth.js"; import { repoIdFromRemote, repoLabelFromRemote } from "../../services/repo.js"; import { logger } from "../../logging.js"; -import { - authAccountIndex, - authEmailIndex, - authSessionIndex, - authVerification, - invoices, - organizationMembers, - organizationProfile, - repos, - seatAssignments, - stripeLookup, -} from "./db/schema.js"; - -export const APP_SHELL_ORGANIZATION_ID = "app"; - -// ── Better Auth adapter where-clause helpers ── -// These convert the adapter's `{ field, value, operator }` clause arrays into -// Drizzle predicates for organization-level auth index / verification tables. - -function organizationAuthColumn(table: any, field: string): any { - const column = table[field]; - if (!column) { - throw new Error(`Unknown auth table field: ${field}`); - } - return column; -} - -function normalizeAuthValue(value: unknown): unknown { - if (value instanceof Date) { - return value.getTime(); - } - if (Array.isArray(value)) { - return value.map((entry) => normalizeAuthValue(entry)); - } - return value; -} - -function organizationAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any { - const column = organizationAuthColumn(table, clause.field); - const value = normalizeAuthValue(clause.value); - switch (clause.operator) { - case "ne": - return value === null ? isNotNull(column) : ne(column, value as any); - case "lt": - return lt(column, value as any); - case "lte": - return lte(column, value as any); - case "gt": - return gt(column, value as any); - case "gte": - return gte(column, value as any); - case "in": - return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); - case "not_in": - return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); - case "contains": - return like(column, `%${String(value ?? "")}%`); - case "starts_with": - return like(column, `${String(value ?? "")}%`); - case "ends_with": - return like(column, `%${String(value ?? "")}`); - case "eq": - default: - return value === null ? isNull(column) : eq(column, value as any); - } -} - -function organizationAuthWhere(table: any, clauses: any[] | undefined): any { - if (!clauses || clauses.length === 0) { - return undefined; - } - let expr = organizationAuthClause(table, clauses[0]); - for (const clause of clauses.slice(1)) { - const next = organizationAuthClause(table, clause); - expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); - } - return expr; -} +import { invoices, organizationMembers, organizationProfile, seatAssignments, stripeLookup } from "./db/schema.js"; +import { APP_SHELL_ORGANIZATION_ID } from "./constants.js"; const githubWebhookLogger = logger.child({ scope: "github-webhook", }); -const PROFILE_ROW_ID = "profile"; +const PROFILE_ROW_ID = 1; function roundDurationMs(start: number): number { return Math.round((performance.now() - start) * 100) / 100; } -function assertAppOrganization(c: any): void { +export function assertAppOrganization(c: any): void { if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) { throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`); } } -function assertOrganizationShell(c: any): void { +export function assertOrganizationShell(c: any): void { if (c.state.organizationId === APP_SHELL_ORGANIZATION_ID) { throw new Error("Organization action cannot run on the reserved app organization"); } @@ -132,10 +58,6 @@ function organizationOrganizationId(kind: FoundryOrganization["kind"], login: st return kind === "personal" ? personalOrganizationId(login) : slugify(login); } -function hasRepoScope(scopes: string[]): boolean { - return scopes.some((scope) => scope === "repo" || scope.startsWith("repo:")); -} - function parseEligibleOrganizationIds(value: string): string[] { try { const parsed = JSON.parse(value); @@ -217,7 +139,9 @@ function stripeWebhookSubscription(event: any) { }; } -async function getOrganizationState(organization: any) { +// sendOrganizationCommand removed — org actions called directly + +export async function getOrganizationState(organization: any) { return await organization.getOrganizationShellState({}); } @@ -290,7 +214,7 @@ async function listSnapshotOrganizations(c: any, sessionId: string, organization }; } -async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepair = true): Promise { +export async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepair = true): Promise { assertAppOrganization(c); const startedAt = performance.now(); const auth = getBetterAuthService(); @@ -359,6 +283,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa githubLogin: profile?.githubLogin ?? "", roleLabel: profile?.roleLabel ?? "GitHub user", eligibleOrganizationIds, + defaultModel: profile?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, } : null; @@ -404,7 +329,7 @@ async function buildAppSnapshot(c: any, sessionId: string, allowOrganizationRepa return snapshot; } -async function requireSignedInSession(c: any, sessionId: string) { +export async function requireSignedInSession(c: any, sessionId: string) { const auth = getBetterAuthService(); const authState = await auth.getAuthState(sessionId); const user = authState?.user ?? null; @@ -431,7 +356,7 @@ async function requireSignedInSession(c: any, sessionId: string) { }; } -function requireEligibleOrganization(session: any, organizationId: string): void { +export function requireEligibleOrganization(session: any, organizationId: string): void { const eligibleOrganizationIds = parseEligibleOrganizationIds(session.eligibleOrganizationIdsJson); if (!eligibleOrganizationIds.includes(organizationId)) { throw new Error(`Organization ${organizationId} is not available in this app session`); @@ -557,7 +482,7 @@ async function syncGithubOrganizationsInternal(c: any, input: { sessionId: strin const organizationId = organizationOrganizationId(account.kind, account.githubLogin); const installation = installations.find((candidate) => candidate.accountLogin === account.githubLogin) ?? null; const organization = await getOrCreateOrganization(c, organizationId); - await organization.syncOrganizationShellFromGithub({ + await organization.commandSyncOrganizationShellFromGithub({ userId: githubUserId, userName: viewer.name || viewer.login, userEmail: viewer.email ?? `${viewer.login}@users.noreply.github.com`, @@ -641,17 +566,22 @@ async function listOrganizationInvoices(c: any): Promise { assertOrganizationShell(c); - const rows = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).orderBy(desc(repos.updatedAt)).all(); - return rows.map((row) => repoLabelFromRemote(row.remoteUrl)).sort((left, right) => left.localeCompare(right)); + try { + const githubData = await getOrCreateGithubData(c, c.state.organizationId); + const rows = await githubData.listRepositories({}); + return rows.map((row: any) => repoLabelFromRemote(row.cloneUrl)).sort((a: string, b: string) => a.localeCompare(b)); + } catch { + return []; + } } -async function buildOrganizationState(c: any) { +export async function buildOrganizationState(c: any) { const startedAt = performance.now(); const row = await requireOrganizationProfileRow(c); return await buildOrganizationStateFromRow(c, row, startedAt); } -async function buildOrganizationStateIfInitialized(c: any) { +export async function buildOrganizationStateIfInitialized(c: any) { const startedAt = performance.now(); const row = await readOrganizationProfileRow(c); if (!row) { @@ -685,7 +615,6 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number slug: row.slug, primaryDomain: row.primaryDomain, seatAccrualMode: "first_prompt", - defaultModel: row.defaultModel, autoImportRepos: row.autoImportRepos === 1, }, github: { @@ -697,6 +626,10 @@ async function buildOrganizationStateFromRow(c: any, row: any, startedAt: number lastSyncAt: row.githubLastSyncAt ?? null, lastWebhookAt: row.githubLastWebhookAt ?? null, lastWebhookEvent: row.githubLastWebhookEvent ?? "", + syncGeneration: row.githubSyncGeneration ?? 0, + syncPhase: row.githubSyncPhase ?? null, + processedRepositoryCount: row.githubProcessedRepositoryCount ?? 0, + totalRepositoryCount: row.githubTotalRepositoryCount ?? 0, }, billing: { planId: row.billingPlanId, @@ -744,396 +677,13 @@ async function applySubscriptionState( }, fallbackPlanId: FoundryBillingPlanId, ): Promise { - await organization.applyOrganizationStripeSubscription({ + await organization.commandApplyStripeSubscription({ subscription, fallbackPlanId, }); } export const organizationAppActions = { - async authFindSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { - assertAppOrganization(c); - - const clauses = [ - ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), - ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), - ]; - if (clauses.length === 0) { - return null; - } - const predicate = organizationAuthWhere(authSessionIndex, clauses); - return await c.db.select().from(authSessionIndex).where(predicate!).get(); - }, - - async authUpsertSessionIndex(c: any, input: { sessionId: string; sessionToken: string; userId: string }) { - assertAppOrganization(c); - - const now = Date.now(); - await c.db - .insert(authSessionIndex) - .values({ - sessionId: input.sessionId, - sessionToken: input.sessionToken, - userId: input.userId, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: authSessionIndex.sessionId, - set: { - sessionToken: input.sessionToken, - userId: input.userId, - updatedAt: now, - }, - }) - .run(); - return await c.db.select().from(authSessionIndex).where(eq(authSessionIndex.sessionId, input.sessionId)).get(); - }, - - async authDeleteSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) { - assertAppOrganization(c); - - const clauses = [ - ...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []), - ...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []), - ]; - if (clauses.length === 0) { - return; - } - const predicate = organizationAuthWhere(authSessionIndex, clauses); - await c.db.delete(authSessionIndex).where(predicate!).run(); - }, - - async authFindEmailIndex(c: any, input: { email: string }) { - assertAppOrganization(c); - - return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); - }, - - async authUpsertEmailIndex(c: any, input: { email: string; userId: string }) { - assertAppOrganization(c); - - const now = Date.now(); - await c.db - .insert(authEmailIndex) - .values({ - email: input.email, - userId: input.userId, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: authEmailIndex.email, - set: { - userId: input.userId, - updatedAt: now, - }, - }) - .run(); - return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get(); - }, - - async authDeleteEmailIndex(c: any, input: { email: string }) { - assertAppOrganization(c); - - await c.db.delete(authEmailIndex).where(eq(authEmailIndex.email, input.email)).run(); - }, - - async authFindAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { - assertAppOrganization(c); - - if (input.id) { - return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); - } - if (!input.providerId || !input.accountId) { - return null; - } - return await c.db - .select() - .from(authAccountIndex) - .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) - .get(); - }, - - async authUpsertAccountIndex(c: any, input: { id: string; providerId: string; accountId: string; userId: string }) { - assertAppOrganization(c); - - const now = Date.now(); - await c.db - .insert(authAccountIndex) - .values({ - id: input.id, - providerId: input.providerId, - accountId: input.accountId, - userId: input.userId, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: authAccountIndex.id, - set: { - providerId: input.providerId, - accountId: input.accountId, - userId: input.userId, - updatedAt: now, - }, - }) - .run(); - return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get(); - }, - - async authDeleteAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) { - assertAppOrganization(c); - - if (input.id) { - await c.db.delete(authAccountIndex).where(eq(authAccountIndex.id, input.id)).run(); - return; - } - if (input.providerId && input.accountId) { - await c.db - .delete(authAccountIndex) - .where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId))) - .run(); - } - }, - - async authCreateVerification(c: any, input: { data: Record }) { - assertAppOrganization(c); - - await c.db - .insert(authVerification) - .values(input.data as any) - .run(); - return await c.db - .select() - .from(authVerification) - .where(eq(authVerification.id, input.data.id as string)) - .get(); - }, - - async authFindOneVerification(c: any, input: { where: any[] }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - return predicate ? await c.db.select().from(authVerification).where(predicate).get() : null; - }, - - async authFindManyVerification(c: any, input: { where?: any[]; limit?: number; sortBy?: any; offset?: number }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - let query = c.db.select().from(authVerification); - if (predicate) { - query = query.where(predicate); - } - if (input.sortBy?.field) { - const column = organizationAuthColumn(authVerification, input.sortBy.field); - query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); - } - if (typeof input.limit === "number") { - query = query.limit(input.limit); - } - if (typeof input.offset === "number") { - query = query.offset(input.offset); - } - return await query.all(); - }, - - async authUpdateVerification(c: any, input: { where: any[]; update: Record }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - if (!predicate) { - return null; - } - await c.db - .update(authVerification) - .set(input.update as any) - .where(predicate) - .run(); - return await c.db.select().from(authVerification).where(predicate).get(); - }, - - async authUpdateManyVerification(c: any, input: { where: any[]; update: Record }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - if (!predicate) { - return 0; - } - await c.db - .update(authVerification) - .set(input.update as any) - .where(predicate) - .run(); - const row = await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get(); - return row?.value ?? 0; - }, - - async authDeleteVerification(c: any, input: { where: any[] }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - if (!predicate) { - return; - } - await c.db.delete(authVerification).where(predicate).run(); - }, - - async authDeleteManyVerification(c: any, input: { where: any[] }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - if (!predicate) { - return 0; - } - const rows = await c.db.select().from(authVerification).where(predicate).all(); - await c.db.delete(authVerification).where(predicate).run(); - return rows.length; - }, - - async authCountVerification(c: any, input: { where?: any[] }) { - assertAppOrganization(c); - - const predicate = organizationAuthWhere(authVerification, input.where); - const row = predicate - ? await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get() - : await c.db.select({ value: sqlCount() }).from(authVerification).get(); - return row?.value ?? 0; - }, - - async getAppSnapshot(c: any, input: { sessionId: string }): Promise { - return await buildAppSnapshot(c, input.sessionId); - }, - - async resolveAppGithubToken( - c: any, - input: { organizationId: string; requireRepoScope?: boolean }, - ): Promise<{ accessToken: string; scopes: string[] } | null> { - assertAppOrganization(c); - const auth = getBetterAuthService(); - const rows = await c.db.select().from(authSessionIndex).orderBy(desc(authSessionIndex.updatedAt)).all(); - - for (const row of rows) { - const authState = await auth.getAuthState(row.sessionId); - if (authState?.sessionState?.activeOrganizationId !== input.organizationId) { - continue; - } - - const token = await auth.getAccessTokenForSession(row.sessionId); - if (!token?.accessToken) { - continue; - } - - const scopes = token.scopes; - if (input.requireRepoScope !== false && scopes.length > 0 && !hasRepoScope(scopes)) { - continue; - } - - return { - accessToken: token.accessToken, - scopes, - }; - } - - return null; - }, - - async skipAppStarterRepo(c: any, input: { sessionId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - await getBetterAuthService().upsertUserProfile(session.authUserId, { - starterRepoStatus: "skipped", - starterRepoSkippedAt: Date.now(), - starterRepoStarredAt: null, - }); - return await buildAppSnapshot(c, input.sessionId); - }, - - async starAppStarterRepo(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - const organization = await getOrCreateOrganization(c, input.organizationId); - await organization.starSandboxAgentRepo({ - organizationId: input.organizationId, - }); - await getBetterAuthService().upsertUserProfile(session.authUserId, { - starterRepoStatus: "starred", - starterRepoStarredAt: Date.now(), - starterRepoSkippedAt: null, - }); - return await buildAppSnapshot(c, input.sessionId); - }, - - async selectAppOrganization(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId); - - // Ensure the GitHub data actor exists. If it's newly created, its own - // workflow will detect the pending sync status and run the initial - // full sync automatically — no orchestration needed here. - await getOrCreateGithubData(c, input.organizationId); - - return await buildAppSnapshot(c, input.sessionId); - }, - - async updateAppOrganizationProfile( - c: any, - input: { sessionId: string; organizationId: string } & UpdateFoundryOrganizationProfileInput, - ): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - const organization = await getOrCreateOrganization(c, input.organizationId); - await organization.updateOrganizationShellProfile({ - displayName: input.displayName, - slug: input.slug, - primaryDomain: input.primaryDomain, - }); - return await buildAppSnapshot(c, input.sessionId); - }, - - async triggerAppRepoImport(c: any, input: { sessionId: string; organizationId: string }): Promise { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - - const githubData = await getOrCreateGithubData(c, input.organizationId); - const summary = await githubData.getSummary({}); - if (summary.syncStatus === "syncing") { - return await buildAppSnapshot(c, input.sessionId); - } - - // Mark sync started on the organization, then send directly to the - // GitHub data actor's own workflow queue. - const organizationHandle = await getOrCreateOrganization(c, input.organizationId); - await organizationHandle.markOrganizationSyncStarted({ - label: "Importing repository catalog...", - }); - - await githubData.send("githubData.command.syncRepos", { label: "Importing repository catalog..." }, { wait: false }); - - return await buildAppSnapshot(c, input.sessionId); - }, - - async beginAppGithubInstall(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> { - assertAppOrganization(c); - const session = await requireSignedInSession(c, input.sessionId); - requireEligibleOrganization(session, input.organizationId); - const { appShell } = getActorRuntimeContext(); - const organizationHandle = await getOrCreateOrganization(c, input.organizationId); - const organizationState = await getOrganizationState(organizationHandle); - if (organizationState.snapshot.kind !== "organization") { - return { - url: `${appShell.appUrl}/organizations/${input.organizationId}`, - }; - } - return { - url: await appShell.github.buildInstallationUrl(organizationState.githubLogin, randomUUID()), - }; - }, - async createAppCheckoutSession(c: any, input: { sessionId: string; organizationId: string; planId: FoundryBillingPlanId }): Promise<{ url: string }> { assertAppOrganization(c); const session = await requireSignedInSession(c, input.sessionId); @@ -1143,7 +693,9 @@ export const organizationAppActions = { const organizationState = await getOrganizationState(organizationHandle); if (input.planId === "free") { - await organizationHandle.applyOrganizationFreePlan({ clearSubscription: false }); + await organizationHandle.commandApplyFreePlan({ + clearSubscription: false, + }); return { url: `${appShell.appUrl}/organizations/${input.organizationId}/billing`, }; @@ -1162,7 +714,9 @@ export const organizationAppActions = { email: session.currentUserEmail, }) ).id; - await organizationHandle.applyOrganizationStripeCustomer({ customerId }); + await organizationHandle.commandApplyStripeCustomer({ + customerId, + }); await upsertStripeLookupEntries(c, input.organizationId, customerId, null); } @@ -1190,7 +744,9 @@ export const organizationAppActions = { const completion = await appShell.stripe.retrieveCheckoutCompletion(input.checkoutSessionId); if (completion.customerId) { - await organizationHandle.applyOrganizationStripeCustomer({ customerId: completion.customerId }); + await organizationHandle.commandApplyStripeCustomer({ + customerId: completion.customerId, + }); } await upsertStripeLookupEntries(c, input.organizationId, completion.customerId, completion.subscriptionId); @@ -1200,7 +756,7 @@ export const organizationAppActions = { } if (completion.paymentMethodLabel) { - await organizationHandle.setOrganizationBillingPaymentMethod({ + await organizationHandle.commandSetPaymentMethod({ label: completion.paymentMethodLabel, }); } @@ -1240,7 +796,9 @@ export const organizationAppActions = { await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); } else { - await organizationHandle.setOrganizationBillingStatus({ status: "scheduled_cancel" }); + await organizationHandle.commandSetBillingStatus({ + status: "scheduled_cancel", + }); } return await buildAppSnapshot(c, input.sessionId); @@ -1259,7 +817,9 @@ export const organizationAppActions = { await applySubscriptionState(organizationHandle, subscription, organizationState.billingPlanId); await upsertStripeLookupEntries(c, input.organizationId, subscription.customerId ?? organizationState.stripeCustomerId, subscription.id); } else { - await organizationHandle.setOrganizationBillingStatus({ status: "active" }); + await organizationHandle.commandSetBillingStatus({ + status: "active", + }); } return await buildAppSnapshot(c, input.sessionId); @@ -1270,7 +830,7 @@ export const organizationAppActions = { const session = await requireSignedInSession(c, input.sessionId); requireEligibleOrganization(session, input.organizationId); const organization = await getOrCreateOrganization(c, input.organizationId); - await organization.recordOrganizationSeatUsage({ + await organization.commandRecordSeatUsage({ email: session.currentUserEmail, }); return await buildAppSnapshot(c, input.sessionId); @@ -1293,7 +853,9 @@ export const organizationAppActions = { if (organizationId) { const organization = await getOrCreateOrganization(c, organizationId); if (typeof object.customer === "string") { - await organization.applyOrganizationStripeCustomer({ customerId: object.customer }); + await organization.commandApplyStripeCustomer({ + customerId: object.customer, + }); } await upsertStripeLookupEntries( c, @@ -1326,7 +888,9 @@ export const organizationAppActions = { const organizationId = await findOrganizationIdForStripeEvent(c, subscription.customerId, subscription.id); if (organizationId) { const organization = await getOrCreateOrganization(c, organizationId); - await organization.applyOrganizationFreePlan({ clearSubscription: true }); + await organization.commandApplyFreePlan({ + clearSubscription: true, + }); } return { ok: true }; } @@ -1338,7 +902,7 @@ export const organizationAppActions = { const organization = await getOrCreateOrganization(c, organizationId); const rawAmount = typeof invoice.amount_paid === "number" ? invoice.amount_paid : invoice.amount_due; const amountUsd = Math.round((typeof rawAmount === "number" ? rawAmount : 0) / 100); - await organization.upsertOrganizationInvoice({ + await organization.commandUpsertInvoice({ id: String(invoice.id), label: typeof invoice.number === "string" ? `Invoice ${invoice.number}` : "Stripe invoice", issuedAt: formatUnixDate(typeof invoice.created === "number" ? invoice.created : Math.floor(Date.now() / 1000)), @@ -1374,7 +938,7 @@ export const organizationAppActions = { const organizationId = organizationOrganizationId(kind, accountLogin); const receivedAt = Date.now(); const organization = await getOrCreateOrganization(c, organizationId); - await organization.recordGithubWebhookReceipt({ + await organization.commandRecordGithubWebhookReceipt({ organizationId: organizationId, event, action: body.action ?? null, @@ -1400,14 +964,16 @@ export const organizationAppActions = { label: "GitHub App installation removed", }); } else if (body.action === "created") { - await githubData.fullSync({ - connectedAccount: accountLogin, - installationStatus: "connected", - installationId: body.installation?.id ?? null, - githubLogin: accountLogin, - kind, - label: "Syncing GitHub data from installation webhook...", - }); + void githubData + .syncRepos({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Syncing GitHub data from installation webhook...", + }) + .catch(() => {}); } else if (body.action === "suspend") { await githubData.clearState({ connectedAccount: accountLogin, @@ -1416,14 +982,16 @@ export const organizationAppActions = { label: "GitHub App installation suspended", }); } else if (body.action === "unsuspend") { - await githubData.fullSync({ - connectedAccount: accountLogin, - installationStatus: "connected", - installationId: body.installation?.id ?? null, - githubLogin: accountLogin, - kind, - label: "Resyncing GitHub data after unsuspend...", - }); + void githubData + .syncRepos({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Resyncing GitHub data after unsuspend...", + }) + .catch(() => {}); } return { ok: true }; } @@ -1440,14 +1008,16 @@ export const organizationAppActions = { }, "repository_membership_changed", ); - await githubData.fullSync({ - connectedAccount: accountLogin, - installationStatus: "connected", - installationId: body.installation?.id ?? null, - githubLogin: accountLogin, - kind, - label: "Resyncing GitHub data after repository access change...", - }); + void githubData + .syncRepos({ + connectedAccount: accountLogin, + installationStatus: "connected", + installationId: body.installation?.id ?? null, + githubLogin: accountLogin, + kind, + label: "Resyncing GitHub data after repository access change...", + }) + .catch(() => {}); return { ok: true }; } @@ -1486,6 +1056,7 @@ export const organizationAppActions = { }, pullRequest: { number: body.pull_request.number, + status: body.pull_request.draft ? "draft" : "ready", title: body.pull_request.title ?? "", body: body.pull_request.body ?? null, state: body.pull_request.state ?? "open", @@ -1520,422 +1091,321 @@ export const organizationAppActions = { ); return { ok: true }; }, +}; - async syncOrganizationShellFromGithub( - c: any, - input: { - userId: string; - userName: string; - userEmail: string; - githubUserLogin: string; - githubAccountId: string; - githubLogin: string; - githubAccountType: string; - kind: FoundryOrganization["kind"]; - displayName: string; - installationId: number | null; - appConfigured: boolean; - }, - ): Promise<{ organizationId: string }> { - assertOrganizationShell(c); - const now = Date.now(); - const existing = await readOrganizationProfileRow(c); - const slug = existing?.slug ?? slugify(input.githubLogin); - const organizationId = organizationOrganizationId(input.kind, input.githubLogin); - if (organizationId !== c.state.organizationId) { - throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} github=${organizationId}`); - } +export async function syncOrganizationShellFromGithubMutation( + c: any, + input: { + userId: string; + userName: string; + userEmail: string; + githubUserLogin: string; + githubAccountId: string; + githubLogin: string; + githubAccountType: string; + kind: FoundryOrganization["kind"]; + displayName: string; + installationId: number | null; + appConfigured: boolean; + }, +): Promise<{ organizationId: string }> { + assertOrganizationShell(c); + const now = Date.now(); + const existing = await readOrganizationProfileRow(c); + const slug = existing?.slug ?? slugify(input.githubLogin); + const organizationId = organizationOrganizationId(input.kind, input.githubLogin); + if (organizationId !== c.state.organizationId) { + throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} github=${organizationId}`); + } - const installationStatus = - input.kind === "personal" ? "connected" : input.installationId ? "connected" : input.appConfigured ? "install_required" : "reconnect_required"; - const syncStatus = existing?.githubSyncStatus ?? legacyRepoImportStatusToGithubSyncStatus(existing?.repoImportStatus); - const lastSyncLabel = - syncStatus === "synced" - ? existing.githubLastSyncLabel - : installationStatus === "connected" - ? "Waiting for first import" - : installationStatus === "install_required" - ? "GitHub App installation required" - : "GitHub App configuration incomplete"; - const hasStripeBillingState = Boolean(existing?.stripeCustomerId || existing?.stripeSubscriptionId || existing?.stripePriceId); - const defaultBillingPlanId = input.kind === "personal" || !hasStripeBillingState ? "free" : (existing?.billingPlanId ?? "team"); - const defaultSeatsIncluded = input.kind === "personal" || !hasStripeBillingState ? 1 : (existing?.billingSeatsIncluded ?? 5); - const defaultPaymentMethodLabel = - input.kind === "personal" - ? "No card required" - : hasStripeBillingState - ? (existing?.billingPaymentMethodLabel ?? "Payment method on file") - : "No payment method on file"; + const installationStatus = + input.kind === "personal" ? "connected" : input.installationId ? "connected" : input.appConfigured ? "install_required" : "reconnect_required"; + const syncStatus = existing?.githubSyncStatus ?? legacyRepoImportStatusToGithubSyncStatus(existing?.repoImportStatus); + const lastSyncLabel = + syncStatus === "synced" + ? existing.githubLastSyncLabel + : installationStatus === "connected" + ? "Waiting for first import" + : installationStatus === "install_required" + ? "GitHub App installation required" + : "GitHub App configuration incomplete"; + const hasStripeBillingState = Boolean(existing?.stripeCustomerId || existing?.stripeSubscriptionId || existing?.stripePriceId); + const defaultBillingPlanId = input.kind === "personal" || !hasStripeBillingState ? "free" : (existing?.billingPlanId ?? "team"); + const defaultSeatsIncluded = input.kind === "personal" || !hasStripeBillingState ? 1 : (existing?.billingSeatsIncluded ?? 5); + const defaultPaymentMethodLabel = + input.kind === "personal" + ? "No card required" + : hasStripeBillingState + ? (existing?.billingPaymentMethodLabel ?? "Payment method on file") + : "No payment method on file"; - await c.db - .insert(organizationProfile) - .values({ - id: PROFILE_ROW_ID, + await c.db + .insert(organizationProfile) + .values({ + id: PROFILE_ROW_ID, + kind: input.kind, + githubAccountId: input.githubAccountId, + githubLogin: input.githubLogin, + githubAccountType: input.githubAccountType, + displayName: input.displayName, + slug, + defaultModel: existing?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, + primaryDomain: existing?.primaryDomain ?? (input.kind === "personal" ? "personal" : `${slug}.github`), + autoImportRepos: existing?.autoImportRepos ?? 1, + repoImportStatus: existing?.repoImportStatus ?? "not_started", + githubConnectedAccount: input.githubLogin, + githubInstallationStatus: installationStatus, + githubSyncStatus: syncStatus, + githubInstallationId: input.installationId, + githubLastSyncLabel: lastSyncLabel, + githubLastSyncAt: existing?.githubLastSyncAt ?? null, + githubSyncGeneration: existing?.githubSyncGeneration ?? 0, + githubSyncPhase: existing?.githubSyncPhase ?? null, + githubProcessedRepositoryCount: existing?.githubProcessedRepositoryCount ?? 0, + githubTotalRepositoryCount: existing?.githubTotalRepositoryCount ?? 0, + stripeCustomerId: existing?.stripeCustomerId ?? null, + stripeSubscriptionId: existing?.stripeSubscriptionId ?? null, + stripePriceId: existing?.stripePriceId ?? null, + billingPlanId: defaultBillingPlanId, + billingStatus: existing?.billingStatus ?? "active", + billingSeatsIncluded: defaultSeatsIncluded, + billingTrialEndsAt: existing?.billingTrialEndsAt ?? null, + billingRenewalAt: existing?.billingRenewalAt ?? null, + billingPaymentMethodLabel: defaultPaymentMethodLabel, + createdAt: existing?.createdAt ?? now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: organizationProfile.id, + set: { kind: input.kind, githubAccountId: input.githubAccountId, githubLogin: input.githubLogin, githubAccountType: input.githubAccountType, displayName: input.displayName, - slug, - primaryDomain: existing?.primaryDomain ?? (input.kind === "personal" ? "personal" : `${slug}.github`), - defaultModel: existing?.defaultModel ?? "claude-sonnet-4", - autoImportRepos: existing?.autoImportRepos ?? 1, - repoImportStatus: existing?.repoImportStatus ?? "not_started", githubConnectedAccount: input.githubLogin, githubInstallationStatus: installationStatus, githubSyncStatus: syncStatus, githubInstallationId: input.installationId, githubLastSyncLabel: lastSyncLabel, githubLastSyncAt: existing?.githubLastSyncAt ?? null, - stripeCustomerId: existing?.stripeCustomerId ?? null, - stripeSubscriptionId: existing?.stripeSubscriptionId ?? null, - stripePriceId: existing?.stripePriceId ?? null, + githubSyncGeneration: existing?.githubSyncGeneration ?? 0, + githubSyncPhase: existing?.githubSyncPhase ?? null, + githubProcessedRepositoryCount: existing?.githubProcessedRepositoryCount ?? 0, + githubTotalRepositoryCount: existing?.githubTotalRepositoryCount ?? 0, billingPlanId: defaultBillingPlanId, - billingStatus: existing?.billingStatus ?? "active", billingSeatsIncluded: defaultSeatsIncluded, - billingTrialEndsAt: existing?.billingTrialEndsAt ?? null, - billingRenewalAt: existing?.billingRenewalAt ?? null, billingPaymentMethodLabel: defaultPaymentMethodLabel, - createdAt: existing?.createdAt ?? now, updatedAt: now, - }) - .onConflictDoUpdate({ - target: organizationProfile.id, - set: { - kind: input.kind, - githubAccountId: input.githubAccountId, - githubLogin: input.githubLogin, - githubAccountType: input.githubAccountType, - displayName: input.displayName, - githubConnectedAccount: input.githubLogin, - githubInstallationStatus: installationStatus, - githubSyncStatus: syncStatus, - githubInstallationId: input.installationId, - githubLastSyncLabel: lastSyncLabel, - githubLastSyncAt: existing?.githubLastSyncAt ?? null, - billingPlanId: defaultBillingPlanId, - billingSeatsIncluded: defaultSeatsIncluded, - billingPaymentMethodLabel: defaultPaymentMethodLabel, - updatedAt: now, - }, - }) - .run(); + }, + }) + .run(); - await c.db - .insert(organizationMembers) - .values({ - id: input.userId, + await c.db + .insert(organizationMembers) + .values({ + id: input.userId, + name: input.userName, + email: input.userEmail, + role: input.kind === "personal" ? "owner" : "admin", + state: "active", + updatedAt: now, + }) + .onConflictDoUpdate({ + target: organizationMembers.id, + set: { name: input.userName, email: input.userEmail, role: input.kind === "personal" ? "owner" : "admin", state: "active", updatedAt: now, + }, + }) + .run(); + + // Auto-trigger github-data sync when the org has a connected installation + // but hasn't synced yet. This handles the common case where a personal + // account or an org with an existing GitHub App installation signs in for + // the first time on a fresh DB — the installation webhook already fired + // before the org actor existed, so we kick off the sync here instead. + const needsInitialSync = installationStatus === "connected" && syncStatus === "pending"; + if (needsInitialSync) { + const githubData = await getOrCreateGithubData(c, organizationId); + void githubData + .syncRepos({ + connectedAccount: input.githubLogin, + installationStatus: "connected", + installationId: input.installationId, + githubLogin: input.githubLogin, + kind: input.kind, + label: "Initial repository sync...", }) - .onConflictDoUpdate({ - target: organizationMembers.id, - set: { - name: input.userName, - email: input.userEmail, - role: input.kind === "personal" ? "owner" : "admin", - state: "active", - updatedAt: now, - }, - }) - .run(); + .catch(() => {}); + } - return { organizationId }; - }, + return { organizationId }; +} - async getOrganizationShellState(c: any): Promise { - assertOrganizationShell(c); - return await buildOrganizationState(c); - }, - - async getOrganizationShellStateIfInitialized(c: any): Promise { - assertOrganizationShell(c); - return await buildOrganizationStateIfInitialized(c); - }, - - async updateOrganizationShellProfile(c: any, input: Pick): Promise { - assertOrganizationShell(c); - const existing = await requireOrganizationProfileRow(c); - await c.db - .update(organizationProfile) - .set({ - displayName: input.displayName.trim() || existing.displayName, - slug: input.slug.trim() || existing.slug, - primaryDomain: input.primaryDomain.trim() || existing.primaryDomain, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async markOrganizationSyncStarted(c: any, input: { label: string }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - githubSyncStatus: "syncing", - githubLastSyncLabel: input.label, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyOrganizationSyncCompleted( - c: any, - input: { - repositories: Array<{ fullName: string; cloneUrl: string; private: boolean }>; - installationStatus: FoundryOrganization["github"]["installationStatus"]; - lastSyncLabel: string; - }, - ): Promise { - assertOrganizationShell(c); - const now = Date.now(); - for (const repository of input.repositories) { - const remoteUrl = repository.cloneUrl; - await c.db - .insert(repos) - .values({ - repoId: repoIdFromRemote(remoteUrl), - remoteUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl, - updatedAt: now, - }, - }) - .run(); - } - await c.db - .update(organizationProfile) - .set({ - githubInstallationStatus: input.installationStatus, - githubSyncStatus: "synced", - githubLastSyncLabel: input.lastSyncLabel, - githubLastSyncAt: now, - updatedAt: now, - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async markOrganizationSyncFailed(c: any, input: { message: string; installationStatus: FoundryOrganization["github"]["installationStatus"] }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - githubInstallationStatus: input.installationStatus, - githubSyncStatus: "error", - githubLastSyncLabel: input.message, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyOrganizationStripeCustomer(c: any, input: { customerId: string }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - stripeCustomerId: input.customerId, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyOrganizationStripeSubscription( - c: any, - input: { - subscription: { - id: string; - customerId: string; - priceId: string | null; - status: string; - cancelAtPeriodEnd: boolean; - currentPeriodEnd: number | null; - trialEnd: number | null; - defaultPaymentMethodLabel: string; - }; - fallbackPlanId: FoundryBillingPlanId; - }, - ): Promise { - assertOrganizationShell(c); - const { appShell } = getActorRuntimeContext(); - const planId = appShell.stripe.planIdForPriceId(input.subscription.priceId ?? "") ?? input.fallbackPlanId; - await c.db - .update(organizationProfile) - .set({ - stripeCustomerId: input.subscription.customerId || null, - stripeSubscriptionId: input.subscription.id || null, - stripePriceId: input.subscription.priceId, - billingPlanId: planId, - billingStatus: stripeStatusToBillingStatus(input.subscription.status, input.subscription.cancelAtPeriodEnd), - billingSeatsIncluded: seatsIncludedForPlan(planId), - billingTrialEndsAt: input.subscription.trialEnd ? new Date(input.subscription.trialEnd * 1000).toISOString() : null, - billingRenewalAt: input.subscription.currentPeriodEnd ? new Date(input.subscription.currentPeriodEnd * 1000).toISOString() : null, - billingPaymentMethodLabel: input.subscription.defaultPaymentMethodLabel || "Payment method on file", - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyOrganizationFreePlan(c: any, input: { clearSubscription: boolean }): Promise { - assertOrganizationShell(c); - const patch: Record = { - billingPlanId: "free", - billingStatus: "active", - billingSeatsIncluded: 1, - billingTrialEndsAt: null, - billingRenewalAt: null, - billingPaymentMethodLabel: "No card required", +export async function updateOrganizationShellProfileMutation( + c: any, + input: Pick, +): Promise { + assertOrganizationShell(c); + const existing = await requireOrganizationProfileRow(c); + await c.db + .update(organizationProfile) + .set({ + displayName: input.displayName.trim() || existing.displayName, + slug: input.slug.trim() || existing.slug, + primaryDomain: input.primaryDomain.trim() || existing.primaryDomain, updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function markOrganizationSyncStartedMutation(c: any, input: { label: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + githubSyncStatus: "syncing", + githubLastSyncLabel: input.label, + githubSyncPhase: "discovering_repositories", + githubProcessedRepositoryCount: 0, + githubTotalRepositoryCount: 0, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function applyOrganizationStripeCustomerMutation(c: any, input: { customerId: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + stripeCustomerId: input.customerId, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function applyOrganizationStripeSubscriptionMutation( + c: any, + input: { + subscription: { + id: string; + customerId: string; + priceId: string | null; + status: string; + cancelAtPeriodEnd: boolean; + currentPeriodEnd: number | null; + trialEnd: number | null; + defaultPaymentMethodLabel: string; }; - if (input.clearSubscription) { - patch.stripeSubscriptionId = null; - patch.stripePriceId = null; - } - await c.db.update(organizationProfile).set(patch).where(eq(organizationProfile.id, PROFILE_ROW_ID)).run(); + fallbackPlanId: FoundryBillingPlanId; }, +): Promise { + assertOrganizationShell(c); + const { appShell } = getActorRuntimeContext(); + const planId = appShell.stripe.planIdForPriceId(input.subscription.priceId ?? "") ?? input.fallbackPlanId; + await c.db + .update(organizationProfile) + .set({ + stripeCustomerId: input.subscription.customerId || null, + stripeSubscriptionId: input.subscription.id || null, + stripePriceId: input.subscription.priceId, + billingPlanId: planId, + billingStatus: stripeStatusToBillingStatus(input.subscription.status, input.subscription.cancelAtPeriodEnd), + billingSeatsIncluded: seatsIncludedForPlan(planId), + billingTrialEndsAt: input.subscription.trialEnd ? new Date(input.subscription.trialEnd * 1000).toISOString() : null, + billingRenewalAt: input.subscription.currentPeriodEnd ? new Date(input.subscription.currentPeriodEnd * 1000).toISOString() : null, + billingPaymentMethodLabel: input.subscription.defaultPaymentMethodLabel || "Payment method on file", + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} - async setOrganizationBillingPaymentMethod(c: any, input: { label: string }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - billingPaymentMethodLabel: input.label, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, +export async function applyOrganizationFreePlanMutation(c: any, input: { clearSubscription: boolean }): Promise { + assertOrganizationShell(c); + const patch: Record = { + billingPlanId: "free", + billingStatus: "active", + billingSeatsIncluded: 1, + billingTrialEndsAt: null, + billingRenewalAt: null, + billingPaymentMethodLabel: "No card required", + updatedAt: Date.now(), + }; + if (input.clearSubscription) { + patch.stripeSubscriptionId = null; + patch.stripePriceId = null; + } + await c.db.update(organizationProfile).set(patch).where(eq(organizationProfile.id, PROFILE_ROW_ID)).run(); +} - async setOrganizationBillingStatus(c: any, input: { status: FoundryBillingState["status"] }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - billingStatus: input.status, - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, +export async function setOrganizationBillingPaymentMethodMutation(c: any, input: { label: string }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + billingPaymentMethodLabel: input.label, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} - async upsertOrganizationInvoice(c: any, input: { id: string; label: string; issuedAt: string; amountUsd: number; status: "paid" | "open" }): Promise { - assertOrganizationShell(c); - await c.db - .insert(invoices) - .values({ - id: input.id, +export async function setOrganizationBillingStatusMutation(c: any, input: { status: FoundryBillingState["status"] }): Promise { + assertOrganizationShell(c); + await c.db + .update(organizationProfile) + .set({ + billingStatus: input.status, + updatedAt: Date.now(), + }) + .where(eq(organizationProfile.id, PROFILE_ROW_ID)) + .run(); +} + +export async function upsertOrganizationInvoiceMutation( + c: any, + input: { id: string; label: string; issuedAt: string; amountUsd: number; status: "paid" | "open" }, +): Promise { + assertOrganizationShell(c); + await c.db + .insert(invoices) + .values({ + id: input.id, + label: input.label, + issuedAt: input.issuedAt, + amountUsd: input.amountUsd, + status: input.status, + createdAt: Date.now(), + }) + .onConflictDoUpdate({ + target: invoices.id, + set: { label: input.label, issuedAt: input.issuedAt, amountUsd: input.amountUsd, status: input.status, - createdAt: Date.now(), - }) - .onConflictDoUpdate({ - target: invoices.id, - set: { - label: input.label, - issuedAt: input.issuedAt, - amountUsd: input.amountUsd, - status: input.status, - }, - }) - .run(); - }, + }, + }) + .run(); +} - async recordOrganizationSeatUsage(c: any, input: { email: string }): Promise { - assertOrganizationShell(c); - await c.db - .insert(seatAssignments) - .values({ - email: input.email, - createdAt: Date.now(), - }) - .onConflictDoNothing() - .run(); - }, - - async applyGithubInstallationCreated(c: any, input: { installationId: number }): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - githubInstallationId: input.installationId, - githubInstallationStatus: "connected", - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyGithubInstallationRemoved(c: any, _input: {}): Promise { - assertOrganizationShell(c); - await c.db - .update(organizationProfile) - .set({ - githubInstallationId: null, - githubInstallationStatus: "install_required", - githubSyncStatus: "pending", - githubLastSyncLabel: "GitHub App installation removed", - updatedAt: Date.now(), - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, - - async applyGithubRepositoryChanges(c: any, input: { added: Array<{ fullName: string; private: boolean }>; removed: string[] }): Promise { - assertOrganizationShell(c); - const now = Date.now(); - - for (const repo of input.added) { - const remoteUrl = `https://github.com/${repo.fullName}.git`; - const repoId = repoIdFromRemote(remoteUrl); - await c.db - .insert(repos) - .values({ - repoId, - remoteUrl, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: repos.repoId, - set: { - remoteUrl, - updatedAt: now, - }, - }) - .run(); - } - - for (const fullName of input.removed) { - const remoteUrl = `https://github.com/${fullName}.git`; - const repoId = repoIdFromRemote(remoteUrl); - await c.db.delete(repos).where(eq(repos.repoId, repoId)).run(); - } - - const repoCount = (await c.db.select().from(repos).all()).length; - await c.db - .update(organizationProfile) - .set({ - githubSyncStatus: "synced", - githubLastSyncLabel: `${repoCount} repositories synced`, - githubLastSyncAt: now, - updatedAt: now, - }) - .where(eq(organizationProfile.id, PROFILE_ROW_ID)) - .run(); - }, -}; +export async function recordOrganizationSeatUsageMutation(c: any, input: { email: string }): Promise { + assertOrganizationShell(c); + await c.db + .insert(seatAssignments) + .values({ + email: input.email, + createdAt: Date.now(), + }) + .onConflictDoNothing() + .run(); +} diff --git a/foundry/packages/backend/src/actors/organization/constants.ts b/foundry/packages/backend/src/actors/organization/constants.ts new file mode 100644 index 0000000..0b8e3c0 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/constants.ts @@ -0,0 +1 @@ +export const APP_SHELL_ORGANIZATION_ID = "app"; diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql index 09b77f9..80be04f 100644 --- a/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/0000_melted_viper.sql @@ -56,6 +56,10 @@ CREATE TABLE `organization_profile` ( `github_last_sync_at` integer, `github_last_webhook_at` integer, `github_last_webhook_event` text, + `github_sync_generation` integer NOT NULL, + `github_sync_phase` text, + `github_processed_repository_count` integer NOT NULL, + `github_total_repository_count` integer NOT NULL, `stripe_customer_id` text, `stripe_subscription_id` text, `stripe_price_id` text, @@ -86,8 +90,3 @@ CREATE TABLE `stripe_lookup` ( `organization_id` text NOT NULL, `updated_at` integer NOT NULL ); ---> statement-breakpoint -CREATE TABLE `task_lookup` ( - `task_id` text PRIMARY KEY NOT NULL, - `repo_id` text NOT NULL -); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql b/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql new file mode 100644 index 0000000..74d63ef --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/0001_add_auth_and_task_tables.sql @@ -0,0 +1,50 @@ +CREATE TABLE `auth_session_index` ( + `session_id` text PRIMARY KEY NOT NULL, + `session_token` text NOT NULL, + `user_id` text NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `auth_email_index` ( + `email` text PRIMARY KEY NOT NULL, + `user_id` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `auth_account_index` ( + `id` text PRIMARY KEY NOT NULL, + `provider_id` text NOT NULL, + `account_id` text NOT NULL, + `user_id` text NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `auth_verification` ( + `id` text PRIMARY KEY NOT NULL, + `identifier` text NOT NULL, + `value` text NOT NULL, + `expires_at` integer NOT NULL, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `task_index` ( + `task_id` text PRIMARY KEY NOT NULL, + `repo_id` text NOT NULL, + `branch_name` text, + `created_at` integer NOT NULL, + `updated_at` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE `task_summaries` ( + `task_id` text PRIMARY KEY NOT NULL, + `repo_id` text NOT NULL, + `title` text NOT NULL, + `status` text NOT NULL, + `repo_name` text NOT NULL, + `updated_at_ms` integer NOT NULL, + `branch` text, + `pull_request_json` text, + `sessions_summary_json` text DEFAULT '[]' NOT NULL +); diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json index cdcc44c..a29c546 100644 --- a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/0000_snapshot.json @@ -373,6 +373,34 @@ "notNull": false, "autoincrement": false }, + "github_sync_generation": { + "name": "github_sync_generation", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_sync_phase": { + "name": "github_sync_phase", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "github_processed_repository_count": { + "name": "github_processed_repository_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "github_total_repository_count": { + "name": "github_total_repository_count", + "type": "integer", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, "stripe_customer_id": { "name": "stripe_customer_id", "type": "text", @@ -549,30 +577,6 @@ "compositePrimaryKeys": {}, "uniqueConstraints": {}, "checkConstraints": {} - }, - "task_lookup": { - "name": "task_lookup", - "columns": { - "task_id": { - "name": "task_id", - "type": "text", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "repo_id": { - "name": "repo_id", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {}, - "checkConstraints": {} } }, "views": {}, diff --git a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json index e3668a1..41ea23b 100644 --- a/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json +++ b/foundry/packages/backend/src/actors/organization/db/drizzle/meta/_journal.json @@ -8,6 +8,13 @@ "when": 1773376221152, "tag": "0000_melted_viper", "breakpoints": true + }, + { + "idx": 1, + "version": "6", + "when": 1773840000000, + "tag": "0001_add_auth_and_task_tables", + "breakpoints": true } ] } diff --git a/foundry/packages/backend/src/actors/organization/db/migrations.ts b/foundry/packages/backend/src/actors/organization/db/migrations.ts index b3e09f1..a7e8abc 100644 --- a/foundry/packages/backend/src/actors/organization/db/migrations.ts +++ b/foundry/packages/backend/src/actors/organization/db/migrations.ts @@ -12,20 +12,8 @@ const journal = { }, { idx: 1, - when: 1773638400000, - tag: "0001_auth_index_tables", - breakpoints: true, - }, - { - idx: 2, - when: 1773720000000, - tag: "0002_task_summaries", - breakpoints: true, - }, - { - idx: 3, - when: 1773810001000, - tag: "0003_drop_provider_profiles", + when: 1773840000000, + tag: "0001_add_auth_and_task_tables", breakpoints: true, }, ], @@ -92,6 +80,10 @@ CREATE TABLE \`organization_profile\` ( \`github_last_sync_at\` integer, \`github_last_webhook_at\` integer, \`github_last_webhook_event\` text, + \`github_sync_generation\` integer NOT NULL, + \`github_sync_phase\` text, + \`github_processed_repository_count\` integer NOT NULL, + \`github_total_repository_count\` integer NOT NULL, \`stripe_customer_id\` text, \`stripe_subscription_id\` text, \`stripe_price_id\` text, @@ -122,13 +114,8 @@ CREATE TABLE \`stripe_lookup\` ( \`organization_id\` text NOT NULL, \`updated_at\` integer NOT NULL ); ---> statement-breakpoint -CREATE TABLE \`task_lookup\` ( - \`task_id\` text PRIMARY KEY NOT NULL, - \`repo_id\` text NOT NULL -); `, - m0001: `CREATE TABLE IF NOT EXISTS \`auth_session_index\` ( + m0001: `CREATE TABLE \`auth_session_index\` ( \`session_id\` text PRIMARY KEY NOT NULL, \`session_token\` text NOT NULL, \`user_id\` text NOT NULL, @@ -136,13 +123,13 @@ CREATE TABLE \`task_lookup\` ( \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE IF NOT EXISTS \`auth_email_index\` ( +CREATE TABLE \`auth_email_index\` ( \`email\` text PRIMARY KEY NOT NULL, \`user_id\` text NOT NULL, \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE IF NOT EXISTS \`auth_account_index\` ( +CREATE TABLE \`auth_account_index\` ( \`id\` text PRIMARY KEY NOT NULL, \`provider_id\` text NOT NULL, \`account_id\` text NOT NULL, @@ -150,7 +137,7 @@ CREATE TABLE IF NOT EXISTS \`auth_account_index\` ( \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE IF NOT EXISTS \`auth_verification\` ( +CREATE TABLE \`auth_verification\` ( \`id\` text PRIMARY KEY NOT NULL, \`identifier\` text NOT NULL, \`value\` text NOT NULL, @@ -158,8 +145,16 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` ( \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL ); -`, - m0002: `CREATE TABLE IF NOT EXISTS \`task_summaries\` ( +--> statement-breakpoint +CREATE TABLE \`task_index\` ( + \`task_id\` text PRIMARY KEY NOT NULL, + \`repo_id\` text NOT NULL, + \`branch_name\` text, + \`created_at\` integer NOT NULL, + \`updated_at\` integer NOT NULL +); +--> statement-breakpoint +CREATE TABLE \`task_summaries\` ( \`task_id\` text PRIMARY KEY NOT NULL, \`repo_id\` text NOT NULL, \`title\` text NOT NULL, @@ -170,8 +165,6 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` ( \`pull_request_json\` text, \`sessions_summary_json\` text DEFAULT '[]' NOT NULL ); -`, - m0003: `DROP TABLE IF EXISTS \`provider_profiles\`; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/organization/db/schema.ts b/foundry/packages/backend/src/actors/organization/db/schema.ts index dd4fa40..5071a25 100644 --- a/foundry/packages/backend/src/actors/organization/db/schema.ts +++ b/foundry/packages/backend/src/actors/organization/db/schema.ts @@ -1,34 +1,34 @@ -import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle"; +import { sql } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; // SQLite is per organization actor instance, so no organizationId column needed. /** - * Coordinator index of RepositoryActor instances. - * The organization actor is the coordinator for repositories. - * Rows are created/removed when repos are added/removed from the organization. + * Coordinator index of TaskActor instances. + * The organization actor is the direct coordinator for tasks (not a per-repo + * actor) because the sidebar needs to query all tasks across all repos on + * every snapshot. With many repos, fanning out to N repo actors on the hot + * read path is too expensive — owning the index here keeps that a single + * local table scan. Each row maps a taskId to its repo and immutable branch + * name. Used for branch conflict checking (scoped by repoId) and + * task-by-branch lookups. */ -export const repos = sqliteTable("repos", { - repoId: text("repo_id").notNull().primaryKey(), - remoteUrl: text("remote_url").notNull(), +export const taskIndex = sqliteTable("task_index", { + taskId: text("task_id").notNull().primaryKey(), + repoId: text("repo_id").notNull(), + branchName: text("branch_name"), createdAt: integer("created_at").notNull(), updatedAt: integer("updated_at").notNull(), }); /** - * Coordinator index of TaskActor instances. - * Fast taskId → repoId lookup so the organization can route requests - * to the correct RepositoryActor without scanning all repos. - */ -export const taskLookup = sqliteTable("task_lookup", { - taskId: text("task_id").notNull().primaryKey(), - repoId: text("repo_id").notNull(), -}); - -/** - * Coordinator index of TaskActor instances — materialized sidebar projection. - * Task actors push summary updates to the organization actor via - * applyTaskSummaryUpdate(). Source of truth lives on each TaskActor; - * this table exists so organization reads stay local without fan-out. + * Organization-owned materialized task summary projection. + * Task actors push summary updates directly to the organization coordinator, + * which keeps this table local for fast list/lookups without fan-out. + * Same rationale as taskIndex: the sidebar repeatedly reads all tasks across + * all repos, so the org must own the materialized view to avoid O(repos) + * actor fan-out on the hot read path. */ export const taskSummaries = sqliteTable("task_summaries", { taskId: text("task_id").notNull().primaryKey(), @@ -42,38 +42,46 @@ export const taskSummaries = sqliteTable("task_summaries", { sessionsSummaryJson: text("sessions_summary_json").notNull().default("[]"), }); -export const organizationProfile = sqliteTable("organization_profile", { - id: text("id").notNull().primaryKey(), - kind: text("kind").notNull(), - githubAccountId: text("github_account_id").notNull(), - githubLogin: text("github_login").notNull(), - githubAccountType: text("github_account_type").notNull(), - displayName: text("display_name").notNull(), - slug: text("slug").notNull(), - primaryDomain: text("primary_domain").notNull(), - defaultModel: text("default_model").notNull(), - autoImportRepos: integer("auto_import_repos").notNull(), - repoImportStatus: text("repo_import_status").notNull(), - githubConnectedAccount: text("github_connected_account").notNull(), - githubInstallationStatus: text("github_installation_status").notNull(), - githubSyncStatus: text("github_sync_status").notNull(), - githubInstallationId: integer("github_installation_id"), - githubLastSyncLabel: text("github_last_sync_label").notNull(), - githubLastSyncAt: integer("github_last_sync_at"), - githubLastWebhookAt: integer("github_last_webhook_at"), - githubLastWebhookEvent: text("github_last_webhook_event"), - stripeCustomerId: text("stripe_customer_id"), - stripeSubscriptionId: text("stripe_subscription_id"), - stripePriceId: text("stripe_price_id"), - billingPlanId: text("billing_plan_id").notNull(), - billingStatus: text("billing_status").notNull(), - billingSeatsIncluded: integer("billing_seats_included").notNull(), - billingTrialEndsAt: text("billing_trial_ends_at"), - billingRenewalAt: text("billing_renewal_at"), - billingPaymentMethodLabel: text("billing_payment_method_label").notNull(), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); +export const organizationProfile = sqliteTable( + "organization_profile", + { + id: integer("id").primaryKey(), + kind: text("kind").notNull(), + githubAccountId: text("github_account_id").notNull(), + githubLogin: text("github_login").notNull(), + githubAccountType: text("github_account_type").notNull(), + displayName: text("display_name").notNull(), + slug: text("slug").notNull(), + defaultModel: text("default_model").notNull().default(DEFAULT_WORKSPACE_MODEL_ID), + primaryDomain: text("primary_domain").notNull(), + autoImportRepos: integer("auto_import_repos").notNull(), + repoImportStatus: text("repo_import_status").notNull(), + githubConnectedAccount: text("github_connected_account").notNull(), + githubInstallationStatus: text("github_installation_status").notNull(), + githubSyncStatus: text("github_sync_status").notNull(), + githubInstallationId: integer("github_installation_id"), + githubLastSyncLabel: text("github_last_sync_label").notNull(), + githubLastSyncAt: integer("github_last_sync_at"), + githubLastWebhookAt: integer("github_last_webhook_at"), + githubLastWebhookEvent: text("github_last_webhook_event"), + githubSyncGeneration: integer("github_sync_generation").notNull(), + githubSyncPhase: text("github_sync_phase"), + githubProcessedRepositoryCount: integer("github_processed_repository_count").notNull(), + githubTotalRepositoryCount: integer("github_total_repository_count").notNull(), + stripeCustomerId: text("stripe_customer_id"), + stripeSubscriptionId: text("stripe_subscription_id"), + stripePriceId: text("stripe_price_id"), + billingPlanId: text("billing_plan_id").notNull(), + billingStatus: text("billing_status").notNull(), + billingSeatsIncluded: integer("billing_seats_included").notNull(), + billingTrialEndsAt: text("billing_trial_ends_at"), + billingRenewalAt: text("billing_renewal_at"), + billingPaymentMethodLabel: text("billing_payment_method_label").notNull(), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => [check("organization_profile_singleton_id_check", sql`${table.id} = 1`)], +); export const organizationMembers = sqliteTable("organization_members", { id: text("id").notNull().primaryKey(), @@ -133,6 +141,7 @@ export const authAccountIndex = sqliteTable("auth_account_index", { updatedAt: integer("updated_at").notNull(), }); +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ export const authVerification = sqliteTable("auth_verification", { id: text("id").notNull().primaryKey(), identifier: text("identifier").notNull(), diff --git a/foundry/packages/backend/src/actors/organization/index.ts b/foundry/packages/backend/src/actors/organization/index.ts index 1ea0196..1bd8896 100644 --- a/foundry/packages/backend/src/actors/organization/index.ts +++ b/foundry/packages/backend/src/actors/organization/index.ts @@ -1,11 +1,10 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; +import { actor } from "rivetkit"; import { organizationDb } from "./db/db.js"; -import { runOrganizationWorkflow, ORGANIZATION_QUEUE_NAMES, organizationActions } from "./actions.js"; +import { organizationActions } from "./actions.js"; +import { organizationCommandActions } from "./workflow.js"; export const organization = actor({ db: organizationDb, - queues: Object.fromEntries(ORGANIZATION_QUEUE_NAMES.map((name) => [name, queue()])), options: { name: "Organization", icon: "compass", @@ -14,6 +13,8 @@ export const organization = actor({ createState: (_c, organizationId: string) => ({ organizationId, }), - actions: organizationActions, - run: workflow(runOrganizationWorkflow), + actions: { + ...organizationActions, + ...organizationCommandActions, + }, }); diff --git a/foundry/packages/backend/src/actors/organization/queues.ts b/foundry/packages/backend/src/actors/organization/queues.ts new file mode 100644 index 0000000..f84e818 --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/queues.ts @@ -0,0 +1,39 @@ +export const ORGANIZATION_QUEUE_NAMES = [ + "organization.command.createTask", + "organization.command.materializeTask", + "organization.command.registerTaskBranch", + "organization.command.applyTaskSummaryUpdate", + "organization.command.removeTaskSummary", + "organization.command.refreshTaskSummaryForBranch", + "organization.command.snapshot.broadcast", + "organization.command.syncGithubSession", + "organization.command.better_auth.session_index.upsert", + "organization.command.better_auth.session_index.delete", + "organization.command.better_auth.email_index.upsert", + "organization.command.better_auth.email_index.delete", + "organization.command.better_auth.account_index.upsert", + "organization.command.better_auth.account_index.delete", + "organization.command.better_auth.verification.create", + "organization.command.better_auth.verification.update", + "organization.command.better_auth.verification.update_many", + "organization.command.better_auth.verification.delete", + "organization.command.better_auth.verification.delete_many", + "organization.command.github.sync_progress.apply", + "organization.command.github.webhook_receipt.record", + "organization.command.github.organization_shell.sync_from_github", + "organization.command.shell.profile.update", + "organization.command.shell.sync_started.mark", + "organization.command.billing.stripe_customer.apply", + "organization.command.billing.stripe_subscription.apply", + "organization.command.billing.free_plan.apply", + "organization.command.billing.payment_method.set", + "organization.command.billing.status.set", + "organization.command.billing.invoice.upsert", + "organization.command.billing.seat_usage.record", +] as const; + +export type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number]; + +export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName { + return name; +} diff --git a/foundry/packages/backend/src/actors/organization/workflow.ts b/foundry/packages/backend/src/actors/organization/workflow.ts new file mode 100644 index 0000000..189225b --- /dev/null +++ b/foundry/packages/backend/src/actors/organization/workflow.ts @@ -0,0 +1,163 @@ +// @ts-nocheck +/** + * Organization command actions — converted from queue handlers to direct actions. + * Each export becomes an action on the organization actor. + */ +import { applyGithubSyncProgressMutation, recordGithubWebhookReceiptMutation, refreshOrganizationSnapshotMutation } from "./actions.js"; +import { + applyTaskSummaryUpdateMutation, + createTaskMutation, + refreshTaskSummaryForBranchMutation, + registerTaskBranchMutation, + removeTaskSummaryMutation, +} from "./actions/task-mutations.js"; +import { + betterAuthCreateVerificationMutation, + betterAuthDeleteAccountIndexMutation, + betterAuthDeleteEmailIndexMutation, + betterAuthDeleteManyVerificationMutation, + betterAuthDeleteSessionIndexMutation, + betterAuthDeleteVerificationMutation, + betterAuthUpdateManyVerificationMutation, + betterAuthUpdateVerificationMutation, + betterAuthUpsertAccountIndexMutation, + betterAuthUpsertEmailIndexMutation, + betterAuthUpsertSessionIndexMutation, +} from "./actions/better-auth.js"; +import { + applyOrganizationFreePlanMutation, + applyOrganizationStripeCustomerMutation, + applyOrganizationStripeSubscriptionMutation, + markOrganizationSyncStartedMutation, + recordOrganizationSeatUsageMutation, + setOrganizationBillingPaymentMethodMutation, + setOrganizationBillingStatusMutation, + syncOrganizationShellFromGithubMutation, + updateOrganizationShellProfileMutation, + upsertOrganizationInvoiceMutation, +} from "./app-shell.js"; + +export const organizationCommandActions = { + async commandCreateTask(c: any, body: any) { + return await createTaskMutation(c, body); + }, + async commandMaterializeTask(c: any, body: any) { + return await createTaskMutation(c, body); + }, + async commandRegisterTaskBranch(c: any, body: any) { + return await registerTaskBranchMutation(c, body); + }, + async commandApplyTaskSummaryUpdate(c: any, body: any) { + await applyTaskSummaryUpdateMutation(c, body); + return { ok: true }; + }, + async commandRemoveTaskSummary(c: any, body: any) { + await removeTaskSummaryMutation(c, body); + return { ok: true }; + }, + async commandRefreshTaskSummaryForBranch(c: any, body: any) { + await refreshTaskSummaryForBranchMutation(c, body); + return { ok: true }; + }, + async commandBroadcastSnapshot(c: any, _body: any) { + await refreshOrganizationSnapshotMutation(c); + return { ok: true }; + }, + async commandSyncGithubSession(c: any, body: any) { + const { syncGithubOrganizations } = await import("./app-shell.js"); + await syncGithubOrganizations(c, body); + return { ok: true }; + }, + + // Better Auth index actions + async commandBetterAuthSessionIndexUpsert(c: any, body: any) { + return await betterAuthUpsertSessionIndexMutation(c, body); + }, + async commandBetterAuthSessionIndexDelete(c: any, body: any) { + await betterAuthDeleteSessionIndexMutation(c, body); + return { ok: true }; + }, + async commandBetterAuthEmailIndexUpsert(c: any, body: any) { + return await betterAuthUpsertEmailIndexMutation(c, body); + }, + async commandBetterAuthEmailIndexDelete(c: any, body: any) { + await betterAuthDeleteEmailIndexMutation(c, body); + return { ok: true }; + }, + async commandBetterAuthAccountIndexUpsert(c: any, body: any) { + return await betterAuthUpsertAccountIndexMutation(c, body); + }, + async commandBetterAuthAccountIndexDelete(c: any, body: any) { + await betterAuthDeleteAccountIndexMutation(c, body); + return { ok: true }; + }, + async commandBetterAuthVerificationCreate(c: any, body: any) { + return await betterAuthCreateVerificationMutation(c, body); + }, + async commandBetterAuthVerificationUpdate(c: any, body: any) { + return await betterAuthUpdateVerificationMutation(c, body); + }, + async commandBetterAuthVerificationUpdateMany(c: any, body: any) { + return await betterAuthUpdateManyVerificationMutation(c, body); + }, + async commandBetterAuthVerificationDelete(c: any, body: any) { + await betterAuthDeleteVerificationMutation(c, body); + return { ok: true }; + }, + async commandBetterAuthVerificationDeleteMany(c: any, body: any) { + return await betterAuthDeleteManyVerificationMutation(c, body); + }, + + // GitHub sync actions + async commandApplyGithubSyncProgress(c: any, body: any) { + await applyGithubSyncProgressMutation(c, body); + return { ok: true }; + }, + async commandRecordGithubWebhookReceipt(c: any, body: any) { + await recordGithubWebhookReceiptMutation(c, body); + return { ok: true }; + }, + async commandSyncOrganizationShellFromGithub(c: any, body: any) { + return await syncOrganizationShellFromGithubMutation(c, body); + }, + + // Shell/profile actions + async commandUpdateShellProfile(c: any, body: any) { + await updateOrganizationShellProfileMutation(c, body); + return { ok: true }; + }, + async commandMarkSyncStarted(c: any, body: any) { + await markOrganizationSyncStartedMutation(c, body); + return { ok: true }; + }, + + // Billing actions + async commandApplyStripeCustomer(c: any, body: any) { + await applyOrganizationStripeCustomerMutation(c, body); + return { ok: true }; + }, + async commandApplyStripeSubscription(c: any, body: any) { + await applyOrganizationStripeSubscriptionMutation(c, body); + return { ok: true }; + }, + async commandApplyFreePlan(c: any, body: any) { + await applyOrganizationFreePlanMutation(c, body); + return { ok: true }; + }, + async commandSetPaymentMethod(c: any, body: any) { + await setOrganizationBillingPaymentMethodMutation(c, body); + return { ok: true }; + }, + async commandSetBillingStatus(c: any, body: any) { + await setOrganizationBillingStatusMutation(c, body); + return { ok: true }; + }, + async commandUpsertInvoice(c: any, body: any) { + await upsertOrganizationInvoiceMutation(c, body); + return { ok: true }; + }, + async commandRecordSeatUsage(c: any, body: any) { + await recordOrganizationSeatUsageMutation(c, body); + return { ok: true }; + }, +}; diff --git a/foundry/packages/backend/src/actors/repository/actions.ts b/foundry/packages/backend/src/actors/repository/actions.ts deleted file mode 100644 index 9ef8e75..0000000 --- a/foundry/packages/backend/src/actors/repository/actions.ts +++ /dev/null @@ -1,557 +0,0 @@ -// @ts-nocheck -import { randomUUID } from "node:crypto"; -import { and, desc, eq, isNotNull, ne } from "drizzle-orm"; -import { Loop } from "rivetkit/workflow"; -import type { AgentType, RepoOverview, SandboxProviderId, TaskRecord, TaskSummary } from "@sandbox-agent/foundry-shared"; -import { getGithubData, getOrCreateHistory, getOrCreateTask, getTask, selfRepository } from "../handles.js"; -import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../services/create-flow.js"; -import { expectQueueResponse } from "../../services/queue.js"; -import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js"; -import { repoMeta, taskIndex } from "./db/schema.js"; - -interface CreateTaskCommand { - task: string; - sandboxProviderId: SandboxProviderId; - agentType: AgentType | null; - explicitTitle: string | null; - explicitBranchName: string | null; - initialPrompt: string | null; - onBranch: string | null; -} - -interface RegisterTaskBranchCommand { - taskId: string; - branchName: string; - requireExistingRemote?: boolean; -} - -interface ListTaskSummariesCommand { - includeArchived?: boolean; -} - -interface GetTaskEnrichedCommand { - taskId: string; -} - -interface GetPullRequestForBranchCommand { - branchName: string; -} - -const REPOSITORY_QUEUE_NAMES = ["repository.command.createTask", "repository.command.registerTaskBranch"] as const; - -type RepositoryQueueName = (typeof REPOSITORY_QUEUE_NAMES)[number]; - -export { REPOSITORY_QUEUE_NAMES }; - -export function repositoryWorkflowQueueName(name: RepositoryQueueName): RepositoryQueueName { - return name; -} - -function isStaleTaskReferenceError(error: unknown): boolean { - const message = resolveErrorMessage(error); - return isActorNotFoundError(error) || message.startsWith("Task not found:"); -} - -async function persistRemoteUrl(c: any, remoteUrl: string): Promise { - c.state.remoteUrl = remoteUrl; - await c.db - .insert(repoMeta) - .values({ - id: 1, - remoteUrl, - updatedAt: Date.now(), - }) - .onConflictDoUpdate({ - target: repoMeta.id, - set: { - remoteUrl, - updatedAt: Date.now(), - }, - }) - .run(); -} - -async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise { - try { - await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run(); - } catch { - // Best effort cleanup only. - } -} - -async function reinsertTaskIndexRow(c: any, taskId: string, branchName: string | null, updatedAt: number): Promise { - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId, - branchName, - createdAt: updatedAt || now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskIndex.taskId, - set: { - branchName, - updatedAt: now, - }, - }) - .run(); -} - -async function listKnownTaskBranches(c: any): Promise { - const rows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all(); - return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0); -} - -async function resolveGitHubRepository(c: any) { - const githubData = getGithubData(c, c.state.organizationId); - return await githubData.getRepository({ repoId: c.state.repoId }).catch(() => null); -} - -async function listGitHubBranches(c: any): Promise> { - const githubData = getGithubData(c, c.state.organizationId); - return await githubData.listBranchesForRepository({ repoId: c.state.repoId }).catch(() => []); -} - -async function enrichTaskRecord(c: any, record: TaskRecord): Promise { - const branchName = record.branchName?.trim() || null; - if (!branchName) { - return record; - } - - const pr = - branchName != null - ? await getGithubData(c, c.state.organizationId) - .listPullRequestsForRepository({ repoId: c.state.repoId }) - .then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null) - .catch(() => null) - : null; - - return { - ...record, - prUrl: pr?.url ?? null, - prAuthor: pr?.authorLogin ?? null, - ciStatus: null, - reviewStatus: null, - reviewer: pr?.authorLogin ?? null, - diffStat: record.diffStat ?? null, - hasUnpushed: record.hasUnpushed ?? null, - conflictsWithMain: record.conflictsWithMain ?? null, - parentBranch: record.parentBranch ?? null, - }; -} - -async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise { - const organizationId = c.state.organizationId; - const repoId = c.state.repoId; - const repoRemote = c.state.remoteUrl; - const onBranch = cmd.onBranch?.trim() || null; - const taskId = randomUUID(); - let initialBranchName: string | null = null; - let initialTitle: string | null = null; - - await persistRemoteUrl(c, repoRemote); - - if (onBranch) { - initialBranchName = onBranch; - initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined); - - await registerTaskBranchMutation(c, { - taskId, - branchName: onBranch, - requireExistingRemote: true, - }); - } else { - const reservedBranches = await listKnownTaskBranches(c); - const resolved = resolveCreateFlowDecision({ - task: cmd.task, - explicitTitle: cmd.explicitTitle ?? undefined, - explicitBranchName: cmd.explicitBranchName ?? undefined, - localBranches: [], - taskBranches: reservedBranches, - }); - - initialBranchName = resolved.branchName; - initialTitle = resolved.title; - - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId, - branchName: resolved.branchName, - createdAt: now, - updatedAt: now, - }) - .onConflictDoNothing() - .run(); - } - - let taskHandle: Awaited>; - try { - taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, { - organizationId, - repoId, - taskId, - repoRemote, - branchName: initialBranchName, - title: initialTitle, - task: cmd.task, - sandboxProviderId: cmd.sandboxProviderId, - agentType: cmd.agentType, - explicitTitle: null, - explicitBranchName: null, - initialPrompt: cmd.initialPrompt, - }); - } catch (error) { - if (initialBranchName) { - await deleteStaleTaskIndexRow(c, taskId); - } - throw error; - } - - const created = await taskHandle.initialize({ sandboxProviderId: cmd.sandboxProviderId }); - - const history = await getOrCreateHistory(c, organizationId, repoId); - await history.append({ - kind: "task.created", - taskId, - payload: { - repoId, - sandboxProviderId: cmd.sandboxProviderId, - }, - }); - - return created; -} - -async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { - const branchName = cmd.branchName.trim(); - if (!branchName) { - throw new Error("branchName is required"); - } - - await persistRemoteUrl(c, c.state.remoteUrl); - - const existingOwner = await c.db - .select({ taskId: taskIndex.taskId }) - .from(taskIndex) - .where(and(eq(taskIndex.branchName, branchName), ne(taskIndex.taskId, cmd.taskId))) - .get(); - - if (existingOwner) { - let ownerMissing = false; - try { - await getTask(c, c.state.organizationId, c.state.repoId, existingOwner.taskId).get(); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - ownerMissing = true; - await deleteStaleTaskIndexRow(c, existingOwner.taskId); - } else { - throw error; - } - } - if (!ownerMissing) { - throw new Error(`branch is already assigned to a different task: ${branchName}`); - } - } - - const branches = await listGitHubBranches(c); - const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null; - if (cmd.requireExistingRemote && !branchMatch) { - throw new Error(`Remote branch not found: ${branchName}`); - } - - const repository = await resolveGitHubRepository(c); - const defaultBranch = repository?.defaultBranch ?? "main"; - const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? ""; - - const now = Date.now(); - await c.db - .insert(taskIndex) - .values({ - taskId: cmd.taskId, - branchName, - createdAt: now, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskIndex.taskId, - set: { - branchName, - updatedAt: now, - }, - }) - .run(); - - return { branchName, headSha }; -} - -async function listTaskSummaries(c: any, includeArchived = false): Promise { - const taskRows = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).orderBy(desc(taskIndex.updatedAt)).all(); - const records: TaskSummary[] = []; - - for (const row of taskRows) { - try { - const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get(); - if (!includeArchived && record.status === "archived") { - continue; - } - records.push({ - organizationId: record.organizationId, - repoId: record.repoId, - taskId: record.taskId, - branchName: record.branchName, - title: record.title, - status: record.status, - updatedAt: record.updatedAt, - }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - continue; - } - logActorWarning("repository", "failed loading task summary row", { - organizationId: c.state.organizationId, - repoId: c.state.repoId, - taskId: row.taskId, - error: resolveErrorMessage(error), - }); - } - } - - records.sort((a, b) => b.updatedAt - a.updatedAt); - return records; -} - -function sortOverviewBranches( - branches: Array<{ - branchName: string; - commitSha: string; - taskId: string | null; - taskTitle: string | null; - taskStatus: TaskRecord["status"] | null; - prNumber: number | null; - prState: string | null; - prUrl: string | null; - ciStatus: string | null; - reviewStatus: string | null; - reviewer: string | null; - updatedAt: number; - }>, - defaultBranch: string | null, -) { - return [...branches].sort((left, right) => { - if (defaultBranch) { - if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1; - if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1; - } - if (Boolean(left.taskId) !== Boolean(right.taskId)) { - return left.taskId ? -1 : 1; - } - if (left.updatedAt !== right.updatedAt) { - return right.updatedAt - left.updatedAt; - } - return left.branchName.localeCompare(right.branchName); - }); -} - -export async function runRepositoryWorkflow(ctx: any): Promise { - await ctx.loop("repository-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-repository-command", { - names: [...REPOSITORY_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - - try { - if (msg.name === "repository.command.createTask") { - const result = await loopCtx.step({ - name: "repository-create-task", - timeout: 5 * 60_000, - run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - - if (msg.name === "repository.command.registerTaskBranch") { - const result = await loopCtx.step({ - name: "repository-register-task-branch", - timeout: 60_000, - run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand), - }); - await msg.complete(result); - return Loop.continue(undefined); - } - } catch (error) { - const message = resolveErrorMessage(error); - logActorWarning("repository", "repository workflow command failed", { - queueName: msg.name, - error: message, - }); - await msg.complete({ error: message }).catch(() => {}); - } - - return Loop.continue(undefined); - }); -} - -export const repositoryActions = { - async createTask(c: any, cmd: CreateTaskCommand): Promise { - const self = selfRepository(c); - return expectQueueResponse( - await self.send(repositoryWorkflowQueueName("repository.command.createTask"), cmd, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async listReservedBranches(c: any): Promise { - return await listKnownTaskBranches(c); - }, - - async registerTaskBranch(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> { - const self = selfRepository(c); - return expectQueueResponse<{ branchName: string; headSha: string }>( - await self.send(repositoryWorkflowQueueName("repository.command.registerTaskBranch"), cmd, { - wait: true, - timeout: 10_000, - }), - ); - }, - - async listTaskSummaries(c: any, cmd?: ListTaskSummariesCommand): Promise { - return await listTaskSummaries(c, cmd?.includeArchived === true); - }, - - async getTaskEnriched(c: any, cmd: GetTaskEnrichedCommand): Promise { - const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.taskId, cmd.taskId)).get(); - if (!row) { - const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get(); - await reinsertTaskIndexRow(c, cmd.taskId, record.branchName ?? null, record.updatedAt ?? Date.now()); - return await enrichTaskRecord(c, record); - } - - try { - const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get(); - return await enrichTaskRecord(c, record); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, cmd.taskId); - throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`); - } - throw error; - } - }, - - async getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { - const repository = await resolveGitHubRepository(c); - return { - defaultBranch: repository?.defaultBranch ?? null, - fullName: repository?.fullName ?? null, - remoteUrl: c.state.remoteUrl, - }; - }, - - async getRepoOverview(c: any): Promise { - await persistRemoteUrl(c, c.state.remoteUrl); - - const now = Date.now(); - const repository = await resolveGitHubRepository(c); - const githubBranches = await listGitHubBranches(c).catch(() => []); - const githubData = getGithubData(c, c.state.organizationId); - const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []); - const prByBranch = new Map(prRows.map((row) => [row.headRefName, row])); - - const taskRows = await c.db - .select({ - taskId: taskIndex.taskId, - branchName: taskIndex.branchName, - updatedAt: taskIndex.updatedAt, - }) - .from(taskIndex) - .all(); - - const taskMetaByBranch = new Map(); - for (const row of taskRows) { - if (!row.branchName) { - continue; - } - try { - const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get(); - taskMetaByBranch.set(row.branchName, { - taskId: row.taskId, - title: record.title ?? null, - status: record.status, - updatedAt: record.updatedAt, - }); - } catch (error) { - if (isStaleTaskReferenceError(error)) { - await deleteStaleTaskIndexRow(c, row.taskId); - continue; - } - } - } - - const branchMap = new Map(); - for (const branch of githubBranches) { - branchMap.set(branch.branchName, branch); - } - for (const branchName of taskMetaByBranch.keys()) { - if (!branchMap.has(branchName)) { - branchMap.set(branchName, { branchName, commitSha: "" }); - } - } - if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) { - branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" }); - } - - const branches = sortOverviewBranches( - [...branchMap.values()].map((branch) => { - const taskMeta = taskMetaByBranch.get(branch.branchName); - const pr = prByBranch.get(branch.branchName); - return { - branchName: branch.branchName, - commitSha: branch.commitSha, - taskId: taskMeta?.taskId ?? null, - taskTitle: taskMeta?.title ?? null, - taskStatus: taskMeta?.status ?? null, - prNumber: pr?.number ?? null, - prState: pr?.state ?? null, - prUrl: pr?.url ?? null, - ciStatus: null, - reviewStatus: null, - reviewer: pr?.authorLogin ?? null, - updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now), - }; - }), - repository?.defaultBranch ?? null, - ); - - return { - organizationId: c.state.organizationId, - repoId: c.state.repoId, - remoteUrl: c.state.remoteUrl, - baseRef: repository?.defaultBranch ?? null, - fetchedAt: now, - branches, - }; - }, - - async getPullRequestForBranch(c: any, cmd: GetPullRequestForBranchCommand): Promise<{ number: number; status: "draft" | "ready" } | null> { - const branchName = cmd.branchName?.trim(); - if (!branchName) { - return null; - } - const githubData = getGithubData(c, c.state.organizationId); - return await githubData.getPullRequestForBranch({ - repoId: c.state.repoId, - branchName, - }); - }, -}; diff --git a/foundry/packages/backend/src/actors/repository/db/db.ts b/foundry/packages/backend/src/actors/repository/db/db.ts deleted file mode 100644 index 79bed8e..0000000 --- a/foundry/packages/backend/src/actors/repository/db/db.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { db } from "rivetkit/db/drizzle"; -import * as schema from "./schema.js"; -import migrations from "./migrations.js"; - -export const repositoryDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts b/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts deleted file mode 100644 index 8b9a1b9..0000000 --- a/foundry/packages/backend/src/actors/repository/db/drizzle.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { defineConfig } from "rivetkit/db/drizzle"; - -export default defineConfig({ - out: "./src/actors/repository/db/drizzle", - schema: "./src/actors/repository/db/schema.ts", -}); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql b/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql deleted file mode 100644 index 14bc071..0000000 --- a/foundry/packages/backend/src/actors/repository/db/drizzle/0000_useful_la_nuit.sql +++ /dev/null @@ -1,12 +0,0 @@ -CREATE TABLE `repo_meta` ( - `id` integer PRIMARY KEY NOT NULL, - `remote_url` text NOT NULL, - `updated_at` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE `task_index` ( - `task_id` text PRIMARY KEY NOT NULL, - `branch_name` text, - `created_at` integer NOT NULL, - `updated_at` integer NOT NULL -); diff --git a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json b/foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json deleted file mode 100644 index deebd86..0000000 --- a/foundry/packages/backend/src/actors/repository/db/drizzle/meta/_journal.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "version": "7", - "dialect": "sqlite", - "entries": [ - { - "idx": 0, - "version": "6", - "when": 1773376221848, - "tag": "0000_useful_la_nuit", - "breakpoints": true - } - ] -} diff --git a/foundry/packages/backend/src/actors/repository/db/migrations.ts b/foundry/packages/backend/src/actors/repository/db/migrations.ts deleted file mode 100644 index ebdb167..0000000 --- a/foundry/packages/backend/src/actors/repository/db/migrations.ts +++ /dev/null @@ -1,43 +0,0 @@ -// This file is generated by src/actors/_scripts/generate-actor-migrations.ts. -// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql). -// Do not hand-edit this file. - -const journal = { - entries: [ - { - idx: 0, - when: 1773376221848, - tag: "0000_useful_la_nuit", - breakpoints: true, - }, - { - idx: 1, - when: 1778900000000, - tag: "0001_remove_local_git_state", - breakpoints: true, - }, - ], -} as const; - -export default { - journal, - migrations: { - m0000: `CREATE TABLE \`repo_meta\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`remote_url\` text NOT NULL, -\t\`updated_at\` integer NOT NULL -); ---> statement-breakpoint -CREATE TABLE \`task_index\` ( -\t\`task_id\` text PRIMARY KEY NOT NULL, -\t\`branch_name\` text, -\t\`created_at\` integer NOT NULL, -\t\`updated_at\` integer NOT NULL -); -`, - m0001: `DROP TABLE IF EXISTS \`branches\`; ---> statement-breakpoint -DROP TABLE IF EXISTS \`repo_action_jobs\`; -`, - } as const, -}; diff --git a/foundry/packages/backend/src/actors/repository/db/schema.ts b/foundry/packages/backend/src/actors/repository/db/schema.ts deleted file mode 100644 index 2f597e8..0000000 --- a/foundry/packages/backend/src/actors/repository/db/schema.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { integer, sqliteTable, text } from "rivetkit/db/drizzle"; - -// SQLite is per repository actor instance (organizationId+repoId). - -export const repoMeta = sqliteTable("repo_meta", { - id: integer("id").primaryKey(), - remoteUrl: text("remote_url").notNull(), - updatedAt: integer("updated_at").notNull(), -}); - -/** - * Coordinator index of TaskActor instances. - * The repository actor is the coordinator for tasks. Each row maps a - * taskId to its branch name. Used for branch conflict checking and - * task-by-branch lookups. Rows are inserted at task creation and - * updated on branch rename. - */ -export const taskIndex = sqliteTable("task_index", { - taskId: text("task_id").notNull().primaryKey(), - branchName: text("branch_name"), - createdAt: integer("created_at").notNull(), - updatedAt: integer("updated_at").notNull(), -}); diff --git a/foundry/packages/backend/src/actors/repository/index.ts b/foundry/packages/backend/src/actors/repository/index.ts deleted file mode 100644 index 4253a90..0000000 --- a/foundry/packages/backend/src/actors/repository/index.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import { repositoryDb } from "./db/db.js"; -import { REPOSITORY_QUEUE_NAMES, repositoryActions, runRepositoryWorkflow } from "./actions.js"; - -export interface RepositoryInput { - organizationId: string; - repoId: string; - remoteUrl: string; -} - -export const repository = actor({ - db: repositoryDb, - queues: Object.fromEntries(REPOSITORY_QUEUE_NAMES.map((name) => [name, queue()])), - options: { - name: "Repository", - icon: "folder", - actionTimeout: 5 * 60_000, - }, - createState: (_c, input: RepositoryInput) => ({ - organizationId: input.organizationId, - repoId: input.repoId, - remoteUrl: input.remoteUrl, - }), - actions: repositoryActions, - run: workflow(runRepositoryWorkflow), -}); diff --git a/foundry/packages/backend/src/actors/sandbox/index.ts b/foundry/packages/backend/src/actors/sandbox/index.ts index 2e2087b..a35a149 100644 --- a/foundry/packages/backend/src/actors/sandbox/index.ts +++ b/foundry/packages/backend/src/actors/sandbox/index.ts @@ -2,12 +2,14 @@ import { actor } from "rivetkit"; import { e2b, sandboxActor } from "rivetkit/sandbox"; import { existsSync } from "node:fs"; import Dockerode from "dockerode"; +import { DEFAULT_WORKSPACE_MODEL_GROUPS, workspaceModelGroupsFromSandboxAgents, type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; import { SandboxAgent } from "sandbox-agent"; import { getActorRuntimeContext } from "../context.js"; import { organizationKey } from "../keys.js"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; import { resolveSandboxProviderId } from "../../sandbox-config.js"; -const SANDBOX_REPO_CWD = "/home/sandbox/organization/repo"; +const SANDBOX_REPO_CWD = "/home/user/repo"; const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:full"; const DEFAULT_LOCAL_SANDBOX_PORT = 2468; const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" }); @@ -203,6 +205,13 @@ const baseTaskSandbox = sandboxActor({ create: () => ({ template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x", envs: sandboxEnvObject(), + // TEMPORARY: Default E2B timeout is 5 minutes which is too short. + // Set to 1 hour as a stopgap. Remove this once the E2B provider in + // sandbox-agent uses betaCreate + autoPause (see + // .context/proposal-rivetkit-sandbox-resilience.md). At that point + // the provider handles timeout/pause lifecycle and this override is + // unnecessary. + timeoutMs: 60 * 60 * 1000, }), installAgents: ["claude", "codex"], }); @@ -219,8 +228,12 @@ async function broadcastProcesses(c: any, actions: Record { return provider; } +async function listWorkspaceModelGroupsForSandbox(c: any): Promise { + const provider = await providerForConnection(c); + if (!provider || !c.state.sandboxId || typeof provider.connectAgent !== "function") { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + } + + try { + const client = await provider.connectAgent(c.state.sandboxId, { + waitForHealth: { + timeoutMs: 15_000, + }, + }); + const listed = await client.listAgents({ config: true }); + const groups = workspaceModelGroupsFromSandboxAgents(Array.isArray(listed?.agents) ? listed.agents : []); + return groups.length > 0 ? groups : DEFAULT_WORKSPACE_MODEL_GROUPS; + } catch { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + } +} + const baseActions = baseTaskSandbox.config.actions as Record Promise>; export const taskSandbox = actor({ @@ -316,6 +349,19 @@ export const taskSandbox = actor({ return sanitizeActorResult(await session.prompt([{ type: "text", text }])); }, + async listProcesses(c: any): Promise { + try { + return await baseActions.listProcesses(c); + } catch (error) { + // Sandbox may be gone (E2B timeout, destroyed, etc.) — degrade to empty + logActorWarning("taskSandbox", "listProcesses failed, sandbox may be expired", { + sandboxId: c.state.sandboxId, + error: resolveErrorMessage(error), + }); + return { processes: [] }; + } + }, + async createProcess(c: any, request: any): Promise { const created = await baseActions.createProcess(c, request); await broadcastProcesses(c, baseActions); @@ -360,6 +406,10 @@ export const taskSandbox = actor({ } }, + async listWorkspaceModelGroups(c: any): Promise { + return await listWorkspaceModelGroupsForSandbox(c); + }, + async providerState(c: any): Promise<{ sandboxProviderId: "e2b" | "local"; sandboxId: string; state: string; at: number }> { const { config } = getActorRuntimeContext(); const { taskId } = parseTaskSandboxKey(c.key); diff --git a/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql b/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql index b9ef95a..c6a346a 100644 --- a/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql +++ b/foundry/packages/backend/src/actors/task/db/drizzle/0000_charming_maestro.sql @@ -3,10 +3,9 @@ CREATE TABLE `task` ( `branch_name` text, `title` text, `task` text NOT NULL, - `provider_id` text NOT NULL, + `sandbox_provider_id` text NOT NULL, `status` text NOT NULL, - `agent_type` text DEFAULT 'claude', - `pr_submitted` integer DEFAULT 0, + `pull_request_json` text, `created_at` integer NOT NULL, `updated_at` integer NOT NULL, CONSTRAINT "task_singleton_id_check" CHECK("task"."id" = 1) @@ -15,33 +14,33 @@ CREATE TABLE `task` ( CREATE TABLE `task_runtime` ( `id` integer PRIMARY KEY NOT NULL, `active_sandbox_id` text, - `active_session_id` text, `active_switch_target` text, `active_cwd` text, - `status_message` text, + `git_state_json` text, + `git_state_updated_at` integer, `updated_at` integer NOT NULL, CONSTRAINT "task_runtime_singleton_id_check" CHECK("task_runtime"."id" = 1) ); --> statement-breakpoint CREATE TABLE `task_sandboxes` ( `sandbox_id` text PRIMARY KEY NOT NULL, - `provider_id` text NOT NULL, + `sandbox_provider_id` text NOT NULL, `sandbox_actor_id` text, `switch_target` text NOT NULL, `cwd` text, - `status_message` text, `created_at` integer NOT NULL, `updated_at` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE `task_workbench_sessions` ( +CREATE TABLE `task_workspace_sessions` ( `session_id` text PRIMARY KEY NOT NULL, + `sandbox_session_id` text, `session_name` text NOT NULL, `model` text NOT NULL, - `unread` integer DEFAULT 0 NOT NULL, - `draft_text` text DEFAULT '' NOT NULL, - `draft_attachments_json` text DEFAULT '[]' NOT NULL, - `draft_updated_at` integer, + `status` text DEFAULT 'ready' NOT NULL, + `error_message` text, + `transcript_json` text DEFAULT '[]' NOT NULL, + `transcript_updated_at` integer, `created` integer DEFAULT 1 NOT NULL, `closed` integer DEFAULT 0 NOT NULL, `thinking_since_ms` integer, diff --git a/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json b/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json index b8a5879..7397b89 100644 --- a/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json +++ b/foundry/packages/backend/src/actors/task/db/drizzle/meta/0000_snapshot.json @@ -35,8 +35,8 @@ "notNull": true, "autoincrement": false }, - "provider_id": { - "name": "provider_id", + "sandbox_provider_id": { + "name": "sandbox_provider_id", "type": "text", "primaryKey": false, "notNull": true, @@ -49,21 +49,12 @@ "notNull": true, "autoincrement": false }, - "agent_type": { - "name": "agent_type", + "pull_request_json": { + "name": "pull_request_json", "type": "text", "primaryKey": false, "notNull": false, - "autoincrement": false, - "default": "'claude'" - }, - "pr_submitted": { - "name": "pr_submitted", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false, - "default": 0 + "autoincrement": false }, "created_at": { "name": "created_at", @@ -108,13 +99,6 @@ "notNull": false, "autoincrement": false }, - "active_session_id": { - "name": "active_session_id", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, "active_switch_target": { "name": "active_switch_target", "type": "text", @@ -129,13 +113,20 @@ "notNull": false, "autoincrement": false }, - "status_message": { - "name": "status_message", + "git_state_json": { + "name": "git_state_json", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, + "git_state_updated_at": { + "name": "git_state_updated_at", + "type": "integer", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, "updated_at": { "name": "updated_at", "type": "integer", @@ -165,8 +156,8 @@ "notNull": true, "autoincrement": false }, - "provider_id": { - "name": "provider_id", + "sandbox_provider_id": { + "name": "sandbox_provider_id", "type": "text", "primaryKey": false, "notNull": true, @@ -193,13 +184,6 @@ "notNull": false, "autoincrement": false }, - "status_message": { - "name": "status_message", - "type": "text", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, "created_at": { "name": "created_at", "type": "integer", @@ -221,8 +205,8 @@ "uniqueConstraints": {}, "checkConstraints": {} }, - "task_workbench_sessions": { - "name": "task_workbench_sessions", + "task_workspace_sessions": { + "name": "task_workspace_sessions", "columns": { "session_id": { "name": "session_id", @@ -231,6 +215,13 @@ "notNull": true, "autoincrement": false }, + "sandbox_session_id": { + "name": "sandbox_session_id", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, "session_name": { "name": "session_name", "type": "text", @@ -245,32 +236,31 @@ "notNull": true, "autoincrement": false }, - "unread": { - "name": "unread", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false, - "default": 0 - }, - "draft_text": { - "name": "draft_text", + "status": { + "name": "status", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false, - "default": "''" + "default": "'ready'" }, - "draft_attachments_json": { - "name": "draft_attachments_json", + "error_message": { + "name": "error_message", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "transcript_json": { + "name": "transcript_json", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false, "default": "'[]'" }, - "draft_updated_at": { - "name": "draft_updated_at", + "transcript_updated_at": { + "name": "transcript_updated_at", "type": "integer", "primaryKey": false, "notNull": false, diff --git a/foundry/packages/backend/src/actors/task/db/migrations.ts b/foundry/packages/backend/src/actors/task/db/migrations.ts index dc3193e..1e6ff76 100644 --- a/foundry/packages/backend/src/actors/task/db/migrations.ts +++ b/foundry/packages/backend/src/actors/task/db/migrations.ts @@ -10,12 +10,6 @@ const journal = { tag: "0000_charming_maestro", breakpoints: true, }, - { - idx: 1, - when: 1773810000000, - tag: "0001_sandbox_provider_columns", - breakpoints: true, - }, ], } as const; @@ -27,10 +21,9 @@ export default { \`branch_name\` text, \`title\` text, \`task\` text NOT NULL, - \`provider_id\` text NOT NULL, + \`sandbox_provider_id\` text NOT NULL, \`status\` text NOT NULL, - \`agent_type\` text DEFAULT 'claude', - \`pr_submitted\` integer DEFAULT 0, + \`pull_request_json\` text, \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL, CONSTRAINT "task_singleton_id_check" CHECK("task"."id" = 1) @@ -39,43 +32,39 @@ export default { CREATE TABLE \`task_runtime\` ( \`id\` integer PRIMARY KEY NOT NULL, \`active_sandbox_id\` text, - \`active_session_id\` text, \`active_switch_target\` text, \`active_cwd\` text, - \`status_message\` text, + \`git_state_json\` text, + \`git_state_updated_at\` integer, \`updated_at\` integer NOT NULL, CONSTRAINT "task_runtime_singleton_id_check" CHECK("task_runtime"."id" = 1) ); --> statement-breakpoint CREATE TABLE \`task_sandboxes\` ( \`sandbox_id\` text PRIMARY KEY NOT NULL, - \`provider_id\` text NOT NULL, + \`sandbox_provider_id\` text NOT NULL, \`sandbox_actor_id\` text, \`switch_target\` text NOT NULL, \`cwd\` text, - \`status_message\` text, \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL ); --> statement-breakpoint -CREATE TABLE \`task_workbench_sessions\` ( +CREATE TABLE \`task_workspace_sessions\` ( \`session_id\` text PRIMARY KEY NOT NULL, + \`sandbox_session_id\` text, \`session_name\` text NOT NULL, \`model\` text NOT NULL, - \`unread\` integer DEFAULT 0 NOT NULL, - \`draft_text\` text DEFAULT '' NOT NULL, - \`draft_attachments_json\` text DEFAULT '[]' NOT NULL, - \`draft_updated_at\` integer, + \`status\` text DEFAULT 'ready' NOT NULL, + \`error_message\` text, + \`transcript_json\` text DEFAULT '[]' NOT NULL, + \`transcript_updated_at\` integer, \`created\` integer DEFAULT 1 NOT NULL, \`closed\` integer DEFAULT 0 NOT NULL, \`thinking_since_ms\` integer, -\`created_at\` integer NOT NULL, + \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL ); -`, - m0001: `ALTER TABLE \`task\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`; ---> statement-breakpoint -ALTER TABLE \`task_sandboxes\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`; `, } as const, }; diff --git a/foundry/packages/backend/src/actors/task/db/schema.ts b/foundry/packages/backend/src/actors/task/db/schema.ts index 889aa31..651ff76 100644 --- a/foundry/packages/backend/src/actors/task/db/schema.ts +++ b/foundry/packages/backend/src/actors/task/db/schema.ts @@ -11,8 +11,7 @@ export const task = sqliteTable( task: text("task").notNull(), sandboxProviderId: text("sandbox_provider_id").notNull(), status: text("status").notNull(), - agentType: text("agent_type").default("claude"), - prSubmitted: integer("pr_submitted").default(0), + pullRequestJson: text("pull_request_json"), createdAt: integer("created_at").notNull(), updatedAt: integer("updated_at").notNull(), }, @@ -24,14 +23,10 @@ export const taskRuntime = sqliteTable( { id: integer("id").primaryKey(), activeSandboxId: text("active_sandbox_id"), - activeSessionId: text("active_session_id"), activeSwitchTarget: text("active_switch_target"), activeCwd: text("active_cwd"), - statusMessage: text("status_message"), gitStateJson: text("git_state_json"), gitStateUpdatedAt: integer("git_state_updated_at"), - provisionStage: text("provision_stage"), - provisionStageUpdatedAt: integer("provision_stage_updated_at"), updatedAt: integer("updated_at").notNull(), }, (table) => [check("task_runtime_singleton_id_check", sql`${table.id} = 1`)], @@ -48,18 +43,17 @@ export const taskSandboxes = sqliteTable("task_sandboxes", { sandboxActorId: text("sandbox_actor_id"), switchTarget: text("switch_target").notNull(), cwd: text("cwd"), - statusMessage: text("status_message"), createdAt: integer("created_at").notNull(), updatedAt: integer("updated_at").notNull(), }); /** - * Coordinator index of workbench sessions within this task. + * Coordinator index of workspace sessions within this task. * The task actor is the coordinator for sessions. Each row holds session * metadata, model, status, transcript, and draft state. Sessions are * sub-entities of the task — no separate session actor in the DB. */ -export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", { +export const taskWorkspaceSessions = sqliteTable("task_workspace_sessions", { sessionId: text("session_id").notNull().primaryKey(), sandboxSessionId: text("sandbox_session_id"), sessionName: text("session_name").notNull(), @@ -68,11 +62,6 @@ export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", { errorMessage: text("error_message"), transcriptJson: text("transcript_json").notNull().default("[]"), transcriptUpdatedAt: integer("transcript_updated_at"), - unread: integer("unread").notNull().default(0), - draftText: text("draft_text").notNull().default(""), - // Structured by the workbench composer attachment payload format. - draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"), - draftUpdatedAt: integer("draft_updated_at"), created: integer("created").notNull().default(1), closed: integer("closed").notNull().default(0), thinkingSinceMs: integer("thinking_since_ms"), diff --git a/foundry/packages/backend/src/actors/task/index.ts b/foundry/packages/backend/src/actors/task/index.ts index f2b9e51..7e1c5e2 100644 --- a/foundry/packages/backend/src/actors/task/index.ts +++ b/foundry/packages/backend/src/actors/task/index.ts @@ -1,393 +1,47 @@ -import { actor, queue } from "rivetkit"; -import { workflow } from "rivetkit/workflow"; -import type { - AgentType, - TaskRecord, - TaskWorkbenchChangeModelInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchUpdateDraftInput, - SandboxProviderId, -} from "@sandbox-agent/foundry-shared"; -import { expectQueueResponse } from "../../services/queue.js"; -import { selfTask } from "../handles.js"; +import { actor } from "rivetkit"; +import type { TaskRecord } from "@sandbox-agent/foundry-shared"; import { taskDb } from "./db/db.js"; import { getCurrentRecord } from "./workflow/common.js"; -import { - changeWorkbenchModel, - closeWorkbenchSession, - createWorkbenchSession, - getSessionDetail, - getTaskDetail, - getTaskSummary, - markWorkbenchUnread, - publishWorkbenchPr, - renameWorkbenchBranch, - renameWorkbenchTask, - renameWorkbenchSession, - revertWorkbenchFile, - sendWorkbenchMessage, - syncWorkbenchSessionStatus, - setWorkbenchSessionUnread, - stopWorkbenchSession, - updateWorkbenchDraft, -} from "./workbench.js"; -import { TASK_QUEUE_NAMES, taskWorkflowQueueName, runTaskWorkflow } from "./workflow/index.js"; +import { getSessionDetail, getTaskDetail, getTaskSummary } from "./workspace.js"; +import { taskCommandActions } from "./workflow/index.js"; export interface TaskInput { organizationId: string; repoId: string; taskId: string; - repoRemote: string; - branchName: string | null; - title: string | null; - task: string; - sandboxProviderId: SandboxProviderId; - agentType: AgentType | null; - explicitTitle: string | null; - explicitBranchName: string | null; - initialPrompt: string | null; -} - -interface InitializeCommand { - sandboxProviderId?: SandboxProviderId; -} - -interface TaskActionCommand { - reason?: string; -} - -interface TaskSessionCommand { - sessionId: string; -} - -interface TaskStatusSyncCommand { - sessionId: string; - status: "running" | "idle" | "error"; - at: number; -} - -interface TaskWorkbenchValueCommand { - value: string; -} - -interface TaskWorkbenchSessionTitleCommand { - sessionId: string; - title: string; -} - -interface TaskWorkbenchSessionUnreadCommand { - sessionId: string; - unread: boolean; -} - -interface TaskWorkbenchUpdateDraftCommand { - sessionId: string; - text: string; - attachments: Array; -} - -interface TaskWorkbenchChangeModelCommand { - sessionId: string; - model: string; -} - -interface TaskWorkbenchSendMessageCommand { - sessionId: string; - text: string; - attachments: Array; -} - -interface TaskWorkbenchCreateSessionCommand { - model?: string; -} - -interface TaskWorkbenchCreateSessionAndSendCommand { - model?: string; - text: string; -} - -interface TaskWorkbenchSessionCommand { - sessionId: string; } export const task = actor({ db: taskDb, - queues: Object.fromEntries(TASK_QUEUE_NAMES.map((name) => [name, queue()])), options: { name: "Task", icon: "wrench", - actionTimeout: 5 * 60_000, + actionTimeout: 10 * 60_000, }, createState: (_c, input: TaskInput) => ({ organizationId: input.organizationId, repoId: input.repoId, taskId: input.taskId, - repoRemote: input.repoRemote, - branchName: input.branchName, - title: input.title, - task: input.task, - sandboxProviderId: input.sandboxProviderId, - agentType: input.agentType, - explicitTitle: input.explicitTitle, - explicitBranchName: input.explicitBranchName, - initialPrompt: input.initialPrompt, - initialized: false, - previousStatus: null as string | null, }), actions: { - async initialize(c, cmd: InitializeCommand): Promise { - const self = selfTask(c); - const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, { - wait: true, - timeout: 10_000, - }); - return expectQueueResponse(result); - }, - - async provision(c, cmd: InitializeCommand): Promise<{ ok: true }> { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.provision"), cmd ?? {}, { - wait: false, - }); - return { ok: true }; - }, - - async attach(c, cmd?: TaskActionCommand): Promise<{ target: string; sessionId: string | null }> { - const self = selfTask(c); - const result = await self.send(taskWorkflowQueueName("task.command.attach"), cmd ?? {}, { - wait: true, - timeout: 10_000, - }); - return expectQueueResponse<{ target: string; sessionId: string | null }>(result); - }, - - async switch(c): Promise<{ switchTarget: string }> { - const self = selfTask(c); - const result = await self.send( - taskWorkflowQueueName("task.command.switch"), - {}, - { - wait: true, - timeout: 10_000, - }, - ); - return expectQueueResponse<{ switchTarget: string }>(result); - }, - - async push(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.push"), cmd ?? {}, { - wait: false, - }); - }, - - async sync(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.sync"), cmd ?? {}, { - wait: false, - }); - }, - - async merge(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.merge"), cmd ?? {}, { - wait: false, - }); - }, - - async archive(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.archive"), cmd ?? {}, { - wait: false, - }); - }, - - async kill(c, cmd?: TaskActionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.kill"), cmd ?? {}, { - wait: false, - }); - }, - async get(c): Promise { - return await getCurrentRecord({ db: c.db, state: c.state }); + return await getCurrentRecord(c); }, async getTaskSummary(c) { return await getTaskSummary(c); }, - async getTaskDetail(c) { - return await getTaskDetail(c); + async getTaskDetail(c, input?: { authSessionId?: string }) { + return await getTaskDetail(c, input?.authSessionId); }, - async getSessionDetail(c, input: { sessionId: string }) { - return await getSessionDetail(c, input.sessionId); + async getSessionDetail(c, input: { sessionId: string; authSessionId?: string }) { + return await getSessionDetail(c, input.sessionId, input.authSessionId); }, - async markWorkbenchUnread(c): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.mark_unread"), - {}, - { - wait: true, - timeout: 10_000, - }, - ); - }, - - async renameWorkbenchTask(c, input: TaskWorkbenchRenameInput): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.rename_task"), { value: input.value } satisfies TaskWorkbenchValueCommand, { - wait: true, - timeout: 20_000, - }); - }, - - async renameWorkbenchBranch(c, input: TaskWorkbenchRenameInput): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.rename_branch"), { value: input.value } satisfies TaskWorkbenchValueCommand, { - wait: false, - }); - }, - - async createWorkbenchSession(c, input?: { model?: string }): Promise<{ sessionId: string }> { - const self = selfTask(c); - const result = await self.send( - taskWorkflowQueueName("task.command.workbench.create_session"), - { ...(input?.model ? { model: input.model } : {}) } satisfies TaskWorkbenchCreateSessionCommand, - { - wait: true, - timeout: 10_000, - }, - ); - return expectQueueResponse<{ sessionId: string }>(result); - }, - - /** - * Fire-and-forget: creates a workbench session and sends the initial message. - * Used by createWorkbenchTask so the caller doesn't block on session creation. - */ - async createWorkbenchSessionAndSend(c, input: { model?: string; text: string }): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.create_session_and_send"), - { model: input.model, text: input.text } satisfies TaskWorkbenchCreateSessionAndSendCommand, - { wait: false }, - ); - }, - - async renameWorkbenchSession(c, input: TaskWorkbenchRenameSessionInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.rename_session"), - { sessionId: input.sessionId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand, - { - wait: true, - timeout: 10_000, - }, - ); - }, - - async setWorkbenchSessionUnread(c, input: TaskWorkbenchSetSessionUnreadInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.set_session_unread"), - { sessionId: input.sessionId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand, - { - wait: true, - timeout: 10_000, - }, - ); - }, - - async updateWorkbenchDraft(c, input: TaskWorkbenchUpdateDraftInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.update_draft"), - { - sessionId: input.sessionId, - text: input.text, - attachments: input.attachments, - } satisfies TaskWorkbenchUpdateDraftCommand, - { - wait: false, - }, - ); - }, - - async changeWorkbenchModel(c, input: TaskWorkbenchChangeModelInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.change_model"), - { sessionId: input.sessionId, model: input.model } satisfies TaskWorkbenchChangeModelCommand, - { - wait: true, - timeout: 10_000, - }, - ); - }, - - async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageInput): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.send_message"), - { - sessionId: input.sessionId, - text: input.text, - attachments: input.attachments, - } satisfies TaskWorkbenchSendMessageCommand, - { - wait: false, - }, - ); - }, - - async stopWorkbenchSession(c, input: TaskSessionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, { - wait: false, - }); - }, - - async syncWorkbenchSessionStatus(c, input: TaskStatusSyncCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.sync_session_status"), input, { - wait: true, - timeout: 20_000, - }); - }, - - async closeWorkbenchSession(c, input: TaskSessionCommand): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, { - wait: false, - }); - }, - - async publishWorkbenchPr(c): Promise { - const self = selfTask(c); - await self.send( - taskWorkflowQueueName("task.command.workbench.publish_pr"), - {}, - { - wait: false, - }, - ); - }, - - async revertWorkbenchFile(c, input: { path: string }): Promise { - const self = selfTask(c); - await self.send(taskWorkflowQueueName("task.command.workbench.revert_file"), input, { - wait: false, - }); - }, + ...taskCommandActions, }, - run: workflow(runTaskWorkflow), }); -export { TASK_QUEUE_NAMES }; +export { taskWorkflowQueueName } from "./workflow/index.js"; diff --git a/foundry/packages/backend/src/actors/task/workflow/commands.ts b/foundry/packages/backend/src/actors/task/workflow/commands.ts index d03ade1..7ba2d2b 100644 --- a/foundry/packages/backend/src/actors/task/workflow/commands.ts +++ b/foundry/packages/backend/src/actors/task/workflow/commands.ts @@ -2,8 +2,8 @@ import { eq } from "drizzle-orm"; import { getTaskSandbox } from "../../handles.js"; import { logActorWarning, resolveErrorMessage } from "../../logging.js"; -import { task as taskTable, taskRuntime } from "../db/schema.js"; -import { TASK_ROW_ID, appendHistory, getCurrentRecord, setTaskState } from "./common.js"; +import { task as taskTable } from "../db/schema.js"; +import { TASK_ROW_ID, appendAuditLog, getCurrentRecord, setTaskState } from "./common.js"; import { pushActiveBranchActivity } from "./push.js"; async function withTimeout(promise: Promise, timeoutMs: number, label: string): Promise { @@ -25,6 +25,7 @@ async function withTimeout(promise: Promise, timeoutMs: number, label: str export async function handleAttachActivity(loopCtx: any, msg: any): Promise { const record = await getCurrentRecord(loopCtx); let target = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === record.activeSandboxId)?.switchTarget ?? ""; + const sessionId = msg.body?.sessionId ?? null; if (record.activeSandboxId) { try { @@ -38,14 +39,14 @@ export async function handleAttachActivity(loopCtx: any, msg: any): Promise await msg.complete({ ok: true }); } -export async function handleSimpleCommandActivity(loopCtx: any, msg: any, statusMessage: string, historyKind: string): Promise { - const db = loopCtx.db; - await db.update(taskRuntime).set({ statusMessage, updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run(); - - await appendHistory(loopCtx, historyKind, { reason: msg.body?.reason ?? null }); +export async function handleSimpleCommandActivity(loopCtx: any, msg: any, historyKind: string): Promise { + await appendAuditLog(loopCtx, historyKind, { reason: msg.body?.reason ?? null }); await msg.complete({ ok: true }); } export async function handleArchiveActivity(loopCtx: any, msg: any): Promise { - await setTaskState(loopCtx, "archive_stop_status_sync", "stopping status sync"); + await setTaskState(loopCtx, "archive_stop_status_sync"); const record = await getCurrentRecord(loopCtx); if (record.activeSandboxId) { - await setTaskState(loopCtx, "archive_release_sandbox", "releasing sandbox"); + await setTaskState(loopCtx, "archive_release_sandbox"); void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => { logActorWarning("task.commands", "failed to release sandbox during archive", { organizationId: loopCtx.state.organizationId, @@ -90,17 +88,15 @@ export async function handleArchiveActivity(loopCtx: any, msg: any): Promise { - await setTaskState(loopCtx, "kill_destroy_sandbox", "destroying sandbox"); + await setTaskState(loopCtx, "kill_destroy_sandbox"); const record = await getCurrentRecord(loopCtx); if (!record.activeSandboxId) { return; @@ -110,13 +106,11 @@ export async function killDestroySandboxActivity(loopCtx: any): Promise { } export async function killWriteDbActivity(loopCtx: any, msg: any): Promise { - await setTaskState(loopCtx, "kill_finalize", "finalizing kill"); + await setTaskState(loopCtx, "kill_finalize"); const db = loopCtx.db; await db.update(taskTable).set({ status: "killed", updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run(); - await db.update(taskRuntime).set({ statusMessage: "killed", updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run(); - - await appendHistory(loopCtx, "task.kill", { reason: msg.body?.reason ?? null }); + await appendAuditLog(loopCtx, "task.kill", { reason: msg.body?.reason ?? null }); await msg.complete({ ok: true }); } diff --git a/foundry/packages/backend/src/actors/task/workflow/common.ts b/foundry/packages/backend/src/actors/task/workflow/common.ts index ae1e8dd..cbe63e6 100644 --- a/foundry/packages/backend/src/actors/task/workflow/common.ts +++ b/foundry/packages/backend/src/actors/task/workflow/common.ts @@ -2,8 +2,10 @@ import { eq } from "drizzle-orm"; import type { TaskRecord, TaskStatus } from "@sandbox-agent/foundry-shared"; import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js"; -import { historyKey } from "../../keys.js"; -import { broadcastTaskUpdate } from "../workbench.js"; +import { getOrCreateAuditLog, getOrCreateOrganization } from "../../handles.js"; +import { broadcastTaskUpdate } from "../workspace.js"; +import { getActorRuntimeContext } from "../../context.js"; +import { defaultSandboxProviderId } from "../../../sandbox-config.js"; export const TASK_ROW_ID = 1; @@ -56,50 +58,32 @@ export function buildAgentPrompt(task: string): string { return task.trim(); } -export async function setTaskState(ctx: any, status: TaskStatus, statusMessage?: string): Promise { +export async function setTaskState(ctx: any, status: TaskStatus): Promise { const now = Date.now(); const db = ctx.db; await db.update(taskTable).set({ status, updatedAt: now }).where(eq(taskTable.id, TASK_ROW_ID)).run(); - if (statusMessage != null) { - await db - .insert(taskRuntime) - .values({ - id: TASK_ROW_ID, - activeSandboxId: null, - activeSessionId: null, - activeSwitchTarget: null, - activeCwd: null, - statusMessage, - updatedAt: now, - }) - .onConflictDoUpdate({ - target: taskRuntime.id, - set: { - statusMessage, - updatedAt: now, - }, - }) - .run(); - } - await broadcastTaskUpdate(ctx); } +/** + * Read the task's current record from its local SQLite DB. + * If the task actor was lazily created (virtual task from PR sync) and has no + * DB rows yet, auto-initializes by reading branch/title from the org actor's + * getTaskIndexEntry. This is the self-initialization path for lazy task actors. + */ export async function getCurrentRecord(ctx: any): Promise { const db = ctx.db; - const row = await db + const organization = await getOrCreateOrganization(ctx, ctx.state.organizationId); + let row = await db .select({ branchName: taskTable.branchName, title: taskTable.title, task: taskTable.task, sandboxProviderId: taskTable.sandboxProviderId, status: taskTable.status, - statusMessage: taskRuntime.statusMessage, + pullRequestJson: taskTable.pullRequestJson, activeSandboxId: taskRuntime.activeSandboxId, - activeSessionId: taskRuntime.activeSessionId, - agentType: taskTable.agentType, - prSubmitted: taskTable.prSubmitted, createdAt: taskTable.createdAt, updatedAt: taskTable.updatedAt, }) @@ -109,7 +93,58 @@ export async function getCurrentRecord(ctx: any): Promise { .get(); if (!row) { - throw new Error(`Task not found: ${ctx.state.taskId}`); + // Virtual task — auto-initialize from org actor's task index data + let branchName: string | null = null; + let title = "Untitled"; + try { + const entry = await organization.getTaskIndexEntry({ taskId: ctx.state.taskId }); + branchName = entry?.branchName ?? null; + title = entry?.title ?? title; + } catch {} + + const { config } = getActorRuntimeContext(); + const { initBootstrapDbActivity, initCompleteActivity } = await import("./init.js"); + await initBootstrapDbActivity(ctx, { + sandboxProviderId: defaultSandboxProviderId(config), + branchName, + title, + task: title, + }); + await initCompleteActivity(ctx, { sandboxProviderId: defaultSandboxProviderId(config) }); + + // Re-read the row after initialization + const initialized = await db + .select({ + branchName: taskTable.branchName, + title: taskTable.title, + task: taskTable.task, + sandboxProviderId: taskTable.sandboxProviderId, + status: taskTable.status, + pullRequestJson: taskTable.pullRequestJson, + activeSandboxId: taskRuntime.activeSandboxId, + createdAt: taskTable.createdAt, + updatedAt: taskTable.updatedAt, + }) + .from(taskTable) + .leftJoin(taskRuntime, eq(taskTable.id, taskRuntime.id)) + .where(eq(taskTable.id, TASK_ROW_ID)) + .get(); + + if (!initialized) { + throw new Error(`Task not found after initialization: ${ctx.state.taskId}`); + } + + row = initialized; + } + + const repositoryMetadata = await organization.getRepositoryMetadata({ repoId: ctx.state.repoId }); + let pullRequest = null; + if (row.pullRequestJson) { + try { + pullRequest = JSON.parse(row.pullRequestJson); + } catch { + pullRequest = null; + } } const sandboxes = await db @@ -128,16 +163,15 @@ export async function getCurrentRecord(ctx: any): Promise { return { organizationId: ctx.state.organizationId, repoId: ctx.state.repoId, - repoRemote: ctx.state.repoRemote, + repoRemote: repositoryMetadata.remoteUrl, taskId: ctx.state.taskId, branchName: row.branchName, title: row.title, task: row.task, sandboxProviderId: row.sandboxProviderId, status: row.status, - statusMessage: row.statusMessage ?? null, activeSandboxId: row.activeSandboxId ?? null, - activeSessionId: row.activeSessionId ?? null, + pullRequest, sandboxes: sandboxes.map((sb) => ({ sandboxId: sb.sandboxId, sandboxProviderId: sb.sandboxProviderId, @@ -147,31 +181,19 @@ export async function getCurrentRecord(ctx: any): Promise { createdAt: sb.createdAt, updatedAt: sb.updatedAt, })), - agentType: row.agentType ?? null, - prSubmitted: Boolean(row.prSubmitted), - diffStat: null, - hasUnpushed: null, - conflictsWithMain: null, - parentBranch: null, - prUrl: null, - prAuthor: null, - ciStatus: null, - reviewStatus: null, - reviewer: null, createdAt: row.createdAt, updatedAt: row.updatedAt, } as TaskRecord; } -export async function appendHistory(ctx: any, kind: string, payload: Record): Promise { - const client = ctx.client(); - const history = await client.history.getOrCreate(historyKey(ctx.state.organizationId, ctx.state.repoId), { - createWithInput: { organizationId: ctx.state.organizationId, repoId: ctx.state.repoId }, - }); - await history.append({ +export async function appendAuditLog(ctx: any, kind: string, payload: Record): Promise { + const row = await ctx.db.select({ branchName: taskTable.branchName }).from(taskTable).where(eq(taskTable.id, TASK_ROW_ID)).get(); + const auditLog = await getOrCreateAuditLog(ctx, ctx.state.organizationId); + void auditLog.append({ kind, + repoId: ctx.state.repoId, taskId: ctx.state.taskId, - branchName: ctx.state.branchName, + branchName: row?.branchName ?? null, payload, }); diff --git a/foundry/packages/backend/src/actors/task/workflow/index.ts b/foundry/packages/backend/src/actors/task/workflow/index.ts index f6ffd10..69004ee 100644 --- a/foundry/packages/backend/src/actors/task/workflow/index.ts +++ b/foundry/packages/backend/src/actors/task/workflow/index.ts @@ -1,4 +1,3 @@ -import { Loop } from "rivetkit/workflow"; import { logActorWarning, resolveErrorMessage } from "../../logging.js"; import { getCurrentRecord } from "./common.js"; import { initBootstrapDbActivity, initCompleteActivity, initEnqueueProvisionActivity, initFailedActivity } from "./init.js"; @@ -12,283 +11,254 @@ import { killDestroySandboxActivity, killWriteDbActivity, } from "./commands.js"; -import { TASK_QUEUE_NAMES } from "./queue.js"; import { - changeWorkbenchModel, - closeWorkbenchSession, - createWorkbenchSession, - ensureWorkbenchSession, - refreshWorkbenchDerivedState, - refreshWorkbenchSessionTranscript, - markWorkbenchUnread, - publishWorkbenchPr, - renameWorkbenchBranch, - renameWorkbenchTask, - renameWorkbenchSession, - revertWorkbenchFile, - sendWorkbenchMessage, - setWorkbenchSessionUnread, - stopWorkbenchSession, - syncWorkbenchSessionStatus, - updateWorkbenchDraft, -} from "../workbench.js"; + changeWorkspaceModel, + closeWorkspaceSession, + createWorkspaceSession, + ensureWorkspaceSession, + refreshWorkspaceDerivedState, + refreshWorkspaceSessionTranscript, + markWorkspaceUnread, + publishWorkspacePr, + renameWorkspaceTask, + renameWorkspaceSession, + selectWorkspaceSession, + revertWorkspaceFile, + sendWorkspaceMessage, + setWorkspaceSessionUnread, + stopWorkspaceSession, + syncTaskPullRequest, + syncWorkspaceSessionStatus, + updateWorkspaceDraft, +} from "../workspace.js"; -export { TASK_QUEUE_NAMES, taskWorkflowQueueName } from "./queue.js"; +export { taskWorkflowQueueName } from "./queue.js"; -type TaskQueueName = (typeof TASK_QUEUE_NAMES)[number]; +/** + * Task command actions — converted from queue/workflow handlers to direct actions. + * Each export becomes an action on the task actor. + */ +export const taskCommandActions = { + async initialize(c: any, body: any) { + await initBootstrapDbActivity(c, body); + await initEnqueueProvisionActivity(c, body); + return await getCurrentRecord(c); + }, -type WorkflowHandler = (loopCtx: any, msg: { name: TaskQueueName; body: any; complete: (response: unknown) => Promise }) => Promise; - -const commandHandlers: Record = { - "task.command.initialize": async (loopCtx, msg) => { - const body = msg.body; - - await loopCtx.step("init-bootstrap-db", async () => initBootstrapDbActivity(loopCtx, body)); - await loopCtx.step("init-enqueue-provision", async () => initEnqueueProvisionActivity(loopCtx, body)); - await loopCtx.removed("init-dispatch-provision-v2", "step"); - const currentRecord = await loopCtx.step("init-read-current-record", async () => getCurrentRecord(loopCtx)); + async provision(c: any, body: any) { try { - await msg.complete(currentRecord); + await initCompleteActivity(c, body); + return { ok: true }; } catch (error) { - logActorWarning("task.workflow", "initialize completion failed", { - error: resolveErrorMessage(error), - }); + await initFailedActivity(c, error, body); + return { ok: false, error: resolveErrorMessage(error) }; } }, - "task.command.provision": async (loopCtx, msg) => { - await loopCtx.removed("init-failed", "step"); - await loopCtx.removed("init-failed-v2", "step"); + async attach(c: any, body: any) { + // handleAttachActivity expects msg with complete — adapt + const result = { value: undefined as any }; + const msg = { + name: "task.command.attach", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleAttachActivity(c, msg); + return result.value; + }, + + async switchTask(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.switch", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleSwitchActivity(c, msg); + return result.value; + }, + + async push(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.push", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handlePushActivity(c, msg); + return result.value; + }, + + async sync(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.sync", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleSimpleCommandActivity(c, msg, "task.sync"); + return result.value; + }, + + async merge(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.merge", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleSimpleCommandActivity(c, msg, "task.merge"); + return result.value; + }, + + async archive(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.archive", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleArchiveActivity(c, msg); + return result.value; + }, + + async kill(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.kill", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await killDestroySandboxActivity(c); + await killWriteDbActivity(c, msg); + return result.value; + }, + + async getRecord(c: any, body: any) { + const result = { value: undefined as any }; + const msg = { + name: "task.command.get", + body, + complete: async (v: any) => { + result.value = v; + }, + }; + await handleGetActivity(c, msg); + return result.value; + }, + + async pullRequestSync(c: any, body: any) { + await syncTaskPullRequest(c, body?.pullRequest ?? null); + return { ok: true }; + }, + + async markUnread(c: any, body: any) { + await markWorkspaceUnread(c, body?.authSessionId); + return { ok: true }; + }, + + async renameTask(c: any, body: any) { + await renameWorkspaceTask(c, body.value); + return { ok: true }; + }, + + async createSession(c: any, body: any) { + return await createWorkspaceSession(c, body?.model, body?.authSessionId); + }, + + async createSessionAndSend(c: any, body: any) { try { - await loopCtx.removed("init-ensure-name", "step"); - await loopCtx.removed("init-assert-name", "step"); - await loopCtx.removed("init-create-sandbox", "step"); - await loopCtx.removed("init-ensure-agent", "step"); - await loopCtx.removed("init-start-sandbox-instance", "step"); - await loopCtx.removed("init-expose-sandbox", "step"); - await loopCtx.removed("init-create-session", "step"); - await loopCtx.removed("init-write-db", "step"); - await loopCtx.removed("init-start-status-sync", "step"); - await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, msg.body)); - await msg.complete({ ok: true }); - } catch (error) { - await loopCtx.step("init-failed-v3", async () => initFailedActivity(loopCtx, error)); - await msg.complete({ - ok: false, - error: resolveErrorMessage(error), - }); - } - }, - - "task.command.attach": async (loopCtx, msg) => { - await loopCtx.step("handle-attach", async () => handleAttachActivity(loopCtx, msg)); - }, - - "task.command.switch": async (loopCtx, msg) => { - await loopCtx.step("handle-switch", async () => handleSwitchActivity(loopCtx, msg)); - }, - - "task.command.push": async (loopCtx, msg) => { - await loopCtx.step("handle-push", async () => handlePushActivity(loopCtx, msg)); - }, - - "task.command.sync": async (loopCtx, msg) => { - await loopCtx.step("handle-sync", async () => handleSimpleCommandActivity(loopCtx, msg, "sync requested", "task.sync")); - }, - - "task.command.merge": async (loopCtx, msg) => { - await loopCtx.step("handle-merge", async () => handleSimpleCommandActivity(loopCtx, msg, "merge requested", "task.merge")); - }, - - "task.command.archive": async (loopCtx, msg) => { - await loopCtx.step("handle-archive", async () => handleArchiveActivity(loopCtx, msg)); - }, - - "task.command.kill": async (loopCtx, msg) => { - await loopCtx.step("kill-destroy-sandbox", async () => killDestroySandboxActivity(loopCtx)); - await loopCtx.step("kill-write-db", async () => killWriteDbActivity(loopCtx, msg)); - }, - - "task.command.get": async (loopCtx, msg) => { - await loopCtx.step("handle-get", async () => handleGetActivity(loopCtx, msg)); - }, - - "task.command.workbench.mark_unread": async (loopCtx, msg) => { - await loopCtx.step("workbench-mark-unread", async () => markWorkbenchUnread(loopCtx)); - await msg.complete({ ok: true }); - }, - - "task.command.workbench.rename_task": async (loopCtx, msg) => { - await loopCtx.step("workbench-rename-task", async () => renameWorkbenchTask(loopCtx, msg.body.value)); - await msg.complete({ ok: true }); - }, - - "task.command.workbench.rename_branch": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-rename-branch", - timeout: 5 * 60_000, - run: async () => renameWorkbenchBranch(loopCtx, msg.body.value), - }); - await msg.complete({ ok: true }); - }, - - "task.command.workbench.create_session": async (loopCtx, msg) => { - try { - const created = await loopCtx.step({ - name: "workbench-create-session", - timeout: 5 * 60_000, - run: async () => createWorkbenchSession(loopCtx, msg.body?.model), - }); - await msg.complete(created); - } catch (error) { - await msg.complete({ error: resolveErrorMessage(error) }); - } - }, - - "task.command.workbench.create_session_and_send": async (loopCtx, msg) => { - try { - const created = await loopCtx.step({ - name: "workbench-create-session-for-send", - timeout: 5 * 60_000, - run: async () => createWorkbenchSession(loopCtx, msg.body?.model), - }); - await loopCtx.step({ - name: "workbench-send-initial-message", - timeout: 5 * 60_000, - run: async () => sendWorkbenchMessage(loopCtx, created.sessionId, msg.body.text, []), - }); + const created = await createWorkspaceSession(c, body?.model, body?.authSessionId); + await sendWorkspaceMessage(c, created.sessionId, body.text, [], body?.authSessionId); } catch (error) { logActorWarning("task.workflow", "create_session_and_send failed", { error: resolveErrorMessage(error), }); } - await msg.complete({ ok: true }); + return { ok: true }; }, - "task.command.workbench.ensure_session": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-ensure-session", - timeout: 5 * 60_000, - run: async () => ensureWorkbenchSession(loopCtx, msg.body.sessionId, msg.body?.model), - }); - await msg.complete({ ok: true }); + async ensureSession(c: any, body: any) { + await ensureWorkspaceSession(c, body.sessionId, body?.model, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.rename_session": async (loopCtx, msg) => { - await loopCtx.step("workbench-rename-session", async () => renameWorkbenchSession(loopCtx, msg.body.sessionId, msg.body.title)); - await msg.complete({ ok: true }); + async renameSession(c: any, body: any) { + await renameWorkspaceSession(c, body.sessionId, body.title); + return { ok: true }; }, - "task.command.workbench.set_session_unread": async (loopCtx, msg) => { - await loopCtx.step("workbench-set-session-unread", async () => setWorkbenchSessionUnread(loopCtx, msg.body.sessionId, msg.body.unread)); - await msg.complete({ ok: true }); + async selectSession(c: any, body: any) { + await selectWorkspaceSession(c, body.sessionId, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.update_draft": async (loopCtx, msg) => { - await loopCtx.step("workbench-update-draft", async () => updateWorkbenchDraft(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments)); - await msg.complete({ ok: true }); + async setSessionUnread(c: any, body: any) { + await setWorkspaceSessionUnread(c, body.sessionId, body.unread, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.change_model": async (loopCtx, msg) => { - await loopCtx.step("workbench-change-model", async () => changeWorkbenchModel(loopCtx, msg.body.sessionId, msg.body.model)); - await msg.complete({ ok: true }); + async updateDraft(c: any, body: any) { + await updateWorkspaceDraft(c, body.sessionId, body.text, body.attachments, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.send_message": async (loopCtx, msg) => { - try { - await loopCtx.step({ - name: "workbench-send-message", - timeout: 10 * 60_000, - run: async () => sendWorkbenchMessage(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments), - }); - await msg.complete({ ok: true }); - } catch (error) { - await msg.complete({ error: resolveErrorMessage(error) }); - } + async changeModel(c: any, body: any) { + await changeWorkspaceModel(c, body.sessionId, body.model, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.stop_session": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-stop-session", - timeout: 5 * 60_000, - run: async () => stopWorkbenchSession(loopCtx, msg.body.sessionId), - }); - await msg.complete({ ok: true }); + async sendMessage(c: any, body: any) { + await sendWorkspaceMessage(c, body.sessionId, body.text, body.attachments, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.sync_session_status": async (loopCtx, msg) => { - await loopCtx.step("workbench-sync-session-status", async () => syncWorkbenchSessionStatus(loopCtx, msg.body.sessionId, msg.body.status, msg.body.at)); - await msg.complete({ ok: true }); + async stopSession(c: any, body: any) { + await stopWorkspaceSession(c, body.sessionId); + return { ok: true }; }, - "task.command.workbench.refresh_derived": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-refresh-derived", - timeout: 5 * 60_000, - run: async () => refreshWorkbenchDerivedState(loopCtx), - }); - await msg.complete({ ok: true }); + async syncSessionStatus(c: any, body: any) { + await syncWorkspaceSessionStatus(c, body.sessionId, body.status, body.at); + return { ok: true }; }, - "task.command.workbench.refresh_session_transcript": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-refresh-session-transcript", - timeout: 60_000, - run: async () => refreshWorkbenchSessionTranscript(loopCtx, msg.body.sessionId), - }); - await msg.complete({ ok: true }); + async refreshDerived(c: any, _body: any) { + await refreshWorkspaceDerivedState(c); + return { ok: true }; }, - "task.command.workbench.close_session": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-close-session", - timeout: 5 * 60_000, - run: async () => closeWorkbenchSession(loopCtx, msg.body.sessionId), - }); - await msg.complete({ ok: true }); + async refreshSessionTranscript(c: any, body: any) { + await refreshWorkspaceSessionTranscript(c, body.sessionId); + return { ok: true }; }, - "task.command.workbench.publish_pr": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-publish-pr", - timeout: 10 * 60_000, - run: async () => publishWorkbenchPr(loopCtx), - }); - await msg.complete({ ok: true }); + async closeSession(c: any, body: any) { + await closeWorkspaceSession(c, body.sessionId, body?.authSessionId); + return { ok: true }; }, - "task.command.workbench.revert_file": async (loopCtx, msg) => { - await loopCtx.step({ - name: "workbench-revert-file", - timeout: 5 * 60_000, - run: async () => revertWorkbenchFile(loopCtx, msg.body.path), - }); - await msg.complete({ ok: true }); + async publishPr(c: any, _body: any) { + await publishWorkspacePr(c); + return { ok: true }; + }, + + async revertFile(c: any, body: any) { + await revertWorkspaceFile(c, body.path); + return { ok: true }; }, }; - -export async function runTaskWorkflow(ctx: any): Promise { - await ctx.loop("task-command-loop", async (loopCtx: any) => { - const msg = await loopCtx.queue.next("next-command", { - names: [...TASK_QUEUE_NAMES], - completable: true, - }); - if (!msg) { - return Loop.continue(undefined); - } - const handler = commandHandlers[msg.name as TaskQueueName]; - if (handler) { - try { - await handler(loopCtx, msg); - } catch (error) { - const message = resolveErrorMessage(error); - logActorWarning("task.workflow", "task workflow command failed", { - queueName: msg.name, - error: message, - }); - await msg.complete({ error: message }).catch(() => {}); - } - } - return Loop.continue(undefined); - }); -} diff --git a/foundry/packages/backend/src/actors/task/workflow/init.ts b/foundry/packages/backend/src/actors/task/workflow/init.ts index 8a9962d..08085e8 100644 --- a/foundry/packages/backend/src/actors/task/workflow/init.ts +++ b/foundry/packages/backend/src/actors/task/workflow/init.ts @@ -1,49 +1,44 @@ // @ts-nocheck import { eq } from "drizzle-orm"; import { getActorRuntimeContext } from "../../context.js"; -import { getOrCreateHistory, selfTask } from "../../handles.js"; +import { selfTask } from "../../handles.js"; import { resolveErrorMessage } from "../../logging.js"; import { defaultSandboxProviderId } from "../../../sandbox-config.js"; import { task as taskTable, taskRuntime } from "../db/schema.js"; -import { TASK_ROW_ID, appendHistory, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js"; -import { taskWorkflowQueueName } from "./queue.js"; - -async function ensureTaskRuntimeCacheColumns(db: any): Promise { - await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {}); - await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_updated_at integer`).catch(() => {}); - await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage text`).catch(() => {}); - await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {}); -} +import { TASK_ROW_ID, appendAuditLog, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js"; +// task actions called directly (no queue) export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise { const { config } = getActorRuntimeContext(); - const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); + const sandboxProviderId = body?.sandboxProviderId ?? defaultSandboxProviderId(config); + const task = body?.task; + if (typeof task !== "string" || task.trim().length === 0) { + throw new Error("task initialize requires the task prompt"); + } const now = Date.now(); - await ensureTaskRuntimeCacheColumns(loopCtx.db); - await loopCtx.db .insert(taskTable) .values({ id: TASK_ROW_ID, - branchName: loopCtx.state.branchName, - title: loopCtx.state.title, - task: loopCtx.state.task, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task, sandboxProviderId, status: "init_bootstrap_db", - agentType: loopCtx.state.agentType ?? config.default_agent, + pullRequestJson: null, createdAt: now, updatedAt: now, }) .onConflictDoUpdate({ target: taskTable.id, set: { - branchName: loopCtx.state.branchName, - title: loopCtx.state.title, - task: loopCtx.state.task, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task, sandboxProviderId, status: "init_bootstrap_db", - agentType: loopCtx.state.agentType ?? config.default_agent, + pullRequestJson: null, updatedAt: now, }, }) @@ -54,26 +49,18 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< .values({ id: TASK_ROW_ID, activeSandboxId: null, - activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: "provisioning", gitStateJson: null, gitStateUpdatedAt: null, - provisionStage: "queued", - provisionStageUpdatedAt: now, updatedAt: now, }) .onConflictDoUpdate({ target: taskRuntime.id, set: { activeSandboxId: null, - activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: "provisioning", - provisionStage: "queued", - provisionStageUpdatedAt: now, updatedAt: now, }, }) @@ -81,22 +68,11 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise< } export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise { - await setTaskState(loopCtx, "init_enqueue_provision", "provision queued"); - await loopCtx.db - .update(taskRuntime) - .set({ - provisionStage: "queued", - provisionStageUpdatedAt: Date.now(), - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); + await setTaskState(loopCtx, "init_enqueue_provision"); const self = selfTask(loopCtx); try { - await self.send(taskWorkflowQueueName("task.command.provision"), body, { - wait: false, - }); + void self.provision(body).catch(() => {}); } catch (error) { logActorWarning("task.init", "background provision command failed", { organizationId: loopCtx.state.organizationId, @@ -111,60 +87,52 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro export async function initCompleteActivity(loopCtx: any, body: any): Promise { const now = Date.now(); const { config } = getActorRuntimeContext(); - const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); + const sandboxProviderId = body?.sandboxProviderId ?? defaultSandboxProviderId(config); - await setTaskState(loopCtx, "init_complete", "task initialized"); + await setTaskState(loopCtx, "init_complete"); await loopCtx.db .update(taskRuntime) .set({ - statusMessage: "ready", - provisionStage: "ready", - provisionStageUpdatedAt: now, updatedAt: now, }) .where(eq(taskRuntime.id, TASK_ROW_ID)) .run(); - const history = await getOrCreateHistory(loopCtx, loopCtx.state.organizationId, loopCtx.state.repoId); - await history.append({ - kind: "task.initialized", - taskId: loopCtx.state.taskId, - branchName: loopCtx.state.branchName, + await appendAuditLog(loopCtx, "task.initialized", { payload: { sandboxProviderId }, }); - - loopCtx.state.initialized = true; } -export async function initFailedActivity(loopCtx: any, error: unknown): Promise { +export async function initFailedActivity(loopCtx: any, error: unknown, body?: any): Promise { const now = Date.now(); const detail = resolveErrorDetail(error); const messages = collectErrorMessages(error); const { config } = getActorRuntimeContext(); - const sandboxProviderId = loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config); + const sandboxProviderId = defaultSandboxProviderId(config); + const task = typeof body?.task === "string" ? body.task : null; await loopCtx.db .insert(taskTable) .values({ id: TASK_ROW_ID, - branchName: loopCtx.state.branchName ?? null, - title: loopCtx.state.title ?? null, - task: loopCtx.state.task, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task: task ?? detail, sandboxProviderId, status: "error", - agentType: loopCtx.state.agentType ?? config.default_agent, + pullRequestJson: null, createdAt: now, updatedAt: now, }) .onConflictDoUpdate({ target: taskTable.id, set: { - branchName: loopCtx.state.branchName ?? null, - title: loopCtx.state.title ?? null, - task: loopCtx.state.task, + branchName: body?.branchName ?? null, + title: body?.title ?? null, + task: task ?? detail, sandboxProviderId, status: "error", - agentType: loopCtx.state.agentType ?? config.default_agent, + pullRequestJson: null, updatedAt: now, }, }) @@ -175,30 +143,22 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise< .values({ id: TASK_ROW_ID, activeSandboxId: null, - activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: detail, - provisionStage: "error", - provisionStageUpdatedAt: now, updatedAt: now, }) .onConflictDoUpdate({ target: taskRuntime.id, set: { activeSandboxId: null, - activeSessionId: null, activeSwitchTarget: null, activeCwd: null, - statusMessage: detail, - provisionStage: "error", - provisionStageUpdatedAt: now, updatedAt: now, }, }) .run(); - await appendHistory(loopCtx, "task.error", { + await appendAuditLog(loopCtx, "task.error", { detail, messages, }); diff --git a/foundry/packages/backend/src/actors/task/workflow/push.ts b/foundry/packages/backend/src/actors/task/workflow/push.ts index c525ebe..f15ab0b 100644 --- a/foundry/packages/backend/src/actors/task/workflow/push.ts +++ b/foundry/packages/backend/src/actors/task/workflow/push.ts @@ -1,9 +1,7 @@ // @ts-nocheck -import { eq } from "drizzle-orm"; import { getTaskSandbox } from "../../handles.js"; import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js"; -import { taskRuntime, taskSandboxes } from "../db/schema.js"; -import { TASK_ROW_ID, appendHistory, getCurrentRecord } from "./common.js"; +import { appendAuditLog, getCurrentRecord } from "./common.js"; export interface PushActiveBranchOptions { reason?: string | null; @@ -13,7 +11,7 @@ export interface PushActiveBranchOptions { export async function pushActiveBranchActivity(loopCtx: any, options: PushActiveBranchOptions = {}): Promise { const record = await getCurrentRecord(loopCtx); const activeSandboxId = record.activeSandboxId; - const branchName = loopCtx.state.branchName ?? record.branchName; + const branchName = record.branchName; if (!activeSandboxId) { throw new Error("cannot push: no active sandbox"); @@ -28,19 +26,6 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive throw new Error("cannot push: active sandbox cwd is not set"); } - const now = Date.now(); - await loopCtx.db - .update(taskRuntime) - .set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - - await loopCtx.db - .update(taskSandboxes) - .set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now }) - .where(eq(taskSandboxes.sandboxId, activeSandboxId)) - .run(); - const script = [ "set -euo pipefail", `cd ${JSON.stringify(cwd)}`, @@ -68,20 +53,7 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive throw new Error(`git push failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`); } - const updatedAt = Date.now(); - await loopCtx.db - .update(taskRuntime) - .set({ statusMessage: `push complete for ${branchName}`, updatedAt }) - .where(eq(taskRuntime.id, TASK_ROW_ID)) - .run(); - - await loopCtx.db - .update(taskSandboxes) - .set({ statusMessage: `push complete for ${branchName}`, updatedAt }) - .where(eq(taskSandboxes.sandboxId, activeSandboxId)) - .run(); - - await appendHistory(loopCtx, options.historyKind ?? "task.push", { + await appendAuditLog(loopCtx, options.historyKind ?? "task.push", { reason: options.reason ?? null, branchName, sandboxId: activeSandboxId, diff --git a/foundry/packages/backend/src/actors/task/workflow/queue.ts b/foundry/packages/backend/src/actors/task/workflow/queue.ts index 3e613e2..133a657 100644 --- a/foundry/packages/backend/src/actors/task/workflow/queue.ts +++ b/foundry/packages/backend/src/actors/task/workflow/queue.ts @@ -9,24 +9,25 @@ export const TASK_QUEUE_NAMES = [ "task.command.archive", "task.command.kill", "task.command.get", - "task.command.workbench.mark_unread", - "task.command.workbench.rename_task", - "task.command.workbench.rename_branch", - "task.command.workbench.create_session", - "task.command.workbench.create_session_and_send", - "task.command.workbench.ensure_session", - "task.command.workbench.rename_session", - "task.command.workbench.set_session_unread", - "task.command.workbench.update_draft", - "task.command.workbench.change_model", - "task.command.workbench.send_message", - "task.command.workbench.stop_session", - "task.command.workbench.sync_session_status", - "task.command.workbench.refresh_derived", - "task.command.workbench.refresh_session_transcript", - "task.command.workbench.close_session", - "task.command.workbench.publish_pr", - "task.command.workbench.revert_file", + "task.command.pull_request.sync", + "task.command.workspace.mark_unread", + "task.command.workspace.rename_task", + "task.command.workspace.create_session", + "task.command.workspace.create_session_and_send", + "task.command.workspace.ensure_session", + "task.command.workspace.rename_session", + "task.command.workspace.select_session", + "task.command.workspace.set_session_unread", + "task.command.workspace.update_draft", + "task.command.workspace.change_model", + "task.command.workspace.send_message", + "task.command.workspace.stop_session", + "task.command.workspace.sync_session_status", + "task.command.workspace.refresh_derived", + "task.command.workspace.refresh_session_transcript", + "task.command.workspace.close_session", + "task.command.workspace.publish_pr", + "task.command.workspace.revert_file", ] as const; export function taskWorkflowQueueName(name: string): string { diff --git a/foundry/packages/backend/src/actors/task/workbench.ts b/foundry/packages/backend/src/actors/task/workspace.ts similarity index 66% rename from foundry/packages/backend/src/actors/task/workbench.ts rename to foundry/packages/backend/src/actors/task/workspace.ts index d6698ca..7505d01 100644 --- a/foundry/packages/backend/src/actors/task/workbench.ts +++ b/foundry/packages/backend/src/actors/task/workspace.ts @@ -2,13 +2,24 @@ import { randomUUID } from "node:crypto"; import { basename, dirname } from "node:path"; import { asc, eq } from "drizzle-orm"; +import { + DEFAULT_WORKSPACE_MODEL_GROUPS, + DEFAULT_WORKSPACE_MODEL_ID, + workspaceAgentForModel, + workspaceSandboxAgentIdForModel, +} from "@sandbox-agent/foundry-shared"; import { getActorRuntimeContext } from "../context.js"; -import { getOrCreateRepository, getOrCreateTaskSandbox, getOrCreateOrganization, getTaskSandbox, selfTask } from "../handles.js"; +import { getOrCreateOrganization, getOrCreateTaskSandbox, getOrCreateUser, getTaskSandbox, selfTask } from "../handles.js"; +import { logActorWarning, resolveErrorMessage } from "../logging.js"; import { SANDBOX_REPO_CWD } from "../sandbox/index.js"; import { resolveSandboxProviderId } from "../../sandbox-config.js"; +import { getBetterAuthService } from "../../services/better-auth.js"; +// expectQueueResponse removed — actions return values directly import { resolveOrganizationGithubAuth } from "../../services/github-auth.js"; import { githubRepoFullNameFromRemote } from "../../services/repo.js"; -import { task as taskTable, taskRuntime, taskSandboxes, taskWorkbenchSessions } from "./db/schema.js"; +// organization actions called directly (no queue) + +import { task as taskTable, taskRuntime, taskSandboxes, taskWorkspaceSessions } from "./db/schema.js"; import { getCurrentRecord } from "./workflow/common.js"; function emptyGitState() { @@ -20,62 +31,29 @@ function emptyGitState() { }; } -async function ensureWorkbenchSessionTable(c: any): Promise { - await c.db.execute(` - CREATE TABLE IF NOT EXISTS task_workbench_sessions ( - session_id text PRIMARY KEY NOT NULL, - sandbox_session_id text, - session_name text NOT NULL, - model text NOT NULL, - status text DEFAULT 'ready' NOT NULL, - error_message text, - transcript_json text DEFAULT '[]' NOT NULL, - transcript_updated_at integer, - unread integer DEFAULT 0 NOT NULL, - draft_text text DEFAULT '' NOT NULL, - draft_attachments_json text DEFAULT '[]' NOT NULL, - draft_updated_at integer, - created integer DEFAULT 1 NOT NULL, - closed integer DEFAULT 0 NOT NULL, - thinking_since_ms integer, - created_at integer NOT NULL, - updated_at integer NOT NULL - ) - `); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN sandbox_session_id text`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN status text DEFAULT 'ready' NOT NULL`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN error_message text`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN transcript_json text DEFAULT '[]' NOT NULL`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_workbench_sessions ADD COLUMN transcript_updated_at integer`).catch(() => {}); -} - -async function ensureTaskRuntimeCacheColumns(c: any): Promise { - await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_updated_at integer`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage text`).catch(() => {}); - await c.db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {}); -} - -function defaultModelForAgent(agentType: string | null | undefined) { - return agentType === "codex" ? "gpt-5.3-codex" : "claude-sonnet-4"; -} - -function isCodexModel(model: string) { - return model.startsWith("gpt-") || model.startsWith("o"); -} +const FALLBACK_MODEL = DEFAULT_WORKSPACE_MODEL_ID; function agentKindForModel(model: string) { - if (isCodexModel(model)) { - return "Codex"; - } - return "Claude"; + return workspaceAgentForModel(model); } -export function agentTypeForModel(model: string) { - if (isCodexModel(model)) { - return "codex"; +export function sandboxAgentIdForModel(model: string) { + return workspaceSandboxAgentIdForModel(model); +} + +async function resolveWorkspaceModelGroups(c: any): Promise { + try { + const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); + const groups = await sandbox.listWorkspaceModelGroups(); + return Array.isArray(groups) && groups.length > 0 ? groups : DEFAULT_WORKSPACE_MODEL_GROUPS; + } catch { + return DEFAULT_WORKSPACE_MODEL_GROUPS; } - return "claude"; +} + +async function resolveSandboxAgentForModel(c: any, model: string): Promise { + const groups = await resolveWorkspaceModelGroups(c); + return workspaceSandboxAgentIdForModel(model, groups); } function repoLabelFromRemote(remoteUrl: string): string { @@ -93,6 +71,11 @@ function repoLabelFromRemote(remoteUrl: string): string { return basename(trimmed.replace(/\.git$/, "")); } +async function getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> { + const organization = await getOrCreateOrganization(c, c.state.organizationId); + return await organization.getRepositoryMetadata({ repoId: c.state.repoId }); +} + function parseDraftAttachments(value: string | null | undefined): Array { if (!value) { return []; @@ -168,8 +151,7 @@ export function shouldRecreateSessionForModelChange(meta: { } async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean }): Promise> { - await ensureWorkbenchSessionTable(c); - const rows = await c.db.select().from(taskWorkbenchSessions).orderBy(asc(taskWorkbenchSessions.createdAt)).all(); + const rows = await c.db.select().from(taskWorkspaceSessions).orderBy(asc(taskWorkspaceSessions.createdAt)).all(); const mapped = rows.map((row: any) => ({ ...row, id: row.sessionId, @@ -179,9 +161,6 @@ async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean } errorMessage: row.errorMessage ?? null, transcript: parseTranscript(row.transcriptJson), transcriptUpdatedAt: row.transcriptUpdatedAt ?? null, - draftAttachments: parseDraftAttachments(row.draftAttachmentsJson), - draftUpdatedAtMs: row.draftUpdatedAt ?? null, - unread: row.unread === 1, created: row.created === 1, closed: row.closed === 1, })); @@ -199,8 +178,7 @@ async function nextSessionName(c: any): Promise { } async function readSessionMeta(c: any, sessionId: string): Promise { - await ensureWorkbenchSessionTable(c); - const row = await c.db.select().from(taskWorkbenchSessions).where(eq(taskWorkbenchSessions.sessionId, sessionId)).get(); + const row = await c.db.select().from(taskWorkspaceSessions).where(eq(taskWorkspaceSessions.sessionId, sessionId)).get(); if (!row) { return null; @@ -215,28 +193,107 @@ async function readSessionMeta(c: any, sessionId: string): Promise { errorMessage: row.errorMessage ?? null, transcript: parseTranscript(row.transcriptJson), transcriptUpdatedAt: row.transcriptUpdatedAt ?? null, - draftAttachments: parseDraftAttachments(row.draftAttachmentsJson), - draftUpdatedAtMs: row.draftUpdatedAt ?? null, - unread: row.unread === 1, created: row.created === 1, closed: row.closed === 1, }; } +async function getUserTaskState(c: any, authSessionId?: string | null): Promise<{ activeSessionId: string | null; bySessionId: Map }> { + if (!authSessionId) { + return { activeSessionId: null, bySessionId: new Map() }; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return { activeSessionId: null, bySessionId: new Map() }; + } + + const user = await getOrCreateUser(c, userId); + const state = await user.getTaskState({ taskId: c.state.taskId }); + const bySessionId = new Map( + (state?.sessions ?? []).map((row: any) => [ + row.sessionId, + { + unread: Boolean(row.unread), + draftText: row.draftText ?? "", + draftAttachments: parseDraftAttachments(row.draftAttachmentsJson), + draftUpdatedAtMs: row.draftUpdatedAt ?? null, + }, + ]), + ); + return { + activeSessionId: state?.activeSessionId ?? null, + bySessionId, + }; +} + +async function upsertUserTaskState(c: any, authSessionId: string | null | undefined, sessionId: string, patch: Record): Promise { + if (!authSessionId) { + return; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return; + } + + const user = await getOrCreateUser(c, userId); + await user.taskStateUpsert({ + taskId: c.state.taskId, + sessionId, + patch, + }); +} + +async function deleteUserTaskState(c: any, authSessionId: string | null | undefined, sessionId: string): Promise { + if (!authSessionId) { + return; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return; + } + + const user = await getOrCreateUser(c, userId); + await user.taskStateDelete({ + taskId: c.state.taskId, + sessionId, + }); +} + +async function resolveDefaultModel(c: any, authSessionId?: string | null): Promise { + if (!authSessionId) { + return FALLBACK_MODEL; + } + + const authState = await getBetterAuthService().getAuthState(authSessionId); + const userId = authState?.user?.id; + if (typeof userId !== "string" || userId.length === 0) { + return FALLBACK_MODEL; + } + + const user = await getOrCreateUser(c, userId); + const userState = await user.getAppAuthState({ sessionId: authSessionId }); + return userState?.profile?.defaultModel ?? FALLBACK_MODEL; +} + async function ensureSessionMeta( c: any, params: { sessionId: string; sandboxSessionId?: string | null; model?: string; + authSessionId?: string | null; sessionName?: string; - unread?: boolean; created?: boolean; status?: "pending_provision" | "pending_session_create" | "ready" | "error"; errorMessage?: string | null; }, ): Promise { - await ensureWorkbenchSessionTable(c); const existing = await readSessionMeta(c, params.sessionId); if (existing) { return existing; @@ -244,11 +301,10 @@ async function ensureSessionMeta( const now = Date.now(); const sessionName = params.sessionName ?? (await nextSessionName(c)); - const model = params.model ?? defaultModelForAgent(c.state.agentType); - const unread = params.unread ?? false; + const model = params.model ?? (await resolveDefaultModel(c, params.authSessionId)); await c.db - .insert(taskWorkbenchSessions) + .insert(taskWorkspaceSessions) .values({ sessionId: params.sessionId, sandboxSessionId: params.sandboxSessionId ?? null, @@ -258,10 +314,6 @@ async function ensureSessionMeta( errorMessage: params.errorMessage ?? null, transcriptJson: "[]", transcriptUpdatedAt: null, - unread: unread ? 1 : 0, - draftText: "", - draftAttachmentsJson: "[]", - draftUpdatedAt: null, created: params.created === false ? 0 : 1, closed: 0, thinkingSinceMs: null, @@ -276,19 +328,18 @@ async function ensureSessionMeta( async function updateSessionMeta(c: any, sessionId: string, values: Record): Promise { await ensureSessionMeta(c, { sessionId }); await c.db - .update(taskWorkbenchSessions) + .update(taskWorkspaceSessions) .set({ ...values, updatedAt: Date.now(), }) - .where(eq(taskWorkbenchSessions.sessionId, sessionId)) + .where(eq(taskWorkspaceSessions.sessionId, sessionId)) .run(); return await readSessionMeta(c, sessionId); } async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: string): Promise { - await ensureWorkbenchSessionTable(c); - const row = await c.db.select().from(taskWorkbenchSessions).where(eq(taskWorkbenchSessions.sandboxSessionId, sandboxSessionId)).get(); + const row = await c.db.select().from(taskWorkspaceSessions).where(eq(taskWorkspaceSessions.sandboxSessionId, sandboxSessionId)).get(); if (!row) { return null; } @@ -298,17 +349,17 @@ async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: strin async function requireReadySessionMeta(c: any, sessionId: string): Promise { const meta = await readSessionMeta(c, sessionId); if (!meta) { - throw new Error(`Unknown workbench session: ${sessionId}`); + throw new Error(`Unknown workspace session: ${sessionId}`); } if (meta.status !== "ready" || !meta.sandboxSessionId) { - throw new Error(meta.errorMessage ?? "This workbench session is still preparing"); + throw new Error(meta.errorMessage ?? "This workspace session is still preparing"); } return meta; } export function requireSendableSessionMeta(meta: any, sessionId: string): any { if (!meta) { - throw new Error(`Unknown workbench session: ${sessionId}`); + throw new Error(`Unknown workspace session: ${sessionId}`); } if (meta.status !== "ready" || !meta.sandboxSessionId) { throw new Error(`Session is not ready (status: ${meta.status}). Wait for session provisioning to complete.`); @@ -336,7 +387,7 @@ async function getTaskSandboxRuntime( }> { const { config } = getActorRuntimeContext(); const sandboxId = stableSandboxId(c); - const sandboxProviderId = resolveSandboxProviderId(config, record.sandboxProviderId ?? c.state.sandboxProviderId ?? null); + const sandboxProviderId = resolveSandboxProviderId(config, record.sandboxProviderId ?? null); const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, sandboxId, {}); const actorId = typeof sandbox.resolve === "function" ? await sandbox.resolve().catch(() => null) : null; const switchTarget = sandboxProviderId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`; @@ -350,7 +401,6 @@ async function getTaskSandboxRuntime( sandboxActorId: typeof actorId === "string" ? actorId : null, switchTarget, cwd: SANDBOX_REPO_CWD, - statusMessage: "sandbox ready", createdAt: now, updatedAt: now, }) @@ -389,7 +439,7 @@ async function getTaskSandboxRuntime( /** * Track whether the sandbox repo has been fully prepared (cloned + fetched + checked out) * for the current actor lifecycle. Subsequent calls can skip the expensive `git fetch` - * when `skipFetch` is true (used by sendWorkbenchMessage to avoid blocking on every prompt). + * when `skipFetch` is true (used by sendWorkspaceMessage to avoid blocking on every prompt). */ let sandboxRepoPrepared = false; @@ -405,8 +455,7 @@ async function ensureSandboxRepo(c: any, sandbox: any, record: any, opts?: { ski } const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); - const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); - const metadata = await repository.getRepositoryMetadata({}); + const metadata = await getRepositoryMetadata(c); const baseRef = metadata.defaultBranch ?? "main"; const sandboxRepoRoot = dirname(SANDBOX_REPO_CWD); const script = [ @@ -414,7 +463,7 @@ async function ensureSandboxRepo(c: any, sandbox: any, record: any, opts?: { ski `mkdir -p ${JSON.stringify(sandboxRepoRoot)}`, "git config --global credential.helper '!f() { echo username=x-access-token; echo password=${GH_TOKEN:-$GITHUB_TOKEN}; }; f'", `if [ ! -d ${JSON.stringify(`${SANDBOX_REPO_CWD}/.git`)} ]; then rm -rf ${JSON.stringify(SANDBOX_REPO_CWD)} && git clone ${JSON.stringify( - c.state.repoRemote, + metadata.remoteUrl, )} ${JSON.stringify(SANDBOX_REPO_CWD)}; fi`, `cd ${JSON.stringify(SANDBOX_REPO_CWD)}`, "git fetch origin --prune", @@ -452,7 +501,7 @@ async function executeInSandbox( label: string; }, ): Promise<{ exitCode: number; result: string }> { - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); const runtime = await getTaskSandboxRuntime(c, record); await ensureSandboxRepo(c, runtime.sandbox, record); const response = await runtime.sandbox.runProcess({ @@ -555,7 +604,7 @@ function buildFileTree(paths: string[]): Array { return sortNodes(root.children.values()); } -async function collectWorkbenchGitState(c: any, record: any) { +async function collectWorkspaceGitState(c: any, record: any) { const activeSandboxId = record.activeSandboxId; const activeSandbox = activeSandboxId != null ? ((record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === activeSandboxId) ?? null) : null; const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null; @@ -628,7 +677,6 @@ async function collectWorkbenchGitState(c: any, record: any) { } async function readCachedGitState(c: any): Promise<{ fileChanges: Array; diffs: Record; fileTree: Array; updatedAt: number | null }> { - await ensureTaskRuntimeCacheColumns(c); const row = await c.db .select({ gitStateJson: taskRuntime.gitStateJson, @@ -645,7 +693,6 @@ async function readCachedGitState(c: any): Promise<{ fileChanges: Array; di } async function writeCachedGitState(c: any, gitState: { fileChanges: Array; diffs: Record; fileTree: Array }): Promise { - await ensureTaskRuntimeCacheColumns(c); const now = Date.now(); await c.db .update(taskRuntime) @@ -687,102 +734,78 @@ async function writeSessionTranscript(c: any, sessionId: string, transcript: Arr }); } -async function enqueueWorkbenchRefresh( +async function enqueueWorkspaceRefresh( c: any, - command: "task.command.workbench.refresh_derived" | "task.command.workbench.refresh_session_transcript", + command: "task.command.workspace.refresh_derived" | "task.command.workspace.refresh_session_transcript", body: Record, ): Promise { - const self = selfTask(c); - await self.send(command, body, { wait: false }); + // Call directly since we're inside the task actor (no queue needed) + if (command === "task.command.workspace.refresh_derived") { + void refreshWorkspaceDerivedState(c).catch(() => {}); + } else { + void refreshWorkspaceSessionTranscript(c, body.sessionId as string).catch(() => {}); + } } -async function enqueueWorkbenchEnsureSession(c: any, sessionId: string): Promise { - const self = selfTask(c); - await self.send( - "task.command.workbench.ensure_session", - { - sessionId, - }, - { - wait: false, - }, - ); +async function enqueueWorkspaceEnsureSession(c: any, sessionId: string): Promise { + // Call directly since we're inside the task actor + void ensureWorkspaceSession(c, sessionId).catch(() => {}); } -function pendingWorkbenchSessionStatus(record: any): "pending_provision" | "pending_session_create" { +function pendingWorkspaceSessionStatus(record: any): "pending_provision" | "pending_session_create" { return record.activeSandboxId ? "pending_session_create" : "pending_provision"; } -async function maybeScheduleWorkbenchRefreshes(c: any, record: any, sessions: Array): Promise { +async function maybeScheduleWorkspaceRefreshes(c: any, record: any, sessions: Array): Promise { const gitState = await readCachedGitState(c); if (record.activeSandboxId && !gitState.updatedAt) { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {}); + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_derived", {}); } for (const session of sessions) { if (session.closed || session.status !== "ready" || !session.sandboxSessionId || session.transcriptUpdatedAt) { continue; } - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_session_transcript", { sessionId: session.sandboxSessionId, }); } } -function activeSessionStatus(record: any, sessionId: string) { - if (record.activeSessionId !== sessionId) { - return "idle"; +function computeWorkspaceTaskStatus(record: any, sessions: Array) { + if (record.status && String(record.status).startsWith("init_")) { + return record.status; } - - if (record.status === "running") { + if (record.status === "archived" || record.status === "killed") { + return record.status; + } + if (sessions.some((session) => session.closed !== true && session.thinkingSinceMs)) { return "running"; } - if (record.status === "error") { + if (sessions.some((session) => session.closed !== true && session.status === "error")) { return "error"; } return "idle"; } -async function readPullRequestSummary(c: any, branchName: string | null) { - if (!branchName) { - return null; - } - - try { - const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); - return await repository.getPullRequestForBranch({ branchName }); - } catch { - return null; - } +export async function ensureWorkspaceSeeded(c: any): Promise { + return await getCurrentRecord(c); } -export async function ensureWorkbenchSeeded(c: any): Promise { - await ensureTaskRuntimeCacheColumns(c); - const record = await getCurrentRecord({ db: c.db, state: c.state }); - if (record.activeSessionId) { - await ensureSessionMeta(c, { - sessionId: record.activeSessionId, - sandboxSessionId: record.activeSessionId, - model: defaultModelForAgent(record.agentType), - sessionName: "Session 1", - status: "ready", - }); - } - return record; -} - -function buildSessionSummary(record: any, meta: any): any { +function buildSessionSummary(meta: any, userState?: any): any { const derivedSandboxSessionId = meta.status === "ready" ? (meta.sandboxSessionId ?? null) : null; const sessionStatus = meta.status === "pending_provision" || meta.status === "pending_session_create" ? meta.status - : meta.status === "ready" && derivedSandboxSessionId - ? activeSessionStatus(record, derivedSandboxSessionId) + : meta.thinkingSinceMs + ? "running" : meta.status === "error" ? "error" - : "ready"; + : meta.status === "ready" && derivedSandboxSessionId + ? "idle" + : "ready"; let thinkingSinceMs = meta.thinkingSinceMs ?? null; - let unread = Boolean(meta.unread); + let unread = Boolean(userState?.unread); if (thinkingSinceMs && sessionStatus !== "running") { thinkingSinceMs = null; unread = true; @@ -803,8 +826,8 @@ function buildSessionSummary(record: any, meta: any): any { }; } -function buildSessionDetailFromMeta(record: any, meta: any): any { - const summary = buildSessionSummary(record, meta); +function buildSessionDetailFromMeta(meta: any, userState?: any): any { + const summary = buildSessionSummary(meta, userState); return { sessionId: meta.sessionId, sandboxSessionId: summary.sandboxSessionId ?? null, @@ -817,57 +840,56 @@ function buildSessionDetailFromMeta(record: any, meta: any): any { created: summary.created, errorMessage: summary.errorMessage, draft: { - text: meta.draftText ?? "", - attachments: Array.isArray(meta.draftAttachments) ? meta.draftAttachments : [], - updatedAtMs: meta.draftUpdatedAtMs ?? null, + text: userState?.draftText ?? "", + attachments: Array.isArray(userState?.draftAttachments) ? userState.draftAttachments : [], + updatedAtMs: userState?.draftUpdatedAtMs ?? null, }, transcript: meta.transcript ?? [], }; } /** - * Builds a WorkbenchTaskSummary from local task actor state. Task actors push + * Builds a WorkspaceTaskSummary from local task actor state. Task actors push * this to the parent organization actor so organization sidebar reads stay local. */ -export async function buildTaskSummary(c: any): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function buildTaskSummary(c: any, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); + const repositoryMetadata = await getRepositoryMetadata(c); const sessions = await listSessionMetaRows(c); - await maybeScheduleWorkbenchRefreshes(c, record, sessions); + await maybeScheduleWorkspaceRefreshes(c, record, sessions); + const userTaskState = await getUserTaskState(c, authSessionId); + const taskStatus = computeWorkspaceTaskStatus(record, sessions); + const activeSessionId = + userTaskState.activeSessionId && sessions.some((meta) => meta.sessionId === userTaskState.activeSessionId) ? userTaskState.activeSessionId : null; return { id: c.state.taskId, repoId: c.state.repoId, title: record.title ?? "New Task", - status: record.status ?? "new", - repoName: repoLabelFromRemote(c.state.repoRemote), + status: taskStatus, + repoName: repoLabelFromRemote(repositoryMetadata.remoteUrl), updatedAtMs: record.updatedAt, branch: record.branchName, - pullRequest: await readPullRequestSummary(c, record.branchName), - sessionsSummary: sessions.map((meta) => buildSessionSummary(record, meta)), + pullRequest: record.pullRequest ?? null, + activeSessionId, + sessionsSummary: sessions.map((meta) => buildSessionSummary(meta, userTaskState.bySessionId.get(meta.sessionId))), }; } /** - * Builds a WorkbenchTaskDetail from local task actor state for direct task + * Builds a WorkspaceTaskDetail from local task actor state for direct task * subscribers. This is a full replacement payload, not a patch. */ -export async function buildTaskDetail(c: any): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function buildTaskDetail(c: any, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); const gitState = await readCachedGitState(c); const sessions = await listSessionMetaRows(c); - await maybeScheduleWorkbenchRefreshes(c, record, sessions); - const summary = await buildTaskSummary(c); + await maybeScheduleWorkspaceRefreshes(c, record, sessions); + const summary = await buildTaskSummary(c, authSessionId); return { ...summary, task: record.task, - agentType: record.agentType === "claude" || record.agentType === "codex" ? record.agentType : null, - runtimeStatus: record.status, - statusMessage: record.statusMessage ?? null, - activeSessionId: record.activeSessionId ?? null, - diffStat: record.diffStat ?? null, - prUrl: record.prUrl ?? null, - reviewStatus: record.reviewStatus ?? null, fileChanges: gitState.fileChanges, diffs: gitState.diffs, fileTree: gitState.fileTree, @@ -882,50 +904,63 @@ export async function buildTaskDetail(c: any): Promise { } /** - * Builds a WorkbenchSessionDetail for a specific session. + * Builds a WorkspaceSessionDetail for a specific session. */ -export async function buildSessionDetail(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function buildSessionDetail(c: any, sessionId: string, authSessionId?: string | null): Promise { + const record = await ensureWorkspaceSeeded(c); const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { - throw new Error(`Unknown workbench session: ${sessionId}`); + throw new Error(`Unknown workspace session: ${sessionId}`); } + const userTaskState = await getUserTaskState(c, authSessionId); + const userSessionState = userTaskState.bySessionId.get(sessionId); - if (!meta.sandboxSessionId) { - return buildSessionDetailFromMeta(record, meta); + // Skip live transcript fetch if the sandbox session doesn't exist yet or + // the session is still provisioning — the sandbox API will block/timeout. + const isPending = meta.status === "pending_provision" || meta.status === "pending_session_create"; + if (!meta.sandboxSessionId || isPending) { + return buildSessionDetailFromMeta(meta, userSessionState); } try { const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId); if (JSON.stringify(meta.transcript ?? []) !== JSON.stringify(transcript)) { await writeSessionTranscript(c, meta.sessionId, transcript); - return buildSessionDetailFromMeta(record, { - ...meta, - transcript, - transcriptUpdatedAt: Date.now(), - }); + return buildSessionDetailFromMeta( + { + ...meta, + transcript, + transcriptUpdatedAt: Date.now(), + }, + userSessionState, + ); } - } catch { - // Session detail reads should degrade to cached transcript data if the live sandbox is unavailable. + } catch (error) { + // Session detail reads degrade to cached transcript when sandbox is unavailable. + logActorWarning("task", "readSessionTranscript failed, using cached transcript", { + taskId: c.state.taskId, + sessionId, + error: resolveErrorMessage(error), + }); } - return buildSessionDetailFromMeta(record, meta); + return buildSessionDetailFromMeta(meta, userSessionState); } export async function getTaskSummary(c: any): Promise { return await buildTaskSummary(c); } -export async function getTaskDetail(c: any): Promise { - return await buildTaskDetail(c); +export async function getTaskDetail(c: any, authSessionId?: string): Promise { + return await buildTaskDetail(c, authSessionId); } -export async function getSessionDetail(c: any, sessionId: string): Promise { - return await buildSessionDetail(c, sessionId); +export async function getSessionDetail(c: any, sessionId: string, authSessionId?: string): Promise { + return await buildSessionDetail(c, sessionId, authSessionId); } /** - * Replaces the old notifyWorkbenchUpdated pattern. + * Replaces the old notifyWorkspaceUpdated pattern. * * The task actor emits two kinds of updates: * - Push summary state up to the parent organization actor so the sidebar @@ -934,9 +969,9 @@ export async function getSessionDetail(c: any, sessionId: string): Promise */ export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string }): Promise { const organization = await getOrCreateOrganization(c, c.state.organizationId); - await organization.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) }); + await organization.commandApplyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) }); c.broadcast("taskUpdated", { - type: "taskDetailUpdated", + type: "taskUpdated", detail: await buildTaskDetail(c), }); @@ -948,15 +983,15 @@ export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string } } -export async function refreshWorkbenchDerivedState(c: any): Promise { - const record = await ensureWorkbenchSeeded(c); - const gitState = await collectWorkbenchGitState(c, record); +export async function refreshWorkspaceDerivedState(c: any): Promise { + const record = await ensureWorkspaceSeeded(c); + const gitState = await collectWorkspaceGitState(c, record); await writeCachedGitState(c, gitState); await broadcastTaskUpdate(c); } -export async function refreshWorkbenchSessionTranscript(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function refreshWorkspaceSessionTranscript(c: any, sessionId: string): Promise { + const record = await ensureWorkspaceSeeded(c); const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await readSessionMeta(c, sessionId)); if (!meta?.sandboxSessionId) { return; @@ -967,7 +1002,7 @@ export async function refreshWorkbenchSessionTranscript(c: any, sessionId: strin await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); } -export async function renameWorkbenchTask(c: any, value: string): Promise { +export async function renameWorkspaceTask(c: any, value: string): Promise { const nextTitle = value.trim(); if (!nextTitle) { throw new Error("task title is required"); @@ -981,85 +1016,51 @@ export async function renameWorkbenchTask(c: any, value: string): Promise }) .where(eq(taskTable.id, 1)) .run(); - c.state.title = nextTitle; await broadcastTaskUpdate(c); } -export async function renameWorkbenchBranch(c: any, value: string): Promise { - const nextBranch = value.trim(); - if (!nextBranch) { - throw new Error("branch name is required"); - } - - const record = await ensureWorkbenchSeeded(c); - if (!record.branchName) { - throw new Error("cannot rename branch before task branch exists"); - } - if (!record.activeSandboxId) { - throw new Error("cannot rename branch without an active sandbox"); - } - const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null; - if (!activeSandbox?.cwd) { - throw new Error("cannot rename branch without a sandbox cwd"); - } - - const renameResult = await executeInSandbox(c, { - sandboxId: record.activeSandboxId, - cwd: activeSandbox.cwd, - command: [ - `git branch -m ${JSON.stringify(record.branchName)} ${JSON.stringify(nextBranch)}`, - `if git ls-remote --exit-code --heads origin ${JSON.stringify(record.branchName)} >/dev/null 2>&1; then git push origin :${JSON.stringify(record.branchName)}; fi`, - `git push origin ${JSON.stringify(nextBranch)}`, - `git branch --set-upstream-to=${JSON.stringify(`origin/${nextBranch}`)} ${JSON.stringify(nextBranch)} || git push --set-upstream origin ${JSON.stringify(nextBranch)}`, - ].join(" && "), - label: `git branch -m ${record.branchName} ${nextBranch}`, - }); - if (renameResult.exitCode !== 0) { - throw new Error(`branch rename failed (${renameResult.exitCode}): ${renameResult.result}`); - } - +export async function syncTaskPullRequest(c: any, pullRequest: any): Promise { + const now = pullRequest?.updatedAtMs ?? Date.now(); await c.db .update(taskTable) .set({ - branchName: nextBranch, - updatedAt: Date.now(), + pullRequestJson: pullRequest ? JSON.stringify(pullRequest) : null, + updatedAt: now, }) .where(eq(taskTable.id, 1)) .run(); - c.state.branchName = nextBranch; - - const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); - await repository.registerTaskBranch({ - taskId: c.state.taskId, - branchName: nextBranch, - }); await broadcastTaskUpdate(c); } -export async function createWorkbenchSession(c: any, model?: string): Promise<{ sessionId: string }> { +export async function createWorkspaceSession(c: any, model?: string, authSessionId?: string): Promise<{ sessionId: string }> { const sessionId = `session-${randomUUID()}`; - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); await ensureSessionMeta(c, { sessionId, - model: model ?? defaultModelForAgent(record.agentType), + model: model ?? (await resolveDefaultModel(c, authSessionId)), + authSessionId, sandboxSessionId: null, - status: pendingWorkbenchSessionStatus(record), + status: pendingWorkspaceSessionStatus(record), created: false, }); + await upsertUserTaskState(c, authSessionId, sessionId, { + activeSessionId: sessionId, + unread: false, + }); await broadcastTaskUpdate(c, { sessionId: sessionId }); - await enqueueWorkbenchEnsureSession(c, sessionId); + await enqueueWorkspaceEnsureSession(c, sessionId); return { sessionId }; } -export async function ensureWorkbenchSession(c: any, sessionId: string, model?: string): Promise { +export async function ensureWorkspaceSession(c: any, sessionId: string, model?: string, authSessionId?: string): Promise { const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { return; } - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); if (meta.sandboxSessionId && meta.status === "ready") { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_session_transcript", { sessionId: meta.sandboxSessionId, }); await broadcastTaskUpdate(c, { sessionId: sessionId }); @@ -1075,10 +1076,12 @@ export async function ensureWorkbenchSession(c: any, sessionId: string, model?: try { const runtime = await getTaskSandboxRuntime(c, record); await ensureSandboxRepo(c, runtime.sandbox, record); + const resolvedModel = model ?? meta.model ?? (await resolveDefaultModel(c, authSessionId)); + const resolvedAgent = await resolveSandboxAgentForModel(c, resolvedModel); await runtime.sandbox.createSession({ id: meta.sandboxSessionId ?? sessionId, - agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)), - model: model ?? meta.model ?? defaultModelForAgent(record.agentType), + agent: resolvedAgent, + model: resolvedModel, sessionInit: { cwd: runtime.cwd, }, @@ -1089,7 +1092,7 @@ export async function ensureWorkbenchSession(c: any, sessionId: string, model?: status: "ready", errorMessage: null, }); - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_session_transcript", { sessionId: meta.sandboxSessionId ?? sessionId, }); } catch (error) { @@ -1102,27 +1105,17 @@ export async function ensureWorkbenchSession(c: any, sessionId: string, model?: await broadcastTaskUpdate(c, { sessionId: sessionId }); } -export async function enqueuePendingWorkbenchSessions(c: any): Promise { - const self = selfTask(c); +export async function enqueuePendingWorkspaceSessions(c: any): Promise { const pending = (await listSessionMetaRows(c, { includeClosed: true })).filter( (row) => row.closed !== true && row.status !== "ready" && row.status !== "error", ); for (const row of pending) { - await self.send( - "task.command.workbench.ensure_session", - { - sessionId: row.sessionId, - model: row.model, - }, - { - wait: false, - }, - ); + void ensureWorkspaceSession(c, row.sessionId, row.model).catch(() => {}); } } -export async function renameWorkbenchSession(c: any, sessionId: string, title: string): Promise { +export async function renameWorkspaceSession(c: any, sessionId: string, title: string): Promise { const trimmed = title.trim(); if (!trimmed) { throw new Error("session title is required"); @@ -1133,15 +1126,26 @@ export async function renameWorkbenchSession(c: any, sessionId: string, title: s await broadcastTaskUpdate(c, { sessionId }); } -export async function setWorkbenchSessionUnread(c: any, sessionId: string, unread: boolean): Promise { - await updateSessionMeta(c, sessionId, { - unread: unread ? 1 : 0, +export async function selectWorkspaceSession(c: any, sessionId: string, authSessionId?: string): Promise { + const meta = await readSessionMeta(c, sessionId); + if (!meta || meta.closed) { + return; + } + await upsertUserTaskState(c, authSessionId, sessionId, { + activeSessionId: sessionId, }); await broadcastTaskUpdate(c, { sessionId }); } -export async function updateWorkbenchDraft(c: any, sessionId: string, text: string, attachments: Array): Promise { - await updateSessionMeta(c, sessionId, { +export async function setWorkspaceSessionUnread(c: any, sessionId: string, unread: boolean, authSessionId?: string): Promise { + await upsertUserTaskState(c, authSessionId, sessionId, { + unread, + }); + await broadcastTaskUpdate(c, { sessionId }); +} + +export async function updateWorkspaceDraft(c: any, sessionId: string, text: string, attachments: Array, authSessionId?: string): Promise { + await upsertUserTaskState(c, authSessionId, sessionId, { draftText: text, draftAttachmentsJson: JSON.stringify(attachments), draftUpdatedAt: Date.now(), @@ -1149,7 +1153,7 @@ export async function updateWorkbenchDraft(c: any, sessionId: string, text: stri await broadcastTaskUpdate(c, { sessionId }); } -export async function changeWorkbenchModel(c: any, sessionId: string, model: string): Promise { +export async function changeWorkspaceModel(c: any, sessionId: string, model: string, _authSessionId?: string): Promise { const meta = await readSessionMeta(c, sessionId); if (!meta || meta.closed) { return; @@ -1159,7 +1163,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str return; } - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); let nextMeta = await updateSessionMeta(c, sessionId, { model, }); @@ -1170,7 +1174,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str await sandbox.destroySession(nextMeta.sandboxSessionId); nextMeta = await updateSessionMeta(c, sessionId, { sandboxSessionId: null, - status: pendingWorkbenchSessionStatus(record), + status: pendingWorkspaceSessionStatus(record), errorMessage: null, transcriptJson: "[]", transcriptUpdatedAt: null, @@ -1191,20 +1195,20 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str } } else if (nextMeta.status !== "ready") { nextMeta = await updateSessionMeta(c, sessionId, { - status: pendingWorkbenchSessionStatus(record), + status: pendingWorkspaceSessionStatus(record), errorMessage: null, }); } if (shouldEnsure) { - await enqueueWorkbenchEnsureSession(c, sessionId); + await enqueueWorkspaceEnsureSession(c, sessionId); } await broadcastTaskUpdate(c, { sessionId }); } -export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array): Promise { +export async function sendWorkspaceMessage(c: any, sessionId: string, text: string, attachments: Array, authSessionId?: string): Promise { const meta = requireSendableSessionMeta(await readSessionMeta(c, sessionId), sessionId); - const record = await ensureWorkbenchSeeded(c); + const record = await ensureWorkspaceSeeded(c); const runtime = await getTaskSandboxRuntime(c, record); // Skip git fetch on subsequent messages — the repo was already prepared during session // creation. This avoids a 5-30s network round-trip to GitHub on every prompt. @@ -1217,42 +1221,36 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri } await updateSessionMeta(c, sessionId, { - unread: 0, created: 1, + thinkingSinceMs: Date.now(), + }); + await upsertUserTaskState(c, authSessionId, sessionId, { + unread: false, draftText: "", draftAttachmentsJson: "[]", draftUpdatedAt: Date.now(), - thinkingSinceMs: Date.now(), + activeSessionId: sessionId, }); - await c.db - .update(taskRuntime) - .set({ - activeSessionId: meta.sandboxSessionId, - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, 1)) - .run(); - - await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "running", Date.now()); + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "running", Date.now()); try { await runtime.sandbox.sendPrompt({ sessionId: meta.sandboxSessionId, prompt: prompt.join("\n\n"), }); - await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "idle", Date.now()); + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "idle", Date.now()); } catch (error) { await updateSessionMeta(c, sessionId, { status: "error", errorMessage: error instanceof Error ? error.message : String(error), }); - await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "error", Date.now()); + await syncWorkspaceSessionStatus(c, meta.sandboxSessionId, "error", Date.now()); throw error; } } -export async function stopWorkbenchSession(c: any, sessionId: string): Promise { +export async function stopWorkspaceSession(c: any, sessionId: string): Promise { const meta = await requireReadySessionMeta(c, sessionId); const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c)); await sandbox.destroySession(meta.sandboxSessionId); @@ -1262,39 +1260,10 @@ export async function stopWorkbenchSession(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function syncWorkspaceSessionStatus(c: any, sessionId: string, status: "running" | "idle" | "error", at: number): Promise { const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { sessionId: sessionId, sandboxSessionId: sessionId })); let changed = false; - if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) { - const mappedStatus = status === "running" ? "running" : status === "error" ? "error" : "idle"; - if (record.status !== mappedStatus) { - await c.db - .update(taskTable) - .set({ - status: mappedStatus, - updatedAt: at, - }) - .where(eq(taskTable.id, 1)) - .run(); - changed = true; - } - - const statusMessage = `session:${status}`; - if (record.statusMessage !== statusMessage) { - await c.db - .update(taskRuntime) - .set({ - statusMessage, - updatedAt: at, - }) - .where(eq(taskRuntime.id, 1)) - .run(); - changed = true; - } - } - if (status === "running") { if (!meta.thinkingSinceMs) { await updateSessionMeta(c, sessionId, { @@ -1309,27 +1278,30 @@ export async function syncWorkbenchSessionStatus(c: any, sessionId: string, stat }); changed = true; } - if (!meta.unread && shouldMarkSessionUnreadForStatus(meta, status)) { - await updateSessionMeta(c, sessionId, { - unread: 1, - }); - changed = true; - } } if (changed) { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", { + const sessions = await listSessionMetaRows(c, { includeClosed: true }); + const nextStatus = computeWorkspaceTaskStatus(await ensureWorkspaceSeeded(c), sessions); + await c.db + .update(taskTable) + .set({ + status: nextStatus, + updatedAt: at, + }) + .where(eq(taskTable.id, 1)) + .run(); + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_session_transcript", { sessionId, }); if (status !== "running") { - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {}); + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_derived", {}); } await broadcastTaskUpdate(c, { sessionId: meta.sessionId }); } } -export async function closeWorkbenchSession(c: any, sessionId: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function closeWorkspaceSession(c: any, sessionId: string, authSessionId?: string): Promise { const sessions = await listSessionMetaRows(c); if (sessions.filter((candidate) => candidate.closed !== true).length <= 1) { return; @@ -1347,61 +1319,63 @@ export async function closeWorkbenchSession(c: any, sessionId: string): Promise< closed: 1, thinkingSinceMs: null, }); - if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) { - await c.db - .update(taskRuntime) - .set({ - activeSessionId: null, - updatedAt: Date.now(), - }) - .where(eq(taskRuntime.id, 1)) - .run(); + const remainingSessions = sessions.filter((candidate) => candidate.sessionId !== sessionId && candidate.closed !== true); + const userTaskState = await getUserTaskState(c, authSessionId); + if (userTaskState.activeSessionId === sessionId && remainingSessions[0]) { + await upsertUserTaskState(c, authSessionId, remainingSessions[0].sessionId, { + activeSessionId: remainingSessions[0].sessionId, + }); } + await deleteUserTaskState(c, authSessionId, sessionId); await broadcastTaskUpdate(c); } -export async function markWorkbenchUnread(c: any): Promise { +export async function markWorkspaceUnread(c: any, authSessionId?: string): Promise { const sessions = await listSessionMetaRows(c); const latest = sessions[sessions.length - 1]; if (!latest) { return; } - await updateSessionMeta(c, latest.sessionId, { - unread: 1, + await upsertUserTaskState(c, authSessionId, latest.sessionId, { + unread: true, }); await broadcastTaskUpdate(c, { sessionId: latest.sessionId }); } -export async function publishWorkbenchPr(c: any): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function publishWorkspacePr(c: any): Promise { + const record = await ensureWorkspaceSeeded(c); if (!record.branchName) { throw new Error("cannot publish PR without a branch"); } - const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote); - const metadata = await repository.getRepositoryMetadata({}); - const repoFullName = metadata.fullName ?? githubRepoFullNameFromRemote(c.state.repoRemote); + const metadata = await getRepositoryMetadata(c); + const repoFullName = metadata.fullName ?? githubRepoFullNameFromRemote(metadata.remoteUrl); if (!repoFullName) { - throw new Error(`Unable to resolve GitHub repository for ${c.state.repoRemote}`); + throw new Error(`Unable to resolve GitHub repository for ${metadata.remoteUrl}`); } const { driver } = getActorRuntimeContext(); const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId); - await driver.github.createPr(repoFullName, record.branchName, record.title ?? c.state.task, undefined, { + const created = await driver.github.createPr(repoFullName, record.branchName, record.title ?? record.task, undefined, { githubToken: auth?.githubToken ?? null, baseBranch: metadata.defaultBranch ?? undefined, }); - await c.db - .update(taskTable) - .set({ - prSubmitted: 1, - updatedAt: Date.now(), - }) - .where(eq(taskTable.id, 1)) - .run(); - await broadcastTaskUpdate(c); + await syncTaskPullRequest(c, { + number: created.number, + status: "ready", + title: record.title ?? record.task, + body: null, + state: "open", + url: created.url, + headRefName: record.branchName, + baseRefName: metadata.defaultBranch ?? "main", + authorLogin: null, + isDraft: false, + merged: false, + updatedAtMs: Date.now(), + }); } -export async function revertWorkbenchFile(c: any, path: string): Promise { - const record = await ensureWorkbenchSeeded(c); +export async function revertWorkspaceFile(c: any, path: string): Promise { + const record = await ensureWorkspaceSeeded(c); if (!record.activeSandboxId) { throw new Error("cannot revert file without an active sandbox"); } @@ -1419,6 +1393,6 @@ export async function revertWorkbenchFile(c: any, path: string): Promise { if (result.exitCode !== 0) { throw new Error(`file revert failed (${result.exitCode}): ${result.result}`); } - await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {}); + await enqueueWorkspaceRefresh(c, "task.command.workspace.refresh_derived", {}); await broadcastTaskUpdate(c); } diff --git a/foundry/packages/backend/src/actors/user/actions/better-auth.ts b/foundry/packages/backend/src/actors/user/actions/better-auth.ts new file mode 100644 index 0000000..0fd950e --- /dev/null +++ b/foundry/packages/backend/src/actors/user/actions/better-auth.ts @@ -0,0 +1,47 @@ +import { asc, count as sqlCount, desc } from "drizzle-orm"; +import { applyJoinToRow, applyJoinToRows, buildWhere, columnFor, tableFor } from "../query-helpers.js"; + +export const betterAuthActions = { + // Better Auth adapter action — called by the Better Auth adapter in better-auth.ts. + // Schema and behavior are constrained by Better Auth. + async betterAuthFindOneRecord(c, input: { model: string; where: any[]; join?: any }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + const row = predicate ? await c.db.select().from(table).where(predicate).get() : await c.db.select().from(table).get(); + return await applyJoinToRow(c, input.model, row ?? null, input.join); + }, + + // Better Auth adapter action — called by the Better Auth adapter in better-auth.ts. + // Schema and behavior are constrained by Better Auth. + async betterAuthFindManyRecords(c, input: { model: string; where?: any[]; limit?: number; offset?: number; sortBy?: any; join?: any }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + let query: any = c.db.select().from(table); + if (predicate) { + query = query.where(predicate); + } + if (input.sortBy?.field) { + const column = columnFor(input.model, table, input.sortBy.field); + query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column)); + } + if (typeof input.limit === "number") { + query = query.limit(input.limit); + } + if (typeof input.offset === "number") { + query = query.offset(input.offset); + } + const rows = await query.all(); + return await applyJoinToRows(c, input.model, rows, input.join); + }, + + // Better Auth adapter action — called by the Better Auth adapter in better-auth.ts. + // Schema and behavior are constrained by Better Auth. + async betterAuthCountRecords(c, input: { model: string; where?: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + const row = predicate + ? await c.db.select({ value: sqlCount() }).from(table).where(predicate).get() + : await c.db.select({ value: sqlCount() }).from(table).get(); + return row?.value ?? 0; + }, +}; diff --git a/foundry/packages/backend/src/actors/user/actions/user.ts b/foundry/packages/backend/src/actors/user/actions/user.ts new file mode 100644 index 0000000..714b2b6 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/actions/user.ts @@ -0,0 +1,44 @@ +import { eq } from "drizzle-orm"; +import { authAccounts, authSessions, authUsers, sessionState, userProfiles, userTaskState } from "../db/schema.js"; +import { materializeRow } from "../query-helpers.js"; + +export const userActions = { + // Custom Foundry action — not part of Better Auth. + async getAppAuthState(c, input: { sessionId: string }) { + const session = await c.db.select().from(authSessions).where(eq(authSessions.id, input.sessionId)).get(); + if (!session) { + return null; + } + const [user, profile, currentSessionState, accounts] = await Promise.all([ + c.db.select().from(authUsers).where(eq(authUsers.authUserId, session.userId)).get(), + c.db.select().from(userProfiles).where(eq(userProfiles.userId, session.userId)).get(), + c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(), + c.db.select().from(authAccounts).where(eq(authAccounts.userId, session.userId)).all(), + ]); + return { + session, + user: materializeRow("user", user), + profile: profile ?? null, + sessionState: currentSessionState ?? null, + accounts, + }; + }, + + // Custom Foundry action — not part of Better Auth. + async getTaskState(c, input: { taskId: string }) { + const rows = await c.db.select().from(userTaskState).where(eq(userTaskState.taskId, input.taskId)).all(); + const activeSessionId = rows.find((row) => typeof row.activeSessionId === "string" && row.activeSessionId.length > 0)?.activeSessionId ?? null; + return { + taskId: input.taskId, + activeSessionId, + sessions: rows.map((row) => ({ + sessionId: row.sessionId, + unread: row.unread === 1, + draftText: row.draftText, + draftAttachmentsJson: row.draftAttachmentsJson, + draftUpdatedAt: row.draftUpdatedAt ?? null, + updatedAt: row.updatedAt, + })), + }; + }, +}; diff --git a/foundry/packages/backend/src/actors/history/db/db.ts b/foundry/packages/backend/src/actors/user/db/db.ts similarity index 70% rename from foundry/packages/backend/src/actors/history/db/db.ts rename to foundry/packages/backend/src/actors/user/db/db.ts index ef76e36..a864893 100644 --- a/foundry/packages/backend/src/actors/history/db/db.ts +++ b/foundry/packages/backend/src/actors/user/db/db.ts @@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle"; import * as schema from "./schema.js"; import migrations from "./migrations.js"; -export const historyDb = db({ schema, migrations }); +export const userDb = db({ schema, migrations }); diff --git a/foundry/packages/backend/src/actors/auth-user/db/migrations.ts b/foundry/packages/backend/src/actors/user/db/migrations.ts similarity index 65% rename from foundry/packages/backend/src/actors/auth-user/db/migrations.ts rename to foundry/packages/backend/src/actors/user/db/migrations.ts index be7cb17..da92bdc 100644 --- a/foundry/packages/backend/src/actors/auth-user/db/migrations.ts +++ b/foundry/packages/backend/src/actors/user/db/migrations.ts @@ -10,6 +10,12 @@ const journal = { tag: "0000_auth_user", breakpoints: true, }, + { + idx: 1, + when: 1773532800000, + tag: "0001_user_task_state", + breakpoints: true, + }, ], } as const; @@ -17,15 +23,19 @@ export default { journal, migrations: { m0000: `CREATE TABLE \`user\` ( - \`id\` text PRIMARY KEY NOT NULL, + \`id\` integer PRIMARY KEY NOT NULL, + \`auth_user_id\` text NOT NULL, \`name\` text NOT NULL, \`email\` text NOT NULL, \`email_verified\` integer NOT NULL, \`image\` text, \`created_at\` integer NOT NULL, - \`updated_at\` integer NOT NULL + \`updated_at\` integer NOT NULL, + CONSTRAINT \`user_singleton_id_check\` CHECK(\`id\` = 1) ); --> statement-breakpoint +CREATE UNIQUE INDEX \`user_auth_user_id_idx\` ON \`user\` (\`auth_user_id\`); +--> statement-breakpoint CREATE TABLE \`session\` ( \`id\` text PRIMARY KEY NOT NULL, \`token\` text NOT NULL, @@ -58,23 +68,39 @@ CREATE TABLE \`account\` ( CREATE UNIQUE INDEX \`account_provider_account_idx\` ON \`account\` (\`provider_id\`, \`account_id\`); --> statement-breakpoint CREATE TABLE \`user_profiles\` ( - \`user_id\` text PRIMARY KEY NOT NULL, + \`id\` integer PRIMARY KEY NOT NULL, + \`user_id\` text NOT NULL, \`github_account_id\` text, \`github_login\` text, \`role_label\` text NOT NULL, + \`default_model\` text DEFAULT 'gpt-5.3-codex' NOT NULL, \`eligible_organization_ids_json\` text NOT NULL, \`starter_repo_status\` text NOT NULL, \`starter_repo_starred_at\` integer, \`starter_repo_skipped_at\` integer, \`created_at\` integer NOT NULL, - \`updated_at\` integer NOT NULL + \`updated_at\` integer NOT NULL, + CONSTRAINT \`user_profiles_singleton_id_check\` CHECK(\`id\` = 1) ); --> statement-breakpoint +CREATE UNIQUE INDEX \`user_profiles_user_id_idx\` ON \`user_profiles\` (\`user_id\`); +--> statement-breakpoint CREATE TABLE \`session_state\` ( \`session_id\` text PRIMARY KEY NOT NULL, \`active_organization_id\` text, \`created_at\` integer NOT NULL, \`updated_at\` integer NOT NULL +);`, + m0001: `CREATE TABLE \`user_task_state\` ( + \`task_id\` text NOT NULL, + \`session_id\` text NOT NULL, + \`active_session_id\` text, + \`unread\` integer DEFAULT 0 NOT NULL, + \`draft_text\` text DEFAULT '' NOT NULL, + \`draft_attachments_json\` text DEFAULT '[]' NOT NULL, + \`draft_updated_at\` integer, + \`updated_at\` integer NOT NULL, + PRIMARY KEY(\`task_id\`, \`session_id\`) );`, } as const, }; diff --git a/foundry/packages/backend/src/actors/user/db/schema.ts b/foundry/packages/backend/src/actors/user/db/schema.ts new file mode 100644 index 0000000..6a87a11 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/db/schema.ts @@ -0,0 +1,112 @@ +import { check, integer, primaryKey, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core"; +import { sql } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authUsers = sqliteTable( + "user", + { + id: integer("id").primaryKey(), + authUserId: text("auth_user_id").notNull(), + name: text("name").notNull(), + email: text("email").notNull(), + emailVerified: integer("email_verified").notNull(), + image: text("image"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + authUserIdIdx: uniqueIndex("user_auth_user_id_idx").on(table.authUserId), + singletonCheck: check("user_singleton_id_check", sql`${table.id} = 1`), + }), +); + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authSessions = sqliteTable( + "session", + { + id: text("id").notNull().primaryKey(), + token: text("token").notNull(), + userId: text("user_id").notNull(), + expiresAt: integer("expires_at").notNull(), + ipAddress: text("ip_address"), + userAgent: text("user_agent"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + tokenIdx: uniqueIndex("session_token_idx").on(table.token), + }), +); + +/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */ +export const authAccounts = sqliteTable( + "account", + { + id: text("id").notNull().primaryKey(), + accountId: text("account_id").notNull(), + providerId: text("provider_id").notNull(), + userId: text("user_id").notNull(), + accessToken: text("access_token"), + refreshToken: text("refresh_token"), + idToken: text("id_token"), + accessTokenExpiresAt: integer("access_token_expires_at"), + refreshTokenExpiresAt: integer("refresh_token_expires_at"), + scope: text("scope"), + password: text("password"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + providerAccountIdx: uniqueIndex("account_provider_account_idx").on(table.providerId, table.accountId), + }), +); + +/** Custom Foundry table — not part of Better Auth. */ +export const userProfiles = sqliteTable( + "user_profiles", + { + id: integer("id").primaryKey(), + userId: text("user_id").notNull(), + githubAccountId: text("github_account_id"), + githubLogin: text("github_login"), + roleLabel: text("role_label").notNull(), + defaultModel: text("default_model").notNull().default(DEFAULT_WORKSPACE_MODEL_ID), + eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(), + starterRepoStatus: text("starter_repo_status").notNull(), + starterRepoStarredAt: integer("starter_repo_starred_at"), + starterRepoSkippedAt: integer("starter_repo_skipped_at"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + userIdIdx: uniqueIndex("user_profiles_user_id_idx").on(table.userId), + singletonCheck: check("user_profiles_singleton_id_check", sql`${table.id} = 1`), + }), +); + +/** Custom Foundry table — not part of Better Auth. */ +export const sessionState = sqliteTable("session_state", { + sessionId: text("session_id").notNull().primaryKey(), + activeOrganizationId: text("active_organization_id"), + createdAt: integer("created_at").notNull(), + updatedAt: integer("updated_at").notNull(), +}); + +/** Custom Foundry table — not part of Better Auth. Stores per-user task/session UI state. */ +export const userTaskState = sqliteTable( + "user_task_state", + { + taskId: text("task_id").notNull(), + sessionId: text("session_id").notNull(), + activeSessionId: text("active_session_id"), + unread: integer("unread").notNull().default(0), + draftText: text("draft_text").notNull().default(""), + draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"), + draftUpdatedAt: integer("draft_updated_at"), + updatedAt: integer("updated_at").notNull(), + }, + (table) => ({ + pk: primaryKey({ columns: [table.taskId, table.sessionId] }), + }), +); diff --git a/foundry/packages/backend/src/actors/user/index.ts b/foundry/packages/backend/src/actors/user/index.ts new file mode 100644 index 0000000..8a15b58 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/index.ts @@ -0,0 +1,60 @@ +import { actor } from "rivetkit"; +import { userDb } from "./db/db.js"; +import { betterAuthActions } from "./actions/better-auth.js"; +import { userActions } from "./actions/user.js"; +import { + createAuthRecordMutation, + updateAuthRecordMutation, + updateManyAuthRecordsMutation, + deleteAuthRecordMutation, + deleteManyAuthRecordsMutation, + upsertUserProfileMutation, + upsertSessionStateMutation, + upsertTaskStateMutation, + deleteTaskStateMutation, +} from "./workflow.js"; + +export const user = actor({ + db: userDb, + options: { + name: "User", + icon: "shield", + actionTimeout: 60_000, + }, + createState: (_c, input: { userId: string }) => ({ + userId: input.userId, + }), + actions: { + ...betterAuthActions, + ...userActions, + async authCreate(c, body) { + return await createAuthRecordMutation(c, body); + }, + async authUpdate(c, body) { + return await updateAuthRecordMutation(c, body); + }, + async authUpdateMany(c, body) { + return await updateManyAuthRecordsMutation(c, body); + }, + async authDelete(c, body) { + await deleteAuthRecordMutation(c, body); + return { ok: true }; + }, + async authDeleteMany(c, body) { + return await deleteManyAuthRecordsMutation(c, body); + }, + async profileUpsert(c, body) { + return await upsertUserProfileMutation(c, body); + }, + async sessionStateUpsert(c, body) { + return await upsertSessionStateMutation(c, body); + }, + async taskStateUpsert(c, body) { + return await upsertTaskStateMutation(c, body); + }, + async taskStateDelete(c, body) { + await deleteTaskStateMutation(c, body); + return { ok: true }; + }, + }, +}); diff --git a/foundry/packages/backend/src/actors/user/query-helpers.ts b/foundry/packages/backend/src/actors/user/query-helpers.ts new file mode 100644 index 0000000..5bdee10 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/query-helpers.ts @@ -0,0 +1,197 @@ +import { and, eq, inArray, isNotNull, isNull, like, lt, lte, gt, gte, ne, notInArray, or } from "drizzle-orm"; +import { authAccounts, authSessions, authUsers, sessionState, userProfiles, userTaskState } from "./db/schema.js"; + +export const userTables = { + user: authUsers, + session: authSessions, + account: authAccounts, + userProfiles, + sessionState, + userTaskState, +} as const; + +export function tableFor(model: string) { + const table = userTables[model as keyof typeof userTables]; + if (!table) { + throw new Error(`Unsupported user model: ${model}`); + } + return table as any; +} + +function dbFieldFor(model: string, field: string): string { + if (model === "user" && field === "id") { + return "authUserId"; + } + return field; +} + +export function materializeRow(model: string, row: any) { + if (!row || model !== "user") { + return row; + } + + const { id: _singletonId, authUserId, ...rest } = row; + return { + id: authUserId, + ...rest, + }; +} + +export function persistInput(model: string, data: Record) { + if (model !== "user") { + return data; + } + + const { id, ...rest } = data; + return { + id: 1, + authUserId: id, + ...rest, + }; +} + +export function persistPatch(model: string, data: Record) { + if (model !== "user") { + return data; + } + + const { id, ...rest } = data; + return { + ...(id !== undefined ? { authUserId: id } : {}), + ...rest, + }; +} + +export function columnFor(model: string, table: any, field: string) { + const column = table[dbFieldFor(model, field)]; + if (!column) { + throw new Error(`Unsupported user field: ${model}.${field}`); + } + return column; +} + +export function normalizeValue(value: unknown): unknown { + if (value instanceof Date) { + return value.getTime(); + } + if (Array.isArray(value)) { + return value.map((entry) => normalizeValue(entry)); + } + return value; +} + +export function clauseToExpr(table: any, clause: any) { + const model = table === authUsers ? "user" : table === authSessions ? "session" : table === authAccounts ? "account" : ""; + const column = columnFor(model, table, clause.field); + const value = normalizeValue(clause.value); + + switch (clause.operator) { + case "ne": + return value === null ? isNotNull(column) : ne(column, value as any); + case "lt": + return lt(column, value as any); + case "lte": + return lte(column, value as any); + case "gt": + return gt(column, value as any); + case "gte": + return gte(column, value as any); + case "in": + return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "not_in": + return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]); + case "contains": + return like(column, `%${String(value ?? "")}%`); + case "starts_with": + return like(column, `${String(value ?? "")}%`); + case "ends_with": + return like(column, `%${String(value ?? "")}`); + case "eq": + default: + return value === null ? isNull(column) : eq(column, value as any); + } +} + +export function buildWhere(table: any, where: any[] | undefined) { + if (!where || where.length === 0) { + return undefined; + } + + let expr = clauseToExpr(table, where[0]); + for (const clause of where.slice(1)) { + const next = clauseToExpr(table, clause); + expr = clause.connector === "OR" ? or(expr, next) : and(expr, next); + } + return expr; +} + +export function applyJoinToRow(c: any, model: string, row: any, join: any) { + const materialized = materializeRow(model, row); + if (!materialized || !join) { + return materialized; + } + + if (model === "session" && join.user) { + return c.db + .select() + .from(authUsers) + .where(eq(authUsers.authUserId, materialized.userId)) + .get() + .then((user: any) => ({ ...materialized, user: materializeRow("user", user) ?? null })); + } + + if (model === "account" && join.user) { + return c.db + .select() + .from(authUsers) + .where(eq(authUsers.authUserId, materialized.userId)) + .get() + .then((user: any) => ({ ...materialized, user: materializeRow("user", user) ?? null })); + } + + if (model === "user" && join.account) { + return c.db + .select() + .from(authAccounts) + .where(eq(authAccounts.userId, materialized.id)) + .all() + .then((accounts: any[]) => ({ ...materialized, account: accounts })); + } + + return Promise.resolve(materialized); +} + +export async function applyJoinToRows(c: any, model: string, rows: any[], join: any) { + if (!join || rows.length === 0) { + return rows.map((row) => materializeRow(model, row)); + } + + if (model === "session" && join.user) { + const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; + const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.authUserId, userIds)).all() : []; + const userMap = new Map(users.map((user: any) => [user.authUserId, materializeRow("user", user)])); + return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); + } + + if (model === "account" && join.user) { + const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))]; + const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.authUserId, userIds)).all() : []; + const userMap = new Map(users.map((user: any) => [user.authUserId, materializeRow("user", user)])); + return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null })); + } + + if (model === "user" && join.account) { + const materializedRows = rows.map((row) => materializeRow("user", row)); + const userIds = materializedRows.map((row) => row.id); + const accounts = userIds.length > 0 ? await c.db.select().from(authAccounts).where(inArray(authAccounts.userId, userIds)).all() : []; + const accountsByUserId = new Map(); + for (const account of accounts) { + const entries = accountsByUserId.get(account.userId) ?? []; + entries.push(account); + accountsByUserId.set(account.userId, entries); + } + return materializedRows.map((row) => ({ ...row, account: accountsByUserId.get(row.id) ?? [] })); + } + + return rows.map((row) => materializeRow(model, row)); +} diff --git a/foundry/packages/backend/src/actors/user/workflow.ts b/foundry/packages/backend/src/actors/user/workflow.ts new file mode 100644 index 0000000..9bf2675 --- /dev/null +++ b/foundry/packages/backend/src/actors/user/workflow.ts @@ -0,0 +1,197 @@ +import { eq, count as sqlCount, and } from "drizzle-orm"; +import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared"; +import { authUsers, sessionState, userProfiles, userTaskState } from "./db/schema.js"; +import { buildWhere, columnFor, materializeRow, persistInput, persistPatch, tableFor } from "./query-helpers.js"; + +export async function createAuthRecordMutation(c: any, input: { model: string; data: Record }) { + const table = tableFor(input.model); + const persisted = persistInput(input.model, input.data); + await c.db + .insert(table) + .values(persisted as any) + .run(); + const row = await c.db + .select() + .from(table) + .where(eq(columnFor(input.model, table, "id"), input.data.id as any)) + .get(); + return materializeRow(input.model, row); +} + +export async function updateAuthRecordMutation(c: any, input: { model: string; where: any[]; update: Record }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("updateAuthRecord requires a where clause"); + await c.db + .update(table) + .set(persistPatch(input.model, input.update) as any) + .where(predicate) + .run(); + return materializeRow(input.model, await c.db.select().from(table).where(predicate).get()); +} + +export async function updateManyAuthRecordsMutation(c: any, input: { model: string; where: any[]; update: Record }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("updateManyAuthRecords requires a where clause"); + await c.db + .update(table) + .set(persistPatch(input.model, input.update) as any) + .where(predicate) + .run(); + const row = await c.db.select({ value: sqlCount() }).from(table).where(predicate).get(); + return row?.value ?? 0; +} + +export async function deleteAuthRecordMutation(c: any, input: { model: string; where: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("deleteAuthRecord requires a where clause"); + await c.db.delete(table).where(predicate).run(); +} + +export async function deleteManyAuthRecordsMutation(c: any, input: { model: string; where: any[] }) { + const table = tableFor(input.model); + const predicate = buildWhere(table, input.where); + if (!predicate) throw new Error("deleteManyAuthRecords requires a where clause"); + const rows = await c.db.select().from(table).where(predicate).all(); + await c.db.delete(table).where(predicate).run(); + return rows.length; +} + +export async function upsertUserProfileMutation( + c: any, + input: { + userId: string; + patch: { + githubAccountId?: string | null; + githubLogin?: string | null; + roleLabel?: string; + defaultModel?: string; + eligibleOrganizationIdsJson?: string; + starterRepoStatus?: string; + starterRepoStarredAt?: number | null; + starterRepoSkippedAt?: number | null; + }; + }, +) { + const now = Date.now(); + await c.db + .insert(userProfiles) + .values({ + id: 1, + userId: input.userId, + githubAccountId: input.patch.githubAccountId ?? null, + githubLogin: input.patch.githubLogin ?? null, + roleLabel: input.patch.roleLabel ?? "GitHub user", + defaultModel: input.patch.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, + eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson ?? "[]", + starterRepoStatus: input.patch.starterRepoStatus ?? "pending", + starterRepoStarredAt: input.patch.starterRepoStarredAt ?? null, + starterRepoSkippedAt: input.patch.starterRepoSkippedAt ?? null, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: userProfiles.userId, + set: { + ...(input.patch.githubAccountId !== undefined ? { githubAccountId: input.patch.githubAccountId } : {}), + ...(input.patch.githubLogin !== undefined ? { githubLogin: input.patch.githubLogin } : {}), + ...(input.patch.roleLabel !== undefined ? { roleLabel: input.patch.roleLabel } : {}), + ...(input.patch.defaultModel !== undefined ? { defaultModel: input.patch.defaultModel } : {}), + ...(input.patch.eligibleOrganizationIdsJson !== undefined ? { eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson } : {}), + ...(input.patch.starterRepoStatus !== undefined ? { starterRepoStatus: input.patch.starterRepoStatus } : {}), + ...(input.patch.starterRepoStarredAt !== undefined ? { starterRepoStarredAt: input.patch.starterRepoStarredAt } : {}), + ...(input.patch.starterRepoSkippedAt !== undefined ? { starterRepoSkippedAt: input.patch.starterRepoSkippedAt } : {}), + updatedAt: now, + }, + }) + .run(); + return await c.db.select().from(userProfiles).where(eq(userProfiles.userId, input.userId)).get(); +} + +export async function upsertSessionStateMutation(c: any, input: { sessionId: string; activeOrganizationId: string | null }) { + const now = Date.now(); + await c.db + .insert(sessionState) + .values({ + sessionId: input.sessionId, + activeOrganizationId: input.activeOrganizationId, + createdAt: now, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: sessionState.sessionId, + set: { activeOrganizationId: input.activeOrganizationId, updatedAt: now }, + }) + .run(); + return await c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(); +} + +export async function upsertTaskStateMutation( + c: any, + input: { + taskId: string; + sessionId: string; + patch: { + activeSessionId?: string | null; + unread?: boolean; + draftText?: string; + draftAttachmentsJson?: string; + draftUpdatedAt?: number | null; + }; + }, +) { + const now = Date.now(); + const existing = await c.db + .select() + .from(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .get(); + + if (input.patch.activeSessionId !== undefined) { + await c.db.update(userTaskState).set({ activeSessionId: input.patch.activeSessionId, updatedAt: now }).where(eq(userTaskState.taskId, input.taskId)).run(); + } + + await c.db + .insert(userTaskState) + .values({ + taskId: input.taskId, + sessionId: input.sessionId, + activeSessionId: input.patch.activeSessionId ?? existing?.activeSessionId ?? null, + unread: input.patch.unread !== undefined ? (input.patch.unread ? 1 : 0) : (existing?.unread ?? 0), + draftText: input.patch.draftText ?? existing?.draftText ?? "", + draftAttachmentsJson: input.patch.draftAttachmentsJson ?? existing?.draftAttachmentsJson ?? "[]", + draftUpdatedAt: input.patch.draftUpdatedAt === undefined ? (existing?.draftUpdatedAt ?? null) : input.patch.draftUpdatedAt, + updatedAt: now, + }) + .onConflictDoUpdate({ + target: [userTaskState.taskId, userTaskState.sessionId], + set: { + ...(input.patch.activeSessionId !== undefined ? { activeSessionId: input.patch.activeSessionId } : {}), + ...(input.patch.unread !== undefined ? { unread: input.patch.unread ? 1 : 0 } : {}), + ...(input.patch.draftText !== undefined ? { draftText: input.patch.draftText } : {}), + ...(input.patch.draftAttachmentsJson !== undefined ? { draftAttachmentsJson: input.patch.draftAttachmentsJson } : {}), + ...(input.patch.draftUpdatedAt !== undefined ? { draftUpdatedAt: input.patch.draftUpdatedAt } : {}), + updatedAt: now, + }, + }) + .run(); + + return await c.db + .select() + .from(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .get(); +} + +export async function deleteTaskStateMutation(c: any, input: { taskId: string; sessionId?: string }) { + if (input.sessionId) { + await c.db + .delete(userTaskState) + .where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId))) + .run(); + return; + } + await c.db.delete(userTaskState).where(eq(userTaskState.taskId, input.taskId)).run(); +} diff --git a/foundry/packages/backend/src/index.ts b/foundry/packages/backend/src/index.ts index 3af36c3..8f82d8b 100644 --- a/foundry/packages/backend/src/index.ts +++ b/foundry/packages/backend/src/index.ts @@ -10,7 +10,7 @@ import { createDefaultDriver } from "./driver.js"; import { createClient } from "rivetkit/client"; import { initBetterAuthService } from "./services/better-auth.js"; import { createDefaultAppShellServices } from "./services/app-shell-runtime.js"; -import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/app-shell.js"; +import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/constants.js"; import { logger } from "./logging.js"; export interface BackendStartOptions { @@ -48,6 +48,19 @@ function isRivetRequest(request: Request): boolean { } export async function startBackend(options: BackendStartOptions = {}): Promise { + // Prevent the sandbox-agent SDK's unhandled SQLite constraint errors from + // crashing the entire process. The SDK has a bug where duplicate event + // inserts (sandbox_agent_events UNIQUE constraint) throw from an internal + // async path with no catch. Log and continue. + process.on("uncaughtException", (error) => { + logger.error({ error: error?.message ?? String(error), stack: error?.stack }, "uncaughtException (kept alive)"); + }); + process.on("unhandledRejection", (reason) => { + const msg = reason instanceof Error ? reason.message : String(reason); + const stack = reason instanceof Error ? reason.stack : undefined; + logger.error({ error: msg, stack }, "unhandledRejection (kept alive)"); + }); + // sandbox-agent agent plugins vary on which env var they read for OpenAI/Codex auth. // Normalize to keep local dev + docker-compose simple. if (!process.env.CODEX_API_KEY && process.env.OPENAI_API_KEY) { diff --git a/foundry/packages/backend/src/services/better-auth.ts b/foundry/packages/backend/src/services/better-auth.ts index 4509402..c36b900 100644 --- a/foundry/packages/backend/src/services/better-auth.ts +++ b/foundry/packages/backend/src/services/better-auth.ts @@ -1,8 +1,11 @@ import { betterAuth } from "better-auth"; import { createAdapterFactory } from "better-auth/adapters"; -import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js"; -import { authUserKey, organizationKey } from "../actors/keys.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/constants.js"; +// organization actions are called directly (no queue) +// user actor actions are called directly (no queue) +import { organizationKey, userKey } from "../actors/keys.js"; import { logger } from "../logging.js"; +// expectQueueResponse removed — actions return values directly const AUTH_BASE_PATH = "/v1/auth"; const SESSION_COOKIE = "better-auth.session_token"; @@ -59,6 +62,8 @@ function resolveRouteUserId(organization: any, resolved: any): string | null { return null; } +// sendOrganizationCommand removed — org actions are called directly + export interface BetterAuthService { auth: any; resolveSession(headers: Headers): Promise<{ session: any; user: any } | null>; @@ -75,7 +80,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } // getOrCreate is intentional here: the adapter runs during Better Auth callbacks - // which can fire before any explicit create path. The app organization and auth user + // which can fire before any explicit create path. The app organization and user // actors must exist by the time the adapter needs them. const appOrganization = () => actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), { @@ -83,9 +88,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin }); // getOrCreate is intentional: Better Auth creates user records during OAuth - // callbacks, so the auth-user actor must be lazily provisioned on first access. - const getAuthUser = async (userId: string) => - await actorClient.authUser.getOrCreate(authUserKey(userId), { + // callbacks, so the user actor must be lazily provisioned on first access. + const getUser = async (userId: string) => + await actorClient.user.getOrCreate(userKey(userId), { createWithInput: { userId }, }); @@ -110,7 +115,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const email = direct("email"); if (typeof email === "string" && email.length > 0) { const organization = await appOrganization(); - const resolved = await organization.authFindEmailIndex({ email: email.toLowerCase() }); + const resolved = await organization.betterAuthFindEmailIndex({ email: email.toLowerCase() }); return resolveRouteUserId(organization, resolved); } return null; @@ -125,7 +130,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const sessionToken = direct("token") ?? data?.token; if (typeof sessionId === "string" || typeof sessionToken === "string") { const organization = await appOrganization(); - const resolved = await organization.authFindSessionIndex({ + const resolved = await organization.betterAuthFindSessionIndex({ ...(typeof sessionId === "string" ? { sessionId } : {}), ...(typeof sessionToken === "string" ? { sessionToken } : {}), }); @@ -144,11 +149,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const accountId = direct("accountId") ?? data?.accountId; const organization = await appOrganization(); if (typeof accountRecordId === "string" && accountRecordId.length > 0) { - const resolved = await organization.authFindAccountIndex({ id: accountRecordId }); + const resolved = await organization.betterAuthFindAccountIndex({ id: accountRecordId }); return resolveRouteUserId(organization, resolved); } if (typeof providerId === "string" && providerId.length > 0 && typeof accountId === "string" && accountId.length > 0) { - const resolved = await organization.authFindAccountIndex({ providerId, accountId }); + const resolved = await organization.betterAuthFindAccountIndex({ providerId, accountId }); return resolveRouteUserId(organization, resolved); } return null; @@ -157,9 +162,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return null; }; - const ensureOrganizationVerification = async (method: string, payload: Record) => { + const ensureOrganizationVerification = async (actionName: string, payload: Record) => { const organization = await appOrganization(); - return await organization[method](payload); + return await (organization as any)[actionName](payload); }; return { @@ -170,7 +175,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin create: async ({ model, data }) => { const transformed = await transformInput(data, model, "create", true); if (model === "verification") { - return await ensureOrganizationVerification("authCreateVerification", { data: transformed }); + return await ensureOrganizationVerification("commandBetterAuthVerificationCreate", { data: transformed }); } const userId = await resolveUserIdForQuery(model, undefined, transformed); @@ -178,19 +183,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin throw new Error(`Unable to resolve auth actor for create(${model})`); } - const userActor = await getAuthUser(userId); - const created = await userActor.createAuthRecord({ model, data: transformed }); + const userActor = await getUser(userId); + const created = await userActor.authCreate({ model, data: transformed }); const organization = await appOrganization(); if (model === "user" && typeof transformed.email === "string" && transformed.email.length > 0) { - await organization.authUpsertEmailIndex({ + await organization.commandBetterAuthEmailIndexUpsert({ email: transformed.email.toLowerCase(), userId, }); } if (model === "session") { - await organization.authUpsertSessionIndex({ + await organization.commandBetterAuthSessionIndexUpsert({ sessionId: String(created.id), sessionToken: String(created.token), userId, @@ -198,7 +203,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "account") { - await organization.authUpsertAccountIndex({ + await organization.commandBetterAuthAccountIndexUpsert({ id: String(created.id), providerId: String(created.providerId), accountId: String(created.accountId), @@ -212,7 +217,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin findOne: async ({ model, where, join }) => { const transformedWhere = transformWhereClause({ model, where, action: "findOne" }); if (model === "verification") { - return await ensureOrganizationVerification("authFindOneVerification", { where: transformedWhere, join }); + const organization = await appOrganization(); + return await organization.betterAuthFindOneVerification({ where: transformedWhere, join }); } const userId = await resolveUserIdForQuery(model, transformedWhere); @@ -220,15 +226,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return null; } - const userActor = await getAuthUser(userId); - const found = await userActor.findOneAuthRecord({ model, where: transformedWhere, join }); + const userActor = await getUser(userId); + const found = await userActor.betterAuthFindOneRecord({ model, where: transformedWhere, join }); return found ? ((await transformOutput(found, model, undefined, join)) as any) : null; }, findMany: async ({ model, where, limit, sortBy, offset, join }) => { const transformedWhere = transformWhereClause({ model, where, action: "findMany" }); if (model === "verification") { - return await ensureOrganizationVerification("authFindManyVerification", { + const organization = await appOrganization(); + return await organization.betterAuthFindManyVerification({ where: transformedWhere, limit, sortBy, @@ -244,7 +251,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const resolved = await Promise.all( (tokenClause.value as string[]).map(async (sessionToken: string) => ({ sessionToken, - route: await organization.authFindSessionIndex({ sessionToken }), + route: await organization.betterAuthFindSessionIndex({ sessionToken }), })), ); const byUser = new Map(); @@ -259,11 +266,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const rows = []; for (const [userId, tokens] of byUser) { - const userActor = await getAuthUser(userId); + const userActor = await getUser(userId); const scopedWhere = transformedWhere.map((entry: any) => entry.field === "token" && entry.operator === "in" ? { ...entry, value: tokens } : entry, ); - const found = await userActor.findManyAuthRecords({ model, where: scopedWhere, limit, sortBy, offset, join }); + const found = await userActor.betterAuthFindManyRecords({ model, where: scopedWhere, limit, sortBy, offset, join }); rows.push(...found); } return await Promise.all(rows.map(async (row: any) => await transformOutput(row, model, undefined, join))); @@ -275,8 +282,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return []; } - const userActor = await getAuthUser(userId); - const found = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit, sortBy, offset, join }); + const userActor = await getUser(userId); + const found = await userActor.betterAuthFindManyRecords({ model, where: transformedWhere, limit, sortBy, offset, join }); return await Promise.all(found.map(async (row: any) => await transformOutput(row, model, undefined, join))); }, @@ -284,7 +291,10 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const transformedWhere = transformWhereClause({ model, where, action: "update" }); const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; if (model === "verification") { - return await ensureOrganizationVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate }); + return await ensureOrganizationVerification("commandBetterAuthVerificationUpdate", { + where: transformedWhere, + update: transformedUpdate, + }); } const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); @@ -292,29 +302,34 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return null; } - const userActor = await getAuthUser(userId); + const userActor = await getUser(userId); const before = model === "user" - ? await userActor.findOneAuthRecord({ model, where: transformedWhere }) + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) : model === "account" - ? await userActor.findOneAuthRecord({ model, where: transformedWhere }) + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) : model === "session" - ? await userActor.findOneAuthRecord({ model, where: transformedWhere }) + ? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }) : null; - const updated = await userActor.updateAuthRecord({ model, where: transformedWhere, update: transformedUpdate }); + const updated = await userActor.authUpdate({ model, where: transformedWhere, update: transformedUpdate }); const organization = await appOrganization(); if (model === "user" && updated) { if (before?.email && before.email !== updated.email) { - await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() }); + await organization.commandBetterAuthEmailIndexDelete({ + email: before.email.toLowerCase(), + }); } if (updated.email) { - await organization.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId }); + await organization.commandBetterAuthEmailIndexUpsert({ + email: updated.email.toLowerCase(), + userId, + }); } } if (model === "session" && updated) { - await organization.authUpsertSessionIndex({ + await organization.commandBetterAuthSessionIndexUpsert({ sessionId: String(updated.id), sessionToken: String(updated.token), userId, @@ -322,7 +337,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "account" && updated) { - await organization.authUpsertAccountIndex({ + await organization.commandBetterAuthAccountIndexUpsert({ id: String(updated.id), providerId: String(updated.providerId), accountId: String(updated.accountId), @@ -337,7 +352,10 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin const transformedWhere = transformWhereClause({ model, where, action: "updateMany" }); const transformedUpdate = (await transformInput(update as Record, model, "update", true)) as Record; if (model === "verification") { - return await ensureOrganizationVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate }); + return await ensureOrganizationVerification("commandBetterAuthVerificationUpdateMany", { + where: transformedWhere, + update: transformedUpdate, + }); } const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate); @@ -345,14 +363,15 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return 0; } - const userActor = await getAuthUser(userId); - return await userActor.updateManyAuthRecords({ model, where: transformedWhere, update: transformedUpdate }); + const userActor = await getUser(userId); + return await userActor.authUpdateMany({ model, where: transformedWhere, update: transformedUpdate }); }, delete: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "delete" }); if (model === "verification") { - await ensureOrganizationVerification("authDeleteVerification", { where: transformedWhere }); + const organization = await appOrganization(); + await organization.commandBetterAuthVerificationDelete({ where: transformedWhere }); return; } @@ -361,20 +380,20 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return; } - const userActor = await getAuthUser(userId); + const userActor = await getUser(userId); const organization = await appOrganization(); - const before = await userActor.findOneAuthRecord({ model, where: transformedWhere }); - await userActor.deleteAuthRecord({ model, where: transformedWhere }); + const before = await userActor.betterAuthFindOneRecord({ model, where: transformedWhere }); + await userActor.authDelete({ model, where: transformedWhere }); if (model === "session" && before) { - await organization.authDeleteSessionIndex({ + await organization.commandBetterAuthSessionIndexDelete({ sessionId: before.id, sessionToken: before.token, }); } if (model === "account" && before) { - await organization.authDeleteAccountIndex({ + await organization.commandBetterAuthAccountIndexDelete({ id: before.id, providerId: before.providerId, accountId: before.accountId, @@ -382,14 +401,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin } if (model === "user" && before?.email) { - await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() }); + await organization.commandBetterAuthEmailIndexDelete({ + email: before.email.toLowerCase(), + }); } }, deleteMany: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "deleteMany" }); if (model === "verification") { - return await ensureOrganizationVerification("authDeleteManyVerification", { where: transformedWhere }); + return await ensureOrganizationVerification("commandBetterAuthVerificationDeleteMany", { where: transformedWhere }); } if (model === "session") { @@ -397,12 +418,12 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin if (!userId) { return 0; } - const userActor = await getAuthUser(userId); + const userActor = await getUser(userId); const organization = await appOrganization(); - const sessions = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit: 5000 }); - const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere }); + const sessions = await userActor.betterAuthFindManyRecords({ model, where: transformedWhere, limit: 5000 }); + const deleted = await userActor.authDeleteMany({ model, where: transformedWhere }); for (const session of sessions) { - await organization.authDeleteSessionIndex({ + await organization.commandBetterAuthSessionIndexDelete({ sessionId: session.id, sessionToken: session.token, }); @@ -415,15 +436,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return 0; } - const userActor = await getAuthUser(userId); - const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere }); + const userActor = await getUser(userId); + const deleted = await userActor.authDeleteMany({ model, where: transformedWhere }); return deleted; }, count: async ({ model, where }) => { const transformedWhere = transformWhereClause({ model, where, action: "count" }); if (model === "verification") { - return await ensureOrganizationVerification("authCountVerification", { where: transformedWhere }); + const organization = await appOrganization(); + return await organization.betterAuthCountVerification({ where: transformedWhere }); } const userId = await resolveUserIdForQuery(model, transformedWhere); @@ -431,8 +453,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin return 0; } - const userActor = await getAuthUser(userId); - return await userActor.countAuthRecords({ model, where: transformedWhere }); + const userActor = await getUser(userId); + return await userActor.betterAuthCountRecords({ model, where: transformedWhere }); }, }; }, @@ -477,17 +499,17 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin async getAuthState(sessionId: string) { const organization = await appOrganization(); - const route = await organization.authFindSessionIndex({ sessionId }); + const route = await organization.betterAuthFindSessionIndex({ sessionId }); if (!route?.userId) { return null; } - const userActor = await getAuthUser(route.userId); + const userActor = await getUser(route.userId); return await userActor.getAppAuthState({ sessionId }); }, async upsertUserProfile(userId: string, patch: Record) { - const userActor = await getAuthUser(userId); - return await userActor.upsertUserProfile({ userId, patch }); + const userActor = await getUser(userId); + return await userActor.profileUpsert({ userId, patch }); }, async setActiveOrganization(sessionId: string, activeOrganizationId: string | null) { @@ -495,8 +517,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin if (!authState?.user?.id) { throw new Error(`Unknown auth session ${sessionId}`); } - const userActor = await getAuthUser(authState.user.id); - return await userActor.upsertSessionState({ sessionId, activeOrganizationId }); + const userActor = await getUser(authState.user.id); + return await userActor.sessionStateUpsert({ sessionId, activeOrganizationId }); }, async getAccessTokenForSession(sessionId: string) { diff --git a/foundry/packages/backend/src/services/branch-name-prefixes.ts b/foundry/packages/backend/src/services/branch-name-prefixes.ts new file mode 100644 index 0000000..aaccaee --- /dev/null +++ b/foundry/packages/backend/src/services/branch-name-prefixes.ts @@ -0,0 +1,584 @@ +// Auto-generated list of branch name prefixes. +// Source: McMaster-Carr product catalog. +export const BRANCH_NAME_PREFIXES: readonly string[] = [ + "abrasive-blasters", + "ac-motors", + "access-doors", + "adjustable-handles", + "aerosol-paint", + "air-cleaners", + "air-cylinders", + "air-filters", + "air-hose", + "air-knives", + "air-nozzles", + "air-regulators", + "air-ride-wheels", + "air-slides", + "alligator-clips", + "alloy-steel", + "aluminum-honeycomb", + "angle-indicators", + "antiseize-lubricants", + "antislip-fluid", + "backlight-panel-kits", + "ball-bearings", + "ball-end-mills", + "ball-joint-linkages", + "ball-transfers", + "band-clamps", + "band-saw-blades", + "bar-clamps", + "bar-grating", + "barbed-hose-fittings", + "barbed-tube-fittings", + "basket-strainers", + "batch-cans", + "battery-chargers", + "battery-holders", + "bead-chain", + "beam-clamps", + "belt-conveyors", + "bench-scales", + "bench-vises", + "bin-boxes", + "bin-storage", + "binding-posts", + "blank-tags", + "blasting-cabinets", + "blind-rivets", + "bluetooth-padlocks", + "boring-lathe-tools", + "box-reducers", + "box-wrenches", + "braided-hose", + "brass-pipe-fittings", + "breather-vents", + "butt-splices", + "c-clamps", + "cable-cutters", + "cable-holders", + "cable-tie-mounts", + "cable-ties", + "cam-handles", + "cam-latches", + "cam-locks", + "cap-nuts", + "captive-panel-screws", + "carbide-burs", + "carbide-inserts", + "carbon-fiber", + "carbon-steel", + "cardstock-tags", + "carriage-bolts", + "cast-acrylic", + "cast-iron", + "cast-nylon", + "casting-compounds", + "ceiling-lights", + "ceramic-adhesives", + "chain-slings", + "check-valves", + "chemical-hose", + "chemistry-meters", + "chemistry-testing", + "chip-clearing-tools", + "chucking-reamers", + "cinching-straps", + "circuit-breakers", + "circular-saw-blades", + "circular-saws", + "clamping-hangers", + "clevis-pins", + "clevis-rod-ends", + "clip-on-nuts", + "coaxial-connectors", + "coaxial-cords", + "coiled-spring-pins", + "compact-connectors", + "computer-adapters", + "concrete-adhesives", + "concrete-repair", + "contour-transfers", + "conveyor-belt-lacing", + "conveyor-belting", + "conveyor-brushes", + "conveyor-rollers", + "coolant-hose", + "copper-tube-fittings", + "copper-tubing", + "cord-grips", + "cord-reels", + "cotter-pins", + "coupling-nuts", + "cpvc-pipe-fittings", + "cup-brushes", + "cutoff-wheels", + "cylinder-hones", + "cylinder-racks", + "cylinder-trucks", + "data-cable", + "data-connectors", + "dc-motors", + "dead-blow-hammers", + "delrin-acetal-resin", + "desiccant-air-dryers", + "desktop-cranes", + "dial-calipers", + "dial-indicators", + "die-springs", + "direct-heaters", + "disconnect-switches", + "dispensing-needles", + "dispensing-pumps", + "disposable-clothing", + "disposable-gloves", + "document-protectors", + "door-closers", + "door-handles", + "door-holders", + "dowel-pins", + "drafting-equipment", + "drain-cleaners", + "drainage-mats", + "draw-latches", + "drawer-cabinets", + "drawer-slides", + "drill-bit-sets", + "drill-bits", + "drill-bushings", + "drill-chucks", + "drill-presses", + "drilling-screws", + "drinking-fountains", + "drive-anchors", + "drive-rollers", + "drive-shafts", + "drum-faucets", + "drum-pumps", + "drum-top-vacuums", + "drum-trucks", + "dry-box-gloves", + "dry-erase-boards", + "dry-film-lubricants", + "duct-fans", + "duct-hose", + "duct-tape", + "dust-collectors", + "dustless-chalk", + "edge-trim", + "electric-actuators", + "electric-drills", + "electric-drum-pumps", + "electric-mixers", + "electrical-switches", + "electrical-tape", + "electronic-calipers", + "enclosure-heaters", + "enclosure-panels", + "ethernet-cords", + "exhaust-fans", + "exit-lights", + "expansion-joints", + "expansion-plugs", + "extension-cords", + "extension-springs", + "fabric-snaps", + "fan-blades", + "fep-tubing", + "fiberglass-grating", + "file-holders", + "filter-bag-housings", + "filter-bags", + "filter-cartridges", + "fire-fighting-hose", + "first-aid-supplies", + "fixture-clamps", + "flange-locknuts", + "flange-mount-seals", + "flap-sanding-discs", + "flap-sanding-wheels", + "flared-tube-fittings", + "flashing-lights", + "flat-washers", + "flexible-shafts", + "flexible-shank-burs", + "flexible-trays", + "float-valves", + "floor-locks", + "floor-marking-tape", + "floor-scales", + "floor-squeegees", + "flow-sights", + "flow-switches", + "flowmeter-totalizers", + "foot-switches", + "force-gauges", + "fume-exhausters", + "garbage-bags", + "garden-hose", + "gas-hose", + "gas-regulators", + "gas-springs", + "gauge-blocks", + "glass-sights", + "gold-wire", + "grab-latches", + "grease-fittings", + "grinding-bits", + "grinding-wheels", + "hand-brushes", + "hand-chain-hoists", + "hand-reamers", + "hand-trucks", + "hand-wheels", + "hand-winches", + "hanging-scales", + "hard-hats", + "hardened-shafts", + "hardness-testers", + "heat-exchangers", + "heat-guns", + "heat-lamps", + "heat-sealable-bags", + "heat-set-inserts", + "heat-shrink-tubing", + "heat-sinks", + "heated-scrapers", + "helical-inserts", + "hex-bit-sockets", + "hex-head-screws", + "hex-nuts", + "high-accuracy-rulers", + "high-amp-relays", + "high-vacuum-filters", + "high-vacuum-sights", + "hinge-adjusters", + "hoist-rings", + "hole-saws", + "hose-couplings", + "hose-reels", + "hot-melt-glue", + "hydraulic-cylinders", + "hydraulic-hose", + "hydraulic-jacks", + "iec-connectors", + "immersion-heaters", + "impression-foam", + "indicating-lights", + "inflatable-wedges", + "ink-markers", + "insertion-heaters", + "inspection-mirrors", + "instrument-carts", + "insulation-jacketing", + "jam-removers", + "jigsaw-blades", + "key-cabinets", + "key-locking-inserts", + "key-stock", + "keyed-drive-shafts", + "keyseat-end-mills", + "l-key-sets", + "l-keys", + "label-holders", + "latching-connectors", + "lathe-tools", + "lavatory-partitions", + "lead-screws", + "leveling-lasers", + "leveling-mounts", + "lid-supports", + "lift-off-hinges", + "lift-trucks", + "light-bulbs", + "limit-switches", + "linear-ball-bearings", + "liquid-level-gauges", + "lock-washers", + "lockout-devices", + "loop-clamps", + "loop-hangers", + "machine-brackets", + "machine-handles", + "machine-keys", + "magnetic-base-drills", + "magnetic-bumpers", + "masking-tape", + "masonry-drill-bits", + "medium-amp-relays", + "metal-cable-ties", + "metal-panels", + "metal-plates", + "metal-tags", + "metering-pumps", + "metric-o-rings", + "mil-spec-connectors", + "mobile-lift-tables", + "motor-controls", + "motor-starters", + "mountable-cable-ties", + "mounting-tape", + "neoprene-foam", + "nickel-titanium", + "nonmarring-hammers", + "nonslip-bumpers", + "nylon-rivets", + "nylon-tubing", + "o-rings", + "oil-level-indicators", + "oil-reservoirs", + "oil-skimmers", + "on-off-valves", + "open-end-wrenches", + "outlet-boxes", + "outlet-strips", + "packaging-tape", + "paint-brushes", + "paint-markers", + "paint-sprayers", + "pallet-racks", + "pallet-trucks", + "panel-air-filters", + "parts-baskets", + "pendant-switches", + "perforated-sheets", + "pest-control", + "petroleum-hose", + "piano-hinges", + "pipe-couplings", + "pipe-gaskets", + "pipe-markers", + "pipe-wrenches", + "plank-grating", + "plastic-clamps", + "plastic-mesh", + "plate-lifting-clamps", + "platinum-wire", + "plier-clamps", + "plug-gauges", + "portable-lights", + "power-cords", + "power-supplied", + "power-supplies", + "precision-knives", + "press-fit-nuts", + "press-in-nuts", + "protecting-tape", + "protective-coatings", + "protective-curtains", + "protective-panels", + "protective-wrap", + "proximity-switches", + "pull-handles", + "push-brooms", + "push-nuts", + "push-on-seals", + "pvc-pipe-fittings", + "pvc-tubing", + "quick-release-pins", + "ratchet-pullers", + "recycled-plastics", + "repair-adhesives", + "repair-clamps", + "reusable-cable-ties", + "ring-terminals", + "rivet-nuts", + "robot-base-mounts", + "robot-bases", + "rocker-switches", + "rod-wipers", + "roller-bearings", + "roller-chain", + "roller-conveyors", + "roof-exhaust-fans", + "roof-repair", + "rotary-broaches", + "rotary-hammers", + "rotary-shaft-seals", + "rotating-cranes", + "rotating-joints", + "router-bits", + "rtd-probes", + "rubber-edge-seals", + "rubber-tread-wheels", + "rubber-tubing", + "safety-cabinets", + "safety-glasses", + "safety-mirrors", + "sanding-belts", + "sanding-discs", + "sanding-guides", + "sanding-rolls", + "sanding-sheets", + "screw-extractors", + "screw-jacks", + "scrub-brushes", + "sealing-washers", + "security-lights", + "sensor-connectors", + "set-screws", + "setup-clamps", + "shaft-collars", + "shaft-couplings", + "shaft-repair-sleeves", + "shaft-supports", + "sharpening-stones", + "sheet-metal-cutters", + "shelf-cabinets", + "shim-stock", + "shim-tape", + "shipping-pails", + "shock-absorbers", + "shoulder-screws", + "shower-stations", + "silicone-foam", + "sleeve-bearings", + "slide-bolts", + "slitting-saws", + "slotted-spring-pins", + "sludge-samplers", + "small-parts-storage", + "snap-acting-switches", + "soap-dispensers", + "socket-head-screws", + "socket-organizers", + "socket-wrenches", + "soldering-irons", + "solid-rivets", + "solid-rod-ends", + "sound-insulation", + "space-heaters", + "spacing-beads", + "spanner-wrenches", + "specialty-pliers", + "specialty-vises", + "specialty-washers", + "speed-reducers", + "splicing-connectors", + "spray-bottles", + "spray-nozzles", + "spring-clamps", + "spring-plungers", + "spring-steel", + "square-drive-sockets", + "square-end-mills", + "square-nuts", + "squeeze-bottles", + "stack-lights", + "stainless-steel", + "stair-treads", + "static-control-mats", + "steel-carts", + "steel-pipe-fittings", + "steel-pipe-flanges", + "steel-stamps", + "steel-tubing", + "step-ladders", + "stepper-motors", + "storage-bags", + "storage-boxes", + "storage-chests", + "straight-ladders", + "strap-hinges", + "stretch-wrap", + "strip-doors", + "strip-springs", + "strobe-lights", + "structural-adhesives", + "strut-channel", + "strut-channel-nuts", + "strut-mount-clamps", + "suction-cup-lifters", + "suction-strainers", + "super-absorbent-foam", + "super-flexible-glass", + "surface-fillers", + "surface-mount-hinges", + "t-handle-keys", + "t-slotted-framing", + "tamper-seals", + "tank-level-measurers", + "tape-dispensers", + "tape-measures", + "taper-pins", + "tapping-screws", + "teflon-ptfe", + "terminal-blocks", + "test-indicators", + "test-leads", + "test-weights", + "tethered-knobs", + "thermal-insulation", + "thread-adapters", + "thread-sealant-tape", + "thread-sealants", + "threaded-inserts", + "threaded-standoffs", + "threaded-studs", + "thrust-ball-bearings", + "thrust-bearings", + "thumb-nuts", + "thumb-screws", + "tie-down-rings", + "time-clocks", + "timer-relays", + "timer-switches", + "toggle-clamps", + "toggle-switches", + "tool-holders", + "tool-sets", + "tool-steel", + "torque-wrenches", + "torsion-springs", + "tote-boxes", + "touch-bars", + "track-casters", + "track-rollers", + "track-wheels", + "traction-mats", + "trolley-systems", + "tube-brushes", + "tube-fittings", + "tubular-light-bulbs", + "turn-lock-connectors", + "twist-ties", + "u-bolts", + "u-joints", + "ul-class-fuses", + "unthreaded-spacers", + "usb-adapters", + "usb-cords", + "utility-knives", + "v-belts", + "vacuum-cups", + "vacuum-pumps", + "wall-louvers", + "wash-fountains", + "wash-guns", + "waste-containers", + "water-deionizers", + "water-filters", + "water-hose", + "water-removal-pumps", + "weather-stations", + "web-slings", + "weld-nuts", + "welding-clothing", + "welding-helmets", + "wet-dry-vacuums", + "wet-mops", + "wheel-brushes", + "wing-nuts", + "wire-cloth", + "wire-connectors", + "wire-cutting-pliers", + "wire-partitions", + "wire-rope", + "wire-rope-clamps", + "wire-wrap", + "wool-felt", + "work-platforms", + "workbench-legs", + "woven-wire-cloth", +] as const; diff --git a/foundry/packages/backend/src/services/create-flow.ts b/foundry/packages/backend/src/services/create-flow.ts index 8341399..eb9e53f 100644 --- a/foundry/packages/backend/src/services/create-flow.ts +++ b/foundry/packages/backend/src/services/create-flow.ts @@ -1,3 +1,5 @@ +import { BRANCH_NAME_PREFIXES } from "./branch-name-prefixes.js"; + export interface ResolveCreateFlowDecisionInput { task: string; explicitTitle?: string; @@ -89,30 +91,42 @@ export function sanitizeBranchName(input: string): string { return trimmed.slice(0, 50).replace(/-+$/g, ""); } +function generateRandomSuffix(length: number): string { + const chars = "abcdefghijklmnopqrstuvwxyz0123456789"; + let result = ""; + for (let i = 0; i < length; i++) { + result += chars[Math.floor(Math.random() * chars.length)]; + } + return result; +} + +function generateBranchName(): string { + const prefix = BRANCH_NAME_PREFIXES[Math.floor(Math.random() * BRANCH_NAME_PREFIXES.length)]!; + const suffix = generateRandomSuffix(4); + return `${prefix}-${suffix}`; +} + export function resolveCreateFlowDecision(input: ResolveCreateFlowDecisionInput): ResolveCreateFlowDecisionResult { const explicitBranch = input.explicitBranchName?.trim(); const title = deriveFallbackTitle(input.task, input.explicitTitle); - const generatedBase = sanitizeBranchName(title) || "task"; - - const branchBase = explicitBranch && explicitBranch.length > 0 ? explicitBranch : generatedBase; const existingBranches = new Set(input.localBranches.map((value) => value.trim()).filter((value) => value.length > 0)); const existingTaskBranches = new Set(input.taskBranches.map((value) => value.trim()).filter((value) => value.length > 0)); const conflicts = (name: string): boolean => existingBranches.has(name) || existingTaskBranches.has(name); - if (explicitBranch && conflicts(branchBase)) { - throw new Error(`Branch '${branchBase}' already exists. Choose a different --name/--branch value.`); + if (explicitBranch && explicitBranch.length > 0) { + if (conflicts(explicitBranch)) { + throw new Error(`Branch '${explicitBranch}' already exists. Choose a different --name/--branch value.`); + } + return { title, branchName: explicitBranch }; } - if (explicitBranch) { - return { title, branchName: branchBase }; - } - - let candidate = branchBase; - let index = 2; - while (conflicts(candidate)) { - candidate = `${branchBase}-${index}`; - index += 1; + // Generate a random McMaster-Carr-style branch name, retrying on conflicts + let candidate = generateBranchName(); + let attempts = 0; + while (conflicts(candidate) && attempts < 100) { + candidate = generateBranchName(); + attempts += 1; } return { diff --git a/foundry/packages/backend/src/services/github-auth.ts b/foundry/packages/backend/src/services/github-auth.ts index ebbbce9..aa475b0 100644 --- a/foundry/packages/backend/src/services/github-auth.ts +++ b/foundry/packages/backend/src/services/github-auth.ts @@ -1,5 +1,5 @@ import { getOrCreateOrganization } from "../actors/handles.js"; -import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js"; +import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/constants.js"; export interface ResolvedGithubAuth { githubToken: string; diff --git a/foundry/packages/backend/test/create-flow.test.ts b/foundry/packages/backend/test/create-flow.test.ts index 498c4dc..8c66cb4 100644 --- a/foundry/packages/backend/test/create-flow.test.ts +++ b/foundry/packages/backend/test/create-flow.test.ts @@ -1,5 +1,6 @@ import { describe, expect, it } from "vitest"; import { deriveFallbackTitle, resolveCreateFlowDecision, sanitizeBranchName } from "../src/services/create-flow.js"; +import { BRANCH_NAME_PREFIXES } from "../src/services/branch-name-prefixes.js"; describe("create flow decision", () => { it("derives a conventional-style fallback title from task text", () => { @@ -17,15 +18,49 @@ describe("create flow decision", () => { expect(sanitizeBranchName(" spaces everywhere ")).toBe("spaces-everywhere"); }); - it("auto-increments generated branch names for conflicts", () => { + it("generates a McMaster-Carr-style branch name with random suffix", () => { const resolved = resolveCreateFlowDecision({ task: "Add auth", - localBranches: ["feat-add-auth"], - taskBranches: ["feat-add-auth-2"], + localBranches: [], + taskBranches: [], }); expect(resolved.title).toBe("feat: Add auth"); - expect(resolved.branchName).toBe("feat-add-auth-3"); + // Branch name should be "-<4-char-suffix>" where prefix is from BRANCH_NAME_PREFIXES + const lastDash = resolved.branchName.lastIndexOf("-"); + const prefix = resolved.branchName.slice(0, lastDash); + const suffix = resolved.branchName.slice(lastDash + 1); + expect(BRANCH_NAME_PREFIXES).toContain(prefix); + expect(suffix).toMatch(/^[a-z0-9]{4}$/); + }); + + it("avoids conflicts by generating a different random name", () => { + // Even with a conflicting branch, it should produce something different + const resolved = resolveCreateFlowDecision({ + task: "Add auth", + localBranches: [], + taskBranches: [], + }); + + // Running again with the first result as a conflict should produce a different name + const resolved2 = resolveCreateFlowDecision({ + task: "Add auth", + localBranches: [resolved.branchName], + taskBranches: [], + }); + + expect(resolved2.branchName).not.toBe(resolved.branchName); + }); + + it("uses explicit branch name when provided", () => { + const resolved = resolveCreateFlowDecision({ + task: "new task", + explicitBranchName: "my-branch", + localBranches: [], + taskBranches: [], + }); + + expect(resolved.branchName).toBe("my-branch"); }); it("fails when explicit branch already exists", () => { diff --git a/foundry/packages/backend/test/keys.test.ts b/foundry/packages/backend/test/keys.test.ts index ac5f3c8..c3b2a10 100644 --- a/foundry/packages/backend/test/keys.test.ts +++ b/foundry/packages/backend/test/keys.test.ts @@ -1,14 +1,13 @@ import { describe, expect, it } from "vitest"; -import { githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/actors/keys.js"; +import { auditLogKey, githubDataKey, organizationKey, taskKey, taskSandboxKey } from "../src/actors/keys.js"; describe("actor keys", () => { it("prefixes every key with organization namespace", () => { const keys = [ organizationKey("default"), - repositoryKey("default", "repo"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), - historyKey("default", "repo"), + auditLogKey("default"), githubDataKey("default"), ]; diff --git a/foundry/packages/backend/test/organization-isolation.test.ts b/foundry/packages/backend/test/organization-isolation.test.ts index fcd1950..f5d58f2 100644 --- a/foundry/packages/backend/test/organization-isolation.test.ts +++ b/foundry/packages/backend/test/organization-isolation.test.ts @@ -8,6 +8,7 @@ import { describe, expect, it } from "vitest"; import { setupTest } from "rivetkit/test"; import { organizationKey } from "../src/actors/keys.js"; import { registry } from "../src/actors/index.js"; +import { organizationWorkflowQueueName } from "../src/actors/organization/queues.js"; import { repoIdFromRemote } from "../src/services/repo.js"; import { createTestDriver } from "./helpers/test-driver.js"; import { createTestRuntimeContext } from "./helpers/test-context.js"; @@ -51,8 +52,8 @@ describe("organization isolation", () => { const { repoPath } = createRepo(); const repoId = repoIdFromRemote(repoPath); - await wsA.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath }); - await wsB.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath }); + await wsA.send(organizationWorkflowQueueName("organization.command.github.repository_projection.apply"), { repoId, remoteUrl: repoPath }, { wait: true }); + await wsB.send(organizationWorkflowQueueName("organization.command.github.repository_projection.apply"), { repoId, remoteUrl: repoPath }, { wait: true }); await wsA.createTask({ organizationId: "alpha", diff --git a/foundry/packages/backend/test/workbench-unread.test.ts b/foundry/packages/backend/test/workspace-unread.test.ts similarity index 92% rename from foundry/packages/backend/test/workbench-unread.test.ts rename to foundry/packages/backend/test/workspace-unread.test.ts index fc94e97..5f7221a 100644 --- a/foundry/packages/backend/test/workbench-unread.test.ts +++ b/foundry/packages/backend/test/workspace-unread.test.ts @@ -1,7 +1,7 @@ import { describe, expect, it } from "vitest"; -import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workbench.js"; +import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workspace.js"; -describe("workbench unread status transitions", () => { +describe("workspace unread status transitions", () => { it("marks unread when a running session first becomes idle", () => { expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: Date.now() - 1_000 }, "idle")).toBe(true); }); @@ -15,7 +15,7 @@ describe("workbench unread status transitions", () => { }); }); -describe("workbench model changes", () => { +describe("workspace model changes", () => { it("recreates an unused ready session so the selected model takes effect", () => { expect( shouldRecreateSessionForModelChange({ @@ -58,9 +58,9 @@ describe("workbench model changes", () => { }); }); -describe("workbench send readiness", () => { +describe("workspace send readiness", () => { it("rejects unknown sessions", () => { - expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workbench session: session-1"); + expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workspace session: session-1"); }); it("rejects pending sessions", () => { diff --git a/foundry/packages/cli/src/tui.ts b/foundry/packages/cli/src/tui.ts index c3aba9e..062bb95 100644 --- a/foundry/packages/cli/src/tui.ts +++ b/foundry/packages/cli/src/tui.ts @@ -1,4 +1,4 @@ -import type { AppConfig, TaskRecord } from "@sandbox-agent/foundry-shared"; +import type { AppConfig, TaskRecord, WorkspaceTaskDetail } from "@sandbox-agent/foundry-shared"; import { spawnSync } from "node:child_process"; import { createBackendClientFromConfig, filterTasks, formatRelativeAge, groupTaskStatus } from "@sandbox-agent/foundry-client"; import { CLI_BUILD_ID } from "./build-id.js"; @@ -51,14 +51,28 @@ interface DisplayRow { age: string; } +type TuiTaskRow = TaskRecord & Pick & { activeSessionId?: string | null }; + interface RenderOptions { width?: number; height?: number; } -async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { +async function listDetailedTasks(client: ReturnType, organizationId: string): Promise { const rows = await client.listTasks(organizationId); - return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId))); + return await Promise.all( + rows.map(async (row) => { + const [task, detail] = await Promise.all([ + client.getTask(organizationId, row.repoId, row.taskId), + client.getTaskDetail(organizationId, row.repoId, row.taskId).catch(() => null), + ]); + return { + ...task, + pullRequest: detail?.pullRequest ?? null, + activeSessionId: detail?.activeSessionId ?? null, + }; + }), + ); } function pad(input: string, width: number): string { @@ -143,29 +157,17 @@ function agentSymbol(status: TaskRecord["status"]): string { return "-"; } -function toDisplayRow(row: TaskRecord): DisplayRow { - const conflictPrefix = row.conflictsWithMain === "true" ? "\u26A0 " : ""; - - const prLabel = row.prUrl ? `#${row.prUrl.match(/\/pull\/(\d+)/)?.[1] ?? "?"}` : row.prSubmitted ? "sub" : "-"; - - const ciLabel = row.ciStatus ?? "-"; - const reviewLabel = row.reviewStatus - ? row.reviewStatus === "approved" - ? "ok" - : row.reviewStatus === "changes_requested" - ? "chg" - : row.reviewStatus === "pending" - ? "..." - : row.reviewStatus - : "-"; +function toDisplayRow(row: TuiTaskRow): DisplayRow { + const prLabel = row.pullRequest ? `#${row.pullRequest.number}` : "-"; + const reviewLabel = row.pullRequest ? (row.pullRequest.isDraft ? "draft" : row.pullRequest.state.toLowerCase()) : "-"; return { - name: `${conflictPrefix}${row.title || row.branchName}`, - diff: row.diffStat ?? "-", + name: row.title || row.branchName || row.taskId, + diff: "-", agent: agentSymbol(row.status), pr: prLabel, - author: row.prAuthor ?? "-", - ci: ciLabel, + author: row.pullRequest?.authorLogin ?? "-", + ci: "-", review: reviewLabel, age: formatRelativeAge(row.updatedAt), }; @@ -186,7 +188,7 @@ function helpLines(width: number): string[] { } export function formatRows( - rows: TaskRecord[], + rows: TuiTaskRow[], selected: number, organizationId: string, status: string, @@ -336,8 +338,8 @@ export async function runTui(config: AppConfig, organizationId: string): Promise renderer.root.add(text); renderer.start(); - let allRows: TaskRecord[] = []; - let filteredRows: TaskRecord[] = []; + let allRows: TuiTaskRow[] = []; + let filteredRows: TuiTaskRow[] = []; let selected = 0; let searchQuery = ""; let showHelp = false; @@ -393,7 +395,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise render(); }; - const selectedRow = (): TaskRecord | null => { + const selectedRow = (): TuiTaskRow | null => { if (filteredRows.length === 0) { return null; } @@ -522,7 +524,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise render(); void (async () => { try { - const result = await client.switchTask(organizationId, row.taskId); + const result = await client.switchTask(organizationId, row.repoId, row.taskId); close(`cd ${result.switchTarget}`); } catch (err) { busy = false; @@ -543,7 +545,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise render(); void (async () => { try { - const result = await client.attachTask(organizationId, row.taskId); + const result = await client.attachTask(organizationId, row.repoId, row.taskId); close(`target=${result.target} session=${result.sessionId ?? "none"}`); } catch (err) { busy = false; @@ -559,7 +561,11 @@ export async function runTui(config: AppConfig, organizationId: string): Promise if (!row) { return; } - void runActionWithRefresh(`archiving ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "archive"), `archived ${row.taskId}`); + void runActionWithRefresh( + `archiving ${row.taskId}`, + async () => client.runAction(organizationId, row.repoId, row.taskId, "archive"), + `archived ${row.taskId}`, + ); return; } @@ -568,7 +574,11 @@ export async function runTui(config: AppConfig, organizationId: string): Promise if (!row) { return; } - void runActionWithRefresh(`syncing ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "sync"), `synced ${row.taskId}`); + void runActionWithRefresh( + `syncing ${row.taskId}`, + async () => client.runAction(organizationId, row.repoId, row.taskId, "sync"), + `synced ${row.taskId}`, + ); return; } @@ -580,8 +590,8 @@ export async function runTui(config: AppConfig, organizationId: string): Promise void runActionWithRefresh( `merging ${row.taskId}`, async () => { - await client.runAction(organizationId, row.taskId, "merge"); - await client.runAction(organizationId, row.taskId, "archive"); + await client.runAction(organizationId, row.repoId, row.taskId, "merge"); + await client.runAction(organizationId, row.repoId, row.taskId, "archive"); }, `merged+archived ${row.taskId}`, ); @@ -590,14 +600,15 @@ export async function runTui(config: AppConfig, organizationId: string): Promise if (ctrl && name === "o") { const row = selectedRow(); - if (!row?.prUrl) { + const prUrl = row?.pullRequest?.url ?? null; + if (!prUrl) { status = "no PR URL available for this task"; render(); return; } const openCmd = process.platform === "darwin" ? "open" : "xdg-open"; - spawnSync(openCmd, [row.prUrl], { stdio: "ignore" }); - status = `opened ${row.prUrl}`; + spawnSync(openCmd, [prUrl], { stdio: "ignore" }); + status = `opened ${prUrl}`; render(); return; } diff --git a/foundry/packages/cli/test/tui-format.test.ts b/foundry/packages/cli/test/tui-format.test.ts index 9ba0feb..15d3fe8 100644 --- a/foundry/packages/cli/test/tui-format.test.ts +++ b/foundry/packages/cli/test/tui-format.test.ts @@ -3,7 +3,7 @@ import type { TaskRecord } from "@sandbox-agent/foundry-shared"; import { filterTasks, fuzzyMatch } from "@sandbox-agent/foundry-client"; import { formatRows } from "../src/tui.js"; -const sample: TaskRecord = { +const sample = { organizationId: "default", repoId: "repo-a", repoRemote: "https://example.com/repo-a.git", @@ -13,33 +13,22 @@ const sample: TaskRecord = { task: "Do test", sandboxProviderId: "local", status: "running", - statusMessage: null, activeSandboxId: "sandbox-1", - activeSessionId: "session-1", + pullRequest: null, sandboxes: [ { sandboxId: "sandbox-1", sandboxProviderId: "local", + sandboxActorId: null, switchTarget: "sandbox://local/sandbox-1", cwd: null, createdAt: 1, updatedAt: 1, }, ], - agentType: null, - prSubmitted: false, - diffStat: null, - prUrl: null, - prAuthor: null, - ciStatus: null, - reviewStatus: null, - reviewer: null, - conflictsWithMain: null, - hasUnpushed: null, - parentBranch: null, createdAt: 1, updatedAt: 1, -}; +} satisfies TaskRecord & { pullRequest: null; activeSessionId?: null }; describe("formatRows", () => { it("renders rust-style table header and empty state", () => { diff --git a/foundry/packages/client/package.json b/foundry/packages/client/package.json index 98079d5..9790474 100644 --- a/foundry/packages/client/package.json +++ b/foundry/packages/client/package.json @@ -10,8 +10,8 @@ "typecheck": "tsc --noEmit", "test": "vitest run", "test:e2e:full": "HF_ENABLE_DAEMON_FULL_E2E=1 vitest run test/e2e/full-integration-e2e.test.ts", - "test:e2e:workbench": "HF_ENABLE_DAEMON_WORKBENCH_E2E=1 vitest run test/e2e/workbench-e2e.test.ts", - "test:e2e:workbench-load": "HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E=1 vitest run test/e2e/workbench-load-e2e.test.ts" + "test:e2e:workspace": "HF_ENABLE_DAEMON_WORKBENCH_E2E=1 vitest run test/e2e/workspace-e2e.test.ts", + "test:e2e:workspace-load": "HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E=1 vitest run test/e2e/workspace-load-e2e.test.ts" }, "dependencies": { "@sandbox-agent/foundry-shared": "workspace:*", diff --git a/foundry/packages/client/src/app-client.ts b/foundry/packages/client/src/app-client.ts index 16968cf..0bf5526 100644 --- a/foundry/packages/client/src/app-client.ts +++ b/foundry/packages/client/src/app-client.ts @@ -4,6 +4,7 @@ import type { FoundryOrganization, FoundryUser, UpdateFoundryOrganizationProfileInput, + WorkspaceModelId, } from "@sandbox-agent/foundry-shared"; import type { BackendClient } from "./backend-client.js"; import { getMockFoundryAppClient } from "./mock-app.js"; @@ -17,6 +18,7 @@ export interface FoundryAppClient { skipStarterRepo(): Promise; starStarterRepo(organizationId: string): Promise; selectOrganization(organizationId: string): Promise; + setDefaultModel(model: WorkspaceModelId): Promise; updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise; triggerGithubSync(organizationId: string): Promise; completeHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise; diff --git a/foundry/packages/client/src/backend-client.ts b/foundry/packages/client/src/backend-client.ts index 14e5661..0903aa8 100644 --- a/foundry/packages/client/src/backend-client.ts +++ b/foundry/packages/client/src/backend-client.ts @@ -7,28 +7,29 @@ import type { CreateTaskInput, AppEvent, SessionEvent, + SandboxProcessSnapshot, SandboxProcessesEvent, TaskRecord, TaskSummary, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, TaskEvent, - WorkbenchTaskDetail, - WorkbenchTaskSummary, - WorkbenchSessionDetail, + WorkspaceTaskDetail, + WorkspaceTaskSummary, + WorkspaceSessionDetail, OrganizationEvent, OrganizationSummarySnapshot, - HistoryEvent, + AuditLogEvent as HistoryEvent, HistoryQueryInput, SandboxProviderId, RepoOverview, @@ -37,8 +38,10 @@ import type { StarSandboxAgentRepoResult, SwitchResult, UpdateFoundryOrganizationProfileInput, + WorkspaceModelGroup, + WorkspaceModelId, } from "@sandbox-agent/foundry-shared"; -import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; +import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; import { createMockBackendClient } from "./mock/backend-client.js"; import { taskKey, taskSandboxKey, organizationKey } from "./keys.js"; @@ -64,7 +67,7 @@ export interface SandboxSessionEventRecord { payload: unknown; } -export type SandboxProcessRecord = ProcessInfo; +export type SandboxProcessRecord = SandboxProcessSnapshot; export interface ActorConn { on(event: string, listener: (payload: any) => void): () => void; @@ -72,45 +75,44 @@ export interface ActorConn { dispose(): Promise; } +interface AuthSessionScopedInput { + authSessionId?: string; +} + interface OrganizationHandle { connect(): ActorConn; listRepos(input: { organizationId: string }): Promise; createTask(input: CreateTaskInput): Promise; listTasks(input: { organizationId: string; repoId?: string }): Promise; getRepoOverview(input: { organizationId: string; repoId: string }): Promise; - history(input: HistoryQueryInput): Promise; - switchTask(taskId: string): Promise; - getTask(input: { organizationId: string; taskId: string }): Promise; - attachTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; - pushTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; - syncTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; - mergeTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; - archiveTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; - killTask(input: { organizationId: string; taskId: string; reason?: string }): Promise; + auditLog(input: HistoryQueryInput): Promise; + switchTask(input: { repoId: string; taskId: string }): Promise; + getTask(input: { organizationId: string; repoId: string; taskId: string }): Promise; + attachTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>; + pushTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + syncTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + mergeTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + archiveTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; + killTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise; useOrganization(input: { organizationId: string }): Promise<{ organizationId: string }>; starSandboxAgentRepo(input: StarSandboxAgentRepoInput): Promise; getOrganizationSummary(input: { organizationId: string }): Promise; - applyTaskSummaryUpdate(input: { taskSummary: WorkbenchTaskSummary }): Promise; - removeTaskSummary(input: { taskId: string }): Promise; - reconcileWorkbenchState(input: { organizationId: string }): Promise; - createWorkbenchTask(input: TaskWorkbenchCreateTaskInput): Promise; - markWorkbenchUnread(input: TaskWorkbenchSelectInput): Promise; - renameWorkbenchTask(input: TaskWorkbenchRenameInput): Promise; - renameWorkbenchBranch(input: TaskWorkbenchRenameInput): Promise; - createWorkbenchSession(input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>; - renameWorkbenchSession(input: TaskWorkbenchRenameSessionInput): Promise; - setWorkbenchSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise; - updateWorkbenchDraft(input: TaskWorkbenchUpdateDraftInput): Promise; - changeWorkbenchModel(input: TaskWorkbenchChangeModelInput): Promise; - sendWorkbenchMessage(input: TaskWorkbenchSendMessageInput): Promise; - stopWorkbenchSession(input: TaskWorkbenchSessionInput): Promise; - closeWorkbenchSession(input: TaskWorkbenchSessionInput): Promise; - publishWorkbenchPr(input: TaskWorkbenchSelectInput): Promise; - revertWorkbenchFile(input: TaskWorkbenchDiffInput): Promise; - reloadGithubOrganization(): Promise; - reloadGithubPullRequests(): Promise; - reloadGithubRepository(input: { repoId: string }): Promise; - reloadGithubPullRequest(input: { repoId: string; prNumber: number }): Promise; + createWorkspaceTask(input: TaskWorkspaceCreateTaskInput & AuthSessionScopedInput): Promise; + markWorkspaceUnread(input: TaskWorkspaceSelectInput & AuthSessionScopedInput): Promise; + renameWorkspaceTask(input: TaskWorkspaceRenameInput & AuthSessionScopedInput): Promise; + createWorkspaceSession(input: TaskWorkspaceSelectInput & { model?: string } & AuthSessionScopedInput): Promise<{ sessionId: string }>; + renameWorkspaceSession(input: TaskWorkspaceRenameSessionInput & AuthSessionScopedInput): Promise; + selectWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + setWorkspaceSessionUnread(input: TaskWorkspaceSetSessionUnreadInput & AuthSessionScopedInput): Promise; + updateWorkspaceDraft(input: TaskWorkspaceUpdateDraftInput & AuthSessionScopedInput): Promise; + changeWorkspaceModel(input: TaskWorkspaceChangeModelInput & AuthSessionScopedInput): Promise; + sendWorkspaceMessage(input: TaskWorkspaceSendMessageInput & AuthSessionScopedInput): Promise; + stopWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + closeWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise; + publishWorkspacePr(input: TaskWorkspaceSelectInput & AuthSessionScopedInput): Promise; + revertWorkspaceFile(input: TaskWorkspaceDiffInput & AuthSessionScopedInput): Promise; + adminReloadGithubOrganization(): Promise; + adminReloadGithubRepository(input: { repoId: string }): Promise; } interface AppOrganizationHandle { @@ -119,6 +121,7 @@ interface AppOrganizationHandle { skipAppStarterRepo(input: { sessionId: string }): Promise; starAppStarterRepo(input: { sessionId: string; organizationId: string }): Promise; selectAppOrganization(input: { sessionId: string; organizationId: string }): Promise; + setAppDefaultModel(input: { sessionId: string; defaultModel: WorkspaceModelId }): Promise; updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput & { sessionId: string }): Promise; triggerAppRepoImport(input: { sessionId: string; organizationId: string }): Promise; beginAppGithubInstall(input: { sessionId: string; organizationId: string }): Promise<{ url: string }>; @@ -130,9 +133,9 @@ interface AppOrganizationHandle { } interface TaskHandle { - getTaskSummary(): Promise; - getTaskDetail(): Promise; - getSessionDetail(input: { sessionId: string }): Promise; + getTaskSummary(): Promise; + getTaskDetail(input?: AuthSessionScopedInput): Promise; + getSessionDetail(input: { sessionId: string } & AuthSessionScopedInput): Promise; connect(): ActorConn; } @@ -157,6 +160,7 @@ interface TaskSandboxHandle { rawSendSessionMethod(sessionId: string, method: string, params: Record): Promise; destroySession(sessionId: string): Promise; sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>; + listWorkspaceModelGroups(): Promise; providerState(): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; } @@ -179,6 +183,7 @@ export interface BackendClientOptions { endpoint: string; defaultOrganizationId?: string; mode?: "remote" | "mock"; + encoding?: "json" | "cbor" | "bare"; } export interface BackendClient { @@ -192,6 +197,7 @@ export interface BackendClient { skipAppStarterRepo(): Promise; starAppStarterRepo(organizationId: string): Promise; selectAppOrganization(organizationId: string): Promise; + setAppDefaultModel(defaultModel: WorkspaceModelId): Promise; updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise; triggerAppRepoImport(organizationId: string): Promise; reconnectAppGithub(organizationId: string): Promise; @@ -204,11 +210,11 @@ export interface BackendClient { createTask(input: CreateTaskInput): Promise; listTasks(organizationId: string, repoId?: string): Promise; getRepoOverview(organizationId: string, repoId: string): Promise; - getTask(organizationId: string, taskId: string): Promise; + getTask(organizationId: string, repoId: string, taskId: string): Promise; listHistory(input: HistoryQueryInput): Promise; - switchTask(organizationId: string, taskId: string): Promise; - attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; - runAction(organizationId: string, taskId: string, action: TaskAction): Promise; + switchTask(organizationId: string, repoId: string, taskId: string): Promise; + attachTask(organizationId: string, repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>; + runAction(organizationId: string, repoId: string, taskId: string, action: TaskAction): Promise; createSandboxSession(input: { organizationId: string; sandboxProviderId: SandboxProviderId; @@ -279,29 +285,28 @@ export interface BackendClient { sandboxId: string, ): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>; getSandboxAgentConnection(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>; + getSandboxWorkspaceModelGroups(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise; getOrganizationSummary(organizationId: string): Promise; - getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; - getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; - getWorkbench(organizationId: string): Promise; - subscribeWorkbench(organizationId: string, listener: () => void): () => void; - createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise; - markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise; - renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise; - renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise; - createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>; - renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise; - setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise; - updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise; - changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise; - sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise; - stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise; - closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise; - publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise; - revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise; - reloadGithubOrganization(organizationId: string): Promise; - reloadGithubPullRequests(organizationId: string): Promise; - reloadGithubRepository(organizationId: string, repoId: string): Promise; - reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise; + getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise; + getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise; + getWorkspace(organizationId: string): Promise; + subscribeWorkspace(organizationId: string, listener: () => void): () => void; + createWorkspaceTask(organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise; + markWorkspaceUnread(organizationId: string, input: TaskWorkspaceSelectInput): Promise; + renameWorkspaceTask(organizationId: string, input: TaskWorkspaceRenameInput): Promise; + createWorkspaceSession(organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }>; + renameWorkspaceSession(organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise; + selectWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + setWorkspaceSessionUnread(organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise; + updateWorkspaceDraft(organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise; + changeWorkspaceModel(organizationId: string, input: TaskWorkspaceChangeModelInput): Promise; + sendWorkspaceMessage(organizationId: string, input: TaskWorkspaceSendMessageInput): Promise; + stopWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + closeWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise; + publishWorkspacePr(organizationId: string, input: TaskWorkspaceSelectInput): Promise; + revertWorkspaceFile(organizationId: string, input: TaskWorkspaceDiffInput): Promise; + adminReloadGithubOrganization(organizationId: string): Promise; + adminReloadGithubRepository(organizationId: string, repoId: string): Promise; health(): Promise<{ ok: true }>; useOrganization(organizationId: string): Promise<{ organizationId: string }>; starSandboxAgentRepo(organizationId: string): Promise; @@ -409,8 +414,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien const endpoints = deriveBackendEndpoints(options.endpoint); const rivetApiEndpoint = endpoints.rivetEndpoint; const appApiEndpoint = endpoints.appEndpoint; - const client = createClient({ endpoint: rivetApiEndpoint }) as unknown as RivetClient; - const workbenchSubscriptions = new Map< + const client = createClient({ endpoint: rivetApiEndpoint, encoding: options.encoding }) as unknown as RivetClient; + const workspaceSubscriptions = new Map< string, { listeners: Set<() => void>; @@ -461,6 +466,16 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return typeof sessionId === "string" && sessionId.length > 0 ? sessionId : null; }; + const getAuthSessionInput = async (): Promise => { + const authSessionId = await getSessionId(); + return authSessionId ? { authSessionId } : undefined; + }; + + const withAuthSessionInput = async (input: TInput): Promise => { + const authSessionInput = await getAuthSessionInput(); + return authSessionInput ? { ...input, ...authSessionInput } : input; + }; + const organization = async (organizationId: string): Promise => client.organization.getOrCreate(organizationKey(organizationId), { createWithInput: organizationId, @@ -471,7 +486,15 @@ export function createBackendClient(options: BackendClientOptions): BackendClien createWithInput: "app", }) as unknown as AppOrganizationHandle; - const task = async (organizationId: string, repoId: string, taskId: string): Promise => client.task.get(taskKey(organizationId, repoId, taskId)); + // getOrCreate is intentional here — this is the ONLY lazy creation point for + // virtual tasks (PR-driven entries that exist in the org's local tables but + // have no task actor yet). The task actor self-initializes from org data in + // getCurrentRecord(). Backend code must NEVER use getOrCreateTask except in + // createTaskMutation. See backend/CLAUDE.md "Lazy Task Actor Creation". + const task = async (organizationId: string, repoId: string, taskId: string): Promise => + client.task.getOrCreate(taskKey(organizationId, repoId, taskId), { + createWithInput: { organizationId, repoId, taskId }, + }); const sandboxByKey = async (organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise => { return (client as any).taskSandbox.get(taskSandboxKey(organizationId, sandboxId)); @@ -493,17 +516,15 @@ export function createBackendClient(options: BackendClientOptions): BackendClien for (const row of candidates) { try { - const detail = await ws.getTask({ organizationId, taskId: row.taskId }); + const detail = await ws.getTask({ organizationId, repoId: row.repoId, taskId: row.taskId }); if (detail.sandboxProviderId !== sandboxProviderId) { continue; } - const sandbox = detail.sandboxes.find( + const sandboxes = detail.sandboxes as Array<(typeof detail.sandboxes)[number] & { sandboxActorId?: string }>; + const sandbox = sandboxes.find( (sb) => - sb.sandboxId === sandboxId && - sb.sandboxProviderId === sandboxProviderId && - typeof (sb as any).sandboxActorId === "string" && - (sb as any).sandboxActorId.length > 0, - ) as { sandboxActorId?: string } | undefined; + sb.sandboxId === sandboxId && sb.sandboxProviderId === sandboxProviderId && typeof sb.sandboxActorId === "string" && sb.sandboxActorId.length > 0, + ); if (sandbox?.sandboxActorId) { return (client as any).taskSandbox.getForId(sandbox.sandboxActorId); } @@ -563,67 +584,81 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } }; - const getWorkbenchCompat = async (organizationId: string): Promise => { + const getTaskDetailWithAuth = async (organizationId: string, repoId: string, taskIdValue: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).getTaskDetail(await getAuthSessionInput()); + }; + + const getSessionDetailWithAuth = async (organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise => { + return (await task(organizationId, repoId, taskIdValue)).getSessionDetail(await withAuthSessionInput({ sessionId })); + }; + + const getWorkspaceCompat = async (organizationId: string): Promise => { + const authSessionInput = await getAuthSessionInput(); const summary = await (await organization(organizationId)).getOrganizationSummary({ organizationId }); - const tasks = ( - await Promise.all( - summary.taskSummaries.map(async (taskSummary) => { - let detail; - try { - detail = await (await task(organizationId, taskSummary.repoId, taskSummary.id)).getTaskDetail(); - } catch (error) { - if (isActorNotFoundError(error)) { - return null; - } - throw error; + const resolvedTasks = await Promise.all( + summary.taskSummaries.map(async (taskSummary) => { + let detail; + try { + const taskHandle = await task(organizationId, taskSummary.repoId, taskSummary.id); + detail = await taskHandle.getTaskDetail(authSessionInput); + } catch (error) { + if (isActorNotFoundError(error)) { + return null; } - const sessionDetails = await Promise.all( - detail.sessionsSummary.map(async (session) => { - try { - const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id }); - return [session.id, full] as const; - } catch (error) { - if (isActorNotFoundError(error)) { - return null; - } - throw error; + throw error; + } + const sessionDetails = await Promise.all( + detail.sessionsSummary.map(async (session) => { + try { + const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({ + sessionId: session.id, + ...(authSessionInput ?? {}), + }); + return [session.id, full] as const; + } catch (error) { + if (isActorNotFoundError(error)) { + return null; } - }), - ); - const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkbenchSessionDetail] => entry !== null)); - return { - id: detail.id, - repoId: detail.repoId, - title: detail.title, - status: detail.status, - repoName: detail.repoName, - updatedAtMs: detail.updatedAtMs, - branch: detail.branch, - pullRequest: detail.pullRequest, - sessions: detail.sessionsSummary.map((session) => { - const full = sessionDetailsById.get(session.id); - return { - id: session.id, - sessionId: session.sessionId, - sessionName: session.sessionName, - agent: session.agent, - model: session.model, - status: session.status, - thinkingSinceMs: session.thinkingSinceMs, - unread: session.unread, - created: session.created, - draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null }, - transcript: full?.transcript ?? [], - }; - }), - fileChanges: detail.fileChanges, - diffs: detail.diffs, - fileTree: detail.fileTree, - minutesUsed: detail.minutesUsed, - }; - }), - ) - ).filter((task): task is TaskWorkbenchSnapshot["tasks"][number] => task !== null); + throw error; + } + }), + ); + const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkspaceSessionDetail] => entry !== null)); + return { + id: detail.id, + repoId: detail.repoId, + title: detail.title, + status: detail.status, + repoName: detail.repoName, + updatedAtMs: detail.updatedAtMs, + branch: detail.branch, + pullRequest: detail.pullRequest, + activeSessionId: detail.activeSessionId ?? null, + sessions: detail.sessionsSummary.map((session) => { + const full = sessionDetailsById.get(session.id); + return { + id: session.id, + sessionId: session.sessionId, + sessionName: session.sessionName, + agent: session.agent, + model: session.model, + status: session.status, + thinkingSinceMs: session.thinkingSinceMs, + unread: session.unread, + created: session.created, + draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null }, + transcript: full?.transcript ?? [], + }; + }), + fileChanges: detail.fileChanges, + diffs: detail.diffs, + fileTree: detail.fileTree, + minutesUsed: detail.minutesUsed, + activeSandboxId: detail.activeSandboxId ?? null, + }; + }), + ); + const tasks = resolvedTasks.filter((task): task is Exclude<(typeof resolvedTasks)[number], null> => task !== null); const repositories = summary.repos .map((repo) => ({ @@ -642,14 +677,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien }; }; - const subscribeWorkbench = (organizationId: string, listener: () => void): (() => void) => { - let entry = workbenchSubscriptions.get(organizationId); + const subscribeWorkspace = (organizationId: string, listener: () => void): (() => void) => { + let entry = workspaceSubscriptions.get(organizationId); if (!entry) { entry = { listeners: new Set(), disposeConnPromise: null, }; - workbenchSubscriptions.set(organizationId, entry); + workspaceSubscriptions.set(organizationId, entry); } entry.listeners.add(listener); @@ -658,8 +693,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien entry.disposeConnPromise = (async () => { const handle = await organization(organizationId); const conn = (handle as any).connect(); - const unsubscribeEvent = conn.on("workbenchUpdated", () => { - const current = workbenchSubscriptions.get(organizationId); + const unsubscribeEvent = conn.on("organizationUpdated", () => { + const current = workspaceSubscriptions.get(organizationId); if (!current) { return; } @@ -677,7 +712,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } return () => { - const current = workbenchSubscriptions.get(organizationId); + const current = workspaceSubscriptions.get(organizationId); if (!current) { return; } @@ -686,7 +721,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return; } - workbenchSubscriptions.delete(organizationId); + workspaceSubscriptions.delete(organizationId); void current.disposeConnPromise?.then(async (disposeConn) => { await disposeConn?.(); }); @@ -849,6 +884,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return await (await appOrganization()).selectAppOrganization({ sessionId, organizationId }); }, + async setAppDefaultModel(defaultModel: WorkspaceModelId): Promise { + const sessionId = await getSessionId(); + if (!sessionId) { + throw new Error("No active auth session"); + } + return await (await appOrganization()).setAppDefaultModel({ sessionId, defaultModel }); + }, + async updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise { const sessionId = await getSessionId(); if (!sessionId) { @@ -948,33 +991,36 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return (await organization(organizationId)).getRepoOverview({ organizationId, repoId }); }, - async getTask(organizationId: string, taskId: string): Promise { + async getTask(organizationId: string, repoId: string, taskId: string): Promise { return (await organization(organizationId)).getTask({ organizationId, + repoId, taskId, }); }, async listHistory(input: HistoryQueryInput): Promise { - return (await organization(input.organizationId)).history(input); + return (await organization(input.organizationId)).auditLog(input); }, - async switchTask(organizationId: string, taskId: string): Promise { - return (await organization(organizationId)).switchTask(taskId); + async switchTask(organizationId: string, repoId: string, taskId: string): Promise { + return (await organization(organizationId)).switchTask({ repoId, taskId }); }, - async attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + async attachTask(organizationId: string, repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { return (await organization(organizationId)).attachTask({ organizationId, + repoId, taskId, reason: "cli.attach", }); }, - async runAction(organizationId: string, taskId: string, action: TaskAction): Promise { + async runAction(organizationId: string, repoId: string, taskId: string, action: TaskAction): Promise { if (action === "push") { await (await organization(organizationId)).pushTask({ organizationId, + repoId, taskId, reason: "cli.push", }); @@ -983,6 +1029,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (action === "sync") { await (await organization(organizationId)).syncTask({ organizationId, + repoId, taskId, reason: "cli.sync", }); @@ -991,6 +1038,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (action === "merge") { await (await organization(organizationId)).mergeTask({ organizationId, + repoId, taskId, reason: "cli.merge", }); @@ -999,6 +1047,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien if (action === "archive") { await (await organization(organizationId)).archiveTask({ organizationId, + repoId, taskId, reason: "cli.archive", }); @@ -1006,6 +1055,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien } await (await organization(organizationId)).killTask({ organizationId, + repoId, taskId, reason: "cli.kill", }); @@ -1156,96 +1206,92 @@ export function createBackendClient(options: BackendClientOptions): BackendClien return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.sandboxAgentConnection()); }, + async getSandboxWorkspaceModelGroups(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise { + return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listWorkspaceModelGroups()); + }, + async getOrganizationSummary(organizationId: string): Promise { return (await organization(organizationId)).getOrganizationSummary({ organizationId }); }, - async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise { - return (await task(organizationId, repoId, taskIdValue)).getTaskDetail(); + async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise { + return await getTaskDetailWithAuth(organizationId, repoId, taskIdValue); }, - async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { - return (await task(organizationId, repoId, taskIdValue)).getSessionDetail({ sessionId }); + async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise { + return await getSessionDetailWithAuth(organizationId, repoId, taskIdValue, sessionId); }, - async getWorkbench(organizationId: string): Promise { - return await getWorkbenchCompat(organizationId); + async getWorkspace(organizationId: string): Promise { + return await getWorkspaceCompat(organizationId); }, - subscribeWorkbench(organizationId: string, listener: () => void): () => void { - return subscribeWorkbench(organizationId, listener); + subscribeWorkspace(organizationId: string, listener: () => void): () => void { + return subscribeWorkspace(organizationId, listener); }, - async createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise { - return (await organization(organizationId)).createWorkbenchTask(input); + async createWorkspaceTask(organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise { + return (await organization(organizationId)).createWorkspaceTask(await withAuthSessionInput(input)); }, - async markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise { - await (await organization(organizationId)).markWorkbenchUnread(input); + async markWorkspaceUnread(organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await (await organization(organizationId)).markWorkspaceUnread(await withAuthSessionInput(input)); }, - async renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise { - await (await organization(organizationId)).renameWorkbenchTask(input); + async renameWorkspaceTask(organizationId: string, input: TaskWorkspaceRenameInput): Promise { + await (await organization(organizationId)).renameWorkspaceTask(await withAuthSessionInput(input)); }, - async renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise { - await (await organization(organizationId)).renameWorkbenchBranch(input); + async createWorkspaceSession(organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + return await (await organization(organizationId)).createWorkspaceSession(await withAuthSessionInput(input)); }, - async createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { - return await (await organization(organizationId)).createWorkbenchSession(input); + async renameWorkspaceSession(organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise { + await (await organization(organizationId)).renameWorkspaceSession(await withAuthSessionInput(input)); }, - async renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise { - await (await organization(organizationId)).renameWorkbenchSession(input); + async selectWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).selectWorkspaceSession(await withAuthSessionInput(input)); }, - async setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { - await (await organization(organizationId)).setWorkbenchSessionUnread(input); + async setWorkspaceSessionUnread(organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise { + await (await organization(organizationId)).setWorkspaceSessionUnread(await withAuthSessionInput(input)); }, - async updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise { - await (await organization(organizationId)).updateWorkbenchDraft(input); + async updateWorkspaceDraft(organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise { + await (await organization(organizationId)).updateWorkspaceDraft(await withAuthSessionInput(input)); }, - async changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise { - await (await organization(organizationId)).changeWorkbenchModel(input); + async changeWorkspaceModel(organizationId: string, input: TaskWorkspaceChangeModelInput): Promise { + await (await organization(organizationId)).changeWorkspaceModel(await withAuthSessionInput(input)); }, - async sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise { - await (await organization(organizationId)).sendWorkbenchMessage(input); + async sendWorkspaceMessage(organizationId: string, input: TaskWorkspaceSendMessageInput): Promise { + await (await organization(organizationId)).sendWorkspaceMessage(await withAuthSessionInput(input)); }, - async stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise { - await (await organization(organizationId)).stopWorkbenchSession(input); + async stopWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).stopWorkspaceSession(await withAuthSessionInput(input)); }, - async closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise { - await (await organization(organizationId)).closeWorkbenchSession(input); + async closeWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await (await organization(organizationId)).closeWorkspaceSession(await withAuthSessionInput(input)); }, - async publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise { - await (await organization(organizationId)).publishWorkbenchPr(input); + async publishWorkspacePr(organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await (await organization(organizationId)).publishWorkspacePr(await withAuthSessionInput(input)); }, - async revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise { - await (await organization(organizationId)).revertWorkbenchFile(input); + async revertWorkspaceFile(organizationId: string, input: TaskWorkspaceDiffInput): Promise { + await (await organization(organizationId)).revertWorkspaceFile(await withAuthSessionInput(input)); }, - async reloadGithubOrganization(organizationId: string): Promise { - await (await organization(organizationId)).reloadGithubOrganization(); + async adminReloadGithubOrganization(organizationId: string): Promise { + await (await organization(organizationId)).adminReloadGithubOrganization(); }, - async reloadGithubPullRequests(organizationId: string): Promise { - await (await organization(organizationId)).reloadGithubPullRequests(); - }, - - async reloadGithubRepository(organizationId: string, repoId: string): Promise { - await (await organization(organizationId)).reloadGithubRepository({ repoId }); - }, - - async reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise { - await (await organization(organizationId)).reloadGithubPullRequest({ repoId, prNumber }); + async adminReloadGithubRepository(organizationId: string, repoId: string): Promise { + await (await organization(organizationId)).adminReloadGithubRepository({ repoId }); }, async health(): Promise<{ ok: true }> { diff --git a/foundry/packages/client/src/index.ts b/foundry/packages/client/src/index.ts index 87909a9..e28745f 100644 --- a/foundry/packages/client/src/index.ts +++ b/foundry/packages/client/src/index.ts @@ -8,4 +8,4 @@ export * from "./subscription/use-subscription.js"; export * from "./keys.js"; export * from "./mock-app.js"; export * from "./view-model.js"; -export * from "./workbench-client.js"; +export * from "./workspace-client.js"; diff --git a/foundry/packages/client/src/keys.ts b/foundry/packages/client/src/keys.ts index 314f16a..7242aae 100644 --- a/foundry/packages/client/src/keys.ts +++ b/foundry/packages/client/src/keys.ts @@ -4,18 +4,14 @@ export function organizationKey(organizationId: string): ActorKey { return ["org", organizationId]; } -export function repositoryKey(organizationId: string, repoId: string): ActorKey { - return ["org", organizationId, "repository", repoId]; -} - export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey { - return ["org", organizationId, "repository", repoId, "task", taskId]; + return ["org", organizationId, "task", repoId, taskId]; } export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey { return ["org", organizationId, "sandbox", sandboxId]; } -export function historyKey(organizationId: string, repoId: string): ActorKey { - return ["org", organizationId, "repository", repoId, "history"]; +export function auditLogKey(organizationId: string): ActorKey { + return ["org", organizationId, "audit-log"]; } diff --git a/foundry/packages/client/src/mock-app.ts b/foundry/packages/client/src/mock-app.ts index 0fa6fc7..00fd9ca 100644 --- a/foundry/packages/client/src/mock-app.ts +++ b/foundry/packages/client/src/mock-app.ts @@ -1,4 +1,8 @@ -import type { WorkbenchModelId } from "@sandbox-agent/foundry-shared"; +import { DEFAULT_WORKSPACE_MODEL_GROUPS, DEFAULT_WORKSPACE_MODEL_ID, type WorkspaceModelId } from "@sandbox-agent/foundry-shared"; + +const claudeModels = DEFAULT_WORKSPACE_MODEL_GROUPS.find((group) => group.agentKind === "Claude")?.models ?? []; +const CLAUDE_SECONDARY_MODEL_ID = claudeModels[1]?.id ?? claudeModels[0]?.id ?? DEFAULT_WORKSPACE_MODEL_ID; +const CLAUDE_TERTIARY_MODEL_ID = claudeModels[2]?.id ?? CLAUDE_SECONDARY_MODEL_ID; import { injectMockLatency } from "./mock/latency.js"; import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" }; @@ -16,6 +20,7 @@ export interface MockFoundryUser { githubLogin: string; roleLabel: string; eligibleOrganizationIds: string[]; + defaultModel: WorkspaceModelId; } export interface MockFoundryOrganizationMember { @@ -61,7 +66,6 @@ export interface MockFoundryOrganizationSettings { slug: string; primaryDomain: string; seatAccrualMode: "first_prompt"; - defaultModel: WorkbenchModelId; autoImportRepos: boolean; } @@ -111,6 +115,7 @@ export interface MockFoundryAppClient { skipStarterRepo(): Promise; starStarterRepo(organizationId: string): Promise; selectOrganization(organizationId: string): Promise; + setDefaultModel(model: WorkspaceModelId): Promise; updateOrganizationProfile(input: UpdateMockOrganizationProfileInput): Promise; triggerGithubSync(organizationId: string): Promise; completeHostedCheckout(organizationId: string, planId: MockBillingPlanId): Promise; @@ -180,7 +185,6 @@ function buildRivetOrganization(): MockFoundryOrganization { slug: "rivet", primaryDomain: "rivet.dev", seatAccrualMode: "first_prompt", - defaultModel: "gpt-5.3-codex", autoImportRepos: true, }, github: { @@ -233,6 +237,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { githubLogin: "nathan", roleLabel: "Founder", eligibleOrganizationIds: ["personal-nathan", "acme", "rivet"], + defaultModel: DEFAULT_WORKSPACE_MODEL_ID, }, { id: "user-maya", @@ -241,6 +246,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { githubLogin: "maya", roleLabel: "Staff Engineer", eligibleOrganizationIds: ["acme"], + defaultModel: CLAUDE_SECONDARY_MODEL_ID, }, { id: "user-jamie", @@ -249,6 +255,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { githubLogin: "jamie", roleLabel: "Platform Lead", eligibleOrganizationIds: ["personal-jamie", "rivet"], + defaultModel: CLAUDE_TERTIARY_MODEL_ID, }, ], organizations: [ @@ -261,7 +268,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { slug: "nathan", primaryDomain: "personal", seatAccrualMode: "first_prompt", - defaultModel: "claude-sonnet-4", autoImportRepos: true, }, github: { @@ -297,7 +303,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { slug: "acme", primaryDomain: "acme.dev", seatAccrualMode: "first_prompt", - defaultModel: "claude-sonnet-4", autoImportRepos: true, }, github: { @@ -342,7 +347,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot { slug: "jamie", primaryDomain: "personal", seatAccrualMode: "first_prompt", - defaultModel: "claude-opus-4", autoImportRepos: true, }, github: { @@ -538,6 +542,18 @@ class MockFoundryAppStore implements MockFoundryAppClient { } } + async setDefaultModel(model: WorkspaceModelId): Promise { + await this.injectAsyncLatency(); + const currentUserId = this.snapshot.auth.currentUserId; + if (!currentUserId) { + throw new Error("No signed-in mock user"); + } + this.updateSnapshot((current) => ({ + ...current, + users: current.users.map((user) => (user.id === currentUserId ? { ...user, defaultModel: model } : user)), + })); + } + async updateOrganizationProfile(input: UpdateMockOrganizationProfileInput): Promise { await this.injectAsyncLatency(); this.requireOrganization(input.organizationId); diff --git a/foundry/packages/client/src/mock/backend-client.ts b/foundry/packages/client/src/mock/backend-client.ts index 011192d..fc6470c 100644 --- a/foundry/packages/client/src/mock/backend-client.ts +++ b/foundry/packages/client/src/mock/backend-client.ts @@ -6,25 +6,26 @@ import type { SessionEvent, TaskRecord, TaskSummary, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, TaskEvent, - WorkbenchSessionDetail, - WorkbenchTaskDetail, - WorkbenchTaskSummary, + WorkspaceSessionDetail, + WorkspaceModelGroup, + WorkspaceTaskDetail, + WorkspaceTaskSummary, OrganizationEvent, OrganizationSummarySnapshot, - HistoryEvent, + AuditLogEvent as HistoryEvent, HistoryQueryInput, SandboxProviderId, RepoOverview, @@ -32,9 +33,10 @@ import type { StarSandboxAgentRepoResult, SwitchResult, } from "@sandbox-agent/foundry-shared"; +import { DEFAULT_WORKSPACE_MODEL_GROUPS } from "@sandbox-agent/foundry-shared"; import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent"; import type { ActorConn, BackendClient, SandboxProcessRecord, SandboxSessionEventRecord, SandboxSessionRecord } from "../backend-client.js"; -import { getSharedMockWorkbenchClient } from "./workbench-client.js"; +import { getSharedMockWorkspaceClient } from "./workspace-client.js"; interface MockProcessRecord extends SandboxProcessRecord { logText: string; @@ -89,7 +91,7 @@ function toTaskStatus(status: TaskRecord["status"], archived: boolean): TaskReco } export function createMockBackendClient(defaultOrganizationId = "default"): BackendClient { - const workbench = getSharedMockWorkbenchClient(); + const workspace = getSharedMockWorkspaceClient(); const listenersBySandboxId = new Map void>>(); const processesBySandboxId = new Map(); const connectionListeners = new Map void>>(); @@ -97,7 +99,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back let nextProcessId = 1; const requireTask = (taskId: string) => { - const task = workbench.getSnapshot().tasks.find((candidate) => candidate.id === taskId); + const task = workspace.getSnapshot().tasks.find((candidate) => candidate.id === taskId); if (!task) { throw new Error(`Unknown mock task ${taskId}`); } @@ -164,7 +166,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back async dispose(): Promise {}, }); - const buildTaskSummary = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskSummary => ({ + const buildTaskSummary = (task: TaskWorkspaceSnapshot["tasks"][number]): WorkspaceTaskSummary => ({ id: task.id, repoId: task.repoId, title: task.title, @@ -173,6 +175,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back updatedAtMs: task.updatedAtMs, branch: task.branch, pullRequest: task.pullRequest, + activeSessionId: task.activeSessionId ?? task.sessions[0]?.id ?? null, sessionsSummary: task.sessions.map((tab) => ({ id: tab.id, sessionId: tab.sessionId, @@ -187,16 +190,9 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back })), }); - const buildTaskDetail = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskDetail => ({ + const buildTaskDetail = (task: TaskWorkspaceSnapshot["tasks"][number]): WorkspaceTaskDetail => ({ ...buildTaskSummary(task), task: task.title, - agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude", - runtimeStatus: toTaskStatus(task.status === "archived" ? "archived" : "running", task.status === "archived"), - statusMessage: task.status === "archived" ? "archived" : "mock sandbox ready", - activeSessionId: task.sessions[0]?.sessionId ?? null, - diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0", - prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null, - reviewStatus: null, fileChanges: task.fileChanges, diffs: task.diffs, fileTree: task.fileTree, @@ -211,7 +207,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back activeSandboxId: task.id, }); - const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], sessionId: string): WorkbenchSessionDetail => { + const buildSessionDetail = (task: TaskWorkspaceSnapshot["tasks"][number], sessionId: string): WorkspaceSessionDetail => { const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { throw new Error(`Unknown mock session ${sessionId} for task ${task.id}`); @@ -232,10 +228,24 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }; const buildOrganizationSummary = (): OrganizationSummarySnapshot => { - const snapshot = workbench.getSnapshot(); + const snapshot = workspace.getSnapshot(); const taskSummaries = snapshot.tasks.map(buildTaskSummary); return { organizationId: defaultOrganizationId, + github: { + connectedAccount: "mock", + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: snapshot.repos.length, + lastSyncLabel: "Synced just now", + lastSyncAt: nowMs(), + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 1, + syncPhase: null, + processedRepositoryCount: snapshot.repos.length, + totalRepositoryCount: snapshot.repos.length, + }, repos: snapshot.repos.map((repo) => { const repoTasks = taskSummaries.filter((task) => task.repoId === repo.id); return { @@ -246,7 +256,6 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }; }), taskSummaries, - openPullRequests: [], }; }; @@ -256,20 +265,16 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back `sandbox:${organizationId}:${sandboxProviderId}:${sandboxId}`; const emitOrganizationSnapshot = (): void => { - const summary = buildOrganizationSummary(); - const latestTask = [...summary.taskSummaries].sort((left, right) => right.updatedAtMs - left.updatedAtMs)[0] ?? null; - if (latestTask) { - emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", { - type: "taskSummaryUpdated", - taskSummary: latestTask, - } satisfies OrganizationEvent); - } + emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", { + type: "organizationUpdated", + snapshot: buildOrganizationSummary(), + } satisfies OrganizationEvent); }; const emitTaskUpdate = (taskId: string): void => { const task = requireTask(taskId); emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "taskUpdated", { - type: "taskDetailUpdated", + type: "taskUpdated", detail: buildTaskDetail(task), } satisfies TaskEvent); }; @@ -303,9 +308,8 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back task: task.title, sandboxProviderId: "local", status: toTaskStatus(archived ? "archived" : "running", archived), - statusMessage: archived ? "archived" : "mock sandbox ready", + pullRequest: null, activeSandboxId: task.id, - activeSessionId: task.sessions[0]?.sessionId ?? null, sandboxes: [ { sandboxId: task.id, @@ -317,17 +321,6 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back updatedAt: task.updatedAtMs, }, ], - agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude", - prSubmitted: Boolean(task.pullRequest), - diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0", - prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null, - prAuthor: task.pullRequest ? "mock" : null, - ciStatus: null, - reviewStatus: null, - reviewer: null, - conflictsWithMain: "0", - hasUnpushed: task.fileChanges.length > 0 ? "1" : "0", - parentBranch: null, createdAt: task.updatedAtMs, updatedAt: task.updatedAtMs, }; @@ -400,6 +393,10 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back return unsupportedAppSnapshot(); }, + async setAppDefaultModel(): Promise { + return unsupportedAppSnapshot(); + }, + async updateAppOrganizationProfile(): Promise { return unsupportedAppSnapshot(); }, @@ -433,7 +430,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }, async listRepos(_organizationId: string): Promise { - return workbench.getSnapshot().repos.map((repo) => ({ + return workspace.getSnapshot().repos.map((repo) => ({ organizationId: defaultOrganizationId, repoId: repo.id, remoteUrl: mockRepoRemote(repo.label), @@ -447,7 +444,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }, async listTasks(_organizationId: string, repoId?: string): Promise { - return workbench + return workspace .getSnapshot() .tasks.filter((task) => !repoId || task.repoId === repoId) .map((task) => ({ @@ -457,6 +454,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back branchName: task.branch, title: task.title, status: task.status === "archived" ? "archived" : "running", + pullRequest: null, updatedAt: task.updatedAtMs, })); }, @@ -464,7 +462,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back async getRepoOverview(_organizationId: string, _repoId: string): Promise { notSupported("getRepoOverview"); }, - async getTask(_organizationId: string, taskId: string): Promise { + async getTask(_organizationId: string, _repoId: string, taskId: string): Promise { return buildTaskRecord(taskId); }, @@ -472,7 +470,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back return []; }, - async switchTask(_organizationId: string, taskId: string): Promise { + async switchTask(_organizationId: string, _repoId: string, taskId: string): Promise { return { organizationId: defaultOrganizationId, taskId, @@ -481,14 +479,14 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back }; }, - async attachTask(_organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { + async attachTask(_organizationId: string, _repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> { return { target: `mock://${taskId}`, sessionId: requireTask(taskId).sessions[0]?.sessionId ?? null, }; }, - async runAction(_organizationId: string, _taskId: string): Promise { + async runAction(_organizationId: string, _repoId: string, _taskId: string): Promise { notSupported("runAction"); }, @@ -637,28 +635,32 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back return { endpoint: "mock://terminal-unavailable" }; }, + async getSandboxWorkspaceModelGroups(_organizationId: string, _sandboxProviderId: SandboxProviderId, _sandboxId: string): Promise { + return DEFAULT_WORKSPACE_MODEL_GROUPS; + }, + async getOrganizationSummary(): Promise { return buildOrganizationSummary(); }, - async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise { + async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise { return buildTaskDetail(requireTask(taskId)); }, - async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise { + async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise { return buildSessionDetail(requireTask(taskId), sessionId); }, - async getWorkbench(): Promise { - return workbench.getSnapshot(); + async getWorkspace(): Promise { + return workspace.getSnapshot(); }, - subscribeWorkbench(_organizationId: string, listener: () => void): () => void { - return workbench.subscribe(listener); + subscribeWorkspace(_organizationId: string, listener: () => void): () => void { + return workspace.subscribe(listener); }, - async createWorkbenchTask(_organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise { - const created = await workbench.createTask(input); + async createWorkspaceTask(_organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise { + const created = await workspace.createTask(input); emitOrganizationSnapshot(); emitTaskUpdate(created.taskId); if (created.sessionId) { @@ -667,99 +669,95 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back return created; }, - async markWorkbenchUnread(_organizationId: string, input: TaskWorkbenchSelectInput): Promise { - await workbench.markTaskUnread(input); + async markWorkspaceUnread(_organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await workspace.markTaskUnread(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async renameWorkbenchTask(_organizationId: string, input: TaskWorkbenchRenameInput): Promise { - await workbench.renameTask(input); + async renameWorkspaceTask(_organizationId: string, input: TaskWorkspaceRenameInput): Promise { + await workspace.renameTask(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async renameWorkbenchBranch(_organizationId: string, input: TaskWorkbenchRenameInput): Promise { - await workbench.renameBranch(input); - emitOrganizationSnapshot(); - emitTaskUpdate(input.taskId); - }, - - async createWorkbenchSession(_organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> { - const created = await workbench.addSession(input); + async createWorkspaceSession(_organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> { + const created = await workspace.addSession(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, created.sessionId); return created; }, - async renameWorkbenchSession(_organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise { - await workbench.renameSession(input); + async renameWorkspaceSession(_organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise { + await workspace.renameSession(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async setWorkbenchSessionUnread(_organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise { - await workbench.setSessionUnread(input); + async selectWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.selectSession(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async updateWorkbenchDraft(_organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise { - await workbench.updateDraft(input); + async setWorkspaceSessionUnread(_organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise { + await workspace.setSessionUnread(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async changeWorkbenchModel(_organizationId: string, input: TaskWorkbenchChangeModelInput): Promise { - await workbench.changeModel(input); + async updateWorkspaceDraft(_organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise { + await workspace.updateDraft(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async sendWorkbenchMessage(_organizationId: string, input: TaskWorkbenchSendMessageInput): Promise { - await workbench.sendMessage(input); + async changeWorkspaceModel(_organizationId: string, input: TaskWorkspaceChangeModelInput): Promise { + await workspace.changeModel(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async stopWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise { - await workbench.stopAgent(input); + async sendWorkspaceMessage(_organizationId: string, input: TaskWorkspaceSendMessageInput): Promise { + await workspace.sendMessage(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); emitSessionUpdate(input.taskId, input.sessionId); }, - async closeWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise { - await workbench.closeSession(input); + async stopWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.stopAgent(input); + emitOrganizationSnapshot(); + emitTaskUpdate(input.taskId); + emitSessionUpdate(input.taskId, input.sessionId); + }, + + async closeWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise { + await workspace.closeSession(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async publishWorkbenchPr(_organizationId: string, input: TaskWorkbenchSelectInput): Promise { - await workbench.publishPr(input); + async publishWorkspacePr(_organizationId: string, input: TaskWorkspaceSelectInput): Promise { + await workspace.publishPr(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async revertWorkbenchFile(_organizationId: string, input: TaskWorkbenchDiffInput): Promise { - await workbench.revertFile(input); + async revertWorkspaceFile(_organizationId: string, input: TaskWorkspaceDiffInput): Promise { + await workspace.revertFile(input); emitOrganizationSnapshot(); emitTaskUpdate(input.taskId); }, - async reloadGithubOrganization(): Promise {}, - - async reloadGithubPullRequests(): Promise {}, - - async reloadGithubRepository(): Promise {}, - - async reloadGithubPullRequest(): Promise {}, + async adminReloadGithubOrganization(): Promise {}, + async adminReloadGithubRepository(): Promise {}, async health(): Promise<{ ok: true }> { return { ok: true }; diff --git a/foundry/packages/client/src/mock/workbench-client.ts b/foundry/packages/client/src/mock/workspace-client.ts similarity index 76% rename from foundry/packages/client/src/mock/workbench-client.ts rename to foundry/packages/client/src/mock/workspace-client.ts index fbed2d0..c51b2e8 100644 --- a/foundry/packages/client/src/mock/workbench-client.ts +++ b/foundry/packages/client/src/mock/workspace-client.ts @@ -1,33 +1,34 @@ import { MODEL_GROUPS, buildInitialMockLayoutViewModel, - groupWorkbenchRepositories, + groupWorkspaceRepositories, nowMs, providerAgent, randomReply, removeFileTreePath, slugify, uid, -} from "../workbench-model.js"; +} from "../workspace-model.js"; +import { DEFAULT_WORKSPACE_MODEL_ID, workspaceAgentForModel } from "@sandbox-agent/foundry-shared"; import type { - TaskWorkbenchAddSessionResponse, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, - WorkbenchSession as AgentSession, - WorkbenchTask as Task, - WorkbenchTranscriptEvent as TranscriptEvent, + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, + WorkspaceSession as AgentSession, + WorkspaceTask as Task, + WorkspaceTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; -import type { TaskWorkbenchClient } from "../workbench-client.js"; +import type { TaskWorkspaceClient } from "../workspace-client.js"; function buildTranscriptEvent(params: { sessionId: string; @@ -47,12 +48,12 @@ function buildTranscriptEvent(params: { }; } -class MockWorkbenchStore implements TaskWorkbenchClient { +class MockWorkspaceStore implements TaskWorkspaceClient { private snapshot = buildInitialMockLayoutViewModel(); private listeners = new Set<() => void>(); private pendingTimers = new Map>(); - getSnapshot(): TaskWorkbenchSnapshot { + getSnapshot(): TaskWorkspaceSnapshot { return this.snapshot; } @@ -63,7 +64,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }; } - async createTask(input: TaskWorkbenchCreateTaskInput): Promise { + async createTask(input: TaskWorkspaceCreateTaskInput): Promise { const id = uid(); const sessionId = `session-${id}`; const repo = this.snapshot.repos.find((candidate) => candidate.id === input.repoId); @@ -74,20 +75,19 @@ class MockWorkbenchStore implements TaskWorkbenchClient { id, repoId: repo.id, title: input.title?.trim() || "New Task", - status: "new", + status: "init_enqueue_provision", repoName: repo.label, updatedAtMs: nowMs(), branch: input.branch?.trim() || null, pullRequest: null, + activeSessionId: sessionId, sessions: [ { id: sessionId, sessionId: sessionId, sessionName: "Session 1", - agent: providerAgent( - MODEL_GROUPS.find((group) => group.models.some((model) => model.id === (input.model ?? "claude-sonnet-4")))?.provider ?? "Claude", - ), - model: input.model ?? "claude-sonnet-4", + agent: workspaceAgentForModel(input.model ?? DEFAULT_WORKSPACE_MODEL_ID, MODEL_GROUPS), + model: input.model ?? DEFAULT_WORKSPACE_MODEL_ID, status: "idle", thinkingSinceMs: null, unread: false, @@ -109,7 +109,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { return { taskId: id, sessionId }; } - async markTaskUnread(input: TaskWorkbenchSelectInput): Promise { + async markTaskUnread(input: TaskWorkspaceSelectInput): Promise { this.updateTask(input.taskId, (task) => { const targetSession = task.sessions[task.sessions.length - 1] ?? null; if (!targetSession) { @@ -123,7 +123,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }); } - async renameTask(input: TaskWorkbenchRenameInput): Promise { + async renameTask(input: TaskWorkspaceRenameInput): Promise { const value = input.value.trim(); if (!value) { throw new Error(`Cannot rename task ${input.taskId} to an empty title`); @@ -131,28 +131,32 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (task) => ({ ...task, title: value, updatedAtMs: nowMs() })); } - async renameBranch(input: TaskWorkbenchRenameInput): Promise { - const value = input.value.trim(); - if (!value) { - throw new Error(`Cannot rename branch for task ${input.taskId} to an empty value`); - } - this.updateTask(input.taskId, (task) => ({ ...task, branch: value, updatedAtMs: nowMs() })); - } - - async archiveTask(input: TaskWorkbenchSelectInput): Promise { + async archiveTask(input: TaskWorkspaceSelectInput): Promise { this.updateTask(input.taskId, (task) => ({ ...task, status: "archived", updatedAtMs: nowMs() })); } - async publishPr(input: TaskWorkbenchSelectInput): Promise { + async publishPr(input: TaskWorkspaceSelectInput): Promise { const nextPrNumber = Math.max(0, ...this.snapshot.tasks.map((task) => task.pullRequest?.number ?? 0)) + 1; this.updateTask(input.taskId, (task) => ({ ...task, updatedAtMs: nowMs(), - pullRequest: { number: nextPrNumber, status: "ready" }, + pullRequest: { + number: nextPrNumber, + status: "ready", + title: task.title, + state: "open", + url: `https://example.test/pr/${nextPrNumber}`, + headRefName: task.branch ?? `task/${task.id}`, + baseRefName: "main", + repoFullName: task.repoName, + authorLogin: "mock", + isDraft: false, + updatedAtMs: nowMs(), + }, })); } - async revertFile(input: TaskWorkbenchDiffInput): Promise { + async revertFile(input: TaskWorkspaceDiffInput): Promise { this.updateTask(input.taskId, (task) => { const file = task.fileChanges.find((entry) => entry.path === input.path); const nextDiffs = { ...task.diffs }; @@ -167,7 +171,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }); } - async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { + async updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise { this.assertSession(input.taskId, input.sessionId); this.updateTask(input.taskId, (task) => ({ ...task, @@ -187,7 +191,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { })); } - async sendMessage(input: TaskWorkbenchSendMessageInput): Promise { + async sendMessage(input: TaskWorkspaceSendMessageInput): Promise { const text = input.text.trim(); if (!text) { throw new Error(`Cannot send an empty mock prompt for task ${input.taskId}`); @@ -197,7 +201,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { const startedAtMs = nowMs(); this.updateTask(input.taskId, (currentTask) => { - const isFirstOnTask = currentTask.status === "new"; + const isFirstOnTask = String(currentTask.status).startsWith("init_"); const newTitle = isFirstOnTask ? (text.length > 50 ? `${text.slice(0, 47)}...` : text) : currentTask.title; const newBranch = isFirstOnTask ? `feat/${slugify(newTitle)}` : currentTask.branch; const userMessageLines = [text, ...input.attachments.map((attachment) => `@ ${attachment.filePath}:${attachment.lineNumber}`)]; @@ -288,7 +292,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.pendingTimers.set(input.sessionId, timer); } - async stopAgent(input: TaskWorkbenchSessionInput): Promise { + async stopAgent(input: TaskWorkspaceSessionInput): Promise { this.assertSession(input.taskId, input.sessionId); const existing = this.pendingTimers.get(input.sessionId); if (existing) { @@ -311,14 +315,22 @@ class MockWorkbenchStore implements TaskWorkbenchClient { }); } - async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise { + async selectSession(input: TaskWorkspaceSessionInput): Promise { + this.assertSession(input.taskId, input.sessionId); + this.updateTask(input.taskId, (currentTask) => ({ + ...currentTask, + activeSessionId: input.sessionId, + })); + } + + async setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, unread: input.unread } : candidate)), })); } - async renameSession(input: TaskWorkbenchRenameSessionInput): Promise { + async renameSession(input: TaskWorkspaceRenameSessionInput): Promise { const title = input.title.trim(); if (!title) { throw new Error(`Cannot rename session ${input.sessionId} to an empty title`); @@ -329,7 +341,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient { })); } - async closeSession(input: TaskWorkbenchSessionInput): Promise { + async closeSession(input: TaskWorkspaceSessionInput): Promise { this.updateTask(input.taskId, (currentTask) => { if (currentTask.sessions.length <= 1) { return currentTask; @@ -337,12 +349,13 @@ class MockWorkbenchStore implements TaskWorkbenchClient { return { ...currentTask, + activeSessionId: currentTask.activeSessionId === input.sessionId ? (currentTask.sessions.find((candidate) => candidate.id !== input.sessionId)?.id ?? null) : currentTask.activeSessionId, sessions: currentTask.sessions.filter((candidate) => candidate.id !== input.sessionId), }; }); } - async addSession(input: TaskWorkbenchSelectInput): Promise { + async addSession(input: TaskWorkspaceSelectInput): Promise { this.assertTask(input.taskId); const nextSessionId = uid(); const nextSession: AgentSession = { @@ -350,8 +363,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient { sessionId: nextSessionId, sandboxSessionId: null, sessionName: `Session ${this.requireTask(input.taskId).sessions.length + 1}`, - agent: "Claude", - model: "claude-sonnet-4", + agent: workspaceAgentForModel(DEFAULT_WORKSPACE_MODEL_ID, MODEL_GROUPS), + model: DEFAULT_WORKSPACE_MODEL_ID, status: "idle", thinkingSinceMs: null, unread: false, @@ -363,12 +376,13 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, updatedAtMs: nowMs(), + activeSessionId: nextSession.id, sessions: [...currentTask.sessions, nextSession], })); return { sessionId: nextSession.id }; } - async changeModel(input: TaskWorkbenchChangeModelInput): Promise { + async changeModel(input: TaskWorkspaceChangeModelInput): Promise { const group = MODEL_GROUPS.find((candidate) => candidate.models.some((entry) => entry.id === input.model)); if (!group) { throw new Error(`Unable to resolve model provider for ${input.model}`); @@ -377,16 +391,16 @@ class MockWorkbenchStore implements TaskWorkbenchClient { this.updateTask(input.taskId, (currentTask) => ({ ...currentTask, sessions: currentTask.sessions.map((candidate) => - candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate, + candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: workspaceAgentForModel(input.model, MODEL_GROUPS) } : candidate, ), })); } - private updateState(updater: (current: TaskWorkbenchSnapshot) => TaskWorkbenchSnapshot): void { + private updateState(updater: (current: TaskWorkspaceSnapshot) => TaskWorkspaceSnapshot): void { const nextSnapshot = updater(this.snapshot); this.snapshot = { ...nextSnapshot, - repositories: groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks), + repositories: groupWorkspaceRepositories(nextSnapshot.repos, nextSnapshot.tasks), }; this.notify(); } @@ -436,11 +450,11 @@ function candidateEventIndex(task: Task, sessionId: string): number { return (session?.transcript.length ?? 0) + 1; } -let sharedMockWorkbenchClient: TaskWorkbenchClient | null = null; +let sharedMockWorkspaceClient: TaskWorkspaceClient | null = null; -export function getSharedMockWorkbenchClient(): TaskWorkbenchClient { - if (!sharedMockWorkbenchClient) { - sharedMockWorkbenchClient = new MockWorkbenchStore(); +export function getSharedMockWorkspaceClient(): TaskWorkspaceClient { + if (!sharedMockWorkspaceClient) { + sharedMockWorkspaceClient = new MockWorkspaceStore(); } - return sharedMockWorkbenchClient; + return sharedMockWorkspaceClient; } diff --git a/foundry/packages/client/src/remote/app-client.ts b/foundry/packages/client/src/remote/app-client.ts index 6daa2c5..f1cb908 100644 --- a/foundry/packages/client/src/remote/app-client.ts +++ b/foundry/packages/client/src/remote/app-client.ts @@ -1,4 +1,4 @@ -import type { FoundryAppSnapshot, FoundryBillingPlanId, UpdateFoundryOrganizationProfileInput } from "@sandbox-agent/foundry-shared"; +import type { FoundryAppSnapshot, FoundryBillingPlanId, UpdateFoundryOrganizationProfileInput, WorkspaceModelId } from "@sandbox-agent/foundry-shared"; import type { BackendClient } from "../backend-client.js"; import type { FoundryAppClient } from "../app-client.js"; @@ -72,6 +72,11 @@ class RemoteFoundryAppStore implements FoundryAppClient { this.notify(); } + async setDefaultModel(model: WorkspaceModelId): Promise { + this.snapshot = await this.backend.setAppDefaultModel(model); + this.notify(); + } + async updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise { this.snapshot = await this.backend.updateAppOrganizationProfile(input); this.notify(); diff --git a/foundry/packages/client/src/remote/workbench-client.ts b/foundry/packages/client/src/remote/workbench-client.ts deleted file mode 100644 index 0dcbecb..0000000 --- a/foundry/packages/client/src/remote/workbench-client.ts +++ /dev/null @@ -1,198 +0,0 @@ -import type { - TaskWorkbenchAddSessionResponse, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, -} from "@sandbox-agent/foundry-shared"; -import type { BackendClient } from "../backend-client.js"; -import { groupWorkbenchRepositories } from "../workbench-model.js"; -import type { TaskWorkbenchClient } from "../workbench-client.js"; - -export interface RemoteWorkbenchClientOptions { - backend: BackendClient; - organizationId: string; -} - -class RemoteWorkbenchStore implements TaskWorkbenchClient { - private readonly backend: BackendClient; - private readonly organizationId: string; - private snapshot: TaskWorkbenchSnapshot; - private readonly listeners = new Set<() => void>(); - private unsubscribeWorkbench: (() => void) | null = null; - private refreshPromise: Promise | null = null; - private refreshRetryTimeout: ReturnType | null = null; - - constructor(options: RemoteWorkbenchClientOptions) { - this.backend = options.backend; - this.organizationId = options.organizationId; - this.snapshot = { - organizationId: options.organizationId, - repos: [], - repositories: [], - tasks: [], - }; - } - - getSnapshot(): TaskWorkbenchSnapshot { - return this.snapshot; - } - - subscribe(listener: () => void): () => void { - this.listeners.add(listener); - this.ensureStarted(); - return () => { - this.listeners.delete(listener); - if (this.listeners.size === 0 && this.refreshRetryTimeout) { - clearTimeout(this.refreshRetryTimeout); - this.refreshRetryTimeout = null; - } - if (this.listeners.size === 0 && this.unsubscribeWorkbench) { - this.unsubscribeWorkbench(); - this.unsubscribeWorkbench = null; - } - }; - } - - async createTask(input: TaskWorkbenchCreateTaskInput): Promise { - const created = await this.backend.createWorkbenchTask(this.organizationId, input); - await this.refresh(); - return created; - } - - async markTaskUnread(input: TaskWorkbenchSelectInput): Promise { - await this.backend.markWorkbenchUnread(this.organizationId, input); - await this.refresh(); - } - - async renameTask(input: TaskWorkbenchRenameInput): Promise { - await this.backend.renameWorkbenchTask(this.organizationId, input); - await this.refresh(); - } - - async renameBranch(input: TaskWorkbenchRenameInput): Promise { - await this.backend.renameWorkbenchBranch(this.organizationId, input); - await this.refresh(); - } - - async archiveTask(input: TaskWorkbenchSelectInput): Promise { - await this.backend.runAction(this.organizationId, input.taskId, "archive"); - await this.refresh(); - } - - async publishPr(input: TaskWorkbenchSelectInput): Promise { - await this.backend.publishWorkbenchPr(this.organizationId, input); - await this.refresh(); - } - - async revertFile(input: TaskWorkbenchDiffInput): Promise { - await this.backend.revertWorkbenchFile(this.organizationId, input); - await this.refresh(); - } - - async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise { - await this.backend.updateWorkbenchDraft(this.organizationId, input); - // Skip refresh — the server broadcast will trigger it, and the frontend - // holds local draft state to avoid the round-trip overwriting user input. - } - - async sendMessage(input: TaskWorkbenchSendMessageInput): Promise { - await this.backend.sendWorkbenchMessage(this.organizationId, input); - await this.refresh(); - } - - async stopAgent(input: TaskWorkbenchSessionInput): Promise { - await this.backend.stopWorkbenchSession(this.organizationId, input); - await this.refresh(); - } - - async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise { - await this.backend.setWorkbenchSessionUnread(this.organizationId, input); - await this.refresh(); - } - - async renameSession(input: TaskWorkbenchRenameSessionInput): Promise { - await this.backend.renameWorkbenchSession(this.organizationId, input); - await this.refresh(); - } - - async closeSession(input: TaskWorkbenchSessionInput): Promise { - await this.backend.closeWorkbenchSession(this.organizationId, input); - await this.refresh(); - } - - async addSession(input: TaskWorkbenchSelectInput): Promise { - const created = await this.backend.createWorkbenchSession(this.organizationId, input); - await this.refresh(); - return created; - } - - async changeModel(input: TaskWorkbenchChangeModelInput): Promise { - await this.backend.changeWorkbenchModel(this.organizationId, input); - await this.refresh(); - } - - private ensureStarted(): void { - if (!this.unsubscribeWorkbench) { - this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.organizationId, () => { - void this.refresh().catch(() => { - this.scheduleRefreshRetry(); - }); - }); - } - void this.refresh().catch(() => { - this.scheduleRefreshRetry(); - }); - } - - private scheduleRefreshRetry(): void { - if (this.refreshRetryTimeout || this.listeners.size === 0) { - return; - } - - this.refreshRetryTimeout = setTimeout(() => { - this.refreshRetryTimeout = null; - void this.refresh().catch(() => { - this.scheduleRefreshRetry(); - }); - }, 1_000); - } - - private async refresh(): Promise { - if (this.refreshPromise) { - await this.refreshPromise; - return; - } - - this.refreshPromise = (async () => { - const nextSnapshot = await this.backend.getWorkbench(this.organizationId); - if (this.refreshRetryTimeout) { - clearTimeout(this.refreshRetryTimeout); - this.refreshRetryTimeout = null; - } - this.snapshot = { - ...nextSnapshot, - repositories: nextSnapshot.repositories ?? groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks), - }; - for (const listener of [...this.listeners]) { - listener(); - } - })().finally(() => { - this.refreshPromise = null; - }); - - await this.refreshPromise; - } -} - -export function createRemoteWorkbenchClient(options: RemoteWorkbenchClientOptions): TaskWorkbenchClient { - return new RemoteWorkbenchStore(options); -} diff --git a/foundry/packages/client/src/remote/workspace-client.ts b/foundry/packages/client/src/remote/workspace-client.ts new file mode 100644 index 0000000..1b6bc8e --- /dev/null +++ b/foundry/packages/client/src/remote/workspace-client.ts @@ -0,0 +1,198 @@ +import type { + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "../backend-client.js"; +import { groupWorkspaceRepositories } from "../workspace-model.js"; +import type { TaskWorkspaceClient } from "../workspace-client.js"; + +export interface RemoteWorkspaceClientOptions { + backend: BackendClient; + organizationId: string; +} + +class RemoteWorkspaceStore implements TaskWorkspaceClient { + private readonly backend: BackendClient; + private readonly organizationId: string; + private snapshot: TaskWorkspaceSnapshot; + private readonly listeners = new Set<() => void>(); + private unsubscribeWorkspace: (() => void) | null = null; + private refreshPromise: Promise | null = null; + private refreshRetryTimeout: ReturnType | null = null; + + constructor(options: RemoteWorkspaceClientOptions) { + this.backend = options.backend; + this.organizationId = options.organizationId; + this.snapshot = { + organizationId: options.organizationId, + repos: [], + repositories: [], + tasks: [], + }; + } + + getSnapshot(): TaskWorkspaceSnapshot { + return this.snapshot; + } + + subscribe(listener: () => void): () => void { + this.listeners.add(listener); + this.ensureStarted(); + return () => { + this.listeners.delete(listener); + if (this.listeners.size === 0 && this.refreshRetryTimeout) { + clearTimeout(this.refreshRetryTimeout); + this.refreshRetryTimeout = null; + } + if (this.listeners.size === 0 && this.unsubscribeWorkspace) { + this.unsubscribeWorkspace(); + this.unsubscribeWorkspace = null; + } + }; + } + + async createTask(input: TaskWorkspaceCreateTaskInput): Promise { + const created = await this.backend.createWorkspaceTask(this.organizationId, input); + await this.refresh(); + return created; + } + + async markTaskUnread(input: TaskWorkspaceSelectInput): Promise { + await this.backend.markWorkspaceUnread(this.organizationId, input); + await this.refresh(); + } + + async renameTask(input: TaskWorkspaceRenameInput): Promise { + await this.backend.renameWorkspaceTask(this.organizationId, input); + await this.refresh(); + } + + async archiveTask(input: TaskWorkspaceSelectInput): Promise { + await this.backend.runAction(this.organizationId, input.repoId, input.taskId, "archive"); + await this.refresh(); + } + + async publishPr(input: TaskWorkspaceSelectInput): Promise { + await this.backend.publishWorkspacePr(this.organizationId, input); + await this.refresh(); + } + + async revertFile(input: TaskWorkspaceDiffInput): Promise { + await this.backend.revertWorkspaceFile(this.organizationId, input); + await this.refresh(); + } + + async updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise { + await this.backend.updateWorkspaceDraft(this.organizationId, input); + // Skip refresh — the server broadcast will trigger it, and the frontend + // holds local draft state to avoid the round-trip overwriting user input. + } + + async sendMessage(input: TaskWorkspaceSendMessageInput): Promise { + await this.backend.sendWorkspaceMessage(this.organizationId, input); + await this.refresh(); + } + + async stopAgent(input: TaskWorkspaceSessionInput): Promise { + await this.backend.stopWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async selectSession(input: TaskWorkspaceSessionInput): Promise { + await this.backend.selectWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise { + await this.backend.setWorkspaceSessionUnread(this.organizationId, input); + await this.refresh(); + } + + async renameSession(input: TaskWorkspaceRenameSessionInput): Promise { + await this.backend.renameWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async closeSession(input: TaskWorkspaceSessionInput): Promise { + await this.backend.closeWorkspaceSession(this.organizationId, input); + await this.refresh(); + } + + async addSession(input: TaskWorkspaceSelectInput): Promise { + const created = await this.backend.createWorkspaceSession(this.organizationId, input); + await this.refresh(); + return created; + } + + async changeModel(input: TaskWorkspaceChangeModelInput): Promise { + await this.backend.changeWorkspaceModel(this.organizationId, input); + await this.refresh(); + } + + private ensureStarted(): void { + if (!this.unsubscribeWorkspace) { + this.unsubscribeWorkspace = this.backend.subscribeWorkspace(this.organizationId, () => { + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + }); + } + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + } + + private scheduleRefreshRetry(): void { + if (this.refreshRetryTimeout || this.listeners.size === 0) { + return; + } + + this.refreshRetryTimeout = setTimeout(() => { + this.refreshRetryTimeout = null; + void this.refresh().catch(() => { + this.scheduleRefreshRetry(); + }); + }, 1_000); + } + + private async refresh(): Promise { + if (this.refreshPromise) { + await this.refreshPromise; + return; + } + + this.refreshPromise = (async () => { + const nextSnapshot = await this.backend.getWorkspace(this.organizationId); + if (this.refreshRetryTimeout) { + clearTimeout(this.refreshRetryTimeout); + this.refreshRetryTimeout = null; + } + this.snapshot = { + ...nextSnapshot, + repositories: nextSnapshot.repositories ?? groupWorkspaceRepositories(nextSnapshot.repos, nextSnapshot.tasks), + }; + for (const listener of [...this.listeners]) { + listener(); + } + })().finally(() => { + this.refreshPromise = null; + }); + + await this.refreshPromise; + } +} + +export function createRemoteWorkspaceClient(options: RemoteWorkspaceClientOptions): TaskWorkspaceClient { + return new RemoteWorkspaceStore(options); +} diff --git a/foundry/packages/client/src/subscription/remote-manager.ts b/foundry/packages/client/src/subscription/remote-manager.ts index 8cb2864..778241f 100644 --- a/foundry/packages/client/src/subscription/remote-manager.ts +++ b/foundry/packages/client/src/subscription/remote-manager.ts @@ -81,6 +81,7 @@ class TopicEntry { private unsubscribeError: (() => void) | null = null; private teardownTimer: ReturnType | null = null; private startPromise: Promise | null = null; + private eventPromise: Promise = Promise.resolve(); private started = false; constructor( @@ -157,12 +158,7 @@ class TopicEntry { try { this.conn = await this.definition.connect(this.backend, this.params); this.unsubscribeEvent = this.conn.on(this.definition.event, (event: TEvent) => { - if (this.data === undefined) { - return; - } - this.data = this.definition.applyEvent(this.data, event); - this.lastRefreshAt = Date.now(); - this.notify(); + void this.applyEvent(event); }); this.unsubscribeError = this.conn.onError((error: unknown) => { this.status = "error"; @@ -182,6 +178,33 @@ class TopicEntry { } } + private applyEvent(event: TEvent): Promise { + this.eventPromise = this.eventPromise + .then(async () => { + if (!this.started || this.data === undefined) { + return; + } + + const nextData = await this.definition.applyEvent(this.backend, this.params, this.data, event); + if (!this.started) { + return; + } + + this.data = nextData; + this.status = "connected"; + this.error = null; + this.lastRefreshAt = Date.now(); + this.notify(); + }) + .catch((error) => { + this.status = "error"; + this.error = error instanceof Error ? error : new Error(String(error)); + this.notify(); + }); + + return this.eventPromise; + } + private notify(): void { for (const listener of [...this.listeners]) { listener(); diff --git a/foundry/packages/client/src/subscription/topics.ts b/foundry/packages/client/src/subscription/topics.ts index f6a0acc..bbda118 100644 --- a/foundry/packages/client/src/subscription/topics.ts +++ b/foundry/packages/client/src/subscription/topics.ts @@ -5,8 +5,8 @@ import type { SandboxProcessesEvent, SessionEvent, TaskEvent, - WorkbenchSessionDetail, - WorkbenchTaskDetail, + WorkspaceSessionDetail, + WorkspaceTaskDetail, OrganizationEvent, OrganizationSummarySnapshot, } from "@sandbox-agent/foundry-shared"; @@ -16,15 +16,15 @@ import type { ActorConn, BackendClient, SandboxProcessRecord } from "../backend- * Topic definitions for the subscription manager. * * Each topic describes one actor connection plus one materialized read model. - * Events always carry full replacement payloads for the changed entity so the - * client can replace cached state directly instead of reconstructing patches. + * Some topics can apply broadcast payloads directly, while others refetch + * through BackendClient so auth-scoped state stays user-specific. */ export interface TopicDefinition { key: (params: TParams) => string; event: string; connect: (backend: BackendClient, params: TParams) => Promise; fetchInitial: (backend: BackendClient, params: TParams) => Promise; - applyEvent: (current: TData, event: TEvent) => TData; + applyEvent: (backend: BackendClient, params: TParams, current: TData, event: TEvent) => Promise | TData; } export interface AppTopicParams {} @@ -48,23 +48,13 @@ export interface SandboxProcessesTopicParams { sandboxId: string; } -function upsertById(items: T[], nextItem: T, sort: (left: T, right: T) => number): T[] { - const filtered = items.filter((item) => item.id !== nextItem.id); - return [...filtered, nextItem].sort(sort); -} - -function upsertByPrId(items: T[], nextItem: T, sort: (left: T, right: T) => number): T[] { - const filtered = items.filter((item) => item.prId !== nextItem.prId); - return [...filtered, nextItem].sort(sort); -} - export const topicDefinitions = { app: { key: () => "app", event: "appUpdated", connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectOrganization("app"), fetchInitial: (backend: BackendClient, _params: AppTopicParams) => backend.getAppSnapshot(), - applyEvent: (_current: FoundryAppSnapshot, event: AppEvent) => event.snapshot, + applyEvent: (_backend: BackendClient, _params: AppTopicParams, _current: FoundryAppSnapshot, event: AppEvent) => event.snapshot, } satisfies TopicDefinition, organization: { @@ -72,41 +62,8 @@ export const topicDefinitions = { event: "organizationUpdated", connect: (backend: BackendClient, params: OrganizationTopicParams) => backend.connectOrganization(params.organizationId), fetchInitial: (backend: BackendClient, params: OrganizationTopicParams) => backend.getOrganizationSummary(params.organizationId), - applyEvent: (current: OrganizationSummarySnapshot, event: OrganizationEvent) => { - switch (event.type) { - case "taskSummaryUpdated": - return { - ...current, - taskSummaries: upsertById(current.taskSummaries, event.taskSummary, (left, right) => right.updatedAtMs - left.updatedAtMs), - }; - case "taskRemoved": - return { - ...current, - taskSummaries: current.taskSummaries.filter((task) => task.id !== event.taskId), - }; - case "repoAdded": - case "repoUpdated": - return { - ...current, - repos: upsertById(current.repos, event.repo, (left, right) => right.latestActivityMs - left.latestActivityMs), - }; - case "repoRemoved": - return { - ...current, - repos: current.repos.filter((repo) => repo.id !== event.repoId), - }; - case "pullRequestUpdated": - return { - ...current, - openPullRequests: upsertByPrId(current.openPullRequests, event.pullRequest, (left, right) => right.updatedAtMs - left.updatedAtMs), - }; - case "pullRequestRemoved": - return { - ...current, - openPullRequests: current.openPullRequests.filter((pullRequest) => pullRequest.prId !== event.prId), - }; - } - }, + applyEvent: (_backend: BackendClient, _params: OrganizationTopicParams, _current: OrganizationSummarySnapshot, event: OrganizationEvent) => + event.snapshot, } satisfies TopicDefinition, task: { @@ -114,8 +71,9 @@ export const topicDefinitions = { event: "taskUpdated", connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.organizationId, params.repoId, params.taskId), - applyEvent: (_current: WorkbenchTaskDetail, event: TaskEvent) => event.detail, - } satisfies TopicDefinition, + applyEvent: (backend: BackendClient, params: TaskTopicParams, _current: WorkspaceTaskDetail, _event: TaskEvent) => + backend.getTaskDetail(params.organizationId, params.repoId, params.taskId), + } satisfies TopicDefinition, session: { key: (params: SessionTopicParams) => `session:${params.organizationId}:${params.taskId}:${params.sessionId}`, @@ -123,13 +81,13 @@ export const topicDefinitions = { connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId), fetchInitial: (backend: BackendClient, params: SessionTopicParams) => backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId), - applyEvent: (current: WorkbenchSessionDetail, event: SessionEvent) => { - if (event.session.sessionId !== current.sessionId) { + applyEvent: async (backend: BackendClient, params: SessionTopicParams, current: WorkspaceSessionDetail, event: SessionEvent) => { + if (event.session.sessionId !== params.sessionId) { return current; } - return event.session; + return await backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId); }, - } satisfies TopicDefinition, + } satisfies TopicDefinition, sandboxProcesses: { key: (params: SandboxProcessesTopicParams) => `sandbox:${params.organizationId}:${params.sandboxProviderId}:${params.sandboxId}`, @@ -138,7 +96,8 @@ export const topicDefinitions = { backend.connectSandbox(params.organizationId, params.sandboxProviderId, params.sandboxId), fetchInitial: async (backend: BackendClient, params: SandboxProcessesTopicParams) => (await backend.listSandboxProcesses(params.organizationId, params.sandboxProviderId, params.sandboxId)).processes, - applyEvent: (_current: SandboxProcessRecord[], event: SandboxProcessesEvent) => event.processes, + applyEvent: (_backend: BackendClient, _params: SandboxProcessesTopicParams, _current: SandboxProcessRecord[], event: SandboxProcessesEvent) => + event.processes, } satisfies TopicDefinition, } as const; diff --git a/foundry/packages/client/src/view-model.ts b/foundry/packages/client/src/view-model.ts index c30ff2a..bd7a98c 100644 --- a/foundry/packages/client/src/view-model.ts +++ b/foundry/packages/client/src/view-model.ts @@ -65,7 +65,7 @@ export function filterTasks(rows: TaskRecord[], query: string): TaskRecord[] { } return rows.filter((row) => { - const fields = [row.branchName ?? "", row.title ?? "", row.taskId, row.task, row.prAuthor ?? "", row.reviewer ?? ""]; + const fields = [row.branchName ?? "", row.title ?? "", row.taskId, row.task]; return fields.some((field) => fuzzyMatch(field, q)); }); } diff --git a/foundry/packages/client/src/workbench-client.ts b/foundry/packages/client/src/workbench-client.ts deleted file mode 100644 index c317649..0000000 --- a/foundry/packages/client/src/workbench-client.ts +++ /dev/null @@ -1,64 +0,0 @@ -import type { - TaskWorkbenchAddSessionResponse, - TaskWorkbenchChangeModelInput, - TaskWorkbenchCreateTaskInput, - TaskWorkbenchCreateTaskResponse, - TaskWorkbenchDiffInput, - TaskWorkbenchRenameInput, - TaskWorkbenchRenameSessionInput, - TaskWorkbenchSelectInput, - TaskWorkbenchSetSessionUnreadInput, - TaskWorkbenchSendMessageInput, - TaskWorkbenchSnapshot, - TaskWorkbenchSessionInput, - TaskWorkbenchUpdateDraftInput, -} from "@sandbox-agent/foundry-shared"; -import type { BackendClient } from "./backend-client.js"; -import { getSharedMockWorkbenchClient } from "./mock/workbench-client.js"; -import { createRemoteWorkbenchClient } from "./remote/workbench-client.js"; - -export type TaskWorkbenchClientMode = "mock" | "remote"; - -export interface CreateTaskWorkbenchClientOptions { - mode: TaskWorkbenchClientMode; - backend?: BackendClient; - organizationId?: string; -} - -export interface TaskWorkbenchClient { - getSnapshot(): TaskWorkbenchSnapshot; - subscribe(listener: () => void): () => void; - createTask(input: TaskWorkbenchCreateTaskInput): Promise; - markTaskUnread(input: TaskWorkbenchSelectInput): Promise; - renameTask(input: TaskWorkbenchRenameInput): Promise; - renameBranch(input: TaskWorkbenchRenameInput): Promise; - archiveTask(input: TaskWorkbenchSelectInput): Promise; - publishPr(input: TaskWorkbenchSelectInput): Promise; - revertFile(input: TaskWorkbenchDiffInput): Promise; - updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise; - sendMessage(input: TaskWorkbenchSendMessageInput): Promise; - stopAgent(input: TaskWorkbenchSessionInput): Promise; - setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise; - renameSession(input: TaskWorkbenchRenameSessionInput): Promise; - closeSession(input: TaskWorkbenchSessionInput): Promise; - addSession(input: TaskWorkbenchSelectInput): Promise; - changeModel(input: TaskWorkbenchChangeModelInput): Promise; -} - -export function createTaskWorkbenchClient(options: CreateTaskWorkbenchClientOptions): TaskWorkbenchClient { - if (options.mode === "mock") { - return getSharedMockWorkbenchClient(); - } - - if (!options.backend) { - throw new Error("Remote task workbench client requires a backend client"); - } - if (!options.organizationId) { - throw new Error("Remote task workbench client requires a organization id"); - } - - return createRemoteWorkbenchClient({ - backend: options.backend, - organizationId: options.organizationId, - }); -} diff --git a/foundry/packages/client/src/workspace-client.ts b/foundry/packages/client/src/workspace-client.ts new file mode 100644 index 0000000..c3293a0 --- /dev/null +++ b/foundry/packages/client/src/workspace-client.ts @@ -0,0 +1,64 @@ +import type { + TaskWorkspaceAddSessionResponse, + TaskWorkspaceChangeModelInput, + TaskWorkspaceCreateTaskInput, + TaskWorkspaceCreateTaskResponse, + TaskWorkspaceDiffInput, + TaskWorkspaceRenameInput, + TaskWorkspaceRenameSessionInput, + TaskWorkspaceSelectInput, + TaskWorkspaceSetSessionUnreadInput, + TaskWorkspaceSendMessageInput, + TaskWorkspaceSnapshot, + TaskWorkspaceSessionInput, + TaskWorkspaceUpdateDraftInput, +} from "@sandbox-agent/foundry-shared"; +import type { BackendClient } from "./backend-client.js"; +import { getSharedMockWorkspaceClient } from "./mock/workspace-client.js"; +import { createRemoteWorkspaceClient } from "./remote/workspace-client.js"; + +export type TaskWorkspaceClientMode = "mock" | "remote"; + +export interface CreateTaskWorkspaceClientOptions { + mode: TaskWorkspaceClientMode; + backend?: BackendClient; + organizationId?: string; +} + +export interface TaskWorkspaceClient { + getSnapshot(): TaskWorkspaceSnapshot; + subscribe(listener: () => void): () => void; + createTask(input: TaskWorkspaceCreateTaskInput): Promise; + markTaskUnread(input: TaskWorkspaceSelectInput): Promise; + renameTask(input: TaskWorkspaceRenameInput): Promise; + archiveTask(input: TaskWorkspaceSelectInput): Promise; + publishPr(input: TaskWorkspaceSelectInput): Promise; + revertFile(input: TaskWorkspaceDiffInput): Promise; + updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise; + sendMessage(input: TaskWorkspaceSendMessageInput): Promise; + stopAgent(input: TaskWorkspaceSessionInput): Promise; + selectSession(input: TaskWorkspaceSessionInput): Promise; + setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise; + renameSession(input: TaskWorkspaceRenameSessionInput): Promise; + closeSession(input: TaskWorkspaceSessionInput): Promise; + addSession(input: TaskWorkspaceSelectInput): Promise; + changeModel(input: TaskWorkspaceChangeModelInput): Promise; +} + +export function createTaskWorkspaceClient(options: CreateTaskWorkspaceClientOptions): TaskWorkspaceClient { + if (options.mode === "mock") { + return getSharedMockWorkspaceClient(); + } + + if (!options.backend) { + throw new Error("Remote task workspace client requires a backend client"); + } + if (!options.organizationId) { + throw new Error("Remote task workspace client requires a organization id"); + } + + return createRemoteWorkspaceClient({ + backend: options.backend, + organizationId: options.organizationId, + }); +} diff --git a/foundry/packages/client/src/workbench-model.ts b/foundry/packages/client/src/workspace-model.ts similarity index 90% rename from foundry/packages/client/src/workbench-model.ts rename to foundry/packages/client/src/workspace-model.ts index afe9e8b..290794b 100644 --- a/foundry/packages/client/src/workbench-model.ts +++ b/foundry/packages/client/src/workspace-model.ts @@ -1,40 +1,28 @@ +import { + DEFAULT_WORKSPACE_MODEL_ID, + DEFAULT_WORKSPACE_MODEL_GROUPS as SharedModelGroups, + workspaceModelLabel as sharedWorkspaceModelLabel, + workspaceProviderAgent as sharedWorkspaceProviderAgent, +} from "@sandbox-agent/foundry-shared"; import type { - WorkbenchAgentKind as AgentKind, - WorkbenchSession as AgentSession, - WorkbenchDiffLineKind as DiffLineKind, - WorkbenchFileTreeNode as FileTreeNode, - WorkbenchTask as Task, - TaskWorkbenchSnapshot, - WorkbenchHistoryEvent as HistoryEvent, - WorkbenchModelGroup as ModelGroup, - WorkbenchModelId as ModelId, - WorkbenchParsedDiffLine as ParsedDiffLine, - WorkbenchRepositorySection, - WorkbenchRepo, - WorkbenchTranscriptEvent as TranscriptEvent, + WorkspaceAgentKind as AgentKind, + WorkspaceSession as AgentSession, + WorkspaceDiffLineKind as DiffLineKind, + WorkspaceFileTreeNode as FileTreeNode, + WorkspaceTask as Task, + TaskWorkspaceSnapshot, + WorkspaceHistoryEvent as HistoryEvent, + WorkspaceModelGroup as ModelGroup, + WorkspaceModelId as ModelId, + WorkspaceParsedDiffLine as ParsedDiffLine, + WorkspaceRepositorySection, + WorkspaceRepo, + WorkspaceTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" }; -export const MODEL_GROUPS: ModelGroup[] = [ - { - provider: "Claude", - models: [ - { id: "claude-sonnet-4", label: "Sonnet 4" }, - { id: "claude-opus-4", label: "Opus 4" }, - ], - }, - { - provider: "OpenAI", - models: [ - { id: "gpt-5.3-codex", label: "GPT-5.3 Codex" }, - { id: "gpt-5.4", label: "GPT-5.4" }, - { id: "gpt-5.2-codex", label: "GPT-5.2 Codex" }, - { id: "gpt-5.1-codex-max", label: "GPT-5.1 Codex Max" }, - { id: "gpt-5.2", label: "GPT-5.2" }, - { id: "gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini" }, - ], - }, -]; +export const MODEL_GROUPS: ModelGroup[] = SharedModelGroups; +export const DEFAULT_MODEL_ID: ModelId = DEFAULT_WORKSPACE_MODEL_ID; const MOCK_REPLIES = [ "Got it. I'll work on that now. Let me start by examining the relevant files...", @@ -73,15 +61,11 @@ export function formatMessageDuration(durationMs: number): string { } export function modelLabel(id: ModelId): string { - const group = MODEL_GROUPS.find((candidate) => candidate.models.some((model) => model.id === id)); - const model = group?.models.find((candidate) => candidate.id === id); - return model && group ? `${group.provider} ${model.label}` : id; + return sharedWorkspaceModelLabel(id, MODEL_GROUPS); } export function providerAgent(provider: string): AgentKind { - if (provider === "Claude") return "Claude"; - if (provider === "OpenAI") return "Codex"; - return "Cursor"; + return sharedWorkspaceProviderAgent(provider); } export function slugify(text: string): string { @@ -204,6 +188,29 @@ export function buildHistoryEvents(sessions: AgentSession[]): HistoryEvent[] { .sort((left, right) => messageOrder(left.messageId) - messageOrder(right.messageId)); } +function buildPullRequestSummary(params: { + number: number; + title: string; + branch: string; + repoName: string; + updatedAtMs: number; + status: "ready" | "draft"; +}) { + return { + number: params.number, + status: params.status, + title: params.title, + state: "open", + url: `https://github.com/${params.repoName}/pull/${params.number}`, + headRefName: params.branch, + baseRefName: "main", + repoFullName: params.repoName, + authorLogin: "mock", + isDraft: params.status === "draft", + updatedAtMs: params.updatedAtMs, + }; +} + function transcriptFromLegacyMessages(sessionId: string, messages: LegacyMessage[]): TranscriptEvent[] { return messages.map((message, index) => ({ id: message.id, @@ -315,14 +322,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(8), branch: "NathanFlurry/pi-bootstrap-fix", - pullRequest: { number: 227, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 227, + title: "Normalize Pi ACP bootstrap payloads", + branch: "NathanFlurry/pi-bootstrap-fix", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(8), + status: "ready", + }), sessions: [ { id: "t1", sessionId: "t1", sessionName: "Pi payload fix", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -484,14 +498,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(3), branch: "feat/builtin-agent-skills", - pullRequest: { number: 223, status: "draft" }, + pullRequest: buildPullRequestSummary({ + number: 223, + title: "Auto-inject builtin agent skills at startup", + branch: "feat/builtin-agent-skills", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(3), + status: "draft", + }), sessions: [ { id: "t3", sessionId: "t3", sessionName: "Skills injection", agent: "Claude", - model: "claude-opus-4", + model: "opus", status: "running", thinkingSinceMs: NOW_MS - 45_000, unread: false, @@ -584,14 +605,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(45), branch: "hooks-example", - pullRequest: { number: 225, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 225, + title: "Add hooks example for Claude, Codex, and OpenCode", + branch: "hooks-example", + repoName: "rivet-dev/sandbox-agent", + updatedAtMs: minutesAgo(45), + status: "ready", + }), sessions: [ { id: "t4", sessionId: "t4", sessionName: "Example docs", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -659,14 +687,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/rivet", updatedAtMs: minutesAgo(15), branch: "actor-reschedule-endpoint", - pullRequest: { number: 4400, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 4400, + title: "Add actor reschedule endpoint", + branch: "actor-reschedule-endpoint", + repoName: "rivet-dev/rivet", + updatedAtMs: minutesAgo(15), + status: "ready", + }), sessions: [ { id: "t5", sessionId: "t5", sessionName: "Reschedule API", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -793,14 +828,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/rivet", updatedAtMs: minutesAgo(35), branch: "feat/dynamic-actors", - pullRequest: { number: 4395, status: "draft" }, + pullRequest: buildPullRequestSummary({ + number: 4395, + title: "Dynamic actors", + branch: "feat/dynamic-actors", + repoName: "rivet-dev/rivet", + updatedAtMs: minutesAgo(35), + status: "draft", + }), sessions: [ { id: "t6", sessionId: "t6", sessionName: "Dynamic actors impl", agent: "Claude", - model: "claude-opus-4", + model: "opus", status: "idle", thinkingSinceMs: null, unread: true, @@ -850,14 +892,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/vbare", updatedAtMs: minutesAgo(25), branch: "fix-use-full-cloud-run-pool-name", - pullRequest: { number: 235, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 235, + title: "Use full cloud run pool name for routing", + branch: "fix-use-full-cloud-run-pool-name", + repoName: "rivet-dev/vbare", + updatedAtMs: minutesAgo(25), + status: "ready", + }), sessions: [ { id: "t7", sessionId: "t7", sessionName: "Pool routing fix", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -959,14 +1008,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/skills", updatedAtMs: minutesAgo(50), branch: "fix-guard-support-https-targets", - pullRequest: { number: 125, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 125, + title: "Route compute gateway path correctly", + branch: "fix-guard-support-https-targets", + repoName: "rivet-dev/skills", + updatedAtMs: minutesAgo(50), + status: "ready", + }), sessions: [ { id: "t8", sessionId: "t8", sessionName: "Guard routing", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -1073,14 +1129,21 @@ export function buildInitialTasks(): Task[] { repoName: "rivet-dev/skills", updatedAtMs: minutesAgo(2 * 24 * 60), branch: "chore-move-compute-gateway-to", - pullRequest: { number: 123, status: "ready" }, + pullRequest: buildPullRequestSummary({ + number: 123, + title: "Move compute gateway to guard", + branch: "chore-move-compute-gateway-to", + repoName: "rivet-dev/skills", + updatedAtMs: minutesAgo(2 * 24 * 60), + status: "ready", + }), sessions: [ { id: "t9", sessionId: "t9", sessionName: "Gateway migration", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "idle", thinkingSinceMs: null, unread: false, @@ -1166,8 +1229,6 @@ export function buildInitialTasks(): Task[] { repoId: "sandbox-agent", title: "Fix broken auth middleware (error demo)", status: "error", - runtimeStatus: "error", - statusMessage: "session:error", repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(2), branch: "fix/auth-middleware", @@ -1178,7 +1239,7 @@ export function buildInitialTasks(): Task[] { sessionId: "status-error-session", sessionName: "Auth fix", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "error", thinkingSinceMs: null, unread: false, @@ -1197,9 +1258,7 @@ export function buildInitialTasks(): Task[] { id: "status-provisioning", repoId: "sandbox-agent", title: "Add rate limiting to API gateway (provisioning demo)", - status: "new", - runtimeStatus: "init_enqueue_provision", - statusMessage: "Queueing sandbox provisioning.", + status: "init_enqueue_provision", repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(0), branch: null, @@ -1211,7 +1270,7 @@ export function buildInitialTasks(): Task[] { sandboxSessionId: null, sessionName: "Session 1", agent: "Claude", - model: "claude-sonnet-4", + model: "sonnet", status: "pending_provision", thinkingSinceMs: null, unread: false, @@ -1259,7 +1318,6 @@ export function buildInitialTasks(): Task[] { repoId: "sandbox-agent", title: "Refactor WebSocket handler (running demo)", status: "running", - runtimeStatus: "running", repoName: "rivet-dev/sandbox-agent", updatedAtMs: minutesAgo(1), branch: "refactor/ws-handler", @@ -1300,7 +1358,7 @@ export function buildInitialTasks(): Task[] { * Uses real public repos so the mock sidebar matches what an actual rivet-dev * organization would show after a GitHub sync. */ -function buildMockRepos(): WorkbenchRepo[] { +function buildMockRepos(): WorkspaceRepo[] { return rivetDevFixture.repos.map((r) => ({ id: repoIdFromFullName(r.fullName), label: r.fullName, @@ -1313,55 +1371,19 @@ function repoIdFromFullName(fullName: string): string { return parts[parts.length - 1] ?? fullName; } -/** - * Build task entries from open PR fixture data. - * Maps to the backend's PR sync behavior (RepositoryPrSyncActor) where PRs - * appear as first-class sidebar items even without an associated task. - * Each open PR gets a lightweight task entry so it shows in the sidebar. - */ -function buildPrTasks(): Task[] { - // Collect branch names already claimed by hand-written tasks so we don't duplicate - const existingBranches = new Set( - buildInitialTasks() - .map((t) => t.branch) - .filter(Boolean), - ); - - return rivetDevFixture.openPullRequests - .filter((pr) => !existingBranches.has(pr.headRefName)) - .map((pr) => { - const repoId = repoIdFromFullName(pr.repoFullName); - return { - id: `pr-${repoId}-${pr.number}`, - repoId, - title: pr.title, - status: "idle" as const, - repoName: pr.repoFullName, - updatedAtMs: new Date(pr.updatedAt).getTime(), - branch: pr.headRefName, - pullRequest: { number: pr.number, status: pr.draft ? ("draft" as const) : ("ready" as const) }, - sessions: [], - fileChanges: [], - diffs: {}, - fileTree: [], - minutesUsed: 0, - }; - }); -} - -export function buildInitialMockLayoutViewModel(): TaskWorkbenchSnapshot { +export function buildInitialMockLayoutViewModel(): TaskWorkspaceSnapshot { const repos = buildMockRepos(); - const tasks = [...buildInitialTasks(), ...buildPrTasks()]; + const tasks = buildInitialTasks(); return { organizationId: "default", repos, - repositories: groupWorkbenchRepositories(repos, tasks), + repositories: groupWorkspaceRepositories(repos, tasks), tasks, }; } -export function groupWorkbenchRepositories(repos: WorkbenchRepo[], tasks: Task[]): WorkbenchRepositorySection[] { - const grouped = new Map(); +export function groupWorkspaceRepositories(repos: WorkspaceRepo[], tasks: Task[]): WorkspaceRepositorySection[] { + const grouped = new Map(); for (const repo of repos) { grouped.set(repo.id, { diff --git a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts index 8446892..21eaf6b 100644 --- a/foundry/packages/client/test/e2e/full-integration-e2e.test.ts +++ b/foundry/packages/client/test/e2e/full-integration-e2e.test.ts @@ -1,6 +1,6 @@ import { randomUUID } from "node:crypto"; import { describe, expect, it } from "vitest"; -import type { HistoryEvent, RepoOverview } from "@sandbox-agent/foundry-shared"; +import type { AuditLogEvent as HistoryEvent, RepoOverview } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; import { requireImportedRepo } from "./helpers.js"; @@ -132,11 +132,11 @@ describe("e2e(client): full integration stack workflow", () => { 90_000, 1_000, async () => client.getRepoOverview(organizationId, repo.repoId), - (value) => value.branches.some((row) => row.branchName === seededBranch), + (value) => value.branches.some((row: RepoOverview["branches"][number]) => row.branchName === seededBranch), ); const postActionOverview = await client.getRepoOverview(organizationId, repo.repoId); - const seededRow = postActionOverview.branches.find((row) => row.branchName === seededBranch); + const seededRow = postActionOverview.branches.find((row: RepoOverview["branches"][number]) => row.branchName === seededBranch); expect(Boolean(seededRow)).toBe(true); expect(postActionOverview.fetchedAt).toBeGreaterThanOrEqual(overview.fetchedAt); } finally { diff --git a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts index 83101fb..89dd638 100644 --- a/foundry/packages/client/test/e2e/github-pr-e2e.test.ts +++ b/foundry/packages/client/test/e2e/github-pr-e2e.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import type { TaskRecord, HistoryEvent } from "@sandbox-agent/foundry-shared"; +import type { AuditLogEvent as HistoryEvent, TaskRecord } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; import { requireImportedRepo } from "./helpers.js"; @@ -80,9 +80,10 @@ function parseHistoryPayload(event: HistoryEvent): Record { } } -async function debugDump(client: ReturnType, organizationId: string, taskId: string): Promise { +async function debugDump(client: ReturnType, organizationId: string, repoId: string, taskId: string): Promise { try { - const task = await client.getTask(organizationId, taskId); + const task = await client.getTask(organizationId, repoId, taskId); + const detail = await client.getTaskDetail(organizationId, repoId, taskId).catch(() => null); const history = await client.listHistory({ organizationId, taskId, limit: 80 }).catch(() => []); const historySummary = history .slice(0, 20) @@ -90,10 +91,11 @@ async function debugDump(client: ReturnType, organiz .join("\n"); let sessionEventsSummary = ""; - if (task.activeSandboxId && task.activeSessionId) { + const activeSessionId = detail?.activeSessionId ?? null; + if (task.activeSandboxId && activeSessionId) { const events = await client .listSandboxSessionEvents(organizationId, task.sandboxProviderId, task.activeSandboxId, { - sessionId: task.activeSessionId, + sessionId: activeSessionId, limit: 50, }) .then((r) => r.items) @@ -109,13 +111,11 @@ async function debugDump(client: ReturnType, organiz JSON.stringify( { status: task.status, - statusMessage: task.statusMessage, title: task.title, branchName: task.branchName, activeSandboxId: task.activeSandboxId, - activeSessionId: task.activeSessionId, - prUrl: task.prUrl, - prSubmitted: task.prSubmitted, + activeSessionId, + pullRequestUrl: detail?.pullRequest?.url ?? null, }, null, 2, @@ -189,7 +189,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { // Cold local sandbox startup can exceed a few minutes on first run. 8 * 60_000, 1_000, - async () => client.getTask(organizationId, created.taskId), + async () => client.getTask(organizationId, repo.repoId, created.taskId), (h) => Boolean(h.title && h.branchName && h.activeSandboxId), (h) => { if (h.status !== lastStatus) { @@ -200,18 +200,18 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); branchName = namedAndProvisioned.branchName!; sandboxId = namedAndProvisioned.activeSandboxId!; - const withSession = await poll( + const withSession = await poll>>( "task to create active session", 3 * 60_000, 1_500, - async () => client.getTask(organizationId, created.taskId), + async () => client.getTaskDetail(organizationId, repo.repoId, created.taskId), (h) => Boolean(h.activeSessionId), (h) => { if (h.status === "error") { @@ -219,7 +219,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -231,14 +231,14 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { 2_000, async () => ( - await client.listSandboxSessionEvents(organizationId, withSession.sandboxProviderId, sandboxId!, { + await client.listSandboxSessionEvents(organizationId, namedAndProvisioned.sandboxProviderId, sandboxId!, { sessionId: sessionId!, limit: 40, }) ).items, (events) => events.length > 0, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -246,7 +246,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { "task to reach idle state", 8 * 60_000, 2_000, - async () => client.getTask(organizationId, created.taskId), + async () => client.getTask(organizationId, repo.repoId, created.taskId), (h) => h.status === "idle", (h) => { if (h.status === "error") { @@ -254,7 +254,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { } }, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -266,7 +266,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { (events) => events.some((e) => e.kind === "task.pr_created"), ) .catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }) .then((events) => events.find((e) => e.kind === "task.pr_created")!); @@ -287,16 +287,16 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { expect(prFiles.some((f) => f.filename === expectedFile)).toBe(true); // Close the task and assert the sandbox is released (stopped). - await client.runAction(organizationId, created.taskId, "archive"); + await client.runAction(organizationId, repo.repoId, created.taskId, "archive"); - await poll( + await poll>>( "task to become archived (session released)", 60_000, 1_000, - async () => client.getTask(organizationId, created.taskId), + async () => client.getTaskDetail(organizationId, repo.repoId, created.taskId), (h) => h.status === "archived" && h.activeSessionId === null, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); throw new Error(`${err instanceof Error ? err.message : String(err)}\n${dump}`); }); @@ -311,7 +311,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => { return st.includes("destroyed") || st.includes("stopped") || st.includes("suspended") || st.includes("paused"); }, ).catch(async (err) => { - const dump = await debugDump(client, organizationId, created.taskId); + const dump = await debugDump(client, organizationId, repo.repoId, created.taskId); const state = await client.sandboxProviderState(organizationId, "local", sandboxId!).catch(() => null); throw new Error(`${err instanceof Error ? err.message : String(err)}\n` + `sandbox state: ${state ? state.state : "unknown"}\n` + `${dump}`); }); diff --git a/foundry/packages/client/test/e2e/workbench-e2e.test.ts b/foundry/packages/client/test/e2e/workspace-e2e.test.ts similarity index 78% rename from foundry/packages/client/test/e2e/workbench-e2e.test.ts rename to foundry/packages/client/test/e2e/workspace-e2e.test.ts index 5442795..1de2065 100644 --- a/foundry/packages/client/test/e2e/workbench-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workspace-e2e.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import type { TaskWorkbenchSnapshot, WorkbenchSession, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared"; +import type { TaskWorkspaceSnapshot, WorkspaceSession, WorkspaceTask, WorkspaceModelId, WorkspaceTranscriptEvent } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; import { requireImportedRepo } from "./helpers.js"; @@ -13,21 +13,9 @@ function requiredEnv(name: string): string { return value; } -function workbenchModelEnv(name: string, fallback: WorkbenchModelId): WorkbenchModelId { +function workspaceModelEnv(name: string, fallback: WorkspaceModelId): WorkspaceModelId { const value = process.env[name]?.trim(); - switch (value) { - case "claude-sonnet-4": - case "claude-opus-4": - case "gpt-5.3-codex": - case "gpt-5.4": - case "gpt-5.2-codex": - case "gpt-5.1-codex-max": - case "gpt-5.2": - case "gpt-5.1-codex-mini": - return value; - default: - return fallback; - } + return value && value.length > 0 ? value : fallback; } async function sleep(ms: number): Promise { @@ -50,7 +38,7 @@ async function poll(label: string, timeoutMs: number, intervalMs: number, fn: } } -function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTask { +function findTask(snapshot: TaskWorkspaceSnapshot, taskId: string): WorkspaceTask { const task = snapshot.tasks.find((candidate) => candidate.id === taskId); if (!task) { throw new Error(`task ${taskId} missing from snapshot`); @@ -58,7 +46,7 @@ function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTas return task; } -function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { +function findTab(task: WorkspaceTask, sessionId: string): WorkspaceSession { const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { throw new Error(`tab ${sessionId} missing from task ${task.id}`); @@ -66,7 +54,7 @@ function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { return tab; } -function extractEventText(event: WorkbenchTranscriptEvent): string { +function extractEventText(event: WorkspaceTranscriptEvent): string { const payload = event.payload; if (!payload || typeof payload !== "object") { return String(payload ?? ""); @@ -127,7 +115,7 @@ function extractEventText(event: WorkbenchTranscriptEvent): string { return JSON.stringify(payload); } -function transcriptIncludesAgentText(transcript: WorkbenchTranscriptEvent[], expectedText: string): boolean { +function transcriptIncludesAgentText(transcript: WorkspaceTranscriptEvent[], expectedText: string): boolean { return transcript .filter((event) => event.sender === "agent") .map((event) => extractEventText(event)) @@ -135,15 +123,15 @@ function transcriptIncludesAgentText(transcript: WorkbenchTranscriptEvent[], exp .includes(expectedText); } -describe("e2e(client): workbench flows", () => { +describe("e2e(client): workspace flows", () => { it.skipIf(!RUN_WORKBENCH_E2E)( - "creates a task from an imported repo, adds sessions, exchanges messages, and manages workbench state", + "creates a task from an imported repo, adds sessions, exchanges messages, and manages workspace state", { timeout: 20 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); - const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); + const model = workspaceModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); const runId = `wb-${Date.now().toString(36)}`; const expectedFile = `${runId}.txt`; const expectedInitialReply = `WORKBENCH_READY_${runId}`; @@ -155,9 +143,9 @@ describe("e2e(client): workbench flows", () => { }); const repo = await requireImportedRepo(client, organizationId, repoRemote); - const created = await client.createWorkbenchTask(organizationId, { + const created = await client.createWorkspaceTask(organizationId, { repoId: repo.repoId, - title: `Workbench E2E ${runId}`, + title: `Workspace E2E ${runId}`, branch: `e2e/${runId}`, model, task: `Reply with exactly: ${expectedInitialReply}`, @@ -167,7 +155,7 @@ describe("e2e(client): workbench flows", () => { "task provisioning", 12 * 60_000, 2_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => task.branch === `e2e/${runId}` && task.sessions.length > 0, ); @@ -177,7 +165,7 @@ describe("e2e(client): workbench flows", () => { "initial agent response", 12 * 60_000, 2_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => { const tab = findTab(task, primaryTab.id); return task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedInitialReply); @@ -187,28 +175,33 @@ describe("e2e(client): workbench flows", () => { expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy(); expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true); - await client.renameWorkbenchTask(organizationId, { + await client.renameWorkspaceTask(organizationId, { + repoId: repo.repoId, taskId: created.taskId, - value: `Workbench E2E ${runId} Renamed`, + value: `Workspace E2E ${runId} Renamed`, }); - await client.renameWorkbenchSession(organizationId, { + await client.renameWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: primaryTab.id, title: "Primary Session", }); - const secondTab = await client.createWorkbenchSession(organizationId, { + const secondTab = await client.createWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, model, }); - await client.renameWorkbenchSession(organizationId, { + await client.renameWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, title: "Follow-up Session", }); - await client.updateWorkbenchDraft(organizationId, { + await client.updateWorkspaceDraft(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, text: [ @@ -226,11 +219,12 @@ describe("e2e(client): workbench flows", () => { ], }); - const drafted = findTask(await client.getWorkbench(organizationId), created.taskId); + const drafted = findTask(await client.getWorkspace(organizationId), created.taskId); expect(findTab(drafted, secondTab.sessionId).draft.text).toContain(expectedReply); expect(findTab(drafted, secondTab.sessionId).draft.attachments).toHaveLength(1); - await client.sendWorkbenchMessage(organizationId, { + await client.sendWorkspaceMessage(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, text: [ @@ -252,7 +246,7 @@ describe("e2e(client): workbench flows", () => { "follow-up session response", 10 * 60_000, 2_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => { const tab = findTab(task, secondTab.sessionId); return ( @@ -265,17 +259,19 @@ describe("e2e(client): workbench flows", () => { expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true); expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true); - await client.setWorkbenchSessionUnread(organizationId, { + await client.setWorkspaceSessionUnread(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, unread: false, }); - await client.markWorkbenchUnread(organizationId, { taskId: created.taskId }); + await client.markWorkspaceUnread(organizationId, { repoId: repo.repoId, taskId: created.taskId }); - const unreadSnapshot = findTask(await client.getWorkbench(organizationId), created.taskId); + const unreadSnapshot = findTask(await client.getWorkspace(organizationId), created.taskId); expect(unreadSnapshot.sessions.some((tab) => tab.unread)).toBe(true); - await client.closeWorkbenchSession(organizationId, { + await client.closeWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: secondTab.sessionId, }); @@ -284,26 +280,27 @@ describe("e2e(client): workbench flows", () => { "secondary session closed", 30_000, 1_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => !task.sessions.some((tab) => tab.id === secondTab.sessionId), ); expect(closedSnapshot.sessions).toHaveLength(1); - await client.revertWorkbenchFile(organizationId, { + await client.revertWorkspaceFile(organizationId, { + repoId: repo.repoId, taskId: created.taskId, path: expectedFile, }); const revertedSnapshot = await poll( - "file revert reflected in workbench", + "file revert reflected in workspace", 30_000, 1_000, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => !task.fileChanges.some((file) => file.path === expectedFile), ); expect(revertedSnapshot.fileChanges.some((file) => file.path === expectedFile)).toBe(false); - expect(revertedSnapshot.title).toBe(`Workbench E2E ${runId} Renamed`); + expect(revertedSnapshot.title).toBe(`Workspace E2E ${runId} Renamed`); expect(findTab(revertedSnapshot, primaryTab.id).sessionName).toBe("Primary Session"); }, ); diff --git a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts b/foundry/packages/client/test/e2e/workspace-load-e2e.test.ts similarity index 85% rename from foundry/packages/client/test/e2e/workbench-load-e2e.test.ts rename to foundry/packages/client/test/e2e/workspace-load-e2e.test.ts index b358b80..f9fc244 100644 --- a/foundry/packages/client/test/e2e/workbench-load-e2e.test.ts +++ b/foundry/packages/client/test/e2e/workspace-load-e2e.test.ts @@ -1,11 +1,11 @@ import { describe, expect, it } from "vitest"; import { createFoundryLogger, - type TaskWorkbenchSnapshot, - type WorkbenchSession, - type WorkbenchTask, - type WorkbenchModelId, - type WorkbenchTranscriptEvent, + type TaskWorkspaceSnapshot, + type WorkspaceSession, + type WorkspaceTask, + type WorkspaceModelId, + type WorkspaceTranscriptEvent, } from "@sandbox-agent/foundry-shared"; import { createBackendClient } from "../../src/backend-client.js"; import { requireImportedRepo } from "./helpers.js"; @@ -14,7 +14,7 @@ const RUN_WORKBENCH_LOAD_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E = const logger = createFoundryLogger({ service: "foundry-client-e2e", bindings: { - suite: "workbench-load", + suite: "workspace-load", }, }); @@ -26,21 +26,9 @@ function requiredEnv(name: string): string { return value; } -function workbenchModelEnv(name: string, fallback: WorkbenchModelId): WorkbenchModelId { +function workspaceModelEnv(name: string, fallback: WorkspaceModelId): WorkspaceModelId { const value = process.env[name]?.trim(); - switch (value) { - case "claude-sonnet-4": - case "claude-opus-4": - case "gpt-5.3-codex": - case "gpt-5.4": - case "gpt-5.2-codex": - case "gpt-5.1-codex-max": - case "gpt-5.2": - case "gpt-5.1-codex-mini": - return value; - default: - return fallback; - } + return value && value.length > 0 ? value : fallback; } function intEnv(name: string, fallback: number): number { @@ -72,7 +60,7 @@ async function poll(label: string, timeoutMs: number, intervalMs: number, fn: } } -function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTask { +function findTask(snapshot: TaskWorkspaceSnapshot, taskId: string): WorkspaceTask { const task = snapshot.tasks.find((candidate) => candidate.id === taskId); if (!task) { throw new Error(`task ${taskId} missing from snapshot`); @@ -80,7 +68,7 @@ function findTask(snapshot: TaskWorkbenchSnapshot, taskId: string): WorkbenchTas return task; } -function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { +function findTab(task: WorkspaceTask, sessionId: string): WorkspaceSession { const tab = task.sessions.find((candidate) => candidate.id === sessionId); if (!tab) { throw new Error(`tab ${sessionId} missing from task ${task.id}`); @@ -88,7 +76,7 @@ function findTab(task: WorkbenchTask, sessionId: string): WorkbenchSession { return tab; } -function extractEventText(event: WorkbenchTranscriptEvent): string { +function extractEventText(event: WorkspaceTranscriptEvent): string { const payload = event.payload; if (!payload || typeof payload !== "object") { return String(payload ?? ""); @@ -138,7 +126,7 @@ function extractEventText(event: WorkbenchTranscriptEvent): string { return typeof envelope.method === "string" ? envelope.method : JSON.stringify(payload); } -function transcriptIncludesAgentText(transcript: WorkbenchTranscriptEvent[], expectedText: string): boolean { +function transcriptIncludesAgentText(transcript: WorkspaceTranscriptEvent[], expectedText: string): boolean { return transcript .filter((event) => event.sender === "agent") .map((event) => extractEventText(event)) @@ -150,7 +138,7 @@ function average(values: number[]): number { return values.reduce((sum, value) => sum + value, 0) / Math.max(values.length, 1); } -async function measureWorkbenchSnapshot( +async function measureWorkspaceSnapshot( client: ReturnType, organizationId: string, iterations: number, @@ -163,11 +151,11 @@ async function measureWorkbenchSnapshot( transcriptEventCount: number; }> { const durations: number[] = []; - let snapshot: TaskWorkbenchSnapshot | null = null; + let snapshot: TaskWorkspaceSnapshot | null = null; for (let index = 0; index < iterations; index += 1) { const startedAt = performance.now(); - snapshot = await client.getWorkbench(organizationId); + snapshot = await client.getWorkspace(organizationId); durations.push(performance.now() - startedAt); } @@ -191,12 +179,12 @@ async function measureWorkbenchSnapshot( }; } -describe("e2e(client): workbench load", () => { +describe("e2e(client): workspace load", () => { it.skipIf(!RUN_WORKBENCH_LOAD_E2E)("runs a simple sequential load profile against the real backend", { timeout: 30 * 60_000 }, async () => { const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet"; const organizationId = process.env.HF_E2E_WORKSPACE?.trim() || "default"; const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO"); - const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); + const model = workspaceModelEnv("HF_E2E_MODEL", "gpt-5.3-codex"); const taskCount = intEnv("HF_LOAD_TASK_COUNT", 3); const extraSessionCount = intEnv("HF_LOAD_EXTRA_SESSION_COUNT", 2); const pollIntervalMs = intEnv("HF_LOAD_POLL_INTERVAL_MS", 2_000); @@ -220,16 +208,16 @@ describe("e2e(client): workbench load", () => { transcriptEventCount: number; }> = []; - snapshotSeries.push(await measureWorkbenchSnapshot(client, organizationId, 2)); + snapshotSeries.push(await measureWorkspaceSnapshot(client, organizationId, 2)); for (let taskIndex = 0; taskIndex < taskCount; taskIndex += 1) { const runId = `load-${taskIndex}-${Date.now().toString(36)}`; const initialReply = `LOAD_INIT_${runId}`; const createStartedAt = performance.now(); - const created = await client.createWorkbenchTask(organizationId, { + const created = await client.createWorkspaceTask(organizationId, { repoId: repo.repoId, - title: `Workbench Load ${runId}`, + title: `Workspace Load ${runId}`, branch: `load/${runId}`, model, task: `Reply with exactly: ${initialReply}`, @@ -241,7 +229,7 @@ describe("e2e(client): workbench load", () => { `task ${runId} provisioning`, 12 * 60_000, pollIntervalMs, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => { const tab = task.sessions[0]; return Boolean(tab && task.status === "idle" && tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, initialReply)); @@ -256,13 +244,15 @@ describe("e2e(client): workbench load", () => { for (let sessionIndex = 0; sessionIndex < extraSessionCount; sessionIndex += 1) { const expectedReply = `LOAD_REPLY_${runId}_${sessionIndex}`; const createSessionStartedAt = performance.now(); - const createdSession = await client.createWorkbenchSession(organizationId, { + const createdSession = await client.createWorkspaceSession(organizationId, { + repoId: repo.repoId, taskId: created.taskId, model, }); createSessionLatencies.push(performance.now() - createSessionStartedAt); - await client.sendWorkbenchMessage(organizationId, { + await client.sendWorkspaceMessage(organizationId, { + repoId: repo.repoId, taskId: created.taskId, sessionId: createdSession.sessionId, text: `Run pwd in the repo, then reply with exactly: ${expectedReply}`, @@ -274,7 +264,7 @@ describe("e2e(client): workbench load", () => { `task ${runId} session ${sessionIndex} reply`, 10 * 60_000, pollIntervalMs, - async () => findTask(await client.getWorkbench(organizationId), created.taskId), + async () => findTask(await client.getWorkspace(organizationId), created.taskId), (task) => { const tab = findTab(task, createdSession.sessionId); return tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply); @@ -285,14 +275,14 @@ describe("e2e(client): workbench load", () => { expect(transcriptIncludesAgentText(findTab(withReply, createdSession.sessionId).transcript, expectedReply)).toBe(true); } - const snapshotMetrics = await measureWorkbenchSnapshot(client, organizationId, 3); + const snapshotMetrics = await measureWorkspaceSnapshot(client, organizationId, 3); snapshotSeries.push(snapshotMetrics); logger.info( { taskIndex: taskIndex + 1, ...snapshotMetrics, }, - "workbench_load_snapshot", + "workspace_load_snapshot", ); } @@ -314,7 +304,7 @@ describe("e2e(client): workbench load", () => { snapshotTranscriptFinalCount: lastSnapshot.transcriptEventCount, }; - logger.info(summary, "workbench_load_summary"); + logger.info(summary, "workspace_load_summary"); expect(createTaskLatencies.length).toBe(taskCount); expect(provisionLatencies.length).toBe(taskCount); diff --git a/foundry/packages/client/test/keys.test.ts b/foundry/packages/client/test/keys.test.ts index 9bd6477..6b93ec1 100644 --- a/foundry/packages/client/test/keys.test.ts +++ b/foundry/packages/client/test/keys.test.ts @@ -1,15 +1,9 @@ import { describe, expect, it } from "vitest"; -import { historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/keys.js"; +import { auditLogKey, organizationKey, taskKey, taskSandboxKey } from "../src/keys.js"; describe("actor keys", () => { it("prefixes every key with organization namespace", () => { - const keys = [ - organizationKey("default"), - repositoryKey("default", "repo"), - taskKey("default", "repo", "task"), - taskSandboxKey("default", "sbx"), - historyKey("default", "repo"), - ]; + const keys = [organizationKey("default"), taskKey("default", "repo", "task"), taskSandboxKey("default", "sbx"), auditLogKey("default")]; for (const key of keys) { expect(key[0]).toBe("org"); diff --git a/foundry/packages/client/test/subscription-manager.test.ts b/foundry/packages/client/test/subscription-manager.test.ts index 9908113..c064606 100644 --- a/foundry/packages/client/test/subscription-manager.test.ts +++ b/foundry/packages/client/test/subscription-manager.test.ts @@ -50,6 +50,20 @@ class FakeActorConn implements ActorConn { function organizationSnapshot(): OrganizationSummarySnapshot { return { organizationId: "org-1", + github: { + connectedAccount: "octocat", + installationStatus: "connected", + syncStatus: "synced", + importedRepoCount: 1, + lastSyncLabel: "Synced just now", + lastSyncAt: 10, + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 1, + syncPhase: null, + processedRepositoryCount: 1, + totalRepositoryCount: 1, + }, repos: [{ id: "repo-1", label: "repo-1", taskCount: 1, latestActivityMs: 10 }], taskSummaries: [ { @@ -61,10 +75,10 @@ function organizationSnapshot(): OrganizationSummarySnapshot { updatedAtMs: 10, branch: "main", pullRequest: null, + activeSessionId: null, sessionsSummary: [], }, ], - openPullRequests: [], }; } @@ -115,20 +129,44 @@ describe("RemoteSubscriptionManager", () => { ]); conn.emit("organizationUpdated", { - type: "taskSummaryUpdated", - taskSummary: { - id: "task-1", - repoId: "repo-1", - title: "Updated task", - status: "running", - repoName: "repo-1", - updatedAtMs: 20, - branch: "feature/live", - pullRequest: null, - sessionsSummary: [], + type: "organizationUpdated", + snapshot: { + organizationId: "org-1", + github: { + connectedAccount: "octocat", + installationStatus: "connected", + syncStatus: "syncing", + importedRepoCount: 1, + lastSyncLabel: "Syncing repositories...", + lastSyncAt: 10, + lastWebhookAt: null, + lastWebhookEvent: "", + syncGeneration: 2, + syncPhase: "syncing_branches", + processedRepositoryCount: 1, + totalRepositoryCount: 3, + }, + repos: [], + taskSummaries: [ + { + id: "task-1", + repoId: "repo-1", + title: "Updated task", + status: "running", + repoName: "repo-1", + updatedAtMs: 20, + branch: "feature/live", + pullRequest: null, + activeSessionId: null, + sessionsSummary: [], + }, + ], }, } satisfies OrganizationEvent); + // applyEvent chains onto an internal promise — flush the microtask queue + await flushAsyncWork(); + expect(manager.getSnapshot("organization", params)?.taskSummaries[0]?.title).toBe("Updated task"); expect(listenerA).toHaveBeenCalled(); expect(listenerB).toHaveBeenCalled(); diff --git a/foundry/packages/client/test/view-model.test.ts b/foundry/packages/client/test/view-model.test.ts index b494135..d418c2f 100644 --- a/foundry/packages/client/test/view-model.test.ts +++ b/foundry/packages/client/test/view-model.test.ts @@ -12,9 +12,8 @@ const sample: TaskRecord = { task: "Do test", sandboxProviderId: "local", status: "running", - statusMessage: null, activeSandboxId: "sandbox-1", - activeSessionId: "session-1", + pullRequest: null, sandboxes: [ { sandboxId: "sandbox-1", @@ -26,17 +25,6 @@ const sample: TaskRecord = { updatedAt: 1, }, ], - agentType: null, - prSubmitted: false, - diffStat: null, - prUrl: null, - prAuthor: null, - ciStatus: null, - reviewStatus: null, - reviewer: null, - conflictsWithMain: null, - hasUnpushed: null, - parentBranch: null, createdAt: 1, updatedAt: 1, }; diff --git a/foundry/packages/frontend/src/components/dev-panel.tsx b/foundry/packages/frontend/src/components/dev-panel.tsx index 56907ff..947331e 100644 --- a/foundry/packages/frontend/src/components/dev-panel.tsx +++ b/foundry/packages/frontend/src/components/dev-panel.tsx @@ -6,11 +6,10 @@ import { subscriptionManager } from "../lib/subscription"; import type { FoundryAppSnapshot, FoundryOrganization, - TaskStatus, - TaskWorkbenchSnapshot, - WorkbenchSandboxSummary, - WorkbenchSessionSummary, - WorkbenchTaskStatus, + TaskWorkspaceSnapshot, + WorkspaceSandboxSummary, + WorkspaceSessionSummary, + WorkspaceTaskStatus, } from "@sandbox-agent/foundry-shared"; import { useSubscription } from "@sandbox-agent/foundry-client"; import type { DebugSubscriptionTopic } from "@sandbox-agent/foundry-client"; @@ -18,7 +17,7 @@ import { describeTaskState } from "../features/tasks/status"; interface DevPanelProps { organizationId: string; - snapshot: TaskWorkbenchSnapshot; + snapshot: TaskWorkspaceSnapshot; organization?: FoundryOrganization | null; focusedTask?: DevPanelFocusedTask | null; } @@ -27,14 +26,12 @@ export interface DevPanelFocusedTask { id: string; repoId: string; title: string | null; - status: WorkbenchTaskStatus; - runtimeStatus?: TaskStatus | null; - statusMessage?: string | null; + status: WorkspaceTaskStatus; branch?: string | null; activeSandboxId?: string | null; activeSessionId?: string | null; - sandboxes?: WorkbenchSandboxSummary[]; - sessions?: WorkbenchSessionSummary[]; + sandboxes?: WorkspaceSandboxSummary[]; + sessions?: WorkspaceSessionSummary[]; } interface TopicInfo { @@ -80,7 +77,7 @@ function timeAgo(ts: number | null): string { } function statusColor(status: string, t: ReturnType): string { - if (status === "new" || status.startsWith("init_") || status.startsWith("archive_") || status.startsWith("kill_") || status.startsWith("pending_")) { + if (status.startsWith("init_") || status.startsWith("archive_") || status.startsWith("kill_") || status.startsWith("pending_")) { return t.statusWarning; } switch (status) { @@ -159,14 +156,16 @@ export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organ }, [now]); const appState = useSubscription(subscriptionManager, "app", {}); + const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); const appSnapshot: FoundryAppSnapshot | null = appState.data ?? null; + const liveGithub = organizationState.data?.github ?? organization?.github ?? null; const repos = snapshot.repos ?? []; const tasks = snapshot.tasks ?? []; const prCount = tasks.filter((task) => task.pullRequest != null).length; - const focusedTaskStatus = focusedTask?.runtimeStatus ?? focusedTask?.status ?? null; - const focusedTaskState = describeTaskState(focusedTaskStatus, focusedTask?.statusMessage ?? null); - const lastWebhookAt = organization?.github.lastWebhookAt ?? null; + const focusedTaskStatus = focusedTask?.status ?? null; + const focusedTaskState = describeTaskState(focusedTaskStatus); + const lastWebhookAt = liveGithub?.lastWebhookAt ?? null; const hasRecentWebhook = lastWebhookAt != null && now - lastWebhookAt < 5 * 60_000; const totalOrgs = appSnapshot?.organizations.length ?? 0; const authStatus = appSnapshot?.auth.status ?? "unknown"; @@ -442,7 +441,7 @@ export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organ {/* GitHub */}
- {organization ? ( + {liveGithub ? (
App Install - - {organization.github.installationStatus.replace(/_/g, " ")} + + {liveGithub.installationStatus.replace(/_/g, " ")}
@@ -465,15 +464,13 @@ export const DevPanel = memo(function DevPanel({ organizationId, snapshot, organ width: "5px", height: "5px", borderRadius: "50%", - backgroundColor: syncStatusColor(organization.github.syncStatus, t), + backgroundColor: syncStatusColor(liveGithub.syncStatus, t), flexShrink: 0, })} /> Sync - {organization.github.syncStatus} - {organization.github.lastSyncAt != null && ( - {timeAgo(organization.github.lastSyncAt)} - )} + {liveGithub.syncStatus} + {liveGithub.lastSyncAt != null && {timeAgo(liveGithub.lastSyncAt)}}
Webhook {lastWebhookAt != null ? ( - {organization.github.lastWebhookEvent} · {timeAgo(lastWebhookAt)} + {liveGithub.lastWebhookEvent} · {timeAgo(lastWebhookAt)} ) : ( never received )}
- - + + +
- {organization.github.connectedAccount && ( -
@{organization.github.connectedAccount}
- )} - {organization.github.lastSyncLabel && ( -
last sync: {organization.github.lastSyncLabel}
+ {liveGithub.connectedAccount &&
@{liveGithub.connectedAccount}
} + {liveGithub.lastSyncLabel &&
last sync: {liveGithub.lastSyncLabel}
} + {liveGithub.syncPhase && ( +
+ phase: {liveGithub.syncPhase.replace(/^syncing_/, "").replace(/_/g, " ")} ({liveGithub.processedRepositoryCount}/ + {liveGithub.totalRepositoryCount}) +
)}
) : ( diff --git a/foundry/packages/frontend/src/components/mock-layout.tsx b/foundry/packages/frontend/src/components/mock-layout.tsx index 1ff4d35..042b5a4 100644 --- a/foundry/packages/frontend/src/components/mock-layout.tsx +++ b/foundry/packages/frontend/src/components/mock-layout.tsx @@ -1,14 +1,17 @@ import { memo, useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState, type PointerEvent as ReactPointerEvent } from "react"; +import { useQuery } from "@tanstack/react-query"; import { useNavigate } from "@tanstack/react-router"; import { useStyletron } from "baseui"; import { + DEFAULT_WORKSPACE_MODEL_GROUPS, + DEFAULT_WORKSPACE_MODEL_ID, createErrorContext, type FoundryOrganization, - type TaskWorkbenchSnapshot, - type WorkbenchOpenPrSummary, - type WorkbenchSessionSummary, - type WorkbenchTaskDetail, - type WorkbenchTaskSummary, + type TaskWorkspaceSnapshot, + type WorkspaceModelGroup, + type WorkspaceSessionSummary, + type WorkspaceTaskDetail, + type WorkspaceTaskSummary, } from "@sandbox-agent/foundry-shared"; import { useSubscription } from "@sandbox-agent/foundry-client"; @@ -39,7 +42,7 @@ import { type Message, type ModelId, } from "./mock-layout/view-model"; -import { activeMockOrganization, useMockAppSnapshot } from "../lib/mock-app"; +import { activeMockOrganization, activeMockUser, useMockAppClient, useMockAppSnapshot } from "../lib/mock-app"; import { backendClient } from "../lib/backend"; import { subscriptionManager } from "../lib/subscription"; import { describeTaskState, isProvisioningTaskStatus } from "../features/tasks/status"; @@ -77,29 +80,38 @@ function sanitizeActiveSessionId(task: Task, sessionId: string | null | undefine return openDiffs.length > 0 ? diffTabId(openDiffs[openDiffs.length - 1]!) : lastAgentSessionId; } -function githubInstallationWarningTitle(organization: FoundryOrganization): string { - return organization.github.installationStatus === "install_required" ? "GitHub App not installed" : "GitHub App needs reconnection"; +type GithubStatusView = Pick< + FoundryOrganization["github"], + "connectedAccount" | "installationStatus" | "syncStatus" | "importedRepoCount" | "lastSyncLabel" +> & { + syncPhase?: string | null; + processedRepositoryCount?: number; + totalRepositoryCount?: number; +}; + +function githubInstallationWarningTitle(github: GithubStatusView): string { + return github.installationStatus === "install_required" ? "GitHub App not installed" : "GitHub App needs reconnection"; } -function githubInstallationWarningDetail(organization: FoundryOrganization): string { - const statusDetail = organization.github.lastSyncLabel.trim(); +function githubInstallationWarningDetail(github: GithubStatusView): string { + const statusDetail = github.lastSyncLabel.trim(); const requirementDetail = - organization.github.installationStatus === "install_required" + github.installationStatus === "install_required" ? "Webhooks are required for Foundry to function. Repo sync and PR updates will not work until the GitHub App is installed for this organization." : "Webhook delivery is unavailable. Repo sync and PR updates will not work until the GitHub App is reconnected."; return statusDetail ? `${requirementDetail} ${statusDetail}.` : requirementDetail; } function GithubInstallationWarning({ - organization, + github, css, t, }: { - organization: FoundryOrganization; + github: GithubStatusView; css: ReturnType[0]; t: ReturnType; }) { - if (organization.github.installationStatus === "connected") { + if (github.installationStatus === "connected") { return null; } @@ -123,15 +135,15 @@ function GithubInstallationWarning({ >
-
{githubInstallationWarningTitle(organization)}
-
{githubInstallationWarningDetail(organization)}
+
{githubInstallationWarningTitle(github)}
+
{githubInstallationWarningDetail(github)}
); } function toSessionModel( - summary: WorkbenchSessionSummary, + summary: WorkspaceSessionSummary, sessionDetail?: { draft: Task["sessions"][number]["draft"]; transcript: Task["sessions"][number]["transcript"] }, ): Task["sessions"][number] { return { @@ -155,8 +167,8 @@ function toSessionModel( } function toTaskModel( - summary: WorkbenchTaskSummary, - detail?: WorkbenchTaskDetail, + summary: WorkspaceTaskSummary, + detail?: WorkspaceTaskDetail, sessionCache?: Map, ): Task { const sessions = detail?.sessionsSummary ?? summary.sessionsSummary; @@ -164,13 +176,12 @@ function toTaskModel( id: summary.id, repoId: summary.repoId, title: detail?.title ?? summary.title, - status: detail?.runtimeStatus ?? detail?.status ?? summary.status, - runtimeStatus: detail?.runtimeStatus, - statusMessage: detail?.statusMessage ?? null, + status: detail?.status ?? summary.status, repoName: detail?.repoName ?? summary.repoName, updatedAtMs: detail?.updatedAtMs ?? summary.updatedAtMs, branch: detail?.branch ?? summary.branch, pullRequest: detail?.pullRequest ?? summary.pullRequest, + activeSessionId: detail?.activeSessionId ?? summary.activeSessionId ?? null, sessions: sessions.map((session) => toSessionModel(session, sessionCache?.get(session.id))), fileChanges: detail?.fileChanges ?? [], diffs: detail?.diffs ?? {}, @@ -180,40 +191,6 @@ function toTaskModel( }; } -const OPEN_PR_TASK_PREFIX = "pr:"; - -function openPrTaskId(prId: string): string { - return `${OPEN_PR_TASK_PREFIX}${prId}`; -} - -function isOpenPrTaskId(taskId: string): boolean { - return taskId.startsWith(OPEN_PR_TASK_PREFIX); -} - -function toOpenPrTaskModel(pullRequest: WorkbenchOpenPrSummary): Task { - return { - id: openPrTaskId(pullRequest.prId), - repoId: pullRequest.repoId, - title: pullRequest.title, - status: "new", - runtimeStatus: undefined, - statusMessage: pullRequest.authorLogin ? `@${pullRequest.authorLogin}` : null, - repoName: pullRequest.repoFullName, - updatedAtMs: pullRequest.updatedAtMs, - branch: pullRequest.headRefName, - pullRequest: { - number: pullRequest.number, - status: pullRequest.isDraft ? "draft" : "ready", - }, - sessions: [], - fileChanges: [], - diffs: {}, - fileTree: [], - minutesUsed: 0, - activeSandboxId: null, - }; -} - function sessionStateMessage(tab: Task["sessions"][number] | null | undefined): string | null { if (!tab) { return null; @@ -230,18 +207,41 @@ function sessionStateMessage(tab: Task["sessions"][number] | null | undefined): return null; } -function groupRepositories(repos: Array<{ id: string; label: string }>, tasks: Task[]) { +function groupRepositories( + repos: Array<{ id: string; label: string }>, + tasks: Task[], + openPullRequests?: Array<{ + repoId: string; + repoFullName: string; + number: number; + title: string; + state: string; + url: string; + headRefName: string; + authorLogin: string | null; + isDraft: boolean; + }>, +) { return repos .map((repo) => ({ id: repo.id, label: repo.label, updatedAtMs: tasks.filter((task) => task.repoId === repo.id).reduce((latest, task) => Math.max(latest, task.updatedAtMs), 0), tasks: tasks.filter((task) => task.repoId === repo.id).sort((left, right) => right.updatedAtMs - left.updatedAtMs), + pullRequests: (openPullRequests ?? []).filter((pr) => pr.repoId === repo.id), })) - .filter((repo) => repo.tasks.length > 0); + .sort((a, b) => { + // Repos with tasks first, then repos with PRs, then alphabetical + const aHasActivity = a.tasks.length > 0 || a.pullRequests.length > 0; + const bHasActivity = b.tasks.length > 0 || b.pullRequests.length > 0; + if (aHasActivity && !bHasActivity) return -1; + if (!aHasActivity && bHasActivity) return 1; + if (a.updatedAtMs !== b.updatedAtMs) return b.updatedAtMs - a.updatedAtMs; + return a.label.localeCompare(b.label); + }); } -interface WorkbenchActions { +interface WorkspaceActions { createTask(input: { repoId: string; task: string; @@ -250,28 +250,26 @@ interface WorkbenchActions { onBranch?: string; model?: ModelId; }): Promise<{ taskId: string; sessionId?: string }>; - markTaskUnread(input: { taskId: string }): Promise; - renameTask(input: { taskId: string; value: string }): Promise; - renameBranch(input: { taskId: string; value: string }): Promise; - archiveTask(input: { taskId: string }): Promise; - publishPr(input: { taskId: string }): Promise; - revertFile(input: { taskId: string; path: string }): Promise; - updateDraft(input: { taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; - sendMessage(input: { taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; - stopAgent(input: { taskId: string; sessionId: string }): Promise; - setSessionUnread(input: { taskId: string; sessionId: string; unread: boolean }): Promise; - renameSession(input: { taskId: string; sessionId: string; title: string }): Promise; - closeSession(input: { taskId: string; sessionId: string }): Promise; - addSession(input: { taskId: string; model?: string }): Promise<{ sessionId: string }>; - changeModel(input: { taskId: string; sessionId: string; model: ModelId }): Promise; - reloadGithubOrganization(): Promise; - reloadGithubPullRequests(): Promise; - reloadGithubRepository(repoId: string): Promise; - reloadGithubPullRequest(repoId: string, prNumber: number): Promise; + markTaskUnread(input: { repoId: string; taskId: string }): Promise; + renameTask(input: { repoId: string; taskId: string; value: string }): Promise; + archiveTask(input: { repoId: string; taskId: string }): Promise; + publishPr(input: { repoId: string; taskId: string }): Promise; + revertFile(input: { repoId: string; taskId: string; path: string }): Promise; + updateDraft(input: { repoId: string; taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + sendMessage(input: { repoId: string; taskId: string; sessionId: string; text: string; attachments: LineAttachment[] }): Promise; + stopAgent(input: { repoId: string; taskId: string; sessionId: string }): Promise; + selectSession(input: { repoId: string; taskId: string; sessionId: string }): Promise; + setSessionUnread(input: { repoId: string; taskId: string; sessionId: string; unread: boolean }): Promise; + renameSession(input: { repoId: string; taskId: string; sessionId: string; title: string }): Promise; + closeSession(input: { repoId: string; taskId: string; sessionId: string }): Promise; + addSession(input: { repoId: string; taskId: string; model?: string }): Promise<{ sessionId: string }>; + changeModel(input: { repoId: string; taskId: string; sessionId: string; model: ModelId }): Promise; + adminReloadGithubOrganization(): Promise; + adminReloadGithubRepository(repoId: string): Promise; } const TranscriptPanel = memo(function TranscriptPanel({ - taskWorkbenchClient, + taskWorkspaceClient, task, hasSandbox, activeSessionId, @@ -288,9 +286,10 @@ const TranscriptPanel = memo(function TranscriptPanel({ rightSidebarCollapsed, onToggleRightSidebar, selectedSessionHydrating = false, + modelGroups, onNavigateToUsage, }: { - taskWorkbenchClient: WorkbenchActions; + taskWorkspaceClient: WorkspaceActions; task: Task; hasSandbox: boolean; activeSessionId: string | null; @@ -307,11 +306,15 @@ const TranscriptPanel = memo(function TranscriptPanel({ rightSidebarCollapsed?: boolean; onToggleRightSidebar?: () => void; selectedSessionHydrating?: boolean; + modelGroups: WorkspaceModelGroup[]; onNavigateToUsage?: () => void; }) { const t = useFoundryTokens(); - const [defaultModel, setDefaultModel] = useState("claude-sonnet-4"); - const [editingField, setEditingField] = useState<"title" | "branch" | null>(null); + const appSnapshot = useMockAppSnapshot(); + const appClient = useMockAppClient(); + const currentUser = activeMockUser(appSnapshot); + const defaultModel = currentUser?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID; + const [editingField, setEditingField] = useState<"title" | null>(null); const [editValue, setEditValue] = useState(""); const [editingSessionId, setEditingSessionId] = useState(null); const [editingSessionName, setEditingSessionName] = useState(""); @@ -333,9 +336,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ const isTerminal = task.status === "archived"; const historyEvents = useMemo(() => buildHistoryEvents(task.sessions), [task.sessions]); const activeMessages = useMemo(() => buildDisplayMessages(activeAgentSession), [activeAgentSession]); - const taskRuntimeStatus = task.runtimeStatus ?? task.status; - const taskState = describeTaskState(taskRuntimeStatus, task.statusMessage ?? null); - const taskProvisioning = isProvisioningTaskStatus(taskRuntimeStatus); + const taskState = describeTaskState(task.status); + const taskProvisioning = isProvisioningTaskStatus(task.status); const taskProvisioningMessage = taskState.detail; const activeSessionMessage = sessionStateMessage(activeAgentSession); const showPendingSessionState = @@ -344,16 +346,17 @@ const TranscriptPanel = memo(function TranscriptPanel({ (activeAgentSession.status === "pending_provision" || activeAgentSession.status === "pending_session_create" || activeAgentSession.status === "error") && activeMessages.length === 0; const serverDraft = promptSession?.draft.text ?? ""; - const serverAttachments = promptSession?.draft.attachments ?? []; + const serverAttachments = promptSession?.draft.attachments; + const serverAttachmentsJson = JSON.stringify(serverAttachments ?? []); // Sync server → local only when user hasn't typed recently (3s cooldown) const DRAFT_SYNC_COOLDOWN_MS = 3_000; useEffect(() => { if (Date.now() - lastEditTimeRef.current > DRAFT_SYNC_COOLDOWN_MS) { setLocalDraft(serverDraft); - setLocalAttachments(serverAttachments); + setLocalAttachments(serverAttachments ?? []); } - }, [serverDraft, serverAttachments]); + }, [serverDraft, serverAttachmentsJson]); // Reset local draft immediately on session/task switch useEffect(() => { @@ -436,14 +439,15 @@ const TranscriptPanel = memo(function TranscriptPanel({ return; } - void taskWorkbenchClient.setSessionUnread({ + void taskWorkspaceClient.setSessionUnread({ + repoId: task.repoId, taskId: task.id, sessionId: activeAgentSession.id, unread: false, }); }, [activeAgentSession?.id, activeAgentSession?.unread, task.id]); - const startEditingField = useCallback((field: "title" | "branch", value: string) => { + const startEditingField = useCallback((field: "title", value: string) => { setEditingField(field); setEditValue(value); }, []); @@ -453,18 +457,14 @@ const TranscriptPanel = memo(function TranscriptPanel({ }, []); const commitEditingField = useCallback( - (field: "title" | "branch") => { + (field: "title") => { const value = editValue.trim(); if (!value) { setEditingField(null); return; } - if (field === "title") { - void taskWorkbenchClient.renameTask({ taskId: task.id, value }); - } else { - void taskWorkbenchClient.renameBranch({ taskId: task.id, value }); - } + void taskWorkspaceClient.renameTask({ repoId: task.repoId, taskId: task.id, value }); setEditingField(null); }, [editValue, task.id], @@ -474,7 +474,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ const flushDraft = useCallback( (text: string, nextAttachments: LineAttachment[], sessionId: string) => { - void taskWorkbenchClient.updateDraft({ + void taskWorkspaceClient.updateDraft({ + repoId: task.repoId, taskId: task.id, sessionId, text, @@ -535,7 +536,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ onSetActiveSessionId(promptSession.id); onSetLastAgentSessionId(promptSession.id); - void taskWorkbenchClient.sendMessage({ + void taskWorkspaceClient.sendMessage({ + repoId: task.repoId, taskId: task.id, sessionId: promptSession.id, text, @@ -548,7 +550,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ return; } - void taskWorkbenchClient.stopAgent({ + void taskWorkspaceClient.stopAgent({ + repoId: task.repoId, taskId: task.id, sessionId: promptSession.id, }); @@ -560,9 +563,15 @@ const TranscriptPanel = memo(function TranscriptPanel({ if (!isDiffTab(sessionId)) { onSetLastAgentSessionId(sessionId); + void taskWorkspaceClient.selectSession({ + repoId: task.repoId, + taskId: task.id, + sessionId, + }); const session = task.sessions.find((candidate) => candidate.id === sessionId); if (session?.unread) { - void taskWorkbenchClient.setSessionUnread({ + void taskWorkspaceClient.setSessionUnread({ + repoId: task.repoId, taskId: task.id, sessionId, unread: false, @@ -571,14 +580,14 @@ const TranscriptPanel = memo(function TranscriptPanel({ onSyncRouteSession(task.id, sessionId); } }, - [task.id, task.sessions, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], + [task.id, task.repoId, task.sessions, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], ); const setSessionUnread = useCallback( (sessionId: string, unread: boolean) => { - void taskWorkbenchClient.setSessionUnread({ taskId: task.id, sessionId, unread }); + void taskWorkspaceClient.setSessionUnread({ repoId: task.repoId, taskId: task.id, sessionId, unread }); }, - [task.id], + [task.id, task.repoId], ); const startRenamingSession = useCallback( @@ -610,7 +619,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ return; } - void taskWorkbenchClient.renameSession({ + void taskWorkspaceClient.renameSession({ + repoId: task.repoId, taskId: task.id, sessionId: editingSessionId, title: trimmedName, @@ -631,9 +641,9 @@ const TranscriptPanel = memo(function TranscriptPanel({ } onSyncRouteSession(task.id, nextSessionId); - void taskWorkbenchClient.closeSession({ taskId: task.id, sessionId }); + void taskWorkspaceClient.closeSession({ repoId: task.repoId, taskId: task.id, sessionId }); }, - [activeSessionId, task.id, task.sessions, lastAgentSessionId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], + [activeSessionId, task.id, task.repoId, task.sessions, lastAgentSessionId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession], ); const closeDiffTab = useCallback( @@ -651,12 +661,12 @@ const TranscriptPanel = memo(function TranscriptPanel({ const addSession = useCallback(() => { void (async () => { - const { sessionId } = await taskWorkbenchClient.addSession({ taskId: task.id }); + const { sessionId } = await taskWorkspaceClient.addSession({ repoId: task.repoId, taskId: task.id }); onSetLastAgentSessionId(sessionId); onSetActiveSessionId(sessionId); onSyncRouteSession(task.id, sessionId); })(); - }, [task.id, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession]); + }, [task.id, task.repoId, onSetActiveSessionId, onSetLastAgentSessionId, onSyncRouteSession]); const changeModel = useCallback( (model: ModelId) => { @@ -664,7 +674,8 @@ const TranscriptPanel = memo(function TranscriptPanel({ throw new Error(`Unable to change model for task ${task.id} without an active prompt session`); } - void taskWorkbenchClient.changeModel({ + void taskWorkspaceClient.changeModel({ + repoId: task.repoId, taskId: task.id, sessionId: promptSession.id, model, @@ -939,7 +950,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ messageRefs={messageRefs} historyEvents={historyEvents} onSelectHistoryEvent={jumpToHistoryEvent} - targetMessageId={pendingHistoryTarget && activeSessionId === pendingHistoryTarget.sessionId ? pendingHistoryTarget.messageId : null} + targetMessageId={pendingHistoryTarget && activeAgentSession?.id === pendingHistoryTarget.sessionId ? pendingHistoryTarget.messageId : null} onTargetMessageResolved={() => setPendingHistoryTarget(null)} copiedMessageId={copiedMessageId} onCopyMessage={(message) => { @@ -958,6 +969,7 @@ const TranscriptPanel = memo(function TranscriptPanel({ textareaRef={textareaRef} placeholder={!promptSession.created ? "Describe your task..." : "Send a message..."} attachments={attachments} + modelGroups={modelGroups} defaultModel={defaultModel} model={promptSession.model} isRunning={promptSession.status === "running"} @@ -966,7 +978,9 @@ const TranscriptPanel = memo(function TranscriptPanel({ onStop={stopAgent} onRemoveAttachment={removeAttachment} onChangeModel={changeModel} - onSetDefaultModel={setDefaultModel} + onSetDefaultModel={(model) => { + void appClient.setDefaultModel(model); + }} /> ) : null}
@@ -1280,45 +1294,37 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } const [css] = useStyletron(); const t = useFoundryTokens(); const navigate = useNavigate(); - const taskWorkbenchClient = useMemo( + const taskWorkspaceClient = useMemo( () => ({ - createTask: (input) => backendClient.createWorkbenchTask(organizationId, input), - markTaskUnread: (input) => backendClient.markWorkbenchUnread(organizationId, input), - renameTask: (input) => backendClient.renameWorkbenchTask(organizationId, input), - renameBranch: (input) => backendClient.renameWorkbenchBranch(organizationId, input), - archiveTask: async (input) => backendClient.runAction(organizationId, input.taskId, "archive"), - publishPr: (input) => backendClient.publishWorkbenchPr(organizationId, input), - revertFile: (input) => backendClient.revertWorkbenchFile(organizationId, input), - updateDraft: (input) => backendClient.updateWorkbenchDraft(organizationId, input), - sendMessage: (input) => backendClient.sendWorkbenchMessage(organizationId, input), - stopAgent: (input) => backendClient.stopWorkbenchSession(organizationId, input), - setSessionUnread: (input) => backendClient.setWorkbenchSessionUnread(organizationId, input), - renameSession: (input) => backendClient.renameWorkbenchSession(organizationId, input), - closeSession: (input) => backendClient.closeWorkbenchSession(organizationId, input), - addSession: (input) => backendClient.createWorkbenchSession(organizationId, input), - changeModel: (input) => backendClient.changeWorkbenchModel(organizationId, input), - reloadGithubOrganization: () => backendClient.reloadGithubOrganization(organizationId), - reloadGithubPullRequests: () => backendClient.reloadGithubPullRequests(organizationId), - reloadGithubRepository: (repoId) => backendClient.reloadGithubRepository(organizationId, repoId), - reloadGithubPullRequest: (repoId, prNumber) => backendClient.reloadGithubPullRequest(organizationId, repoId, prNumber), + createTask: (input) => backendClient.createWorkspaceTask(organizationId, input), + markTaskUnread: (input) => backendClient.markWorkspaceUnread(organizationId, input), + renameTask: (input) => backendClient.renameWorkspaceTask(organizationId, input), + archiveTask: async (input) => backendClient.runAction(organizationId, input.repoId, input.taskId, "archive"), + publishPr: (input) => backendClient.publishWorkspacePr(organizationId, input), + revertFile: (input) => backendClient.revertWorkspaceFile(organizationId, input), + updateDraft: (input) => backendClient.updateWorkspaceDraft(organizationId, input), + sendMessage: (input) => backendClient.sendWorkspaceMessage(organizationId, input), + stopAgent: (input) => backendClient.stopWorkspaceSession(organizationId, input), + selectSession: (input) => backendClient.selectWorkspaceSession(organizationId, input), + setSessionUnread: (input) => backendClient.setWorkspaceSessionUnread(organizationId, input), + renameSession: (input) => backendClient.renameWorkspaceSession(organizationId, input), + closeSession: (input) => backendClient.closeWorkspaceSession(organizationId, input), + addSession: (input) => backendClient.createWorkspaceSession(organizationId, input), + changeModel: (input) => backendClient.changeWorkspaceModel(organizationId, input), + adminReloadGithubOrganization: () => backendClient.adminReloadGithubOrganization(organizationId), + adminReloadGithubRepository: (repoId) => backendClient.adminReloadGithubRepository(organizationId, repoId), }), [organizationId], ); const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); - const organizationRepos = organizationState.data?.repos ?? []; - const taskSummaries = organizationState.data?.taskSummaries ?? []; - const openPullRequests = organizationState.data?.openPullRequests ?? []; - const openPullRequestsByTaskId = useMemo( - () => new Map(openPullRequests.map((pullRequest) => [openPrTaskId(pullRequest.prId), pullRequest])), - [openPullRequests], - ); - const selectedOpenPullRequest = useMemo( - () => (selectedTaskId ? (openPullRequestsByTaskId.get(selectedTaskId) ?? null) : null), - [openPullRequestsByTaskId, selectedTaskId], - ); + const organizationReposData = organizationState.data?.repos; + const taskSummariesData = organizationState.data?.taskSummaries; + const openPullRequestsData = organizationState.data?.openPullRequests; + const organizationRepos = organizationReposData ?? []; + const taskSummaries = taskSummariesData ?? []; const selectedTaskSummary = useMemo( () => taskSummaries.find((task) => task.id === selectedTaskId) ?? taskSummaries[0] ?? null, - [selectedTaskId, taskSummaries], + [selectedTaskId, taskSummariesData], ); const taskState = useSubscription( subscriptionManager, @@ -1359,6 +1365,20 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } : null, ); const hasSandbox = Boolean(activeSandbox) && sandboxState.status !== "error"; + const modelGroupsQuery = useQuery({ + queryKey: ["mock-layout", "workspace-model-groups", organizationId, activeSandbox?.sandboxProviderId ?? "", activeSandbox?.sandboxId ?? ""], + enabled: Boolean(activeSandbox?.sandboxId), + staleTime: 30_000, + refetchOnWindowFocus: false, + queryFn: async () => { + if (!activeSandbox) { + throw new Error("Cannot load workspace model groups without an active sandbox."); + } + + return await backendClient.getSandboxWorkspaceModelGroups(organizationId, activeSandbox.sandboxProviderId, activeSandbox.sandboxId); + }, + }); + const modelGroups = modelGroupsQuery.data && modelGroupsQuery.data.length > 0 ? modelGroupsQuery.data : DEFAULT_WORKSPACE_MODEL_GROUPS; const tasks = useMemo(() => { const sessionCache = new Map(); if (selectedTaskSummary && taskState.data) { @@ -1383,12 +1403,14 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } const hydratedTasks = taskSummaries.map((summary) => summary.id === selectedTaskSummary?.id ? toTaskModel(summary, taskState.data, sessionCache) : toTaskModel(summary), ); - const openPrTasks = openPullRequests.map((pullRequest) => toOpenPrTaskModel(pullRequest)); - return [...hydratedTasks, ...openPrTasks].sort((left, right) => right.updatedAtMs - left.updatedAtMs); - }, [openPullRequests, selectedTaskSummary, selectedSessionId, sessionState.data, taskState.data, taskSummaries, organizationId]); - const rawRepositories = useMemo(() => groupRepositories(organizationRepos, tasks), [tasks, organizationRepos]); + return hydratedTasks.sort((left, right) => right.updatedAtMs - left.updatedAtMs); + }, [selectedTaskSummary, selectedSessionId, sessionState.data, taskState.data, taskSummariesData, organizationId]); + const openPullRequests = openPullRequestsData ?? []; + const rawRepositories = useMemo(() => groupRepositories(organizationRepos, tasks, openPullRequests), [tasks, organizationReposData, openPullRequestsData]); const appSnapshot = useMockAppSnapshot(); + const currentUser = activeMockUser(appSnapshot); const activeOrg = activeMockOrganization(appSnapshot); + const liveGithub = organizationState.data?.github ?? activeOrg?.github ?? null; const navigateToUsage = useCallback(() => { if (activeOrg) { void navigate({ to: "/organizations/$organizationId/billing" as never, params: { organizationId: activeOrg.id } as never }); @@ -1413,11 +1435,9 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } const leftWidthRef = useRef(leftWidth); const rightWidthRef = useRef(rightWidth); const autoCreatingSessionForTaskRef = useRef>(new Set()); - const resolvingOpenPullRequestsRef = useRef>(new Set()); const [leftSidebarOpen, setLeftSidebarOpen] = useState(true); const [rightSidebarOpen, setRightSidebarOpen] = useState(true); const [leftSidebarPeeking, setLeftSidebarPeeking] = useState(false); - const [materializingOpenPrId, setMaterializingOpenPrId] = useState(null); const showDevPanel = useDevPanel(); const peekTimeoutRef = useRef | null>(null); @@ -1484,80 +1504,17 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } }, []); const activeTask = useMemo(() => { - const realTasks = tasks.filter((task) => !isOpenPrTaskId(task.id)); - if (selectedOpenPullRequest) { - return null; - } if (selectedTaskId) { - return realTasks.find((task) => task.id === selectedTaskId) ?? realTasks[0] ?? null; + return tasks.find((task) => task.id === selectedTaskId) ?? tasks[0] ?? null; } - return realTasks[0] ?? null; - }, [selectedOpenPullRequest, selectedTaskId, tasks]); - - const materializeOpenPullRequest = useCallback( - async (pullRequest: WorkbenchOpenPrSummary) => { - if (resolvingOpenPullRequestsRef.current.has(pullRequest.prId)) { - return; - } - - resolvingOpenPullRequestsRef.current.add(pullRequest.prId); - setMaterializingOpenPrId(pullRequest.prId); - - try { - const { taskId, sessionId } = await taskWorkbenchClient.createTask({ - repoId: pullRequest.repoId, - task: `Continue work on GitHub PR #${pullRequest.number}: ${pullRequest.title}`, - model: "gpt-5.3-codex", - title: pullRequest.title, - onBranch: pullRequest.headRefName, - }); - await navigate({ - to: "/organizations/$organizationId/tasks/$taskId", - params: { - organizationId, - taskId, - }, - search: { sessionId: sessionId ?? undefined }, - replace: true, - }); - } catch (error) { - setMaterializingOpenPrId((current) => (current === pullRequest.prId ? null : current)); - resolvingOpenPullRequestsRef.current.delete(pullRequest.prId); - logger.error( - { - prId: pullRequest.prId, - repoId: pullRequest.repoId, - branchName: pullRequest.headRefName, - ...createErrorContext(error), - }, - "failed_to_materialize_open_pull_request_task", - ); - } - }, - [navigate, taskWorkbenchClient, organizationId], - ); - - useEffect(() => { - if (!selectedOpenPullRequest) { - if (materializingOpenPrId) { - resolvingOpenPullRequestsRef.current.delete(materializingOpenPrId); - } - setMaterializingOpenPrId(null); - return; - } - - void materializeOpenPullRequest(selectedOpenPullRequest); - }, [materializeOpenPullRequest, materializingOpenPrId, selectedOpenPullRequest]); + return tasks[0] ?? null; + }, [selectedTaskId, tasks]); useEffect(() => { if (activeTask) { return; } - if (selectedOpenPullRequest || materializingOpenPrId) { - return; - } - const fallbackTaskId = tasks[0]?.id; if (!fallbackTaskId) { return; @@ -1574,11 +1531,13 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } search: { sessionId: fallbackTask?.sessions[0]?.id ?? undefined }, replace: true, }); - }, [activeTask, materializingOpenPrId, navigate, selectedOpenPullRequest, tasks, organizationId]); + }, [activeTask, navigate, tasks, organizationId]); const openDiffs = activeTask ? sanitizeOpenDiffs(activeTask, openDiffsByTask[activeTask.id]) : []; const lastAgentSessionId = activeTask ? sanitizeLastAgentSessionId(activeTask, lastAgentSessionIdByTask[activeTask.id]) : null; - const activeSessionId = activeTask ? sanitizeActiveSessionId(activeTask, activeSessionIdByTask[activeTask.id], openDiffs, lastAgentSessionId) : null; + const activeSessionId = activeTask + ? sanitizeActiveSessionId(activeTask, activeSessionIdByTask[activeTask.id] ?? activeTask.activeSessionId ?? null, openDiffs, lastAgentSessionId) + : null; const selectedSessionHydrating = Boolean( selectedSessionId && activeSessionId === selectedSessionId && sessionState.status === "loading" && !sessionState.data, ); @@ -1635,6 +1594,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } }, [activeTask, lastAgentSessionIdByTask, selectedSessionId, syncRouteSession]); useEffect(() => { + const organizationRepos = organizationReposData ?? []; if (selectedNewTaskRepoId && organizationRepos.some((repo) => repo.id === selectedNewTaskRepoId)) { return; } @@ -1644,7 +1604,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } if (fallbackRepoId !== selectedNewTaskRepoId) { setSelectedNewTaskRepoId(fallbackRepoId); } - }, [activeTask?.repoId, selectedNewTaskRepoId, organizationRepos]); + }, [activeTask?.repoId, selectedNewTaskRepoId, organizationReposData]); useEffect(() => { if (!activeTask) { @@ -1664,7 +1624,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } autoCreatingSessionForTaskRef.current.add(activeTask.id); void (async () => { try { - const { sessionId } = await taskWorkbenchClient.addSession({ taskId: activeTask.id }); + const { sessionId } = await taskWorkspaceClient.addSession({ repoId: activeTask.repoId, taskId: activeTask.id }); syncRouteSession(activeTask.id, sessionId, true); } catch (error) { logger.error( @@ -1672,13 +1632,13 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } taskId: activeTask.id, ...createErrorContext(error), }, - "failed_to_auto_create_workbench_session", + "failed_to_auto_create_workspace_session", ); // Keep the guard in the set on error to prevent retry storms. // The guard is cleared when sessions appear (line above) or the task changes. } })(); - }, [activeTask, selectedSessionId, syncRouteSession, taskWorkbenchClient]); + }, [activeTask, selectedSessionId, syncRouteSession, taskWorkspaceClient]); const createTask = useCallback( (overrideRepoId?: string, options?: { title?: string; task?: string; branch?: string; onBranch?: string }) => { @@ -1688,10 +1648,10 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } throw new Error("Cannot create a task without an available repo"); } - const { taskId, sessionId } = await taskWorkbenchClient.createTask({ + const { taskId, sessionId } = await taskWorkspaceClient.createTask({ repoId, task: options?.task ?? "New task", - model: "gpt-5.3-codex", + model: currentUser?.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID, title: options?.title ?? "New task", ...(options?.branch ? { branch: options.branch } : {}), ...(options?.onBranch ? { onBranch: options.onBranch } : {}), @@ -1706,7 +1666,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } }); })(); }, - [navigate, selectedNewTaskRepoId, taskWorkbenchClient, organizationId], + [currentUser?.defaultModel, navigate, selectedNewTaskRepoId, taskWorkspaceClient, organizationId], ); const openDiffTab = useCallback( @@ -1735,14 +1695,6 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } const selectTask = useCallback( (id: string) => { - if (isOpenPrTaskId(id)) { - const pullRequest = openPullRequestsByTaskId.get(id); - if (!pullRequest) { - return; - } - void materializeOpenPullRequest(pullRequest); - return; - } const task = tasks.find((candidate) => candidate.id === id) ?? null; void navigate({ to: "/organizations/$organizationId/tasks/$taskId", @@ -1753,12 +1705,19 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } search: { sessionId: task?.sessions[0]?.id ?? undefined }, }); }, - [materializeOpenPullRequest, navigate, openPullRequestsByTaskId, tasks, organizationId], + [navigate, tasks, organizationId], ); - const markTaskUnread = useCallback((id: string) => { - void taskWorkbenchClient.markTaskUnread({ taskId: id }); - }, []); + const markTaskUnread = useCallback( + (id: string) => { + const task = tasks.find((candidate) => candidate.id === id); + if (!task) { + return; + } + void taskWorkspaceClient.markTaskUnread({ repoId: task.repoId, taskId: id }); + }, + [tasks], + ); const renameTask = useCallback( (id: string) => { @@ -1777,29 +1736,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } return; } - void taskWorkbenchClient.renameTask({ taskId: id, value: trimmedTitle }); - }, - [tasks], - ); - - const renameBranch = useCallback( - (id: string) => { - const currentTask = tasks.find((task) => task.id === id); - if (!currentTask) { - throw new Error(`Unable to rename missing task ${id}`); - } - - const nextBranch = window.prompt("Rename branch", currentTask.branch ?? ""); - if (nextBranch === null) { - return; - } - - const trimmedBranch = nextBranch.trim(); - if (!trimmedBranch) { - return; - } - - void taskWorkbenchClient.renameBranch({ taskId: id, value: trimmedBranch }); + void taskWorkspaceClient.renameTask({ repoId: currentTask.repoId, taskId: id, value: trimmedTitle }); }, [tasks], ); @@ -1808,14 +1745,14 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } if (!activeTask) { throw new Error("Cannot archive without an active task"); } - void taskWorkbenchClient.archiveTask({ taskId: activeTask.id }); + void taskWorkspaceClient.archiveTask({ repoId: activeTask.repoId, taskId: activeTask.id }); }, [activeTask]); const publishPr = useCallback(() => { if (!activeTask) { throw new Error("Cannot publish PR without an active task"); } - void taskWorkbenchClient.publishPr({ taskId: activeTask.id }); + void taskWorkspaceClient.publishPr({ repoId: activeTask.repoId, taskId: activeTask.id }); }, [activeTask]); const revertFile = useCallback( @@ -1835,7 +1772,8 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } : (current[activeTask.id] ?? null), })); - void taskWorkbenchClient.revertFile({ + void taskWorkspaceClient.revertFile({ + repoId: activeTask.repoId, taskId: activeTask.id, path, }); @@ -1912,7 +1850,6 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } }; if (!activeTask) { - const isMaterializingSelectedOpenPr = Boolean(selectedOpenPullRequest) || materializingOpenPrId != null; return ( <> {dragRegion} @@ -1939,14 +1876,11 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } onSelectNewTaskRepo={setSelectedNewTaskRepoId} onMarkUnread={markTaskUnread} onRenameTask={renameTask} - onRenameBranch={renameBranch} onReorderRepositories={reorderRepositories} taskOrderByRepository={taskOrderByRepository} onReorderTasks={reorderTasks} - onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} - onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} - onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} - onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} + onReloadOrganization={() => void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} onToggleSidebar={() => setLeftSidebarOpen(false)} /> @@ -1988,7 +1922,7 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } gap: "12px", }} > - {activeOrg?.github.syncStatus === "syncing" || activeOrg?.github.syncStatus === "pending" ? ( + {liveGithub?.syncStatus === "syncing" || liveGithub?.syncStatus === "pending" ? ( <>

Syncing with GitHub

- Importing repos from @{activeOrg.github.connectedAccount || "GitHub"}... - {activeOrg.github.importedRepoCount > 0 && <> {activeOrg.github.importedRepoCount} repos imported so far.} + {liveGithub.lastSyncLabel || `Importing repos from @${liveGithub.connectedAccount || "GitHub"}...`} + {(liveGithub.totalRepositoryCount ?? 0) > 0 && ( + <> + {" "} + {liveGithub.syncPhase === "syncing_repositories" + ? `${liveGithub.importedRepoCount} of ${liveGithub.totalRepositoryCount} repos imported so far.` + : `${liveGithub.processedRepositoryCount} of ${liveGithub.totalRepositoryCount} repos processed in ${liveGithub.syncPhase?.replace(/^syncing_/, "").replace(/_/g, " ") ?? "sync"}.`} + + )}

- ) : isMaterializingSelectedOpenPr && selectedOpenPullRequest ? ( - <> - -

Creating task from pull request

-

- Preparing a task for {selectedOpenPullRequest.title} on {selectedOpenPullRequest.headRefName}. -

- - ) : activeOrg?.github.syncStatus === "error" ? ( + ) : liveGithub?.syncStatus === "error" ? ( <>

GitHub sync failed

There was a problem syncing repos from GitHub. Check the dev panel for details.

@@ -2075,11 +2008,11 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId }
- {activeOrg && } + {liveGithub && } {showDevPanel && ( @@ -2114,14 +2047,11 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } onSelectNewTaskRepo={setSelectedNewTaskRepoId} onMarkUnread={markTaskUnread} onRenameTask={renameTask} - onRenameBranch={renameBranch} onReorderRepositories={reorderRepositories} taskOrderByRepository={taskOrderByRepository} onReorderTasks={reorderTasks} - onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} - onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} - onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} - onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} + onReloadOrganization={() => void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} onToggleSidebar={() => setLeftSidebarOpen(false)} /> @@ -2169,14 +2099,11 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } onSelectNewTaskRepo={setSelectedNewTaskRepoId} onMarkUnread={markTaskUnread} onRenameTask={renameTask} - onRenameBranch={renameBranch} onReorderRepositories={reorderRepositories} taskOrderByRepository={taskOrderByRepository} onReorderTasks={reorderTasks} - onReloadOrganization={() => void taskWorkbenchClient.reloadGithubOrganization()} - onReloadPullRequests={() => void taskWorkbenchClient.reloadGithubPullRequests()} - onReloadRepository={(repoId) => void taskWorkbenchClient.reloadGithubRepository(repoId)} - onReloadPullRequest={(repoId, prNumber) => void taskWorkbenchClient.reloadGithubPullRequest(repoId, prNumber)} + onReloadOrganization={() => void taskWorkspaceClient.adminReloadGithubOrganization()} + onReloadRepository={(repoId) => void taskWorkspaceClient.adminReloadGithubRepository(repoId)} onToggleSidebar={() => { setLeftSidebarPeeking(false); setLeftSidebarOpen(true); @@ -2189,9 +2116,10 @@ export function MockLayout({ organizationId, selectedTaskId, selectedSessionId } {leftSidebarOpen ? : null}
- {activeOrg && } + {liveGithub && } {showDevPanel && ( ({ diff --git a/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx b/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx index ba3f0f3..6ec6ea6 100644 --- a/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx @@ -2,18 +2,21 @@ import { memo, useState } from "react"; import { useStyletron } from "baseui"; import { StatefulPopover, PLACEMENT } from "baseui/popover"; import { ChevronUp, Star } from "lucide-react"; +import { workspaceModelLabel, type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; import { useFoundryTokens } from "../../app/theme"; import { AgentIcon } from "./ui"; -import { MODEL_GROUPS, modelLabel, providerAgent, type ModelId } from "./view-model"; +import { type ModelId } from "./view-model"; const ModelPickerContent = memo(function ModelPickerContent({ + groups, value, defaultModel, onChange, onSetDefault, close, }: { + groups: WorkspaceModelGroup[]; value: ModelId; defaultModel: ModelId; onChange: (id: ModelId) => void; @@ -26,7 +29,7 @@ const ModelPickerContent = memo(function ModelPickerContent({ return (
- {MODEL_GROUPS.map((group) => ( + {groups.map((group) => (
void; @@ -137,7 +142,9 @@ export const ModelPicker = memo(function ModelPicker({ }, }, }} - content={({ close }) => } + content={({ close }) => ( + + )} >
diff --git a/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx b/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx index 08d72ae..b7e27be 100644 --- a/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/prompt-composer.tsx @@ -2,6 +2,7 @@ import { memo, type Ref } from "react"; import { useStyletron } from "baseui"; import { ChatComposer, type ChatComposerClassNames } from "@sandbox-agent/react"; import { FileCode, SendHorizonal, Square, X } from "lucide-react"; +import { type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared"; import { useFoundryTokens } from "../../app/theme"; import { ModelPicker } from "./model-picker"; @@ -13,6 +14,7 @@ export const PromptComposer = memo(function PromptComposer({ textareaRef, placeholder, attachments, + modelGroups, defaultModel, model, isRunning, @@ -27,6 +29,7 @@ export const PromptComposer = memo(function PromptComposer({ textareaRef: Ref; placeholder: string; attachments: LineAttachment[]; + modelGroups: WorkspaceModelGroup[]; defaultModel: ModelId; model: ModelId; isRunning: boolean; @@ -172,7 +175,7 @@ export const PromptComposer = memo(function PromptComposer({ renderSubmitContent={() => (isRunning ? : )} renderFooter={() => (
- +
)} /> diff --git a/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx index 529da47..cd4c33a 100644 --- a/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/right-sidebar.tsx @@ -125,7 +125,7 @@ export const RightSidebar = memo(function RightSidebar({ }); observer.observe(node); }, []); - const pullRequestUrl = task.pullRequest != null ? `https://github.com/${task.repoName}/pull/${task.pullRequest.number}` : null; + const pullRequestUrl = task.pullRequest?.url ?? null; const copyFilePath = useCallback(async (path: string) => { try { diff --git a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx index 7ccb18c..4e8b7ce 100644 --- a/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/sidebar.tsx @@ -54,10 +54,6 @@ function repositoryIconColor(label: string): string { return REPOSITORY_COLORS[Math.abs(hash) % REPOSITORY_COLORS.length]!; } -function isPullRequestSidebarItem(task: Task): boolean { - return task.id.startsWith("pr:"); -} - export const Sidebar = memo(function Sidebar({ repositories, newTaskRepos, @@ -68,14 +64,11 @@ export const Sidebar = memo(function Sidebar({ onSelectNewTaskRepo, onMarkUnread, onRenameTask, - onRenameBranch, onReorderRepositories, taskOrderByRepository, onReorderTasks, onReloadOrganization, - onReloadPullRequests, onReloadRepository, - onReloadPullRequest, onToggleSidebar, }: { repositories: RepositorySection[]; @@ -87,14 +80,11 @@ export const Sidebar = memo(function Sidebar({ onSelectNewTaskRepo: (repoId: string) => void; onMarkUnread: (id: string) => void; onRenameTask: (id: string) => void; - onRenameBranch: (id: string) => void; onReorderRepositories: (fromIndex: number, toIndex: number) => void; taskOrderByRepository: Record; onReorderTasks: (repositoryId: string, fromIndex: number, toIndex: number) => void; onReloadOrganization: () => void; - onReloadPullRequests: () => void; onReloadRepository: (repoId: string) => void; - onReloadPullRequest: (repoId: string, prNumber: number) => void; onToggleSidebar?: () => void; }) { const [css] = useStyletron(); @@ -446,16 +436,6 @@ export const Sidebar = memo(function Sidebar({ > Reload organization -
) : null}
{ if (node) { @@ -667,15 +648,12 @@ export const Sidebar = memo(function Sidebar({ if (item.type === "task") { const { repository, task, taskIndex } = item; const isActive = task.id === activeId; - const isPullRequestItem = isPullRequestSidebarItem(task); const isRunning = task.sessions.some((s) => s.status === "running"); const isProvisioning = - !isPullRequestItem && - ((String(task.status).startsWith("init_") && task.status !== "init_complete") || - task.status === "new" || - task.sessions.some((s) => s.status === "pending_provision" || s.status === "pending_session_create")); + (String(task.status).startsWith("init_") && task.status !== "init_complete") || + task.sessions.some((s) => s.status === "pending_provision" || s.status === "pending_session_create"); const hasUnread = task.sessions.some((s) => s.unread); - const isDraft = task.pullRequest == null || task.pullRequest.status === "draft"; + const isDraft = task.pullRequest?.isDraft ?? true; const totalAdded = task.fileChanges.reduce((sum, file) => sum + file.added, 0); const totalRemoved = task.fileChanges.reduce((sum, file) => sum + file.removed, 0); const hasDiffs = totalAdded > 0 || totalRemoved > 0; @@ -686,6 +664,7 @@ export const Sidebar = memo(function Sidebar({ return (
{ @@ -720,18 +699,11 @@ export const Sidebar = memo(function Sidebar({
onSelect(task.id)} onContextMenu={(event) => { - if (isPullRequestItem && task.pullRequest) { - contextMenu.open(event, [ - { label: "Reload pull request", onClick: () => onReloadPullRequest(task.repoId, task.pullRequest!.number) }, - { label: "Create task", onClick: () => onSelect(task.id) }, - ]); - return; - } - contextMenu.open(event, [ + const items = [ { label: "Rename task", onClick: () => onRenameTask(task.id) }, - { label: "Rename branch", onClick: () => onRenameBranch(task.id) }, { label: "Mark as unread", onClick: () => onMarkUnread(task.id) }, - ]); + ]; + contextMenu.open(event, items); }} className={css({ padding: "8px 12px", @@ -756,11 +728,7 @@ export const Sidebar = memo(function Sidebar({ flexShrink: 0, })} > - {isPullRequestItem ? ( - - ) : ( - - )} +
{task.title} - {isPullRequestItem && task.statusMessage ? ( - - {task.statusMessage} - - ) : null}
{task.pullRequest != null ? ( #{task.pullRequest.number} - {task.pullRequest.status === "draft" ? : null} + {task.pullRequest.isDraft ? : null} ) : ( @@ -814,6 +777,7 @@ export const Sidebar = memo(function Sidebar({ return (
{ @@ -851,6 +815,7 @@ export const Sidebar = memo(function Sidebar({ return (
{ if (node) { diff --git a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx index 95e6876..10d74d7 100644 --- a/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/terminal-pane.tsx @@ -305,7 +305,8 @@ export function TerminalPane({ organizationId, taskId, isExpanded, onExpand, onC setProcessTabs([]); }, [taskId]); - const processes = processesState.data ?? []; + const processesData = processesState.data; + const processes = processesData ?? []; const openTerminalTab = useCallback((process: SandboxProcessRecord) => { setProcessTabs((current) => { @@ -361,7 +362,7 @@ export function TerminalPane({ organizationId, taskId, isExpanded, onExpand, onC const activeProcessTab = activeSessionId ? (processTabsById.get(activeSessionId) ?? null) : null; const activeTerminalProcess = useMemo( () => (activeProcessTab ? (processes.find((process) => process.id === activeProcessTab.processId) ?? null) : null), - [activeProcessTab, processes], + [activeProcessTab, processesData], ); const emptyBodyClassName = css({ diff --git a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx index a024871..16f87e6 100644 --- a/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/transcript-header.tsx @@ -30,11 +30,11 @@ export const TranscriptHeader = memo(function TranscriptHeader({ task: Task; hasSandbox: boolean; activeSession: AgentSession | null | undefined; - editingField: "title" | "branch" | null; + editingField: "title" | null; editValue: string; onEditValueChange: (value: string) => void; - onStartEditingField: (field: "title" | "branch", value: string) => void; - onCommitEditingField: (field: "title" | "branch") => void; + onStartEditingField: (field: "title", value: string) => void; + onCommitEditingField: (field: "title") => void; onCancelEditingField: () => void; onSetActiveSessionUnread: (unread: boolean) => void; sidebarCollapsed?: boolean; @@ -49,10 +49,9 @@ export const TranscriptHeader = memo(function TranscriptHeader({ const t = useFoundryTokens(); const isDesktop = !!import.meta.env.VITE_DESKTOP; const needsTrafficLightInset = isDesktop && sidebarCollapsed; - const taskStatus = task.runtimeStatus ?? task.status; const headerStatus = useMemo( - () => deriveHeaderStatus(taskStatus, task.statusMessage ?? null, activeSession?.status ?? null, activeSession?.errorMessage ?? null, hasSandbox), - [taskStatus, task.statusMessage, activeSession?.status, activeSession?.errorMessage, hasSandbox], + () => deriveHeaderStatus(task.status, activeSession?.status ?? null, activeSession?.errorMessage ?? null, hasSandbox), + [task.status, activeSession?.status, activeSession?.errorMessage, hasSandbox], ); return ( @@ -118,55 +117,20 @@ export const TranscriptHeader = memo(function TranscriptHeader({ )} {task.branch ? ( - editingField === "branch" ? ( - onEditValueChange(event.target.value)} - onBlur={() => onCommitEditingField("branch")} - onKeyDown={(event) => { - if (event.key === "Enter") { - onCommitEditingField("branch"); - } else if (event.key === "Escape") { - onCancelEditingField(); - } - }} - className={css({ - appearance: "none", - WebkitAppearance: "none", - margin: "0", - outline: "none", - padding: "2px 8px", - borderRadius: "999px", - border: `1px solid ${t.borderFocus}`, - backgroundColor: t.interactiveSubtle, - color: t.textPrimary, - fontSize: "11px", - whiteSpace: "nowrap", - fontFamily: '"IBM Plex Mono", monospace', - minWidth: "60px", - })} - /> - ) : ( - onStartEditingField("branch", task.branch ?? "")} - className={css({ - padding: "2px 8px", - borderRadius: "999px", - border: `1px solid ${t.borderMedium}`, - backgroundColor: t.interactiveSubtle, - color: t.textPrimary, - fontSize: "11px", - whiteSpace: "nowrap", - fontFamily: '"IBM Plex Mono", monospace', - cursor: "pointer", - ":hover": { borderColor: t.borderFocus }, - })} - > - {task.branch} - - ) + + {task.branch} + ) : null}
diff --git a/foundry/packages/frontend/src/components/mock-layout/ui.tsx b/foundry/packages/frontend/src/components/mock-layout/ui.tsx index d39a408..b86ca18 100644 --- a/foundry/packages/frontend/src/components/mock-layout/ui.tsx +++ b/foundry/packages/frontend/src/components/mock-layout/ui.tsx @@ -181,6 +181,8 @@ export const AgentIcon = memo(function AgentIcon({ agent, size = 14 }: { agent: return ; case "Cursor": return ; + default: + return ; } }); diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts index 21228fc..bc6ab87 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.test.ts @@ -1,8 +1,8 @@ import { describe, expect, it } from "vitest"; -import type { WorkbenchSession } from "@sandbox-agent/foundry-shared"; +import type { WorkspaceSession } from "@sandbox-agent/foundry-shared"; import { buildDisplayMessages } from "./view-model"; -function makeSession(transcript: WorkbenchSession["transcript"]): WorkbenchSession { +function makeSession(transcript: WorkspaceSession["transcript"]): WorkspaceSession { return { id: "session-1", sessionId: "session-1", diff --git a/foundry/packages/frontend/src/components/mock-layout/view-model.ts b/foundry/packages/frontend/src/components/mock-layout/view-model.ts index 83f5c7a..9232293 100644 --- a/foundry/packages/frontend/src/components/mock-layout/view-model.ts +++ b/foundry/packages/frontend/src/components/mock-layout/view-model.ts @@ -1,42 +1,28 @@ +import { + DEFAULT_WORKSPACE_MODEL_GROUPS as SharedModelGroups, + workspaceModelLabel as sharedWorkspaceModelLabel, + workspaceProviderAgent as sharedWorkspaceProviderAgent, +} from "@sandbox-agent/foundry-shared"; import type { - WorkbenchAgentKind as AgentKind, - WorkbenchSession as AgentSession, - WorkbenchDiffLineKind as DiffLineKind, - WorkbenchFileChange as FileChange, - WorkbenchFileTreeNode as FileTreeNode, - WorkbenchTask as Task, - WorkbenchHistoryEvent as HistoryEvent, - WorkbenchLineAttachment as LineAttachment, - WorkbenchModelGroup as ModelGroup, - WorkbenchModelId as ModelId, - WorkbenchParsedDiffLine as ParsedDiffLine, - WorkbenchRepositorySection as RepositorySection, - WorkbenchTranscriptEvent as TranscriptEvent, + WorkspaceAgentKind as AgentKind, + WorkspaceSession as AgentSession, + WorkspaceDiffLineKind as DiffLineKind, + WorkspaceFileChange as FileChange, + WorkspaceFileTreeNode as FileTreeNode, + WorkspaceTask as Task, + WorkspaceHistoryEvent as HistoryEvent, + WorkspaceLineAttachment as LineAttachment, + WorkspaceModelGroup as ModelGroup, + WorkspaceModelId as ModelId, + WorkspaceParsedDiffLine as ParsedDiffLine, + WorkspaceRepositorySection as RepositorySection, + WorkspaceTranscriptEvent as TranscriptEvent, } from "@sandbox-agent/foundry-shared"; import { extractEventText } from "../../features/sessions/model"; export type { RepositorySection }; -export const MODEL_GROUPS: ModelGroup[] = [ - { - provider: "Claude", - models: [ - { id: "claude-sonnet-4", label: "Sonnet 4" }, - { id: "claude-opus-4", label: "Opus 4" }, - ], - }, - { - provider: "OpenAI", - models: [ - { id: "gpt-5.3-codex", label: "GPT-5.3 Codex" }, - { id: "gpt-5.4", label: "GPT-5.4" }, - { id: "gpt-5.2-codex", label: "GPT-5.2 Codex" }, - { id: "gpt-5.1-codex-max", label: "GPT-5.1 Codex Max" }, - { id: "gpt-5.2", label: "GPT-5.2" }, - { id: "gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini" }, - ], - }, -]; +export const MODEL_GROUPS: ModelGroup[] = SharedModelGroups; export function formatRelativeAge(updatedAtMs: number, nowMs = Date.now()): string { const deltaSeconds = Math.max(0, Math.floor((nowMs - updatedAtMs) / 1000)); @@ -94,15 +80,11 @@ export function formatMessageDuration(durationMs: number): string { } export function modelLabel(id: ModelId): string { - const group = MODEL_GROUPS.find((candidate) => candidate.models.some((model) => model.id === id)); - const model = group?.models.find((candidate) => candidate.id === id); - return model && group ? `${group.provider} ${model.label}` : id; + return sharedWorkspaceModelLabel(id, MODEL_GROUPS); } export function providerAgent(provider: string): AgentKind { - if (provider === "Claude") return "Claude"; - if (provider === "OpenAI") return "Codex"; - return "Cursor"; + return sharedWorkspaceProviderAgent(provider); } const DIFF_PREFIX = "diff:"; diff --git a/foundry/packages/frontend/src/components/organization-dashboard.tsx b/foundry/packages/frontend/src/components/organization-dashboard.tsx index 461ee90..4f54ac3 100644 --- a/foundry/packages/frontend/src/components/organization-dashboard.tsx +++ b/foundry/packages/frontend/src/components/organization-dashboard.tsx @@ -1,5 +1,5 @@ import { useEffect, useMemo, useState, type ReactNode } from "react"; -import type { AgentType, RepoBranchRecord, RepoOverview, TaskWorkbenchSnapshot, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared"; +import type { RepoBranchRecord, RepoOverview, TaskWorkspaceSnapshot, WorkspaceTaskStatus } from "@sandbox-agent/foundry-shared"; import { currentFoundryOrganization, useSubscription } from "@sandbox-agent/foundry-client"; import { useMutation, useQuery } from "@tanstack/react-query"; import { Link, useNavigate } from "@tanstack/react-router"; @@ -14,7 +14,6 @@ import { StyledDivider } from "baseui/divider"; import { styled, useStyletron } from "baseui"; import { HeadingSmall, HeadingXSmall, LabelSmall, LabelXSmall, MonoLabelSmall, ParagraphSmall } from "baseui/typography"; import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal } from "lucide-react"; -import { formatDiffStat } from "../features/tasks/model"; import { deriveHeaderStatus, describeTaskState } from "../features/tasks/status"; import { HeaderStatusPill } from "./mock-layout/ui"; import { buildTranscript, resolveSessionSelection } from "../features/sessions/model"; @@ -95,25 +94,13 @@ const FILTER_OPTIONS: SelectItem[] = [ { id: "all", label: "All Branches" }, ]; -const AGENT_OPTIONS: SelectItem[] = [ - { id: "codex", label: "codex" }, - { id: "claude", label: "claude" }, -]; - -function statusKind(status: WorkbenchTaskStatus): StatusTagKind { +function statusKind(status: WorkspaceTaskStatus): StatusTagKind { if (status === "running") return "positive"; if (status === "error") return "negative"; - if (status === "new" || String(status).startsWith("init_")) return "warning"; + if (String(status).startsWith("init_")) return "warning"; return "neutral"; } -function normalizeAgent(agent: string | null): AgentType | undefined { - if (agent === "claude" || agent === "codex") { - return agent; - } - return undefined; -} - function formatTime(value: number): string { return new Date(value).toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" }); } @@ -160,7 +147,7 @@ function repoSummary(overview: RepoOverview | undefined): { if (row.taskId) { mapped += 1; } - if (row.prNumber && row.prState !== "MERGED" && row.prState !== "CLOSED") { + if (row.pullRequest && row.pullRequest.state !== "MERGED" && row.pullRequest.state !== "CLOSED") { openPrs += 1; } } @@ -174,15 +161,25 @@ function repoSummary(overview: RepoOverview | undefined): { } function branchKind(row: RepoBranchRecord): StatusTagKind { - if (row.prState === "OPEN" || row.prState === "DRAFT") { + if (row.pullRequest?.isDraft || row.pullRequest?.state === "OPEN") { return "warning"; } - if (row.prState === "MERGED") { + if (row.pullRequest?.state === "MERGED") { return "positive"; } return "neutral"; } +function branchPullRequestLabel(branch: RepoBranchRecord): string { + if (!branch.pullRequest) { + return "no pr"; + } + if (branch.pullRequest.isDraft) { + return "draft"; + } + return branch.pullRequest.state.toLowerCase(); +} + function matchesOverviewFilter(branch: RepoBranchRecord, filter: RepoOverviewFilter): boolean { if (filter === "archived") { return branch.taskStatus === "archived"; @@ -332,23 +329,17 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected const [createTaskOpen, setCreateTaskOpen] = useState(false); const [selectedOverviewBranch, setSelectedOverviewBranch] = useState(null); const [overviewFilter, setOverviewFilter] = useState("active"); - const [newAgentType, setNewAgentType] = useState(() => { - try { - const raw = globalThis.localStorage?.getItem("hf.settings.agentType"); - return raw === "claude" || raw === "codex" ? raw : "codex"; - } catch { - return "codex"; - } - }); const [createError, setCreateError] = useState(null); const appState = useSubscription(subscriptionManager, "app", {}); const activeOrg = appState.data ? currentFoundryOrganization(appState.data) : null; const organizationState = useSubscription(subscriptionManager, "organization", { organizationId }); - const repos = organizationState.data?.repos ?? []; - const rows = organizationState.data?.taskSummaries ?? []; - const selectedSummary = useMemo(() => rows.find((row) => row.id === selectedTaskId) ?? rows[0] ?? null, [rows, selectedTaskId]); + const reposData = organizationState.data?.repos; + const rowsData = organizationState.data?.taskSummaries; + const repos = reposData ?? []; + const rows = rowsData ?? []; + const selectedSummary = useMemo(() => rows.find((row) => row.id === selectedTaskId) ?? rows[0] ?? null, [rowsData, selectedTaskId]); const taskState = useSubscription( subscriptionManager, "task", @@ -374,6 +365,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected }); useEffect(() => { + const repos = reposData ?? []; if (repoOverviewMode && selectedRepoId) { setCreateRepoId(selectedRepoId); return; @@ -381,17 +373,11 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected if (!createRepoId && repos.length > 0) { setCreateRepoId(repos[0]!.id); } - }, [createRepoId, repoOverviewMode, repos, selectedRepoId]); - - useEffect(() => { - try { - globalThis.localStorage?.setItem("hf.settings.agentType", newAgentType); - } catch { - // ignore storage failures - } - }, [newAgentType]); + }, [createRepoId, repoOverviewMode, reposData, selectedRepoId]); const repoGroups = useMemo(() => { + const repos = reposData ?? []; + const rows = rowsData ?? []; const byRepo = new Map(); for (const row of rows) { const bucket = byRepo.get(row.repoId); @@ -419,7 +405,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected } return a.repoLabel.localeCompare(b.repoLabel); }); - }, [repos, rows]); + }, [reposData, rowsData]); const selectedForSession = repoOverviewMode ? null : (taskState.data ?? null); @@ -432,6 +418,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected }, [selectedForSession]); useEffect(() => { + const rows = rowsData ?? []; if (!repoOverviewMode && !selectedTaskId && rows.length > 0) { void navigate({ to: "/organizations/$organizationId/tasks/$taskId", @@ -443,18 +430,19 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected replace: true, }); } - }, [navigate, repoOverviewMode, rows, selectedTaskId, organizationId]); + }, [navigate, repoOverviewMode, rowsData, selectedTaskId, organizationId]); useEffect(() => { setActiveSessionId(null); setDraft(""); }, [selectedForSession?.id]); - const sessionRows = selectedForSession?.sessionsSummary ?? []; - const taskRuntimeStatus = selectedForSession?.runtimeStatus ?? selectedForSession?.status ?? null; - const taskStatusState = describeTaskState(taskRuntimeStatus, selectedForSession?.statusMessage ?? null); + const sessionRowsData = selectedForSession?.sessionsSummary; + const sessionRows = sessionRowsData ?? []; + const taskStatus = selectedForSession?.status ?? null; + const taskStatusState = describeTaskState(taskStatus); const taskStateSummary = `${taskStatusState.title}. ${taskStatusState.detail}`; - const shouldUseTaskStateEmptyState = Boolean(selectedForSession && taskRuntimeStatus && taskRuntimeStatus !== "running" && taskRuntimeStatus !== "idle"); + const shouldUseTaskStateEmptyState = Boolean(selectedForSession && taskStatus && taskStatus !== "running" && taskStatus !== "idle"); const sessionSelection = useMemo( () => resolveSessionSelection({ @@ -469,7 +457,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected status: session.status, })), }), - [activeSessionId, selectedForSession?.activeSessionId, sessionRows], + [activeSessionId, selectedForSession?.activeSessionId, sessionRowsData], ); const resolvedSessionId = sessionSelection.sessionId; const staleSessionId = sessionSelection.staleSessionId; @@ -485,7 +473,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected } : null, ); - const selectedSessionSummary = useMemo(() => sessionRows.find((session) => session.id === resolvedSessionId) ?? null, [resolvedSessionId, sessionRows]); + const selectedSessionSummary = useMemo(() => sessionRows.find((session) => session.id === resolvedSessionId) ?? null, [resolvedSessionId, sessionRowsData]); const isPendingProvision = selectedSessionSummary?.status === "pending_provision"; const isPendingSessionCreate = selectedSessionSummary?.status === "pending_session_create"; const isSessionError = selectedSessionSummary?.status === "error"; @@ -505,8 +493,6 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected repoId: task.repoId, title: task.title, status: task.status, - runtimeStatus: selectedForSession?.runtimeStatus ?? null, - statusMessage: selectedForSession?.statusMessage ?? null, branch: task.branch ?? null, activeSandboxId: selectedForSession?.activeSandboxId ?? null, activeSessionId: selectedForSession?.activeSessionId ?? null, @@ -515,7 +501,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected }; }, [repoOverviewMode, selectedForSession, selectedSummary]); const devPanelSnapshot = useMemo( - (): TaskWorkbenchSnapshot => ({ + (): TaskWorkspaceSnapshot => ({ organizationId, repos: repos.map((repo) => ({ id: repo.id, label: repo.label })), repositories: [], @@ -524,8 +510,6 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected repoId: task.repoId, title: task.title, status: task.status, - runtimeStatus: selectedForSession?.id === task.id ? selectedForSession.runtimeStatus : undefined, - statusMessage: selectedForSession?.id === task.id ? selectedForSession.statusMessage : null, repoName: task.repoName, updatedAtMs: task.updatedAtMs, branch: task.branch ?? null, @@ -546,20 +530,21 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected activeSandboxId: selectedForSession?.id === task.id ? selectedForSession.activeSandboxId : null, })), }), - [repos, rows, selectedForSession, organizationId], + [reposData, rowsData, selectedForSession, organizationId], ); const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => { if (!selectedForSession || !activeSandbox?.sandboxId) { throw new Error("No sandbox is available for this task"); } + const preferredAgent = selectedSessionSummary?.agent === "Claude" ? "claude" : selectedSessionSummary?.agent === "Codex" ? "codex" : undefined; return backendClient.createSandboxSession({ organizationId, sandboxProviderId: activeSandbox.sandboxProviderId, sandboxId: activeSandbox.sandboxId, prompt: selectedForSession.task, cwd: activeSandbox.cwd ?? undefined, - agent: normalizeAgent(selectedForSession.agentType), + agent: preferredAgent, }); }; @@ -616,7 +601,6 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected organizationId, repoId, task, - agentType: newAgentType, explicitTitle: draftTitle || undefined, explicitBranchName: createOnBranch ? undefined : draftBranchName || undefined, onBranch: createOnBranch ?? undefined, @@ -654,16 +638,15 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected setCreateTaskOpen(true); }; - const repoOptions = useMemo(() => repos.map((repo) => createOption({ id: repo.id, label: repo.label })), [repos]); + const repoOptions = useMemo(() => repos.map((repo) => createOption({ id: repo.id, label: repo.label })), [reposData]); const selectedRepoOption = repoOptions.find((option) => option.id === createRepoId) ?? null; - const selectedAgentOption = useMemo(() => createOption(AGENT_OPTIONS.find((option) => option.id === newAgentType) ?? AGENT_OPTIONS[0]!), [newAgentType]); const selectedFilterOption = useMemo( () => createOption(FILTER_OPTIONS.find((option) => option.id === overviewFilter) ?? FILTER_OPTIONS[0]!), [overviewFilter], ); const sessionOptions = useMemo( () => sessionRows.map((session) => createOption({ id: session.id, label: `${session.sessionName} (${session.status})` })), - [sessionRows], + [sessionRowsData], ); const selectedSessionOption = sessionOptions.find((option) => option.id === resolvedSessionId) ?? null; @@ -1057,23 +1040,23 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected
{branch.taskTitle ?? branch.taskId ?? "-"}
- {branch.ciStatus ?? "-"} / {branch.reviewStatus ?? "-"} + {branch.ciStatus ?? "-"} / {branch.pullRequest ? (branch.pullRequest.isDraft ? "draft" : "ready") : "-"}
{formatRelativeAge(branch.updatedAt)}
@@ -1098,7 +1081,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected ) : null} - {branch.prState?.toLowerCase() ?? "no pr"} + {branchPullRequestLabel(branch)}
@@ -1137,8 +1120,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected {selectedForSession ? ( {shouldUseTaskStateEmptyState ? taskStateSummary - : (selectedForSession?.statusMessage ?? - (isPendingProvision ? "The task is still provisioning." : "The session is being created."))} + : isPendingProvision + ? "The task is still provisioning." + : "The session is being created."}
) : null} @@ -1277,15 +1260,13 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected {shouldUseTaskStateEmptyState ? taskStateSummary : isPendingProvision - ? (selectedForSession.statusMessage ?? "Provisioning sandbox...") + ? "Provisioning sandbox..." : isPendingSessionCreate ? "Creating session..." : isSessionError ? (selectedSessionSummary?.errorMessage ?? "Session failed to start.") : !activeSandbox?.sandboxId - ? selectedForSession.statusMessage - ? `Sandbox unavailable: ${selectedForSession.statusMessage}` - : "This task is still provisioning its sandbox." + ? "This task is still provisioning its sandbox." : staleSessionId ? `Session ${staleSessionId} is unavailable. Start a new session to continue.` : resolvedSessionId @@ -1458,7 +1439,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected - +
)} @@ -1483,7 +1464,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected gap: theme.sizing.scale300, })} > - + @@ -1504,9 +1485,8 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected })} > - - - + +
@@ -1529,7 +1509,7 @@ export function OrganizationDashboard({ organizationId, selectedTaskId, selected
- {taskRuntimeStatus === "error" ? ( + {taskStatus === "error" ? (
-
- - Agent - - setWidth(event.target.value)} inputMode="numeric" /> +
+
+ + setHeight(event.target.value)} inputMode="numeric" /> +
+
+ + setDpi(event.target.value)} inputMode="numeric" /> +
+
+ +
+ + +
+ + + {status?.missingDependencies && status.missingDependencies.length > 0 && ( +
+
+ Missing Dependencies +
+
+ {status.missingDependencies.map((dependency) => ( + + {dependency} + + ))} +
+ {status.installCommand && ( + <> +
+ Install command +
+
{status.installCommand}
+ + )} +
+ )} + + {(status?.lastError || status?.runtimeLogPath || (status?.processes?.length ?? 0) > 0) && ( +
+
+ Diagnostics +
+ {status?.lastError && ( +
+
Last error
+
{status.lastError.code}
+
{status.lastError.message}
+
+ )} + {status?.runtimeLogPath && ( +
+
Runtime log
+
{status.runtimeLogPath}
+
+ )} + {status?.processes && status.processes.length > 0 && ( +
+
Processes
+
+ {status.processes.map((process) => ( +
+
+ {process.name} + + {process.running ? "running" : "stopped"} + +
+
{process.pid ? `pid ${process.pid}` : "no pid"}
+ {process.logPath &&
{process.logPath}
} +
+ ))} +
+
+ )} +
+ )} + +
+
+ Latest Screenshot + {status?.state === "active" ? Manual refresh only : null} +
+ + {loading ?
Loading...
: null} + {!loading && !screenshotUrl && ( +
+ {status?.state === "active" ? "No screenshot loaded yet." : "Start the desktop runtime to capture a screenshot."} +
+ )} + {screenshotUrl && ( +
+ Desktop screenshot +
+ )} +
+ + ); +}; + +export default DesktopTab; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8824736..fe82ce0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -5,8 +5,8 @@ settings: excludeLinksFromLockfile: false overrides: - '@types/react': ^19.1.12 - '@types/react-dom': ^19.1.6 + '@types/react': ^18.3.3 + '@types/react-dom': ^18.3.0 importers: @@ -29,7 +29,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@24.10.9)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@24.10.9)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) examples/boxlite: dependencies: @@ -73,16 +73,16 @@ importers: devDependencies: '@cloudflare/workers-types': specifier: latest - version: 4.20260316.1 + version: 4.20260313.1 '@types/node': specifier: latest version: 25.5.0 '@types/react': - specifier: ^19.1.12 - version: 19.1.12 + specifier: ^18.3.3 + version: 18.3.27 '@types/react-dom': - specifier: ^19.1.6 - version: 19.2.3(@types/react@19.1.12) + specifier: ^18.3.0 + version: 18.3.7(@types/react@18.3.27) '@vitejs/plugin-react': specifier: ^4.5.0 version: 4.7.0(vite@6.4.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) @@ -94,10 +94,10 @@ importers: version: 6.4.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) wrangler: specifier: latest - version: 4.73.0(@cloudflare/workers-types@4.20260316.1) + version: 4.73.0(@cloudflare/workers-types@4.20260313.1) examples/computesdk: dependencies: @@ -122,7 +122,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) examples/daytona: dependencies: @@ -154,13 +154,13 @@ importers: dockerode: specifier: latest version: 4.0.9 - get-port: - specifier: latest - version: 7.1.0 sandbox-agent: specifier: workspace:* version: link:../../sdks/typescript devDependencies: + '@types/dockerode': + specifier: latest + version: 4.0.1 '@types/node': specifier: latest version: 25.5.0 @@ -172,7 +172,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) examples/e2b: dependencies: @@ -197,7 +197,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) examples/file-system: dependencies: @@ -300,7 +300,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) examples/permissions: dependencies: @@ -345,6 +345,9 @@ importers: '@sandbox-agent/example-shared': specifier: workspace:* version: link:../shared + '@sandbox-agent/persist-postgres': + specifier: workspace:* + version: link:../../sdks/persist-postgres pg: specifier: latest version: 8.20.0 @@ -370,16 +373,13 @@ importers: '@sandbox-agent/example-shared': specifier: workspace:* version: link:../shared - better-sqlite3: - specifier: ^11.0.0 - version: 11.10.0 + '@sandbox-agent/persist-sqlite': + specifier: workspace:* + version: link:../../sdks/persist-sqlite sandbox-agent: specifier: workspace:* version: link:../../sdks/typescript devDependencies: - '@types/better-sqlite3': - specifier: ^7.0.0 - version: 7.6.13 '@types/node': specifier: latest version: 25.5.0 @@ -473,7 +473,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) foundry/packages/backend: dependencies: @@ -492,9 +492,12 @@ importers: '@sandbox-agent/foundry-shared': specifier: workspace:* version: link:../shared + '@sandbox-agent/persist-rivet': + specifier: workspace:* + version: link:../../../sdks/persist-rivet better-auth: specifier: ^1.5.5 - version: 1.5.5(@cloudflare/workers-types@4.20260316.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) + version: 1.5.5(@cloudflare/workers-types@4.20260313.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2)) dockerode: specifier: ^4.0.9 version: 4.0.9 @@ -503,7 +506,7 @@ importers: version: 0.31.9 drizzle-orm: specifier: ^0.44.5 - version: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + version: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) hono: specifier: ^4.11.9 version: 4.12.2 @@ -512,7 +515,7 @@ importers: version: 10.3.1 rivetkit: specifier: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a - version: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) + version: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) sandbox-agent: specifier: workspace:* version: link:../../../sdks/typescript @@ -543,14 +546,14 @@ importers: version: 19.2.4 rivetkit: specifier: 2.1.6 - version: 2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) + version: 2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0) sandbox-agent: specifier: workspace:* version: link:../../../sdks/typescript devDependencies: '@types/react': - specifier: ^19.1.12 - version: 19.1.12 + specifier: ^18.3.3 + version: 18.3.27 tsup: specifier: ^8.5.0 version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) @@ -577,7 +580,7 @@ importers: version: 3.13.22(react-dom@19.2.4(react@19.2.4))(react@19.2.4) baseui: specifier: ^16.1.1 - version: 16.1.1(@types/react@19.1.12)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(styletron-react@6.1.1(react@19.2.4)) + version: 16.1.1(@types/react@18.3.27)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(styletron-react@6.1.1(react@19.2.4)) lucide-react: specifier: ^0.542.0 version: 0.542.0(react@19.2.4) @@ -599,19 +602,19 @@ importers: devDependencies: '@react-grab/mcp': specifier: ^0.1.13 - version: 0.1.27(@types/react@19.1.12)(react@19.2.4) + version: 0.1.27(@types/react@18.3.27)(react@19.2.4) '@types/react': - specifier: ^19.1.12 - version: 19.1.12 + specifier: ^18.3.3 + version: 18.3.27 '@types/react-dom': - specifier: ^19.1.6 - version: 19.2.3(@types/react@19.1.12) + specifier: ^18.3.0 + version: 18.3.7(@types/react@18.3.27) '@vitejs/plugin-react': specifier: ^5.0.3 version: 5.1.4(vite@7.3.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) react-grab: specifier: ^0.1.13 - version: 0.1.27(@types/react@19.1.12)(react@19.2.4) + version: 0.1.27(@types/react@18.3.27)(react@19.2.4) tsup: specifier: ^8.5.0 version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) @@ -637,6 +640,9 @@ importers: frontend/packages/inspector: dependencies: + '@sandbox-agent/persist-indexeddb': + specifier: workspace:* + version: link:../../../sdks/persist-indexeddb lucide-react: specifier: ^0.469.0 version: 0.469.0(react@18.3.1) @@ -651,17 +657,20 @@ importers: specifier: workspace:* version: link:../../../sdks/react '@types/react': - specifier: ^19.1.12 - version: 19.1.12 + specifier: ^18.3.3 + version: 18.3.27 '@types/react-dom': - specifier: ^19.1.6 - version: 19.2.3(@types/react@19.1.12) + specifier: ^18.3.0 + version: 18.3.7(@types/react@18.3.27) '@vitejs/plugin-react': specifier: ^4.3.1 version: 4.7.0(vite@5.4.21(@types/node@25.5.0)) fake-indexeddb: specifier: ^6.2.4 version: 6.2.5 + jsdom: + specifier: ^26.1.0 + version: 26.1.0 sandbox-agent: specifier: workspace:* version: link:../../../sdks/typescript @@ -673,13 +682,13 @@ importers: version: 5.4.21(@types/node@25.5.0) vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) frontend/packages/website: dependencies: '@astrojs/react': specifier: ^4.2.0 - version: 4.4.2(@types/node@25.5.0)(@types/react-dom@19.2.3(@types/react@19.1.12))(@types/react@19.1.12)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2) + version: 4.4.2(@types/node@25.5.0)(@types/react-dom@18.3.7(@types/react@18.3.27))(@types/react@18.3.27)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2) '@astrojs/sitemap': specifier: ^3.2.0 version: 3.7.0 @@ -706,11 +715,11 @@ importers: version: 3.4.19(tsx@4.21.0)(yaml@2.8.2) devDependencies: '@types/react': - specifier: ^19.1.12 - version: 19.1.12 + specifier: ^18.3.3 + version: 18.3.27 '@types/react-dom': - specifier: ^19.1.6 - version: 19.2.3(@types/react@19.1.12) + specifier: ^18.3.0 + version: 18.3.7(@types/react@18.3.27) typescript: specifier: ^5.7.0 version: 5.9.3 @@ -790,8 +799,8 @@ importers: sdks/acp-http-client: dependencies: '@agentclientprotocol/sdk': - specifier: ^0.16.1 - version: 0.16.1(zod@4.3.6) + specifier: ^0.14.1 + version: 0.14.1(zod@4.3.6) devDependencies: '@types/node': specifier: ^22.0.0 @@ -804,7 +813,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) sdks/cli: dependencies: @@ -830,7 +839,7 @@ importers: devDependencies: vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) sdks/cli-shared: devDependencies: @@ -878,7 +887,7 @@ importers: devDependencies: vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) sdks/gigacode/platforms/darwin-arm64: {} @@ -891,30 +900,57 @@ importers: sdks/gigacode/platforms/win32-x64: {} sdks/persist-indexeddb: + dependencies: + sandbox-agent: + specifier: workspace:* + version: link:../typescript devDependencies: '@types/node': specifier: ^22.0.0 version: 22.19.7 + fake-indexeddb: + specifier: ^6.2.4 + version: 6.2.5 tsup: specifier: ^8.0.0 version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) typescript: specifier: ^5.7.0 version: 5.9.3 + vitest: + specifier: ^3.0.0 + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) sdks/persist-postgres: + dependencies: + pg: + specifier: ^8.16.3 + version: 8.18.0 + sandbox-agent: + specifier: workspace:* + version: link:../typescript devDependencies: '@types/node': specifier: ^22.0.0 version: 22.19.7 + '@types/pg': + specifier: ^8.15.6 + version: 8.16.0 tsup: specifier: ^8.0.0 version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.2) typescript: specifier: ^5.7.0 version: 5.9.3 + vitest: + specifier: ^3.0.0 + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) sdks/persist-rivet: + dependencies: + sandbox-agent: + specifier: workspace:* + version: link:../typescript devDependencies: '@types/node': specifier: ^22.0.0 @@ -925,9 +961,22 @@ importers: typescript: specifier: ^5.7.0 version: 5.9.3 + vitest: + specifier: ^3.0.0 + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) sdks/persist-sqlite: + dependencies: + better-sqlite3: + specifier: ^11.0.0 + version: 11.10.0 + sandbox-agent: + specifier: workspace:* + version: link:../typescript devDependencies: + '@types/better-sqlite3': + specifier: ^7.0.0 + version: 7.6.13 '@types/node': specifier: ^22.0.0 version: 22.19.7 @@ -937,22 +986,25 @@ importers: typescript: specifier: ^5.7.0 version: 5.9.3 + vitest: + specifier: ^3.0.0 + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) sdks/react: dependencies: '@tanstack/react-virtual': specifier: ^3.13.22 - version: 3.13.22(react-dom@19.2.4(react@19.1.1))(react@19.1.1) + version: 3.13.22(react-dom@19.2.4(react@18.3.1))(react@18.3.1) ghostty-web: specifier: ^0.4.0 version: 0.4.0 devDependencies: '@types/react': - specifier: ^19.1.12 - version: 19.1.12 + specifier: ^18.3.3 + version: 18.3.27 react: - specifier: ^19.1.1 - version: 19.1.1 + specifier: ^18.3.1 + version: 18.3.1 sandbox-agent: specifier: workspace:* version: link:../typescript @@ -976,39 +1028,12 @@ importers: specifier: workspace:* version: link:../cli devDependencies: - '@cloudflare/sandbox': - specifier: '>=0.1.0' - version: 0.7.17(@opencode-ai/sdk@1.2.24) - '@daytonaio/sdk': - specifier: '>=0.12.0' - version: 0.151.0(ws@8.19.0) - '@e2b/code-interpreter': - specifier: '>=1.0.0' - version: 2.3.3 - '@types/dockerode': - specifier: ^4.0.0 - version: 4.0.1 '@types/node': specifier: ^22.0.0 version: 22.19.7 '@types/ws': specifier: ^8.18.1 version: 8.18.1 - '@vercel/sandbox': - specifier: '>=0.1.0' - version: 1.8.1 - computesdk: - specifier: '>=0.1.0' - version: 2.5.0 - dockerode: - specifier: '>=4.0.0' - version: 4.0.9 - get-port: - specifier: '>=7.0.0' - version: 7.1.0 - modal: - specifier: '>=0.1.0' - version: 0.7.3 openapi-typescript: specifier: ^6.7.0 version: 6.7.6 @@ -1020,7 +1045,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) ws: specifier: ^8.19.0 version: 8.19.0 @@ -1039,7 +1064,7 @@ importers: version: 5.9.3 vitest: specifier: ^3.0.0 - version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + version: 3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) packages: @@ -1048,11 +1073,6 @@ packages: peerDependencies: zod: ^3.25.0 || ^4.0.0 - '@agentclientprotocol/sdk@0.16.1': - resolution: {integrity: sha512-1ad+Sc/0sCtZGHthxxvgEUo5Wsbw16I+aF+YwdiLnPwkZG8KAGUEAPK6LM6Pf69lCyJPt1Aomk1d+8oE3C4ZEw==} - peerDependencies: - zod: ^3.25.0 || ^4.0.0 - '@alloc/quick-lru@5.2.0': resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} engines: {node: '>=10'} @@ -1061,6 +1081,9 @@ packages: resolution: {integrity: sha512-FSEVWXvwroExDXUu8qV6Wqp2X3D1nJ0Li4LFymCyvCVrm7I3lNfG0zZWSWvGU1RE7891eTnFTyh31L3igOwNKQ==} hasBin: true + '@asamuzakjp/css-color@3.2.0': + resolution: {integrity: sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==} + '@asteasolutions/zod-to-openapi@8.4.3': resolution: {integrity: sha512-lwfMTN7kDbFDwMniYZUebiGGHxVGBw9ZSI4IBYjm6Ey22Kd5z/fsQb2k+Okr8WMbCCC553vi/ZM9utl5/XcvuQ==} peerDependencies: @@ -1083,8 +1106,8 @@ packages: resolution: {integrity: sha512-1tl95bpGfuaDMDn8O3x/5Dxii1HPvzjvpL2YTuqOOrQehs60I2DKiDgh1jrKc7G8lv+LQT5H15V6QONQ+9waeQ==} engines: {node: 18.20.8 || ^20.3.0 || >=22.0.0} peerDependencies: - '@types/react': ^19.1.12 - '@types/react-dom': ^19.1.6 + '@types/react': ^18.3.3 + '@types/react-dom': ^18.3.0 react: ^17.0.2 || ^18.0.0 || ^19.0.0 react-dom: ^17.0.2 || ^18.0.0 || ^19.0.0 @@ -1644,8 +1667,8 @@ packages: cpu: [x64] os: [win32] - '@cloudflare/workers-types@4.20260316.1': - resolution: {integrity: sha512-HUZ+vQD8/1A4Fz/8WAlzYWcS5W5u3Nu7Dv9adkIkmLfeKqMIRn01vc4nSUBar60KkmohyQHkPi8jtWV/zazvAg==} + '@cloudflare/workers-types@4.20260313.1': + resolution: {integrity: sha512-jMEeX3RKfOSVqqXRKr/ulgglcTloeMzSH3FdzIfqJHtvc12/ELKd5Ldsg8ZHahKX/4eRxYdw3kbzb8jLXbq/jQ==} '@computesdk/cmd@0.4.1': resolution: {integrity: sha512-hhcYrwMnOpRSwWma3gkUeAVsDFG56nURwSaQx8vCepv0IuUv39bK4mMkgszolnUQrVjBDdW7b3lV+l5B2S8fRA==} @@ -1665,6 +1688,34 @@ packages: resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} + '@csstools/color-helpers@5.1.0': + resolution: {integrity: sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==} + engines: {node: '>=18'} + + '@csstools/css-calc@2.1.4': + resolution: {integrity: sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-parser-algorithms': ^3.0.5 + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-color-parser@3.1.0': + resolution: {integrity: sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-parser-algorithms': ^3.0.5 + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-parser-algorithms@3.0.5': + resolution: {integrity: sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-tokenizer@3.0.4': + resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} + engines: {node: '>=18'} + '@date-io/core@2.17.0': resolution: {integrity: sha512-+EQE8xZhRM/hsY0CDTVyayMDDY5ihc4MqXCrPxooKw19yAzUIC6uUqsZeaOFNL9YKTNxYKrJP5DFgE8o5xRCOw==} @@ -3585,21 +3636,27 @@ packages: '@types/node@25.5.0': resolution: {integrity: sha512-jp2P3tQMSxWugkCUKLRPVUpGaL5MVFwF8RDuSRztfwgN1wmqJeMSbKlnEtQqU8UrhTmzEmZdu2I6v2dpp7XIxw==} + '@types/pg@8.16.0': + resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} + '@types/pg@8.18.0': resolution: {integrity: sha512-gT+oueVQkqnj6ajGJXblFR4iavIXWsGAFCk3dP4Kki5+a9R4NMt0JARdk6s8cUKcfUoqP5dAtDSLU8xYUTFV+Q==} - '@types/react-dom@19.2.3': - resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} + '@types/prop-types@15.7.15': + resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} + + '@types/react-dom@18.3.7': + resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==} peerDependencies: - '@types/react': ^19.1.12 + '@types/react': ^18.3.3 '@types/react-reconciler@0.28.9': resolution: {integrity: sha512-HHM3nxyUZ3zAylX8ZEyrDNd2XZOnQ0D5XfunJF5FLQnZbHHYq4UWvW1QfelQNXv1ICNkwYhfxjwfnqivYB6bFg==} peerDependencies: - '@types/react': ^19.1.12 + '@types/react': ^18.3.3 - '@types/react@19.1.12': - resolution: {integrity: sha512-cMoR+FoAf/Jyq6+Df2/Z41jISvGZZ2eTlnsaJRptmZ76Caldwy1odD4xTr/gNV9VLj0AWgg/nmkevIyUfIIq5w==} + '@types/react@18.3.27': + resolution: {integrity: sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==} '@types/retry@0.12.2': resolution: {integrity: sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==} @@ -3690,6 +3747,10 @@ packages: acp-http-client@0.3.2: resolution: {integrity: sha512-btRUDXAA9BlcTQURsJogdWthoXsKOnMeFhtYlEYQxgt0vq7H6xMfMrewlIgFjRXgRTbru4Fre2T6wS/amTTyjQ==} + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} + engines: {node: '>= 14'} + aggregate-error@5.0.0: resolution: {integrity: sha512-gOsf2YwSlleG6IjRYG2A7k0HmBMEo6qVNk9Bp/EaLgAJT5ngH6PXbqa4ItvnEwCm/velL5jAnQgsHsWnjhGmvw==} engines: {node: '>=18'} @@ -4222,6 +4283,10 @@ packages: resolution: {integrity: sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==} engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0, npm: '>=7.0.0'} + cssstyle@4.6.0: + resolution: {integrity: sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg==} + engines: {node: '>=18'} + csstype@2.6.11: resolution: {integrity: sha512-l8YyEC9NBkSm783PFTvh0FmJy7s5pFKrDp49ZL7zBGX3fWkO+N4EEyan1qqp8cwPLDcD0OSdyY6hAMoxp34JFw==} @@ -4355,6 +4420,10 @@ packages: resolution: {integrity: sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==} engines: {node: '>=12'} + data-urls@5.0.0: + resolution: {integrity: sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==} + engines: {node: '>=18'} + date-fns-tz@1.3.8: resolution: {integrity: sha512-qwNXUFtMHTTU6CFSFjoJ80W8Fzzp24LntbjFFBgL/faqds4e5mo9mftoRLgr3Vi1trISsg4awSpYVsOQCRnapQ==} peerDependencies: @@ -4373,6 +4442,9 @@ packages: supports-color: optional: true + decimal.js@10.6.0: + resolution: {integrity: sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==} + decode-named-character-reference@1.3.0: resolution: {integrity: sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==} @@ -5001,6 +5073,10 @@ packages: resolution: {integrity: sha512-gJnaDHXKDayjt8ue0n8Gs0A007yKXj4Xzb8+cNjZeYsSzzwKc0Lr+OZgYwVfB0pHfUs17EPoLvrOsEaJ9mj+Tg==} engines: {node: '>=16.9.0'} + html-encoding-sniffer@4.0.0: + resolution: {integrity: sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==} + engines: {node: '>=18'} + html-escaper@3.0.3: resolution: {integrity: sha512-RuMffC89BOWQoY0WKGpIhn5gX3iI54O6nRA0yC124NYVtzjmFWBIiFd8M0x+ZdX0P9R4lADg1mgP8C7PxGOWuQ==} @@ -5014,6 +5090,14 @@ packages: resolution: {integrity: sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==} engines: {node: '>= 0.8'} + http-proxy-agent@7.0.2: + resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} + engines: {node: '>= 14'} + + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} + human-signals@3.0.1: resolution: {integrity: sha512-rQLskxnM/5OCldHo+wNXbpVgDn5A17CUoKX+7Sokwaknlq7CdSnphy0W39GU8dw59XiCXmFXDg4fRuckQRKewQ==} engines: {node: '>=12.20.0'} @@ -5123,6 +5207,9 @@ packages: resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} engines: {node: '>=12'} + is-potential-custom-element-name@1.0.1: + resolution: {integrity: sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==} + is-promise@4.0.0: resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} @@ -5186,6 +5273,15 @@ packages: resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} hasBin: true + jsdom@26.1.0: + resolution: {integrity: sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg==} + engines: {node: '>=18'} + peerDependencies: + canvas: ^3.0.0 + peerDependenciesMeta: + canvas: + optional: true + jsesc@3.1.0: resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} engines: {node: '>=6'} @@ -5656,6 +5752,9 @@ packages: nth-check@2.1.1: resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} + nwsapi@2.2.23: + resolution: {integrity: sha512-7wfH4sLbt4M0gCDzGE6vzQBo0bfTKjU7Sfpqy/7gs1qBfYz2vEJH6vXcBKpO3+6Yu1telwd0t9HpyOoLEQQbIQ==} + object-assign@4.1.1: resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} engines: {node: '>=0.10.0'} @@ -5795,6 +5894,9 @@ packages: pg-cloudflare@1.3.0: resolution: {integrity: sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==} + pg-connection-string@2.11.0: + resolution: {integrity: sha512-kecgoJwhOpxYU21rZjULrmrBJ698U2RxXofKVzOn5UDj61BPj/qMb7diYUR1nLScCDbrztQFl1TaQZT0t1EtzQ==} + pg-connection-string@2.12.0: resolution: {integrity: sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==} @@ -5802,6 +5904,11 @@ packages: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} + pg-pool@3.11.0: + resolution: {integrity: sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==} + peerDependencies: + pg: '>=8.0' + pg-pool@3.13.0: resolution: {integrity: sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==} peerDependencies: @@ -5817,6 +5924,15 @@ packages: resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==} engines: {node: '>=4'} + pg@8.18.0: + resolution: {integrity: sha512-xqrUDL1b9MbkydY/s+VZ6v+xiMUmOUk7SS9d/1kpyQxoJ6U9AO1oIJyUWVZojbfe5Cc/oluutcgFG4L9RDP1iQ==} + engines: {node: '>= 16.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + pg@8.20.0: resolution: {integrity: sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==} engines: {node: '>= 16.0.0'} @@ -6021,6 +6137,10 @@ packages: pump@3.0.3: resolution: {integrity: sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==} + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + qs@6.14.1: resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==} engines: {node: '>=0.6'} @@ -6073,7 +6193,7 @@ packages: react-focus-lock@2.13.7: resolution: {integrity: sha512-20lpZHEQrXPb+pp1tzd4ULL6DyO5D2KnR0G69tTDdydrmNhU7pdFmbQUYVyHUgp+xN29IuFR0PVuhOmvaZL9Og==} peerDependencies: - '@types/react': ^19.1.12 + '@types/react': ^18.3.3 react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc peerDependenciesMeta: '@types/react': @@ -6142,7 +6262,7 @@ packages: resolution: {integrity: sha512-tsPZ77GR0pISGYmpCLHAbZTabKXZ7zBniKPVqVMMfnXFyo39zq5g/psIlD5vLTKkjQEhWOO8JhqcHnxkwNu6eA==} engines: {node: '>=8.5.0'} peerDependencies: - '@types/react': ^19.1.12 + '@types/react': ^18.3.3 react: ^16.8.0 peerDependenciesMeta: '@types/react': @@ -6172,10 +6292,6 @@ packages: resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} engines: {node: '>=0.10.0'} - react@19.1.1: - resolution: {integrity: sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==} - engines: {node: '>=0.10.0'} - react@19.2.4: resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} engines: {node: '>=0.10.0'} @@ -6367,6 +6483,9 @@ packages: resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} engines: {node: '>= 18'} + rrweb-cssom@0.8.0: + resolution: {integrity: sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==} + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} @@ -6390,6 +6509,10 @@ packages: resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==} engines: {node: '>=11.0.0'} + saxes@6.0.0: + resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==} + engines: {node: '>=v12.22.7'} + scheduler@0.23.2: resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==} @@ -6630,6 +6753,9 @@ packages: engines: {node: '>=16'} hasBin: true + symbol-tree@3.2.4: + resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} + tailwindcss@3.4.19: resolution: {integrity: sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==} engines: {node: '>=14.0.0'} @@ -6714,6 +6840,13 @@ packages: resolution: {integrity: sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==} engines: {node: '>=14.0.0'} + tldts-core@6.1.86: + resolution: {integrity: sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==} + + tldts@6.1.86: + resolution: {integrity: sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==} + hasBin: true + to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} @@ -6722,6 +6855,14 @@ packages: resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} engines: {node: '>=0.6'} + tough-cookie@5.1.2: + resolution: {integrity: sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==} + engines: {node: '>=16'} + + tr46@5.1.1: + resolution: {integrity: sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==} + engines: {node: '>=18'} + tree-kill@1.2.2: resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} hasBin: true @@ -6981,7 +7122,7 @@ packages: resolution: {integrity: sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==} engines: {node: '>=10'} peerDependencies: - '@types/react': ^19.1.12 + '@types/react': ^18.3.3 react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc peerDependenciesMeta: '@types/react': @@ -6991,7 +7132,7 @@ packages: resolution: {integrity: sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==} engines: {node: '>=10'} peerDependencies: - '@types/react': ^19.1.12 + '@types/react': ^18.3.3 react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc peerDependenciesMeta: '@types/react': @@ -7203,12 +7344,33 @@ packages: vt-pbf@3.1.3: resolution: {integrity: sha512-2LzDFzt0mZKZ9IpVF2r69G9bXaP2Q2sArJCmcCgvfTdCCZzSyz4aCLoQyUilu37Ll56tCblIZrXFIjNUpGIlmA==} + w3c-xmlserializer@5.0.0: + resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==} + engines: {node: '>=18'} + warning@4.0.3: resolution: {integrity: sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==} web-namespaces@2.0.1: resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==} + webidl-conversions@7.0.0: + resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} + engines: {node: '>=12'} + + whatwg-encoding@3.1.1: + resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} + engines: {node: '>=18'} + deprecated: Use @exodus/bytes instead for a more spec-conformant and faster implementation + + whatwg-mimetype@4.0.0: + resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} + engines: {node: '>=18'} + + whatwg-url@14.2.0: + resolution: {integrity: sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==} + engines: {node: '>=18'} + which-pm-runs@1.1.0: resolution: {integrity: sha512-n1brCuqClxfFfq/Rb0ICg9giSZqCS+pLtccdag6C2HyufBrh3fBOiy9nb6ggRMvWOVH5GrdJskj5iGTZNxd7SA==} engines: {node: '>=4'} @@ -7289,6 +7451,13 @@ packages: resolution: {integrity: sha512-sqMMuL1rc0FmMBOzCpd0yuy9trqF2yTTVe+E9ogwCSWQCdDEtQUwrZPT6AxqtsFGRNxycgncbP/xmOOSPw5ZUw==} engines: {node: '>= 6.0'} + xml-name-validator@5.0.0: + resolution: {integrity: sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==} + engines: {node: '>=18'} + + xmlchars@2.2.0: + resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==} + xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} @@ -7367,14 +7536,18 @@ snapshots: dependencies: zod: 4.3.6 - '@agentclientprotocol/sdk@0.16.1(zod@4.3.6)': - dependencies: - zod: 4.3.6 - '@alloc/quick-lru@5.2.0': {} '@antfu/ni@0.23.2': {} + '@asamuzakjp/css-color@3.2.0': + dependencies: + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + lru-cache: 10.4.3 + '@asteasolutions/zod-to-openapi@8.4.3(zod@4.3.6)': dependencies: openapi3-ts: 4.5.0 @@ -7414,10 +7587,10 @@ snapshots: dependencies: prismjs: 1.30.0 - '@astrojs/react@4.4.2(@types/node@25.5.0)(@types/react-dom@19.2.3(@types/react@19.1.12))(@types/react@19.1.12)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2)': + '@astrojs/react@4.4.2(@types/node@25.5.0)(@types/react-dom@18.3.7(@types/react@18.3.27))(@types/react@18.3.27)(jiti@1.21.7)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(tsx@4.21.0)(yaml@2.8.2)': dependencies: - '@types/react': 19.1.12 - '@types/react-dom': 19.2.3(@types/react@19.1.12) + '@types/react': 18.3.27 + '@types/react-dom': 18.3.7(@types/react@18.3.27) '@vitejs/plugin-react': 4.7.0(vite@6.4.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)) react: 19.2.4 react-dom: 19.2.4(react@19.2.4) @@ -8199,7 +8372,7 @@ snapshots: '@balena/dockerignore@1.0.2': {} - '@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1)': + '@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1)': dependencies: '@better-auth/utils': 0.3.1 '@better-fetch/fetch': 1.1.21 @@ -8210,39 +8383,39 @@ snapshots: nanostores: 1.1.1 zod: 4.3.6 optionalDependencies: - '@cloudflare/workers-types': 4.20260316.1 + '@cloudflare/workers-types': 4.20260313.1 - '@better-auth/drizzle-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))': + '@better-auth/drizzle-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 optionalDependencies: - drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) - '@better-auth/kysely-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11)': + '@better-auth/kysely-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11)': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 kysely: 0.28.11 - '@better-auth/memory-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': + '@better-auth/memory-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 - '@better-auth/mongo-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': + '@better-auth/mongo-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 - '@better-auth/prisma-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': + '@better-auth/prisma-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 - '@better-auth/telemetry@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))': + '@better-auth/telemetry@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))': dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) '@better-auth/utils': 0.3.1 '@better-fetch/fetch': 1.1.21 @@ -8352,7 +8525,7 @@ snapshots: '@cloudflare/workerd-windows-64@1.20260312.1': optional: true - '@cloudflare/workers-types@4.20260316.1': {} + '@cloudflare/workers-types@4.20260313.1': {} '@computesdk/cmd@0.4.1': {} @@ -8369,6 +8542,26 @@ snapshots: dependencies: '@jridgewell/trace-mapping': 0.3.9 + '@csstools/color-helpers@5.1.0': {} + + '@csstools/css-calc@2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-color-parser@3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/color-helpers': 5.1.0 + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-tokenizer@3.0.4': {} + '@date-io/core@2.17.0': {} '@date-io/date-fns@2.17.0(date-fns@2.30.0)': @@ -9417,11 +9610,11 @@ snapshots: prompts: 2.4.2 smol-toml: 1.6.0 - '@react-grab/mcp@0.1.27(@types/react@19.1.12)(react@19.2.4)': + '@react-grab/mcp@0.1.27(@types/react@18.3.27)(react@19.2.4)': dependencies: '@modelcontextprotocol/sdk': 1.27.1(zod@3.25.76) fkill: 9.0.0 - react-grab: 0.1.27(@types/react@19.1.12)(react@19.2.4) + react-grab: 0.1.27(@types/react@18.3.27)(react@19.2.4) zod: 3.25.76 transitivePeerDependencies: - '@cfworker/json-schema' @@ -10034,11 +10227,11 @@ snapshots: react-dom: 19.2.4(react@19.2.4) use-sync-external-store: 1.6.0(react@19.2.4) - '@tanstack/react-virtual@3.13.22(react-dom@19.2.4(react@19.1.1))(react@19.1.1)': + '@tanstack/react-virtual@3.13.22(react-dom@19.2.4(react@18.3.1))(react@18.3.1)': dependencies: '@tanstack/virtual-core': 3.13.22 - react: 19.1.1 - react-dom: 19.2.4(react@19.1.1) + react: 18.3.1 + react-dom: 19.2.4(react@18.3.1) '@tanstack/react-virtual@3.13.22(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: @@ -10149,22 +10342,31 @@ snapshots: dependencies: undici-types: 7.18.2 + '@types/pg@8.16.0': + dependencies: + '@types/node': 24.10.9 + pg-protocol: 1.11.0 + pg-types: 2.2.0 + '@types/pg@8.18.0': dependencies: '@types/node': 24.10.9 pg-protocol: 1.11.0 pg-types: 2.2.0 - '@types/react-dom@19.2.3(@types/react@19.1.12)': - dependencies: - '@types/react': 19.1.12 + '@types/prop-types@15.7.15': {} - '@types/react-reconciler@0.28.9(@types/react@19.1.12)': + '@types/react-dom@18.3.7(@types/react@18.3.27)': dependencies: - '@types/react': 19.1.12 + '@types/react': 18.3.27 - '@types/react@19.1.12': + '@types/react-reconciler@0.28.9(@types/react@18.3.27)': dependencies: + '@types/react': 18.3.27 + + '@types/react@18.3.27': + dependencies: + '@types/prop-types': 15.7.15 csstype: 3.2.3 '@types/retry@0.12.2': {} @@ -10317,6 +10519,8 @@ snapshots: transitivePeerDependencies: - zod + agent-base@7.1.4: {} + aggregate-error@5.0.0: dependencies: clean-stack: 5.3.0 @@ -10521,7 +10725,7 @@ snapshots: baseline-browser-mapping@2.9.18: {} - baseui@16.1.1(@types/react@19.1.12)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(styletron-react@6.1.1(react@19.2.4)): + baseui@16.1.1(@types/react@18.3.27)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(styletron-react@6.1.1(react@19.2.4)): dependencies: '@date-io/date-fns': 2.17.0(date-fns@2.30.0) '@date-io/moment': 2.17.0(moment@2.30.1) @@ -10541,7 +10745,7 @@ snapshots: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) react-dropzone: 9.0.0(react@19.2.4) - react-focus-lock: 2.13.7(@types/react@19.1.12)(react@19.2.4) + react-focus-lock: 2.13.7(@types/react@18.3.27)(react@19.2.4) react-hook-form: 7.71.2(react@19.2.4) react-input-mask: 2.0.4(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-is: 17.0.2 @@ -10549,7 +10753,7 @@ snapshots: react-movable: 3.4.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-multi-ref: 1.0.2 react-range: 1.10.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) - react-uid: 2.3.0(@types/react@19.1.12)(react@19.2.4) + react-uid: 2.3.0(@types/react@18.3.27)(react@19.2.4) react-virtualized: 9.22.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-virtualized-auto-sizer: 1.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react-window: 1.8.5(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -10563,15 +10767,15 @@ snapshots: dependencies: tweetnacl: 0.14.5 - better-auth@1.5.5(@cloudflare/workers-types@4.20260316.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2)): + better-auth@1.5.5(@cloudflare/workers-types@4.20260313.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2)): dependencies: - '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) - '@better-auth/drizzle-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0)) - '@better-auth/kysely-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11) - '@better-auth/memory-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) - '@better-auth/mongo-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) - '@better-auth/prisma-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) - '@better-auth/telemetry': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260316.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1)) + '@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1) + '@better-auth/drizzle-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0)) + '@better-auth/kysely-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11) + '@better-auth/memory-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) + '@better-auth/mongo-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) + '@better-auth/prisma-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1) + '@better-auth/telemetry': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(@cloudflare/workers-types@4.20260313.1)(better-call@1.3.2(zod@4.3.6))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.1)) '@better-auth/utils': 0.3.1 '@better-fetch/fetch': 1.1.21 '@noble/ciphers': 2.1.1 @@ -10584,12 +10788,12 @@ snapshots: zod: 4.3.6 optionalDependencies: drizzle-kit: 0.31.9 - drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) pg: 8.20.0 react: 19.2.4 react-dom: 19.2.4(react@19.2.4) solid-js: 1.9.11 - vitest: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) + vitest: 3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2) transitivePeerDependencies: - '@cloudflare/workers-types' @@ -10613,9 +10817,9 @@ snapshots: dependencies: file-uri-to-path: 1.0.0 - bippy@0.5.32(@types/react@19.1.12)(react@19.2.4): + bippy@0.5.32(@types/react@18.3.27)(react@19.2.4): dependencies: - '@types/react-reconciler': 0.28.9(@types/react@19.1.12) + '@types/react-reconciler': 0.28.9(@types/react@18.3.27) react: 19.2.4 transitivePeerDependencies: - '@types/react' @@ -10919,6 +11123,11 @@ snapshots: dependencies: css-tree: 2.2.1 + cssstyle@4.6.0: + dependencies: + '@asamuzakjp/css-color': 3.2.0 + rrweb-cssom: 0.8.0 + csstype@2.6.11: {} csstype@3.2.3: {} @@ -11075,6 +11284,11 @@ snapshots: d3-transition: 3.0.1(d3-selection@3.0.0) d3-zoom: 3.0.0 + data-urls@5.0.0: + dependencies: + whatwg-mimetype: 4.0.0 + whatwg-url: 14.2.0 + date-fns-tz@1.3.8(date-fns@2.30.0): dependencies: date-fns: 2.30.0 @@ -11087,6 +11301,8 @@ snapshots: dependencies: ms: 2.1.3 + decimal.js@10.6.0: {} + decode-named-character-reference@1.3.0: dependencies: character-entities: 2.0.2 @@ -11193,9 +11409,9 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0): + drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0): optionalDependencies: - '@cloudflare/workers-types': 4.20260316.1 + '@cloudflare/workers-types': 4.20260313.1 '@opentelemetry/api': 1.9.0 '@types/better-sqlite3': 7.6.13 '@types/pg': 8.18.0 @@ -11223,7 +11439,7 @@ snapshots: glob: 11.1.0 openapi-fetch: 0.14.1 platform: 1.3.6 - tar: 7.5.7 + tar: 7.5.6 earcut@2.2.4: {} @@ -11827,6 +12043,10 @@ snapshots: hono@4.12.2: {} + html-encoding-sniffer@4.0.0: + dependencies: + whatwg-encoding: 3.1.1 + html-escaper@3.0.3: {} html-void-elements@3.0.0: {} @@ -11841,6 +12061,20 @@ snapshots: statuses: 2.0.2 toidentifier: 1.0.1 + http-proxy-agent@7.0.2: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + https-proxy-agent@7.0.6: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + human-signals@3.0.1: {} human-signals@5.0.0: {} @@ -11920,6 +12154,8 @@ snapshots: is-plain-obj@4.1.0: {} + is-potential-custom-element-name@1.0.1: {} + is-promise@4.0.0: {} is-stream@3.0.0: {} @@ -11966,6 +12202,33 @@ snapshots: dependencies: argparse: 2.0.1 + jsdom@26.1.0: + dependencies: + cssstyle: 4.6.0 + data-urls: 5.0.0 + decimal.js: 10.6.0 + html-encoding-sniffer: 4.0.0 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.6 + is-potential-custom-element-name: 1.0.1 + nwsapi: 2.2.23 + parse5: 7.3.0 + rrweb-cssom: 0.8.0 + saxes: 6.0.0 + symbol-tree: 3.2.4 + tough-cookie: 5.1.2 + w3c-xmlserializer: 5.0.0 + webidl-conversions: 7.0.0 + whatwg-encoding: 3.1.1 + whatwg-mimetype: 4.0.0 + whatwg-url: 14.2.0 + ws: 8.19.0 + xml-name-validator: 5.0.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + jsesc@3.1.0: {} json-schema-traverse@1.0.0: {} @@ -12590,6 +12853,8 @@ snapshots: dependencies: boolbase: 1.0.0 + nwsapi@2.2.23: {} + object-assign@4.1.1: {} object-hash@3.0.0: {} @@ -12733,10 +12998,16 @@ snapshots: pg-cloudflare@1.3.0: optional: true + pg-connection-string@2.11.0: {} + pg-connection-string@2.12.0: {} pg-int8@1.0.1: {} + pg-pool@3.11.0(pg@8.18.0): + dependencies: + pg: 8.18.0 + pg-pool@3.13.0(pg@8.20.0): dependencies: pg: 8.20.0 @@ -12753,6 +13024,16 @@ snapshots: postgres-date: 1.0.7 postgres-interval: 1.2.0 + pg@8.18.0: + dependencies: + pg-connection-string: 2.11.0 + pg-pool: 3.11.0(pg@8.18.0) + pg-protocol: 1.11.0 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.3.0 + pg@8.20.0: dependencies: pg-connection-string: 2.12.0 @@ -12968,6 +13249,8 @@ snapshots: end-of-stream: 1.4.5 once: 1.4.0 + punycode@2.3.1: {} + qs@6.14.1: dependencies: side-channel: 1.1.0 @@ -13007,9 +13290,9 @@ snapshots: react: 18.3.1 scheduler: 0.23.2 - react-dom@19.2.4(react@19.1.1): + react-dom@19.2.4(react@18.3.1): dependencies: - react: 19.1.1 + react: 18.3.1 scheduler: 0.27.0 react-dom@19.2.4(react@19.2.4): @@ -13025,23 +13308,23 @@ snapshots: prop-types-extra: 1.1.1(react@19.2.4) react: 19.2.4 - react-focus-lock@2.13.7(@types/react@19.1.12)(react@19.2.4): + react-focus-lock@2.13.7(@types/react@18.3.27)(react@19.2.4): dependencies: '@babel/runtime': 7.28.6 focus-lock: 1.3.6 prop-types: 15.8.1 react: 19.2.4 react-clientside-effect: 1.2.8(react@19.2.4) - use-callback-ref: 1.3.3(@types/react@19.1.12)(react@19.2.4) - use-sidecar: 1.1.3(@types/react@19.1.12)(react@19.2.4) + use-callback-ref: 1.3.3(@types/react@18.3.27)(react@19.2.4) + use-sidecar: 1.1.3(@types/react@18.3.27)(react@19.2.4) optionalDependencies: - '@types/react': 19.1.12 + '@types/react': 18.3.27 - react-grab@0.1.27(@types/react@19.1.12)(react@19.2.4): + react-grab@0.1.27(@types/react@18.3.27)(react@19.2.4): dependencies: '@medv/finder': 4.0.2 '@react-grab/cli': 0.1.27 - bippy: 0.5.32(@types/react@19.1.12)(react@19.2.4) + bippy: 0.5.32(@types/react@18.3.27)(react@19.2.4) solid-js: 1.9.11 optionalDependencies: react: 19.2.4 @@ -13095,12 +13378,12 @@ snapshots: react-refresh@0.18.0: {} - react-uid@2.3.0(@types/react@19.1.12)(react@19.2.4): + react-uid@2.3.0(@types/react@18.3.27)(react@19.2.4): dependencies: react: 19.2.4 tslib: 1.14.1 optionalDependencies: - '@types/react': 19.1.12 + '@types/react': 18.3.27 react-virtualized-auto-sizer@1.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: @@ -13129,8 +13412,6 @@ snapshots: dependencies: loose-envify: 1.4.0 - react@19.1.1: {} - react@19.2.4: {} read-cache@1.0.0: @@ -13290,7 +13571,7 @@ snapshots: reusify@1.1.0: {} - rivetkit@2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): + rivetkit@2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): dependencies: '@hono/standard-validator': 0.1.5(@standard-schema/spec@1.1.0)(hono@4.12.2) '@hono/zod-openapi': 1.2.2(hono@4.12.2)(zod@4.3.6) @@ -13318,14 +13599,14 @@ snapshots: '@hono/node-server': 1.19.9(hono@4.12.2) '@hono/node-ws': 1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2) drizzle-kit: 0.31.9 - drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) ws: 8.19.0 transitivePeerDependencies: - '@standard-schema/spec' - bufferutil - utf-8-validate - rivetkit@https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): + rivetkit@https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0): dependencies: '@hono/standard-validator': 0.1.5(@standard-schema/spec@1.1.0)(hono@4.12.2) '@hono/zod-openapi': 1.2.2(hono@4.12.2)(zod@4.3.6) @@ -13356,7 +13637,7 @@ snapshots: '@hono/node-ws': 1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2) dockerode: 4.0.9 drizzle-kit: 0.31.9 - drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260316.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) + drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0) ws: 8.19.0 transitivePeerDependencies: - '@standard-schema/spec' @@ -13408,6 +13689,8 @@ snapshots: transitivePeerDependencies: - supports-color + rrweb-cssom@0.8.0: {} + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 @@ -13431,6 +13714,10 @@ snapshots: sax@1.4.4: {} + saxes@6.0.0: + dependencies: + xmlchars: 2.2.0 + scheduler@0.23.2: dependencies: loose-envify: 1.4.0 @@ -13732,6 +14019,8 @@ snapshots: picocolors: 1.1.1 sax: 1.4.4 + symbol-tree@3.2.4: {} + tailwindcss@3.4.19(tsx@4.21.0)(yaml@2.8.2): dependencies: '@alloc/quick-lru': 5.2.0 @@ -13851,12 +14140,26 @@ snapshots: tinyspy@4.0.4: {} + tldts-core@6.1.86: {} + + tldts@6.1.86: + dependencies: + tldts-core: 6.1.86 + to-regex-range@5.0.1: dependencies: is-number: 7.0.0 toidentifier@1.0.1: {} + tough-cookie@5.1.2: + dependencies: + tldts: 6.1.86 + + tr46@5.1.1: + dependencies: + punycode: 2.3.1 + tree-kill@1.2.2: {} trim-lines@3.0.1: {} @@ -14068,20 +14371,20 @@ snapshots: escalade: 3.2.0 picocolors: 1.1.1 - use-callback-ref@1.3.3(@types/react@19.1.12)(react@19.2.4): + use-callback-ref@1.3.3(@types/react@18.3.27)(react@19.2.4): dependencies: react: 19.2.4 tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.12 + '@types/react': 18.3.27 - use-sidecar@1.1.3(@types/react@19.1.12)(react@19.2.4): + use-sidecar@1.1.3(@types/react@18.3.27)(react@19.2.4): dependencies: detect-node-es: 1.1.0 react: 19.2.4 tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.12 + '@types/react': 18.3.27 use-sync-external-store@1.6.0(react@19.2.4): dependencies: @@ -14256,7 +14559,7 @@ snapshots: optionalDependencies: vite: 6.4.1(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2) - vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): + vitest@3.2.4(@types/debug@4.1.12)(@types/node@22.19.7)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2): dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 @@ -14284,6 +14587,7 @@ snapshots: optionalDependencies: '@types/debug': 4.1.12 '@types/node': 22.19.7 + jsdom: 26.1.0 transitivePeerDependencies: - jiti - less @@ -14298,7 +14602,7 @@ snapshots: - tsx - yaml - vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.10.9)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): + vitest@3.2.4(@types/debug@4.1.12)(@types/node@24.10.9)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2): dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 @@ -14326,6 +14630,7 @@ snapshots: optionalDependencies: '@types/debug': 4.1.12 '@types/node': 24.10.9 + jsdom: 26.1.0 transitivePeerDependencies: - jiti - less @@ -14340,7 +14645,7 @@ snapshots: - tsx - yaml - vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2): + vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(jsdom@26.1.0)(tsx@4.21.0)(yaml@2.8.2): dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 @@ -14368,6 +14673,7 @@ snapshots: optionalDependencies: '@types/debug': 4.1.12 '@types/node': 25.5.0 + jsdom: 26.1.0 transitivePeerDependencies: - jiti - less @@ -14392,12 +14698,29 @@ snapshots: '@mapbox/vector-tile': 1.3.1 pbf: 3.3.0 + w3c-xmlserializer@5.0.0: + dependencies: + xml-name-validator: 5.0.0 + warning@4.0.3: dependencies: loose-envify: 1.4.0 web-namespaces@2.0.1: {} + webidl-conversions@7.0.0: {} + + whatwg-encoding@3.1.1: + dependencies: + iconv-lite: 0.6.3 + + whatwg-mimetype@4.0.0: {} + + whatwg-url@14.2.0: + dependencies: + tr46: 5.1.1 + webidl-conversions: 7.0.0 + which-pm-runs@1.1.0: {} which@2.0.2: @@ -14421,7 +14744,7 @@ snapshots: '@cloudflare/workerd-linux-arm64': 1.20260312.1 '@cloudflare/workerd-windows-64': 1.20260312.1 - wrangler@4.73.0(@cloudflare/workers-types@4.20260316.1): + wrangler@4.73.0(@cloudflare/workers-types@4.20260313.1): dependencies: '@cloudflare/kv-asset-handler': 0.4.2 '@cloudflare/unenv-preset': 2.15.0(unenv@2.0.0-rc.24)(workerd@1.20260312.1) @@ -14432,7 +14755,7 @@ snapshots: unenv: 2.0.0-rc.24 workerd: 1.20260312.1 optionalDependencies: - '@cloudflare/workers-types': 4.20260316.1 + '@cloudflare/workers-types': 4.20260313.1 fsevents: 2.3.3 transitivePeerDependencies: - bufferutil @@ -14470,6 +14793,10 @@ snapshots: dependencies: os-paths: 4.4.0 + xml-name-validator@5.0.0: {} + + xmlchars@2.2.0: {} + xtend@4.0.2: {} xxhash-wasm@1.1.0: {} diff --git a/research/acp/friction.md b/research/acp/friction.md index 983b966..e5273b8 100644 --- a/research/acp/friction.md +++ b/research/acp/friction.md @@ -277,3 +277,13 @@ Update this file continuously during the migration. - Owner: Unassigned. - Status: resolved - Links: `sdks/acp-http-client/src/index.ts`, `sdks/acp-http-client/tests/smoke.test.ts`, `sdks/typescript/tests/integration.test.ts` + +- Date: 2026-03-07 +- Area: Desktop host/runtime API boundary +- Issue: Desktop automation needed screenshot/input/file-transfer-like host capabilities, but routing it through ACP would have mixed agent protocol semantics with host-owned runtime control and binary payloads. +- Impact: A desktop feature built as ACP methods would blur the division between agent/session behavior and Sandbox Agent host/runtime APIs, and would complicate binary screenshot transport. +- Proposed direction: Ship desktop as first-party HTTP endpoints under `/v1/desktop/*`, keep health/install/remediation in the server runtime, and expose the feature through the SDK and inspector without ACP extension methods. +- Decision: Accepted and implemented for phase one. +- Owner: Unassigned. +- Status: resolved +- Links: `server/packages/sandbox-agent/src/router.rs`, `server/packages/sandbox-agent/src/desktop_runtime.rs`, `sdks/typescript/src/client.ts`, `frontend/packages/inspector/src/components/debug/DesktopTab.tsx` diff --git a/sdks/react/src/DesktopViewer.tsx b/sdks/react/src/DesktopViewer.tsx new file mode 100644 index 0000000..f1ce711 --- /dev/null +++ b/sdks/react/src/DesktopViewer.tsx @@ -0,0 +1,257 @@ +"use client"; + +import type { CSSProperties, MouseEvent, WheelEvent } from "react"; +import { useEffect, useRef, useState } from "react"; +import type { DesktopMouseButton, DesktopStreamErrorStatus, DesktopStreamReadyStatus, SandboxAgent } from "sandbox-agent"; + +type ConnectionState = "connecting" | "ready" | "closed" | "error"; + +export type DesktopViewerClient = Pick; + +export interface DesktopViewerProps { + client: DesktopViewerClient; + className?: string; + style?: CSSProperties; + imageStyle?: CSSProperties; + height?: number | string; + onConnect?: (status: DesktopStreamReadyStatus) => void; + onDisconnect?: () => void; + onError?: (error: DesktopStreamErrorStatus | Error) => void; +} + +const shellStyle: CSSProperties = { + display: "flex", + flexDirection: "column", + overflow: "hidden", + border: "1px solid rgba(15, 23, 42, 0.14)", + borderRadius: 14, + background: "linear-gradient(180deg, rgba(248, 250, 252, 0.96) 0%, rgba(226, 232, 240, 0.92) 100%)", + boxShadow: "0 20px 40px rgba(15, 23, 42, 0.08)", +}; + +const statusBarStyle: CSSProperties = { + display: "flex", + alignItems: "center", + justifyContent: "space-between", + gap: 12, + padding: "10px 14px", + borderBottom: "1px solid rgba(15, 23, 42, 0.08)", + background: "rgba(255, 255, 255, 0.78)", + color: "#0f172a", + fontSize: 12, + lineHeight: 1.4, +}; + +const viewportStyle: CSSProperties = { + position: "relative", + display: "flex", + alignItems: "center", + justifyContent: "center", + overflow: "hidden", + background: "radial-gradient(circle at top, rgba(14, 165, 233, 0.18), transparent 45%), linear-gradient(180deg, #0f172a 0%, #111827 100%)", +}; + +const imageBaseStyle: CSSProperties = { + display: "block", + width: "100%", + height: "100%", + objectFit: "contain", + userSelect: "none", +}; + +const hintStyle: CSSProperties = { + opacity: 0.66, +}; + +const getStatusColor = (state: ConnectionState): string => { + switch (state) { + case "ready": + return "#15803d"; + case "error": + return "#b91c1c"; + case "closed": + return "#b45309"; + default: + return "#475569"; + } +}; + +export const DesktopViewer = ({ client, className, style, imageStyle, height = 480, onConnect, onDisconnect, onError }: DesktopViewerProps) => { + const wrapperRef = useRef(null); + const sessionRef = useRef | null>(null); + const [connectionState, setConnectionState] = useState("connecting"); + const [statusMessage, setStatusMessage] = useState("Starting desktop stream..."); + const [frameUrl, setFrameUrl] = useState(null); + const [resolution, setResolution] = useState<{ width: number; height: number } | null>(null); + + useEffect(() => { + let cancelled = false; + let lastObjectUrl: string | null = null; + let session: ReturnType | null = null; + + setConnectionState("connecting"); + setStatusMessage("Starting desktop stream..."); + setResolution(null); + + const connect = async () => { + try { + await client.startDesktopStream(); + if (cancelled) { + return; + } + + session = client.connectDesktopStream(); + sessionRef.current = session; + session.onReady((status) => { + if (cancelled) { + return; + } + setConnectionState("ready"); + setStatusMessage("Desktop stream connected."); + setResolution({ width: status.width, height: status.height }); + onConnect?.(status); + }); + session.onFrame((frame) => { + if (cancelled) { + return; + } + const nextUrl = URL.createObjectURL(new Blob([frame.slice().buffer], { type: "image/jpeg" })); + setFrameUrl((current) => { + if (current) { + URL.revokeObjectURL(current); + } + return nextUrl; + }); + if (lastObjectUrl) { + URL.revokeObjectURL(lastObjectUrl); + } + lastObjectUrl = nextUrl; + }); + session.onError((error) => { + if (cancelled) { + return; + } + setConnectionState("error"); + setStatusMessage(error instanceof Error ? error.message : error.message); + onError?.(error); + }); + session.onClose(() => { + if (cancelled) { + return; + } + setConnectionState((current) => (current === "error" ? current : "closed")); + setStatusMessage((current) => (current === "Desktop stream connected." ? "Desktop stream disconnected." : current)); + onDisconnect?.(); + }); + } catch (error) { + if (cancelled) { + return; + } + const nextError = error instanceof Error ? error : new Error("Failed to initialize desktop stream."); + setConnectionState("error"); + setStatusMessage(nextError.message); + onError?.(nextError); + } + }; + + void connect(); + + return () => { + cancelled = true; + session?.close(); + sessionRef.current = null; + void client.stopDesktopStream().catch(() => undefined); + setFrameUrl((current) => { + if (current) { + URL.revokeObjectURL(current); + } + return null; + }); + if (lastObjectUrl) { + URL.revokeObjectURL(lastObjectUrl); + } + }; + }, [client, onConnect, onDisconnect, onError]); + + const scalePoint = (clientX: number, clientY: number) => { + const wrapper = wrapperRef.current; + if (!wrapper || !resolution) { + return null; + } + const rect = wrapper.getBoundingClientRect(); + if (rect.width === 0 || rect.height === 0) { + return null; + } + const x = Math.max(0, Math.min(resolution.width, ((clientX - rect.left) / rect.width) * resolution.width)); + const y = Math.max(0, Math.min(resolution.height, ((clientY - rect.top) / rect.height) * resolution.height)); + return { + x: Math.round(x), + y: Math.round(y), + }; + }; + + const buttonFromMouseEvent = (event: MouseEvent): DesktopMouseButton => { + switch (event.button) { + case 1: + return "middle"; + case 2: + return "right"; + default: + return "left"; + } + }; + + const withSession = (callback: (session: NonNullable>) => void) => { + const session = sessionRef.current; + if (session) { + callback(session); + } + }; + + return ( +
+
+ {statusMessage} + {resolution ? `${resolution.width}×${resolution.height}` : "Awaiting frames"} +
+
{ + const point = scalePoint(event.clientX, event.clientY); + if (!point) { + return; + } + withSession((session) => session.moveMouse(point.x, point.y)); + }} + onMouseDown={(event) => { + event.preventDefault(); + const point = scalePoint(event.clientX, event.clientY); + withSession((session) => session.mouseDown(buttonFromMouseEvent(event), point?.x, point?.y)); + }} + onMouseUp={(event) => { + const point = scalePoint(event.clientX, event.clientY); + withSession((session) => session.mouseUp(buttonFromMouseEvent(event), point?.x, point?.y)); + }} + onWheel={(event: WheelEvent) => { + event.preventDefault(); + const point = scalePoint(event.clientX, event.clientY); + if (!point) { + return; + } + withSession((session) => session.scroll(point.x, point.y, Math.round(event.deltaX), Math.round(event.deltaY))); + }} + onKeyDown={(event) => { + withSession((session) => session.keyDown(event.key)); + }} + onKeyUp={(event) => { + withSession((session) => session.keyUp(event.key)); + }} + > + {frameUrl ? Desktop stream : null} +
+
+ ); +}; diff --git a/sdks/react/src/index.ts b/sdks/react/src/index.ts index 55d4a91..1d8d1e1 100644 --- a/sdks/react/src/index.ts +++ b/sdks/react/src/index.ts @@ -1,6 +1,7 @@ export { AgentConversation } from "./AgentConversation.tsx"; export { AgentTranscript } from "./AgentTranscript.tsx"; export { ChatComposer } from "./ChatComposer.tsx"; +export { DesktopViewer } from "./DesktopViewer.tsx"; export { ProcessTerminal } from "./ProcessTerminal.tsx"; export { useTranscriptVirtualizer } from "./useTranscriptVirtualizer.ts"; @@ -23,6 +24,11 @@ export type { ChatComposerProps, } from "./ChatComposer.tsx"; +export type { + DesktopViewerClient, + DesktopViewerProps, +} from "./DesktopViewer.tsx"; + export type { ProcessTerminalClient, ProcessTerminalProps, diff --git a/sdks/typescript/src/client.ts b/sdks/typescript/src/client.ts index 10200bc..94a3375 100644 --- a/sdks/typescript/src/client.ts +++ b/sdks/typescript/src/client.ts @@ -23,12 +23,35 @@ import { type SetSessionModeRequest, } from "acp-http-client"; import type { SandboxProvider } from "./providers/types.ts"; +import { DesktopStreamSession, type DesktopStreamConnectOptions } from "./desktop-stream.ts"; import { type AcpServerListResponse, type AgentInfo, type AgentInstallRequest, type AgentInstallResponse, type AgentListResponse, + type DesktopActionResponse, + type DesktopDisplayInfoResponse, + type DesktopKeyboardDownRequest, + type DesktopKeyboardPressRequest, + type DesktopKeyboardTypeRequest, + type DesktopMouseClickRequest, + type DesktopMouseDownRequest, + type DesktopMouseDragRequest, + type DesktopMouseMoveRequest, + type DesktopMousePositionResponse, + type DesktopMouseScrollRequest, + type DesktopMouseUpRequest, + type DesktopKeyboardUpRequest, + type DesktopRecordingInfo, + type DesktopRecordingListResponse, + type DesktopRecordingStartRequest, + type DesktopRegionScreenshotQuery, + type DesktopScreenshotQuery, + type DesktopStartRequest, + type DesktopStatusResponse, + type DesktopStreamStatusResponse, + type DesktopWindowListResponse, type FsActionResponse, type FsDeleteQuery, type FsEntriesQuery, @@ -53,7 +76,9 @@ import { type ProcessInfo, type ProcessInputRequest, type ProcessInputResponse, + type ProcessListQuery, type ProcessListResponse, + type ProcessOwner, type ProcessLogEntry, type ProcessLogsQuery, type ProcessLogsResponse, @@ -201,6 +226,7 @@ export interface ProcessTerminalConnectOptions extends ProcessTerminalWebSocketU } export type ProcessTerminalSessionOptions = ProcessTerminalConnectOptions; +export type DesktopStreamSessionOptions = DesktopStreamConnectOptions; export class SandboxAgentError extends Error { readonly status: number; @@ -1533,6 +1559,148 @@ export class SandboxAgent { return this.requestHealth(); } + async startDesktop(request: DesktopStartRequest = {}): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/start`, { + body: request, + }); + } + + async stopDesktop(): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/stop`); + } + + async getDesktopStatus(): Promise { + return this.requestJson("GET", `${API_PREFIX}/desktop/status`); + } + + async getDesktopDisplayInfo(): Promise { + return this.requestJson("GET", `${API_PREFIX}/desktop/display/info`); + } + + async takeDesktopScreenshot(query: DesktopScreenshotQuery = {}): Promise { + const response = await this.requestRaw("GET", `${API_PREFIX}/desktop/screenshot`, { + query, + accept: "image/*", + }); + const buffer = await response.arrayBuffer(); + return new Uint8Array(buffer); + } + + async takeDesktopRegionScreenshot(query: DesktopRegionScreenshotQuery): Promise { + const response = await this.requestRaw("GET", `${API_PREFIX}/desktop/screenshot/region`, { + query, + accept: "image/*", + }); + const buffer = await response.arrayBuffer(); + return new Uint8Array(buffer); + } + + async getDesktopMousePosition(): Promise { + return this.requestJson("GET", `${API_PREFIX}/desktop/mouse/position`); + } + + async moveDesktopMouse(request: DesktopMouseMoveRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/mouse/move`, { + body: request, + }); + } + + async clickDesktop(request: DesktopMouseClickRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/mouse/click`, { + body: request, + }); + } + + async mouseDownDesktop(request: DesktopMouseDownRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/mouse/down`, { + body: request, + }); + } + + async mouseUpDesktop(request: DesktopMouseUpRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/mouse/up`, { + body: request, + }); + } + + async dragDesktopMouse(request: DesktopMouseDragRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/mouse/drag`, { + body: request, + }); + } + + async scrollDesktop(request: DesktopMouseScrollRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/mouse/scroll`, { + body: request, + }); + } + + async typeDesktopText(request: DesktopKeyboardTypeRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/keyboard/type`, { + body: request, + }); + } + + async pressDesktopKey(request: DesktopKeyboardPressRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/keyboard/press`, { + body: request, + }); + } + + async keyDownDesktop(request: DesktopKeyboardDownRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/keyboard/down`, { + body: request, + }); + } + + async keyUpDesktop(request: DesktopKeyboardUpRequest): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/keyboard/up`, { + body: request, + }); + } + + async listDesktopWindows(): Promise { + return this.requestJson("GET", `${API_PREFIX}/desktop/windows`); + } + + async startDesktopRecording(request: DesktopRecordingStartRequest = {}): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/recording/start`, { + body: request, + }); + } + + async stopDesktopRecording(): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/recording/stop`); + } + + async listDesktopRecordings(): Promise { + return this.requestJson("GET", `${API_PREFIX}/desktop/recordings`); + } + + async getDesktopRecording(id: string): Promise { + return this.requestJson("GET", `${API_PREFIX}/desktop/recordings/${encodeURIComponent(id)}`); + } + + async downloadDesktopRecording(id: string): Promise { + const response = await this.requestRaw("GET", `${API_PREFIX}/desktop/recordings/${encodeURIComponent(id)}/download`, { + accept: "video/mp4", + }); + const buffer = await response.arrayBuffer(); + return new Uint8Array(buffer); + } + + async deleteDesktopRecording(id: string): Promise { + await this.requestRaw("DELETE", `${API_PREFIX}/desktop/recordings/${encodeURIComponent(id)}`); + } + + async startDesktopStream(): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/stream/start`); + } + + async stopDesktopStream(): Promise { + return this.requestJson("POST", `${API_PREFIX}/desktop/stream/stop`); + } + async listAgents(options?: AgentQueryOptions): Promise { return this.requestJson("GET", `${API_PREFIX}/agents`, { query: toAgentQuery(options), @@ -1665,8 +1833,10 @@ export class SandboxAgent { }); } - async listProcesses(): Promise { - return this.requestJson("GET", `${API_PREFIX}/processes`); + async listProcesses(query?: ProcessListQuery): Promise { + return this.requestJson("GET", `${API_PREFIX}/processes`, { + query, + }); } async getProcess(id: string): Promise { @@ -1754,6 +1924,32 @@ export class SandboxAgent { return new ProcessTerminalSession(this.connectProcessTerminalWebSocket(id, options)); } + buildDesktopStreamWebSocketUrl(options: ProcessTerminalWebSocketUrlOptions = {}): string { + return toWebSocketUrl( + this.buildUrl(`${API_PREFIX}/desktop/stream/ws`, { + access_token: options.accessToken ?? this.token, + }), + ); + } + + connectDesktopStreamWebSocket(options: DesktopStreamConnectOptions = {}): WebSocket { + const WebSocketCtor = options.WebSocket ?? globalThis.WebSocket; + if (!WebSocketCtor) { + throw new Error("WebSocket API is not available; provide a WebSocket implementation."); + } + + return new WebSocketCtor( + this.buildDesktopStreamWebSocketUrl({ + accessToken: options.accessToken, + }), + options.protocols, + ); + } + + connectDesktopStream(options: DesktopStreamSessionOptions = {}): DesktopStreamSession { + return new DesktopStreamSession(this.connectDesktopStreamWebSocket(options)); + } + private async getLiveConnection(agent: string): Promise { await this.awaitHealthy(); diff --git a/sdks/typescript/src/desktop-stream.ts b/sdks/typescript/src/desktop-stream.ts new file mode 100644 index 0000000..1bbf76f --- /dev/null +++ b/sdks/typescript/src/desktop-stream.ts @@ -0,0 +1,236 @@ +import type { DesktopMouseButton } from "./types.ts"; + +const WS_READY_STATE_CONNECTING = 0; +const WS_READY_STATE_OPEN = 1; +const WS_READY_STATE_CLOSED = 3; + +export interface DesktopStreamReadyStatus { + type: "ready"; + width: number; + height: number; +} + +export interface DesktopStreamErrorStatus { + type: "error"; + message: string; +} + +export type DesktopStreamStatusMessage = DesktopStreamReadyStatus | DesktopStreamErrorStatus; + +export interface DesktopStreamConnectOptions { + accessToken?: string; + WebSocket?: typeof WebSocket; + protocols?: string | string[]; +} + +type DesktopStreamClientFrame = + | { + type: "moveMouse"; + x: number; + y: number; + } + | { + type: "mouseDown" | "mouseUp"; + x?: number; + y?: number; + button?: DesktopMouseButton; + } + | { + type: "scroll"; + x: number; + y: number; + deltaX?: number; + deltaY?: number; + } + | { + type: "keyDown" | "keyUp"; + key: string; + } + | { + type: "close"; + }; + +export class DesktopStreamSession { + readonly socket: WebSocket; + readonly closed: Promise; + + private readonly readyListeners = new Set<(status: DesktopStreamReadyStatus) => void>(); + private readonly frameListeners = new Set<(frame: Uint8Array) => void>(); + private readonly errorListeners = new Set<(error: DesktopStreamErrorStatus | Error) => void>(); + private readonly closeListeners = new Set<() => void>(); + + private closeSignalSent = false; + private closedResolve!: () => void; + + constructor(socket: WebSocket) { + this.socket = socket; + this.socket.binaryType = "arraybuffer"; + this.closed = new Promise((resolve) => { + this.closedResolve = resolve; + }); + + this.socket.addEventListener("message", (event) => { + void this.handleMessage(event.data); + }); + this.socket.addEventListener("error", () => { + this.emitError(new Error("Desktop stream websocket connection failed.")); + }); + this.socket.addEventListener("close", () => { + this.closedResolve(); + for (const listener of this.closeListeners) { + listener(); + } + }); + } + + onReady(listener: (status: DesktopStreamReadyStatus) => void): () => void { + this.readyListeners.add(listener); + return () => { + this.readyListeners.delete(listener); + }; + } + + onFrame(listener: (frame: Uint8Array) => void): () => void { + this.frameListeners.add(listener); + return () => { + this.frameListeners.delete(listener); + }; + } + + onError(listener: (error: DesktopStreamErrorStatus | Error) => void): () => void { + this.errorListeners.add(listener); + return () => { + this.errorListeners.delete(listener); + }; + } + + onClose(listener: () => void): () => void { + this.closeListeners.add(listener); + return () => { + this.closeListeners.delete(listener); + }; + } + + moveMouse(x: number, y: number): void { + this.sendFrame({ type: "moveMouse", x, y }); + } + + mouseDown(button?: DesktopMouseButton, x?: number, y?: number): void { + this.sendFrame({ type: "mouseDown", button, x, y }); + } + + mouseUp(button?: DesktopMouseButton, x?: number, y?: number): void { + this.sendFrame({ type: "mouseUp", button, x, y }); + } + + scroll(x: number, y: number, deltaX?: number, deltaY?: number): void { + this.sendFrame({ type: "scroll", x, y, deltaX, deltaY }); + } + + keyDown(key: string): void { + this.sendFrame({ type: "keyDown", key }); + } + + keyUp(key: string): void { + this.sendFrame({ type: "keyUp", key }); + } + + close(): void { + if (this.socket.readyState === WS_READY_STATE_CONNECTING) { + this.socket.addEventListener( + "open", + () => { + this.close(); + }, + { once: true }, + ); + return; + } + + if (this.socket.readyState === WS_READY_STATE_OPEN) { + if (!this.closeSignalSent) { + this.closeSignalSent = true; + this.sendFrame({ type: "close" }); + } + this.socket.close(); + return; + } + + if (this.socket.readyState !== WS_READY_STATE_CLOSED) { + this.socket.close(); + } + } + + private async handleMessage(data: unknown): Promise { + try { + if (typeof data === "string") { + const frame = parseStatusFrame(data); + if (!frame) { + this.emitError(new Error("Received invalid desktop stream control frame.")); + return; + } + + if (frame.type === "ready") { + for (const listener of this.readyListeners) { + listener(frame); + } + return; + } + + this.emitError(frame); + return; + } + + const bytes = await decodeBinaryFrame(data); + for (const listener of this.frameListeners) { + listener(bytes); + } + } catch (error) { + this.emitError(error instanceof Error ? error : new Error(String(error))); + } + } + + private sendFrame(frame: DesktopStreamClientFrame): void { + if (this.socket.readyState !== WS_READY_STATE_OPEN) { + return; + } + this.socket.send(JSON.stringify(frame)); + } + + private emitError(error: DesktopStreamErrorStatus | Error): void { + for (const listener of this.errorListeners) { + listener(error); + } + } +} + +function parseStatusFrame(payload: string): DesktopStreamStatusMessage | null { + const value = JSON.parse(payload) as Record; + if (value.type === "ready" && typeof value.width === "number" && typeof value.height === "number") { + return { + type: "ready", + width: value.width, + height: value.height, + }; + } + if (value.type === "error" && typeof value.message === "string") { + return { + type: "error", + message: value.message, + }; + } + return null; +} + +async function decodeBinaryFrame(data: unknown): Promise { + if (data instanceof ArrayBuffer) { + return new Uint8Array(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + } + if (typeof Blob !== "undefined" && data instanceof Blob) { + return new Uint8Array(await data.arrayBuffer()); + } + throw new Error("Unsupported desktop stream binary frame type."); +} diff --git a/sdks/typescript/src/generated/openapi.ts b/sdks/typescript/src/generated/openapi.ts index 18374fb..195c481 100644 --- a/sdks/typescript/src/generated/openapi.ts +++ b/sdks/typescript/src/generated/openapi.ts @@ -3,7 +3,6 @@ * Do not make direct changes to the file. */ - export interface paths { "/v1/acp": { get: operations["get_v1_acp_servers"]; @@ -32,6 +31,213 @@ export interface paths { put: operations["put_v1_config_skills"]; delete: operations["delete_v1_config_skills"]; }; + "/v1/desktop/display/info": { + /** + * Get desktop display information. + * @description Performs a health-gated display query against the managed desktop and + * returns the current display identifier and resolution. + */ + get: operations["get_v1_desktop_display_info"]; + }; + "/v1/desktop/keyboard/down": { + /** + * Press and hold a desktop keyboard key. + * @description Performs a health-gated `xdotool keydown` operation against the managed + * desktop. + */ + post: operations["post_v1_desktop_keyboard_down"]; + }; + "/v1/desktop/keyboard/press": { + /** + * Press a desktop keyboard shortcut. + * @description Performs a health-gated `xdotool key` operation against the managed + * desktop. + */ + post: operations["post_v1_desktop_keyboard_press"]; + }; + "/v1/desktop/keyboard/type": { + /** + * Type desktop keyboard text. + * @description Performs a health-gated `xdotool type` operation against the managed + * desktop. + */ + post: operations["post_v1_desktop_keyboard_type"]; + }; + "/v1/desktop/keyboard/up": { + /** + * Release a desktop keyboard key. + * @description Performs a health-gated `xdotool keyup` operation against the managed + * desktop. + */ + post: operations["post_v1_desktop_keyboard_up"]; + }; + "/v1/desktop/mouse/click": { + /** + * Click on the desktop. + * @description Performs a health-gated pointer move and click against the managed desktop + * and returns the resulting mouse position. + */ + post: operations["post_v1_desktop_mouse_click"]; + }; + "/v1/desktop/mouse/down": { + /** + * Press and hold a desktop mouse button. + * @description Performs a health-gated optional pointer move followed by `xdotool mousedown` + * and returns the resulting mouse position. + */ + post: operations["post_v1_desktop_mouse_down"]; + }; + "/v1/desktop/mouse/drag": { + /** + * Drag the desktop mouse. + * @description Performs a health-gated drag gesture against the managed desktop and + * returns the resulting mouse position. + */ + post: operations["post_v1_desktop_mouse_drag"]; + }; + "/v1/desktop/mouse/move": { + /** + * Move the desktop mouse. + * @description Performs a health-gated absolute pointer move on the managed desktop and + * returns the resulting mouse position. + */ + post: operations["post_v1_desktop_mouse_move"]; + }; + "/v1/desktop/mouse/position": { + /** + * Get the current desktop mouse position. + * @description Performs a health-gated mouse position query against the managed desktop. + */ + get: operations["get_v1_desktop_mouse_position"]; + }; + "/v1/desktop/mouse/scroll": { + /** + * Scroll the desktop mouse wheel. + * @description Performs a health-gated scroll gesture at the requested coordinates and + * returns the resulting mouse position. + */ + post: operations["post_v1_desktop_mouse_scroll"]; + }; + "/v1/desktop/mouse/up": { + /** + * Release a desktop mouse button. + * @description Performs a health-gated optional pointer move followed by `xdotool mouseup` + * and returns the resulting mouse position. + */ + post: operations["post_v1_desktop_mouse_up"]; + }; + "/v1/desktop/recording/start": { + /** + * Start desktop recording. + * @description Starts an ffmpeg x11grab recording against the managed desktop and returns + * the created recording metadata. + */ + post: operations["post_v1_desktop_recording_start"]; + }; + "/v1/desktop/recording/stop": { + /** + * Stop desktop recording. + * @description Stops the active desktop recording and returns the finalized recording + * metadata. + */ + post: operations["post_v1_desktop_recording_stop"]; + }; + "/v1/desktop/recordings": { + /** + * List desktop recordings. + * @description Returns the current desktop recording catalog. + */ + get: operations["get_v1_desktop_recordings"]; + }; + "/v1/desktop/recordings/{id}": { + /** + * Get desktop recording metadata. + * @description Returns metadata for a single desktop recording. + */ + get: operations["get_v1_desktop_recording"]; + /** + * Delete a desktop recording. + * @description Removes a completed desktop recording and its file from disk. + */ + delete: operations["delete_v1_desktop_recording"]; + }; + "/v1/desktop/recordings/{id}/download": { + /** + * Download a desktop recording. + * @description Serves the recorded MP4 bytes for a completed desktop recording. + */ + get: operations["get_v1_desktop_recording_download"]; + }; + "/v1/desktop/screenshot": { + /** + * Capture a full desktop screenshot. + * @description Performs a health-gated full-frame screenshot of the managed desktop and + * returns the requested image bytes. + */ + get: operations["get_v1_desktop_screenshot"]; + }; + "/v1/desktop/screenshot/region": { + /** + * Capture a desktop screenshot region. + * @description Performs a health-gated screenshot crop against the managed desktop and + * returns the requested region image bytes. + */ + get: operations["get_v1_desktop_screenshot_region"]; + }; + "/v1/desktop/start": { + /** + * Start the private desktop runtime. + * @description Lazily launches the managed Xvfb/openbox stack, validates display health, + * and returns the resulting desktop status snapshot. + */ + post: operations["post_v1_desktop_start"]; + }; + "/v1/desktop/status": { + /** + * Get desktop runtime status. + * @description Returns the current desktop runtime state, dependency status, active + * display metadata, and supervised process information. + */ + get: operations["get_v1_desktop_status"]; + }; + "/v1/desktop/stop": { + /** + * Stop the private desktop runtime. + * @description Terminates the managed openbox/Xvfb/dbus processes owned by the desktop + * runtime and returns the resulting status snapshot. + */ + post: operations["post_v1_desktop_stop"]; + }; + "/v1/desktop/stream/start": { + /** + * Start desktop streaming. + * @description Enables desktop websocket streaming for the managed desktop. + */ + post: operations["post_v1_desktop_stream_start"]; + }; + "/v1/desktop/stream/stop": { + /** + * Stop desktop streaming. + * @description Disables desktop websocket streaming for the managed desktop. + */ + post: operations["post_v1_desktop_stream_stop"]; + }; + "/v1/desktop/stream/ws": { + /** + * Open a desktop websocket streaming session. + * @description Upgrades the connection to a websocket that streams JPEG desktop frames and + * accepts mouse and keyboard control frames. + */ + get: operations["get_v1_desktop_stream_ws"]; + }; + "/v1/desktop/windows": { + /** + * List visible desktop windows. + * @description Performs a health-gated visible-window enumeration against the managed + * desktop and returns the current window metadata. + */ + get: operations["get_v1_desktop_windows"]; + }; "/v1/fs/entries": { get: operations["get_v1_fs_entries"]; }; @@ -234,8 +440,215 @@ export interface components { AgentListResponse: { agents: components["schemas"]["AgentInfo"][]; }; + DesktopActionResponse: { + ok: boolean; + }; + DesktopDisplayInfoResponse: { + display: string; + resolution: components["schemas"]["DesktopResolution"]; + }; + DesktopErrorInfo: { + code: string; + message: string; + }; + DesktopKeyModifiers: { + alt?: boolean | null; + cmd?: boolean | null; + ctrl?: boolean | null; + shift?: boolean | null; + }; + DesktopKeyboardDownRequest: { + key: string; + }; + DesktopKeyboardPressRequest: { + key: string; + modifiers?: components["schemas"]["DesktopKeyModifiers"] | null; + }; + DesktopKeyboardTypeRequest: { + /** Format: int32 */ + delayMs?: number | null; + text: string; + }; + DesktopKeyboardUpRequest: { + key: string; + }; /** @enum {string} */ - ErrorType: "invalid_request" | "conflict" | "unsupported_agent" | "agent_not_installed" | "install_failed" | "agent_process_exited" | "token_invalid" | "permission_denied" | "not_acceptable" | "unsupported_media_type" | "not_found" | "session_not_found" | "session_already_exists" | "mode_not_supported" | "stream_error" | "timeout"; + DesktopMouseButton: "left" | "middle" | "right"; + DesktopMouseClickRequest: { + button?: components["schemas"]["DesktopMouseButton"] | null; + /** Format: int32 */ + clickCount?: number | null; + /** Format: int32 */ + x: number; + /** Format: int32 */ + y: number; + }; + DesktopMouseDownRequest: { + button?: components["schemas"]["DesktopMouseButton"] | null; + /** Format: int32 */ + x?: number | null; + /** Format: int32 */ + y?: number | null; + }; + DesktopMouseDragRequest: { + button?: components["schemas"]["DesktopMouseButton"] | null; + /** Format: int32 */ + endX: number; + /** Format: int32 */ + endY: number; + /** Format: int32 */ + startX: number; + /** Format: int32 */ + startY: number; + }; + DesktopMouseMoveRequest: { + /** Format: int32 */ + x: number; + /** Format: int32 */ + y: number; + }; + DesktopMousePositionResponse: { + /** Format: int32 */ + screen?: number | null; + window?: string | null; + /** Format: int32 */ + x: number; + /** Format: int32 */ + y: number; + }; + DesktopMouseScrollRequest: { + /** Format: int32 */ + deltaX?: number | null; + /** Format: int32 */ + deltaY?: number | null; + /** Format: int32 */ + x: number; + /** Format: int32 */ + y: number; + }; + DesktopMouseUpRequest: { + button?: components["schemas"]["DesktopMouseButton"] | null; + /** Format: int32 */ + x?: number | null; + /** Format: int32 */ + y?: number | null; + }; + DesktopProcessInfo: { + logPath?: string | null; + name: string; + /** Format: int32 */ + pid?: number | null; + running: boolean; + }; + DesktopRecordingInfo: { + /** Format: int64 */ + bytes: number; + endedAt?: string | null; + fileName: string; + id: string; + processId?: string | null; + startedAt: string; + status: components["schemas"]["DesktopRecordingStatus"]; + }; + DesktopRecordingListResponse: { + recordings: components["schemas"]["DesktopRecordingInfo"][]; + }; + DesktopRecordingStartRequest: { + /** Format: int32 */ + fps?: number | null; + }; + /** @enum {string} */ + DesktopRecordingStatus: "recording" | "completed" | "failed"; + DesktopRegionScreenshotQuery: { + format?: components["schemas"]["DesktopScreenshotFormat"] | null; + /** Format: int32 */ + height: number; + /** Format: int32 */ + quality?: number | null; + /** Format: float */ + scale?: number | null; + /** Format: int32 */ + width: number; + /** Format: int32 */ + x: number; + /** Format: int32 */ + y: number; + }; + DesktopResolution: { + /** Format: int32 */ + dpi?: number | null; + /** Format: int32 */ + height: number; + /** Format: int32 */ + width: number; + }; + /** @enum {string} */ + DesktopScreenshotFormat: "png" | "jpeg" | "webp"; + DesktopScreenshotQuery: { + format?: components["schemas"]["DesktopScreenshotFormat"] | null; + /** Format: int32 */ + quality?: number | null; + /** Format: float */ + scale?: number | null; + }; + DesktopStartRequest: { + /** Format: int32 */ + dpi?: number | null; + /** Format: int32 */ + height?: number | null; + /** Format: int32 */ + width?: number | null; + }; + /** @enum {string} */ + DesktopState: "inactive" | "install_required" | "starting" | "active" | "stopping" | "failed"; + DesktopStatusResponse: { + display?: string | null; + installCommand?: string | null; + lastError?: components["schemas"]["DesktopErrorInfo"] | null; + missingDependencies?: string[]; + processes?: components["schemas"]["DesktopProcessInfo"][]; + resolution?: components["schemas"]["DesktopResolution"] | null; + runtimeLogPath?: string | null; + startedAt?: string | null; + state: components["schemas"]["DesktopState"]; + }; + DesktopStreamStatusResponse: { + active: boolean; + }; + DesktopWindowInfo: { + /** Format: int32 */ + height: number; + id: string; + isActive: boolean; + title: string; + /** Format: int32 */ + width: number; + /** Format: int32 */ + x: number; + /** Format: int32 */ + y: number; + }; + DesktopWindowListResponse: { + windows: components["schemas"]["DesktopWindowInfo"][]; + }; + /** @enum {string} */ + ErrorType: + | "invalid_request" + | "conflict" + | "unsupported_agent" + | "agent_not_installed" + | "install_failed" + | "agent_process_exited" + | "token_invalid" + | "permission_denied" + | "not_acceptable" + | "unsupported_media_type" + | "not_found" + | "session_not_found" + | "session_already_exists" + | "mode_not_supported" + | "stream_error" + | "timeout"; FsActionResponse: { path: string; }; @@ -294,35 +707,37 @@ export interface components { directory: string; mcpName: string; }; - McpServerConfig: ({ - args?: string[]; - command: string; - cwd?: string | null; - enabled?: boolean | null; - env?: { - [key: string]: string; - } | null; - /** Format: int64 */ - timeoutMs?: number | null; - /** @enum {string} */ - type: "local"; - }) | ({ - bearerTokenEnvVar?: string | null; - enabled?: boolean | null; - envHeaders?: { - [key: string]: string; - } | null; - headers?: { - [key: string]: string; - } | null; - oauth?: Record | null | null; - /** Format: int64 */ - timeoutMs?: number | null; - transport?: string | null; - /** @enum {string} */ - type: "remote"; - url: string; - }); + McpServerConfig: + | { + args?: string[]; + command: string; + cwd?: string | null; + enabled?: boolean | null; + env?: { + [key: string]: string; + } | null; + /** Format: int64 */ + timeoutMs?: number | null; + /** @enum {string} */ + type: "local"; + } + | { + bearerTokenEnvVar?: string | null; + enabled?: boolean | null; + envHeaders?: { + [key: string]: string; + } | null; + headers?: { + [key: string]: string; + } | null; + oauth?: Record | null | null; + /** Format: int64 */ + timeoutMs?: number | null; + transport?: string | null; + /** @enum {string} */ + type: "remote"; + url: string; + }; ProblemDetails: { detail?: string | null; instance?: string | null; @@ -364,6 +779,7 @@ export interface components { exitedAtMs?: number | null; id: string; interactive: boolean; + owner: components["schemas"]["ProcessOwner"]; /** Format: int32 */ pid?: number | null; status: components["schemas"]["ProcessState"]; @@ -376,6 +792,9 @@ export interface components { ProcessInputResponse: { bytesWritten: number; }; + ProcessListQuery: { + owner?: components["schemas"]["ProcessOwner"] | null; + }; ProcessListResponse: { processes: components["schemas"]["ProcessInfo"][]; }; @@ -402,6 +821,8 @@ export interface components { }; /** @enum {string} */ ProcessLogsStream: "stdout" | "stderr" | "combined" | "pty"; + /** @enum {string} */ + ProcessOwner: "user" | "desktop" | "system"; ProcessRunRequest: { args?: string[]; command: string; @@ -476,7 +897,6 @@ export type $defs = Record; export type external = Record; export interface operations { - get_v1_acp_servers: { responses: { /** @description Active ACP server instances */ @@ -811,6 +1231,850 @@ export interface operations { }; }; }; + /** + * Get desktop display information. + * @description Performs a health-gated display query against the managed desktop and + * returns the current display identifier and resolution. + */ + get_v1_desktop_display_info: { + responses: { + /** @description Desktop display information */ + 200: { + content: { + "application/json": components["schemas"]["DesktopDisplayInfoResponse"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or display query failed */ + 503: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Press and hold a desktop keyboard key. + * @description Performs a health-gated `xdotool keydown` operation against the managed + * desktop. + */ + post_v1_desktop_keyboard_down: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopKeyboardDownRequest"]; + }; + }; + responses: { + /** @description Desktop keyboard action result */ + 200: { + content: { + "application/json": components["schemas"]["DesktopActionResponse"]; + }; + }; + /** @description Invalid keyboard down request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Press a desktop keyboard shortcut. + * @description Performs a health-gated `xdotool key` operation against the managed + * desktop. + */ + post_v1_desktop_keyboard_press: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopKeyboardPressRequest"]; + }; + }; + responses: { + /** @description Desktop keyboard action result */ + 200: { + content: { + "application/json": components["schemas"]["DesktopActionResponse"]; + }; + }; + /** @description Invalid keyboard press request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Type desktop keyboard text. + * @description Performs a health-gated `xdotool type` operation against the managed + * desktop. + */ + post_v1_desktop_keyboard_type: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopKeyboardTypeRequest"]; + }; + }; + responses: { + /** @description Desktop keyboard action result */ + 200: { + content: { + "application/json": components["schemas"]["DesktopActionResponse"]; + }; + }; + /** @description Invalid keyboard type request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Release a desktop keyboard key. + * @description Performs a health-gated `xdotool keyup` operation against the managed + * desktop. + */ + post_v1_desktop_keyboard_up: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopKeyboardUpRequest"]; + }; + }; + responses: { + /** @description Desktop keyboard action result */ + 200: { + content: { + "application/json": components["schemas"]["DesktopActionResponse"]; + }; + }; + /** @description Invalid keyboard up request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Click on the desktop. + * @description Performs a health-gated pointer move and click against the managed desktop + * and returns the resulting mouse position. + */ + post_v1_desktop_mouse_click: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopMouseClickRequest"]; + }; + }; + responses: { + /** @description Desktop mouse position after click */ + 200: { + content: { + "application/json": components["schemas"]["DesktopMousePositionResponse"]; + }; + }; + /** @description Invalid mouse click request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Press and hold a desktop mouse button. + * @description Performs a health-gated optional pointer move followed by `xdotool mousedown` + * and returns the resulting mouse position. + */ + post_v1_desktop_mouse_down: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopMouseDownRequest"]; + }; + }; + responses: { + /** @description Desktop mouse position after button press */ + 200: { + content: { + "application/json": components["schemas"]["DesktopMousePositionResponse"]; + }; + }; + /** @description Invalid mouse down request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Drag the desktop mouse. + * @description Performs a health-gated drag gesture against the managed desktop and + * returns the resulting mouse position. + */ + post_v1_desktop_mouse_drag: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopMouseDragRequest"]; + }; + }; + responses: { + /** @description Desktop mouse position after drag */ + 200: { + content: { + "application/json": components["schemas"]["DesktopMousePositionResponse"]; + }; + }; + /** @description Invalid mouse drag request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Move the desktop mouse. + * @description Performs a health-gated absolute pointer move on the managed desktop and + * returns the resulting mouse position. + */ + post_v1_desktop_mouse_move: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopMouseMoveRequest"]; + }; + }; + responses: { + /** @description Desktop mouse position after move */ + 200: { + content: { + "application/json": components["schemas"]["DesktopMousePositionResponse"]; + }; + }; + /** @description Invalid mouse move request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Get the current desktop mouse position. + * @description Performs a health-gated mouse position query against the managed desktop. + */ + get_v1_desktop_mouse_position: { + responses: { + /** @description Desktop mouse position */ + 200: { + content: { + "application/json": components["schemas"]["DesktopMousePositionResponse"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input check failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Scroll the desktop mouse wheel. + * @description Performs a health-gated scroll gesture at the requested coordinates and + * returns the resulting mouse position. + */ + post_v1_desktop_mouse_scroll: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopMouseScrollRequest"]; + }; + }; + responses: { + /** @description Desktop mouse position after scroll */ + 200: { + content: { + "application/json": components["schemas"]["DesktopMousePositionResponse"]; + }; + }; + /** @description Invalid mouse scroll request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Release a desktop mouse button. + * @description Performs a health-gated optional pointer move followed by `xdotool mouseup` + * and returns the resulting mouse position. + */ + post_v1_desktop_mouse_up: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopMouseUpRequest"]; + }; + }; + responses: { + /** @description Desktop mouse position after button release */ + 200: { + content: { + "application/json": components["schemas"]["DesktopMousePositionResponse"]; + }; + }; + /** @description Invalid mouse up request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or input failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Start desktop recording. + * @description Starts an ffmpeg x11grab recording against the managed desktop and returns + * the created recording metadata. + */ + post_v1_desktop_recording_start: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopRecordingStartRequest"]; + }; + }; + responses: { + /** @description Desktop recording started */ + 200: { + content: { + "application/json": components["schemas"]["DesktopRecordingInfo"]; + }; + }; + /** @description Desktop runtime is not ready or a recording is already active */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop recording failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Stop desktop recording. + * @description Stops the active desktop recording and returns the finalized recording + * metadata. + */ + post_v1_desktop_recording_stop: { + responses: { + /** @description Desktop recording stopped */ + 200: { + content: { + "application/json": components["schemas"]["DesktopRecordingInfo"]; + }; + }; + /** @description No active desktop recording */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop recording stop failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * List desktop recordings. + * @description Returns the current desktop recording catalog. + */ + get_v1_desktop_recordings: { + responses: { + /** @description Desktop recordings */ + 200: { + content: { + "application/json": components["schemas"]["DesktopRecordingListResponse"]; + }; + }; + /** @description Desktop recordings query failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Get desktop recording metadata. + * @description Returns metadata for a single desktop recording. + */ + get_v1_desktop_recording: { + parameters: { + path: { + /** @description Desktop recording ID */ + id: string; + }; + }; + responses: { + /** @description Desktop recording metadata */ + 200: { + content: { + "application/json": components["schemas"]["DesktopRecordingInfo"]; + }; + }; + /** @description Unknown desktop recording */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Delete a desktop recording. + * @description Removes a completed desktop recording and its file from disk. + */ + delete_v1_desktop_recording: { + parameters: { + path: { + /** @description Desktop recording ID */ + id: string; + }; + }; + responses: { + /** @description Desktop recording deleted */ + 204: { + content: never; + }; + /** @description Unknown desktop recording */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop recording is still active */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Download a desktop recording. + * @description Serves the recorded MP4 bytes for a completed desktop recording. + */ + get_v1_desktop_recording_download: { + parameters: { + path: { + /** @description Desktop recording ID */ + id: string; + }; + }; + responses: { + /** @description Desktop recording as MP4 bytes */ + 200: { + content: never; + }; + /** @description Unknown desktop recording */ + 404: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Capture a full desktop screenshot. + * @description Performs a health-gated full-frame screenshot of the managed desktop and + * returns the requested image bytes. + */ + get_v1_desktop_screenshot: { + parameters: { + query?: { + format?: components["schemas"]["DesktopScreenshotFormat"] | null; + quality?: number | null; + scale?: number | null; + }; + }; + responses: { + /** @description Desktop screenshot as image bytes */ + 200: { + content: never; + }; + /** @description Invalid screenshot query */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or screenshot capture failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Capture a desktop screenshot region. + * @description Performs a health-gated screenshot crop against the managed desktop and + * returns the requested region image bytes. + */ + get_v1_desktop_screenshot_region: { + parameters: { + query: { + x: number; + y: number; + width: number; + height: number; + format?: components["schemas"]["DesktopScreenshotFormat"] | null; + quality?: number | null; + scale?: number | null; + }; + }; + responses: { + /** @description Desktop screenshot region as image bytes */ + 200: { + content: never; + }; + /** @description Invalid screenshot region */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or screenshot capture failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Start the private desktop runtime. + * @description Lazily launches the managed Xvfb/openbox stack, validates display health, + * and returns the resulting desktop status snapshot. + */ + post_v1_desktop_start: { + requestBody: { + content: { + "application/json": components["schemas"]["DesktopStartRequest"]; + }; + }; + responses: { + /** @description Desktop runtime status after start */ + 200: { + content: { + "application/json": components["schemas"]["DesktopStatusResponse"]; + }; + }; + /** @description Invalid desktop start request */ + 400: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime is already transitioning */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop API unsupported on this platform */ + 501: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime could not be started */ + 503: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Get desktop runtime status. + * @description Returns the current desktop runtime state, dependency status, active + * display metadata, and supervised process information. + */ + get_v1_desktop_status: { + responses: { + /** @description Desktop runtime status */ + 200: { + content: { + "application/json": components["schemas"]["DesktopStatusResponse"]; + }; + }; + /** @description Authentication required */ + 401: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Stop the private desktop runtime. + * @description Terminates the managed openbox/Xvfb/dbus processes owned by the desktop + * runtime and returns the resulting status snapshot. + */ + post_v1_desktop_stop: { + responses: { + /** @description Desktop runtime status after stop */ + 200: { + content: { + "application/json": components["schemas"]["DesktopStatusResponse"]; + }; + }; + /** @description Desktop runtime is already transitioning */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * Start desktop streaming. + * @description Enables desktop websocket streaming for the managed desktop. + */ + post_v1_desktop_stream_start: { + responses: { + /** @description Desktop streaming started */ + 200: { + content: { + "application/json": components["schemas"]["DesktopStreamStatusResponse"]; + }; + }; + }; + }; + /** + * Stop desktop streaming. + * @description Disables desktop websocket streaming for the managed desktop. + */ + post_v1_desktop_stream_stop: { + responses: { + /** @description Desktop streaming stopped */ + 200: { + content: { + "application/json": components["schemas"]["DesktopStreamStatusResponse"]; + }; + }; + }; + }; + /** + * Open a desktop websocket streaming session. + * @description Upgrades the connection to a websocket that streams JPEG desktop frames and + * accepts mouse and keyboard control frames. + */ + get_v1_desktop_stream_ws: { + parameters: { + query?: { + /** @description Bearer token alternative for WS auth */ + access_token?: string | null; + }; + }; + responses: { + /** @description WebSocket upgraded */ + 101: { + content: never; + }; + /** @description Desktop runtime or streaming session is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop stream failed */ + 502: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; + /** + * List visible desktop windows. + * @description Performs a health-gated visible-window enumeration against the managed + * desktop and returns the current window metadata. + */ + get_v1_desktop_windows: { + responses: { + /** @description Visible desktop windows */ + 200: { + content: { + "application/json": components["schemas"]["DesktopWindowListResponse"]; + }; + }; + /** @description Desktop runtime is not ready */ + 409: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + /** @description Desktop runtime health or window query failed */ + 503: { + content: { + "application/json": components["schemas"]["ProblemDetails"]; + }; + }; + }; + }; get_v1_fs_entries: { parameters: { query?: { @@ -966,6 +2230,11 @@ export interface operations { * by the runtime, sorted by process ID. */ get_v1_processes: { + parameters: { + query?: { + owner?: components["schemas"]["ProcessOwner"] | null; + }; + }; responses: { /** @description List processes */ 200: { diff --git a/sdks/typescript/src/index.ts b/sdks/typescript/src/index.ts index 15537dd..8c05760 100644 --- a/sdks/typescript/src/index.ts +++ b/sdks/typescript/src/index.ts @@ -14,10 +14,18 @@ export { export { AcpRpcError } from "acp-http-client"; export { buildInspectorUrl } from "./inspector.ts"; +export { DesktopStreamSession } from "./desktop-stream.ts"; +export type { + DesktopStreamConnectOptions, + DesktopStreamErrorStatus, + DesktopStreamReadyStatus, + DesktopStreamStatusMessage, +} from "./desktop-stream.ts"; export type { SandboxAgentHealthWaitOptions, AgentQueryOptions, + DesktopStreamSessionOptions, ProcessLogFollowQuery, ProcessLogListener, ProcessLogSubscription, @@ -50,6 +58,37 @@ export type { AgentInstallRequest, AgentInstallResponse, AgentListResponse, + DesktopActionResponse, + DesktopDisplayInfoResponse, + DesktopErrorInfo, + DesktopKeyboardDownRequest, + DesktopKeyboardUpRequest, + DesktopKeyModifiers, + DesktopKeyboardPressRequest, + DesktopKeyboardTypeRequest, + DesktopMouseButton, + DesktopMouseClickRequest, + DesktopMouseDownRequest, + DesktopMouseDragRequest, + DesktopMouseMoveRequest, + DesktopMousePositionResponse, + DesktopMouseScrollRequest, + DesktopMouseUpRequest, + DesktopProcessInfo, + DesktopRecordingInfo, + DesktopRecordingListResponse, + DesktopRecordingStartRequest, + DesktopRecordingStatus, + DesktopRegionScreenshotQuery, + DesktopResolution, + DesktopScreenshotFormat, + DesktopScreenshotQuery, + DesktopStartRequest, + DesktopState, + DesktopStatusResponse, + DesktopStreamStatusResponse, + DesktopWindowInfo, + DesktopWindowListResponse, FsActionResponse, FsDeleteQuery, FsEntriesQuery, @@ -74,10 +113,12 @@ export type { ProcessInfo, ProcessInputRequest, ProcessInputResponse, + ProcessListQuery, ProcessListResponse, ProcessLogEntry, ProcessLogsQuery, ProcessLogsResponse, + ProcessOwner, ProcessLogsStream, ProcessRunRequest, ProcessRunResponse, diff --git a/sdks/typescript/src/types.ts b/sdks/typescript/src/types.ts index f2a7af3..080e62c 100644 --- a/sdks/typescript/src/types.ts +++ b/sdks/typescript/src/types.ts @@ -4,6 +4,38 @@ import type { components, operations } from "./generated/openapi.ts"; export type ProblemDetails = components["schemas"]["ProblemDetails"]; export type HealthResponse = JsonResponse; +export type DesktopState = components["schemas"]["DesktopState"]; +export type DesktopResolution = components["schemas"]["DesktopResolution"]; +export type DesktopErrorInfo = components["schemas"]["DesktopErrorInfo"]; +export type DesktopProcessInfo = components["schemas"]["DesktopProcessInfo"]; +export type DesktopStatusResponse = JsonResponse; +export type DesktopStartRequest = JsonRequestBody; +export type DesktopScreenshotFormat = components["schemas"]["DesktopScreenshotFormat"]; +export type DesktopScreenshotQuery = + QueryParams extends never ? Record : QueryParams; +export type DesktopRegionScreenshotQuery = QueryParams; +export type DesktopMousePositionResponse = JsonResponse; +export type DesktopMouseButton = components["schemas"]["DesktopMouseButton"]; +export type DesktopMouseMoveRequest = JsonRequestBody; +export type DesktopMouseClickRequest = JsonRequestBody; +export type DesktopMouseDownRequest = JsonRequestBody; +export type DesktopMouseUpRequest = JsonRequestBody; +export type DesktopMouseDragRequest = JsonRequestBody; +export type DesktopMouseScrollRequest = JsonRequestBody; +export type DesktopKeyboardTypeRequest = JsonRequestBody; +export type DesktopKeyModifiers = components["schemas"]["DesktopKeyModifiers"]; +export type DesktopKeyboardPressRequest = JsonRequestBody; +export type DesktopKeyboardDownRequest = JsonRequestBody; +export type DesktopKeyboardUpRequest = JsonRequestBody; +export type DesktopActionResponse = JsonResponse; +export type DesktopDisplayInfoResponse = JsonResponse; +export type DesktopWindowInfo = components["schemas"]["DesktopWindowInfo"]; +export type DesktopWindowListResponse = JsonResponse; +export type DesktopRecordingStartRequest = JsonRequestBody; +export type DesktopRecordingStatus = components["schemas"]["DesktopRecordingStatus"]; +export type DesktopRecordingInfo = JsonResponse; +export type DesktopRecordingListResponse = JsonResponse; +export type DesktopStreamStatusResponse = JsonResponse; export type AgentListResponse = JsonResponse; export type AgentInfo = components["schemas"]["AgentInfo"]; export type AgentQuery = QueryParams; @@ -37,11 +69,13 @@ export type ProcessCreateRequest = JsonRequestBody; export type ProcessInputResponse = JsonResponse; +export type ProcessListQuery = QueryParams; export type ProcessListResponse = JsonResponse; export type ProcessLogEntry = components["schemas"]["ProcessLogEntry"]; export type ProcessLogsQuery = QueryParams; export type ProcessLogsResponse = JsonResponse; export type ProcessLogsStream = components["schemas"]["ProcessLogsStream"]; +export type ProcessOwner = components["schemas"]["ProcessOwner"]; export type ProcessRunRequest = JsonRequestBody; export type ProcessRunResponse = JsonResponse; export type ProcessSignalQuery = QueryParams; diff --git a/sdks/typescript/tests/helpers/docker.ts b/sdks/typescript/tests/helpers/docker.ts new file mode 100644 index 0000000..c15c03c --- /dev/null +++ b/sdks/typescript/tests/helpers/docker.ts @@ -0,0 +1,244 @@ +import { execFileSync } from "node:child_process"; +import { mkdtempSync, mkdirSync, rmSync } from "node:fs"; +import { dirname, join, resolve } from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const REPO_ROOT = resolve(__dirname, "../../../.."); +const CONTAINER_PORT = 3000; +const DEFAULT_PATH = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"; +const DEFAULT_IMAGE_TAG = "sandbox-agent-test:dev"; +const STANDARD_PATHS = new Set(["/usr/local/sbin", "/usr/local/bin", "/usr/sbin", "/usr/bin", "/sbin", "/bin"]); + +let cachedImage: string | undefined; +let containerCounter = 0; + +export type DockerSandboxAgentHandle = { + baseUrl: string; + token: string; + dispose: () => Promise; +}; + +export type DockerSandboxAgentOptions = { + env?: Record; + pathMode?: "merge" | "replace"; + timeoutMs?: number; +}; + +type TestLayout = { + rootDir: string; + homeDir: string; + xdgDataHome: string; + xdgStateHome: string; + appDataDir: string; + localAppDataDir: string; + installDir: string; +}; + +export function createDockerTestLayout(): TestLayout { + const tempRoot = join(REPO_ROOT, ".context", "docker-test-"); + mkdirSync(resolve(REPO_ROOT, ".context"), { recursive: true }); + const rootDir = mkdtempSync(tempRoot); + const homeDir = join(rootDir, "home"); + const xdgDataHome = join(rootDir, "xdg-data"); + const xdgStateHome = join(rootDir, "xdg-state"); + const appDataDir = join(rootDir, "appdata", "Roaming"); + const localAppDataDir = join(rootDir, "appdata", "Local"); + const installDir = join(xdgDataHome, "sandbox-agent", "bin"); + + for (const dir of [homeDir, xdgDataHome, xdgStateHome, appDataDir, localAppDataDir, installDir]) { + mkdirSync(dir, { recursive: true }); + } + + return { + rootDir, + homeDir, + xdgDataHome, + xdgStateHome, + appDataDir, + localAppDataDir, + installDir, + }; +} + +export function disposeDockerTestLayout(layout: TestLayout): void { + try { + rmSync(layout.rootDir, { recursive: true, force: true }); + } catch (error) { + if (typeof process.getuid === "function" && typeof process.getgid === "function") { + try { + execFileSync( + "docker", + [ + "run", + "--rm", + "--user", + "0:0", + "--entrypoint", + "sh", + "-v", + `${layout.rootDir}:${layout.rootDir}`, + ensureImage(), + "-c", + `chown -R ${process.getuid()}:${process.getgid()} '${layout.rootDir}'`, + ], + { stdio: "pipe" }, + ); + rmSync(layout.rootDir, { recursive: true, force: true }); + return; + } catch {} + } + throw error; + } +} + +export async function startDockerSandboxAgent(layout: TestLayout, options: DockerSandboxAgentOptions = {}): Promise { + const image = ensureImage(); + const containerId = uniqueContainerId(); + const env = buildEnv(layout, options.env ?? {}, options.pathMode ?? "merge"); + const mounts = buildMounts(layout.rootDir, env); + + const args = ["run", "-d", "--rm", "--name", containerId, "-p", `127.0.0.1::${CONTAINER_PORT}`]; + + if (typeof process.getuid === "function" && typeof process.getgid === "function") { + args.push("--user", `${process.getuid()}:${process.getgid()}`); + } + + if (process.platform === "linux") { + args.push("--add-host", "host.docker.internal:host-gateway"); + } + + for (const mount of mounts) { + args.push("-v", `${mount}:${mount}`); + } + + for (const [key, value] of Object.entries(env)) { + args.push("-e", `${key}=${value}`); + } + + args.push(image, "server", "--host", "0.0.0.0", "--port", String(CONTAINER_PORT), "--no-token"); + + execFileSync("docker", args, { stdio: "pipe" }); + + try { + const mapping = execFileSync("docker", ["port", containerId, `${CONTAINER_PORT}/tcp`], { + encoding: "utf8", + stdio: ["ignore", "pipe", "pipe"], + }).trim(); + const mappingParts = mapping.split(":"); + const hostPort = mappingParts[mappingParts.length - 1]?.trim(); + if (!hostPort) { + throw new Error(`missing mapped host port in ${mapping}`); + } + const baseUrl = `http://127.0.0.1:${hostPort}`; + await waitForHealth(baseUrl, options.timeoutMs ?? 30_000); + + return { + baseUrl, + token: "", + dispose: async () => { + try { + execFileSync("docker", ["rm", "-f", containerId], { stdio: "pipe" }); + } catch {} + }, + }; + } catch (error) { + try { + execFileSync("docker", ["rm", "-f", containerId], { stdio: "pipe" }); + } catch {} + throw error; + } +} + +function ensureImage(): string { + if (cachedImage) { + return cachedImage; + } + + cachedImage = process.env.SANDBOX_AGENT_TEST_IMAGE ?? DEFAULT_IMAGE_TAG; + execFileSync("docker", ["build", "--tag", cachedImage, "--file", resolve(REPO_ROOT, "docker/test-agent/Dockerfile"), REPO_ROOT], { + cwd: REPO_ROOT, + stdio: ["ignore", "ignore", "pipe"], + }); + return cachedImage; +} + +function buildEnv(layout: TestLayout, extraEnv: Record, pathMode: "merge" | "replace"): Record { + const env: Record = { + HOME: layout.homeDir, + USERPROFILE: layout.homeDir, + XDG_DATA_HOME: layout.xdgDataHome, + XDG_STATE_HOME: layout.xdgStateHome, + APPDATA: layout.appDataDir, + LOCALAPPDATA: layout.localAppDataDir, + PATH: DEFAULT_PATH, + }; + + const customPathEntries = new Set(); + for (const entry of (extraEnv.PATH ?? "").split(":")) { + if (!entry || entry === DEFAULT_PATH || !entry.startsWith("/")) continue; + if (entry.startsWith(layout.rootDir)) { + customPathEntries.add(entry); + } + } + if (pathMode === "replace") { + env.PATH = extraEnv.PATH ?? ""; + } else if (customPathEntries.size > 0) { + env.PATH = `${Array.from(customPathEntries).join(":")}:${DEFAULT_PATH}`; + } + + for (const [key, value] of Object.entries(extraEnv)) { + if (key === "PATH") { + continue; + } + env[key] = rewriteLocalhostUrl(key, value); + } + + return env; +} + +function buildMounts(rootDir: string, env: Record): string[] { + const mounts = new Set([rootDir]); + + for (const key of ["HOME", "USERPROFILE", "XDG_DATA_HOME", "XDG_STATE_HOME", "APPDATA", "LOCALAPPDATA", "SANDBOX_AGENT_DESKTOP_FAKE_STATE_DIR"]) { + const value = env[key]; + if (value?.startsWith("/")) { + mounts.add(value); + } + } + + for (const entry of (env.PATH ?? "").split(":")) { + if (entry.startsWith("/") && !STANDARD_PATHS.has(entry)) { + mounts.add(entry); + } + } + + return Array.from(mounts); +} + +async function waitForHealth(baseUrl: string, timeoutMs: number): Promise { + const started = Date.now(); + while (Date.now() - started < timeoutMs) { + try { + const response = await fetch(`${baseUrl}/v1/health`); + if (response.ok) { + return; + } + } catch {} + await new Promise((resolve) => setTimeout(resolve, 200)); + } + + throw new Error(`timed out waiting for sandbox-agent health at ${baseUrl}`); +} + +function uniqueContainerId(): string { + containerCounter += 1; + return `sandbox-agent-ts-${process.pid}-${Date.now().toString(36)}-${containerCounter.toString(36)}`; +} + +function rewriteLocalhostUrl(key: string, value: string): string { + if (key.endsWith("_URL") || key.endsWith("_URI")) { + return value.replace("http://127.0.0.1", "http://host.docker.internal").replace("http://localhost", "http://host.docker.internal"); + } + return value; +} diff --git a/sdks/typescript/tests/integration.test.ts b/sdks/typescript/tests/integration.test.ts index 295e688..d5ae278 100644 --- a/sdks/typescript/tests/integration.test.ts +++ b/sdks/typescript/tests/integration.test.ts @@ -1,9 +1,6 @@ -import { describe, it, expect, beforeAll, afterAll } from "vitest"; -import { existsSync } from "node:fs"; -import { mkdtempSync, rmSync } from "node:fs"; -import { dirname, resolve } from "node:path"; +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import { mkdirSync, mkdtempSync, rmSync } from "node:fs"; import { join } from "node:path"; -import { fileURLToPath } from "node:url"; import { tmpdir } from "node:os"; import { InMemorySessionPersistDriver, @@ -14,36 +11,11 @@ import { type SessionPersistDriver, type SessionRecord, } from "../src/index.ts"; -import { spawnSandboxAgent, isNodeRuntime, type SandboxAgentSpawnHandle } from "../src/spawn.ts"; +import { isNodeRuntime } from "../src/spawn.ts"; +import { createDockerTestLayout, disposeDockerTestLayout, startDockerSandboxAgent, type DockerSandboxAgentHandle } from "./helpers/docker.ts"; import { prepareMockAgentDataHome } from "./helpers/mock-agent.ts"; import WebSocket from "ws"; -const __dirname = dirname(fileURLToPath(import.meta.url)); - -function findBinary(): string | null { - if (process.env.SANDBOX_AGENT_BIN) { - return process.env.SANDBOX_AGENT_BIN; - } - - const cargoPaths = [resolve(__dirname, "../../../target/debug/sandbox-agent"), resolve(__dirname, "../../../target/release/sandbox-agent")]; - - for (const p of cargoPaths) { - if (existsSync(p)) { - return p; - } - } - - return null; -} - -const BINARY_PATH = findBinary(); -if (!BINARY_PATH) { - throw new Error("sandbox-agent binary not found. Build it (cargo build -p sandbox-agent) or set SANDBOX_AGENT_BIN."); -} -if (!process.env.SANDBOX_AGENT_BIN) { - process.env.SANDBOX_AGENT_BIN = BINARY_PATH; -} - function sleep(ms: number): Promise { return new Promise((resolve) => setTimeout(resolve, ms)); } @@ -110,6 +82,15 @@ async function waitForAsync(fn: () => Promise, timeoutM throw new Error("timed out waiting for condition"); } +async function withTimeout(promise: Promise, label: string, timeoutMs = 15_000): Promise { + return await Promise.race([ + promise, + sleep(timeoutMs).then(() => { + throw new Error(`${label} timed out after ${timeoutMs}ms`); + }), + ]); +} + function buildTarArchive(entries: Array<{ name: string; content: string }>): Uint8Array { const blocks: Buffer[] = []; @@ -174,34 +155,77 @@ function decodeProcessLogData(data: string, encoding: string): string { function nodeCommand(source: string): { command: string; args: string[] } { return { - command: process.execPath, + command: "node", args: ["-e", source], }; } +function forwardRequest(defaultFetch: typeof fetch, baseUrl: string, outgoing: Request, parsed: URL): Promise { + const forwardedInit: RequestInit & { duplex?: "half" } = { + method: outgoing.method, + headers: new Headers(outgoing.headers), + signal: outgoing.signal, + }; + + if (outgoing.method !== "GET" && outgoing.method !== "HEAD") { + forwardedInit.body = outgoing.body; + forwardedInit.duplex = "half"; + } + + const forwardedUrl = new URL(`${parsed.pathname}${parsed.search}`, baseUrl); + return defaultFetch(forwardedUrl, forwardedInit); +} + +async function launchDesktopFocusWindow(sdk: SandboxAgent, display: string): Promise { + const windowProcess = await sdk.createProcess({ + command: "xterm", + args: ["-geometry", "80x24+40+40", "-title", "Sandbox Desktop Test", "-e", "sh", "-lc", "sleep 60"], + env: { DISPLAY: display }, + }); + + await waitForAsync( + async () => { + const result = await sdk.runProcess({ + command: "sh", + args: [ + "-lc", + 'wid="$(xdotool search --onlyvisible --name \'Sandbox Desktop Test\' 2>/dev/null | head -n 1 || true)"; if [ -z "$wid" ]; then exit 3; fi; xdotool windowactivate "$wid"', + ], + env: { DISPLAY: display }, + timeoutMs: 5_000, + }); + + return result.exitCode === 0 ? true : undefined; + }, + 10_000, + 200, + ); + + return windowProcess.id; +} + describe("Integration: TypeScript SDK flat session API", () => { - let handle: SandboxAgentSpawnHandle; + let handle: DockerSandboxAgentHandle; let baseUrl: string; let token: string; - let dataHome: string; + let layout: ReturnType; - beforeAll(async () => { - dataHome = mkdtempSync(join(tmpdir(), "sdk-integration-")); - const agentEnv = prepareMockAgentDataHome(dataHome); + beforeEach(async () => { + layout = createDockerTestLayout(); + prepareMockAgentDataHome(layout.xdgDataHome); - handle = await spawnSandboxAgent({ - enabled: true, - log: "silent", + handle = await startDockerSandboxAgent(layout, { timeoutMs: 30000, - env: agentEnv, }); baseUrl = handle.baseUrl; token = handle.token; }); - afterAll(async () => { - await handle.dispose(); - rmSync(dataHome, { recursive: true, force: true }); + afterEach(async () => { + await handle?.dispose?.(); + if (layout) { + disposeDockerTestLayout(layout); + } }); it("detects Node.js runtime", () => { @@ -280,11 +304,12 @@ describe("Integration: TypeScript SDK flat session API", () => { token, }); - const directory = mkdtempSync(join(tmpdir(), "sdk-fs-")); + const directory = join(layout.rootDir, "fs-test"); const nestedDir = join(directory, "nested"); const filePath = join(directory, "notes.txt"); const movedPath = join(directory, "notes-moved.txt"); const uploadDir = join(directory, "uploaded"); + mkdirSync(directory, { recursive: true }); try { const listedAgents = await sdk.listAgents({ config: true, noCache: true }); @@ -341,25 +366,30 @@ describe("Integration: TypeScript SDK flat session API", () => { const parsed = new URL(outgoing.url); seenPaths.push(parsed.pathname); - const forwardedUrl = new URL(`${parsed.pathname}${parsed.search}`, baseUrl); - const forwarded = new Request(forwardedUrl.toString(), outgoing); - return defaultFetch(forwarded); + return forwardRequest(defaultFetch, baseUrl, outgoing, parsed); }; const sdk = await SandboxAgent.connect({ token, fetch: customFetch, }); + let sessionId: string | undefined; - await sdk.getHealth(); - const session = await sdk.createSession({ agent: "mock" }); - const prompt = await session.prompt([{ type: "text", text: "custom fetch integration test" }]); - expect(prompt.stopReason).toBe("end_turn"); + try { + await withTimeout(sdk.getHealth(), "custom fetch getHealth"); + const session = await withTimeout(sdk.createSession({ agent: "mock" }), "custom fetch createSession"); + sessionId = session.id; + expect(session.agent).toBe("mock"); + await withTimeout(sdk.destroySession(session.id), "custom fetch destroySession"); - expect(seenPaths).toContain("/v1/health"); - expect(seenPaths.some((path) => path.startsWith("/v1/acp/"))).toBe(true); - - await sdk.dispose(); + expect(seenPaths).toContain("/v1/health"); + expect(seenPaths.some((path) => path.startsWith("/v1/acp/"))).toBe(true); + } finally { + if (sessionId) { + await sdk.destroySession(sessionId).catch(() => {}); + } + await withTimeout(sdk.dispose(), "custom fetch dispose"); + } }, 60_000); it("requires baseUrl when fetch is not provided", async () => { @@ -386,9 +416,7 @@ describe("Integration: TypeScript SDK flat session API", () => { } } - const forwardedUrl = new URL(`${parsed.pathname}${parsed.search}`, baseUrl); - const forwarded = new Request(forwardedUrl.toString(), outgoing); - return defaultFetch(forwarded); + return forwardRequest(defaultFetch, baseUrl, outgoing, parsed); }; const sdk = await SandboxAgent.connect({ @@ -710,7 +738,9 @@ describe("Integration: TypeScript SDK flat session API", () => { token, }); - const directory = mkdtempSync(join(tmpdir(), "sdk-config-")); + const directory = join(layout.rootDir, "config-test"); + + mkdirSync(directory, { recursive: true }); const mcpConfig = { type: "local" as const, @@ -957,4 +987,98 @@ describe("Integration: TypeScript SDK flat session API", () => { await sdk.dispose(); } }); + + it("covers desktop status, screenshot, display, mouse, and keyboard helpers", async () => { + const sdk = await SandboxAgent.connect({ + baseUrl, + token, + }); + let focusWindowProcessId: string | undefined; + + try { + const initialStatus = await sdk.getDesktopStatus(); + expect(initialStatus.state).toBe("inactive"); + + const started = await sdk.startDesktop({ + width: 1440, + height: 900, + dpi: 96, + }); + expect(started.state).toBe("active"); + expect(started.display?.startsWith(":")).toBe(true); + expect(started.missingDependencies).toEqual([]); + + const displayInfo = await sdk.getDesktopDisplayInfo(); + expect(displayInfo.display).toBe(started.display); + expect(displayInfo.resolution.width).toBe(1440); + expect(displayInfo.resolution.height).toBe(900); + + const screenshot = await sdk.takeDesktopScreenshot(); + expect(Buffer.from(screenshot.subarray(0, 8)).equals(Buffer.from("\x89PNG\r\n\x1a\n", "binary"))).toBe(true); + + const region = await sdk.takeDesktopRegionScreenshot({ + x: 10, + y: 20, + width: 40, + height: 50, + }); + expect(Buffer.from(region.subarray(0, 8)).equals(Buffer.from("\x89PNG\r\n\x1a\n", "binary"))).toBe(true); + + const moved = await sdk.moveDesktopMouse({ x: 40, y: 50 }); + expect(moved.x).toBe(40); + expect(moved.y).toBe(50); + + const dragged = await sdk.dragDesktopMouse({ + startX: 40, + startY: 50, + endX: 80, + endY: 90, + button: "left", + }); + expect(dragged.x).toBe(80); + expect(dragged.y).toBe(90); + + const clicked = await sdk.clickDesktop({ + x: 80, + y: 90, + button: "left", + clickCount: 1, + }); + expect(clicked.x).toBe(80); + expect(clicked.y).toBe(90); + + const scrolled = await sdk.scrollDesktop({ + x: 80, + y: 90, + deltaY: -2, + }); + expect(scrolled.x).toBe(80); + expect(scrolled.y).toBe(90); + + const position = await sdk.getDesktopMousePosition(); + expect(position.x).toBe(80); + expect(position.y).toBe(90); + + focusWindowProcessId = await launchDesktopFocusWindow(sdk, started.display!); + + const typed = await sdk.typeDesktopText({ + text: "hello desktop", + delayMs: 5, + }); + expect(typed.ok).toBe(true); + + const pressed = await sdk.pressDesktopKey({ key: "ctrl+l" }); + expect(pressed.ok).toBe(true); + + const stopped = await sdk.stopDesktop(); + expect(stopped.state).toBe("inactive"); + } finally { + if (focusWindowProcessId) { + await sdk.killProcess(focusWindowProcessId, { waitMs: 5_000 }).catch(() => {}); + await sdk.deleteProcess(focusWindowProcessId).catch(() => {}); + } + await sdk.stopDesktop().catch(() => {}); + await sdk.dispose(); + } + }); }); diff --git a/sdks/typescript/vitest.config.ts b/sdks/typescript/vitest.config.ts index e83d10a..a3ba3f3 100644 --- a/sdks/typescript/vitest.config.ts +++ b/sdks/typescript/vitest.config.ts @@ -4,7 +4,6 @@ export default defineConfig({ test: { include: ["tests/**/*.test.ts"], testTimeout: 30000, - teardownTimeout: 10000, - pool: "forks", + hookTimeout: 120000, }, }); diff --git a/server/packages/sandbox-agent/src/cli.rs b/server/packages/sandbox-agent/src/cli.rs index 51757b6..000ea41 100644 --- a/server/packages/sandbox-agent/src/cli.rs +++ b/server/packages/sandbox-agent/src/cli.rs @@ -11,6 +11,7 @@ mod build_version { include!(concat!(env!("OUT_DIR"), "/version.rs")); } +use crate::desktop_install::{install_desktop, DesktopInstallRequest, DesktopPackageManager}; use crate::router::{ build_router_with_state, shutdown_servers, AppState, AuthConfig, BrandingMode, }; @@ -75,6 +76,8 @@ pub enum Command { Server(ServerArgs), /// Call the HTTP API without writing client code. Api(ApiArgs), + /// Install first-party runtime dependencies. + Install(InstallArgs), /// EXPERIMENTAL: OpenCode compatibility layer (disabled until ACP Phase 7). Opencode(OpencodeArgs), /// Manage the sandbox-agent background daemon. @@ -118,6 +121,12 @@ pub struct ApiArgs { command: ApiCommand, } +#[derive(Args, Debug)] +pub struct InstallArgs { + #[command(subcommand)] + command: InstallCommand, +} + #[derive(Args, Debug)] pub struct OpencodeArgs { #[arg(long, short = 'H', default_value = DEFAULT_HOST)] @@ -156,6 +165,12 @@ pub struct DaemonArgs { command: DaemonCommand, } +#[derive(Subcommand, Debug)] +pub enum InstallCommand { + /// Install desktop runtime dependencies. + Desktop(InstallDesktopArgs), +} + #[derive(Subcommand, Debug)] pub enum DaemonCommand { /// Start the daemon in the background. @@ -310,6 +325,18 @@ pub struct InstallAgentArgs { agent_process_version: Option, } +#[derive(Args, Debug)] +pub struct InstallDesktopArgs { + #[arg(long, default_value_t = false)] + yes: bool, + #[arg(long, default_value_t = false)] + print_only: bool, + #[arg(long, value_enum)] + package_manager: Option, + #[arg(long, default_value_t = false)] + no_fonts: bool, +} + #[derive(Args, Debug)] pub struct CredentialsExtractArgs { #[arg(long, short = 'a', value_enum)] @@ -405,6 +432,7 @@ pub fn run_command(command: &Command, cli: &CliConfig) -> Result<(), CliError> { match command { Command::Server(args) => run_server(cli, args), Command::Api(subcommand) => run_api(&subcommand.command, cli), + Command::Install(subcommand) => run_install(&subcommand.command), Command::Opencode(args) => run_opencode(cli, args), Command::Daemon(subcommand) => run_daemon(&subcommand.command, cli), Command::InstallAgent(args) => install_agent_local(args), @@ -413,6 +441,12 @@ pub fn run_command(command: &Command, cli: &CliConfig) -> Result<(), CliError> { } } +fn run_install(command: &InstallCommand) -> Result<(), CliError> { + match command { + InstallCommand::Desktop(args) => install_desktop_local(args), + } +} + fn run_server(cli: &CliConfig, server: &ServerArgs) -> Result<(), CliError> { let auth = if let Some(token) = cli.token.clone() { AuthConfig::with_token(token) @@ -477,6 +511,17 @@ fn run_api(command: &ApiCommand, cli: &CliConfig) -> Result<(), CliError> { } } +fn install_desktop_local(args: &InstallDesktopArgs) -> Result<(), CliError> { + install_desktop(DesktopInstallRequest { + yes: args.yes, + print_only: args.print_only, + package_manager: args.package_manager, + no_fonts: args.no_fonts, + }) + .map(|_| ()) + .map_err(CliError::Server) +} + fn run_agents(command: &AgentsCommand, cli: &CliConfig) -> Result<(), CliError> { match command { AgentsCommand::List(args) => { diff --git a/server/packages/sandbox-agent/src/desktop_errors.rs b/server/packages/sandbox-agent/src/desktop_errors.rs new file mode 100644 index 0000000..67f99b9 --- /dev/null +++ b/server/packages/sandbox-agent/src/desktop_errors.rs @@ -0,0 +1,217 @@ +use sandbox_agent_error::ProblemDetails; +use serde_json::{json, Map, Value}; + +use crate::desktop_types::{DesktopErrorInfo, DesktopProcessInfo}; + +#[derive(Debug, Clone)] +pub struct DesktopProblem { + status: u16, + title: &'static str, + code: &'static str, + message: String, + missing_dependencies: Vec, + install_command: Option, + processes: Vec, +} + +impl DesktopProblem { + pub fn unsupported_platform(message: impl Into) -> Self { + Self::new( + 501, + "Desktop Unsupported", + "desktop_unsupported_platform", + message, + ) + } + + pub fn dependencies_missing( + missing_dependencies: Vec, + install_command: Option, + processes: Vec, + ) -> Self { + let mut message = if missing_dependencies.is_empty() { + "Desktop dependencies are not installed".to_string() + } else { + format!( + "Desktop dependencies are not installed: {}", + missing_dependencies.join(", ") + ) + }; + if let Some(command) = install_command.as_ref() { + message.push_str(&format!( + ". Run `{command}` to install them, or install the required tools manually." + )); + } + Self::new( + 503, + "Desktop Dependencies Missing", + "desktop_dependencies_missing", + message, + ) + .with_missing_dependencies(missing_dependencies) + .with_install_command(install_command) + .with_processes(processes) + } + + pub fn runtime_inactive(message: impl Into) -> Self { + Self::new( + 409, + "Desktop Runtime Inactive", + "desktop_runtime_inactive", + message, + ) + } + + pub fn runtime_starting(message: impl Into) -> Self { + Self::new( + 409, + "Desktop Runtime Starting", + "desktop_runtime_starting", + message, + ) + } + + pub fn runtime_failed( + message: impl Into, + install_command: Option, + processes: Vec, + ) -> Self { + Self::new( + 503, + "Desktop Runtime Failed", + "desktop_runtime_failed", + message, + ) + .with_install_command(install_command) + .with_processes(processes) + } + + pub fn invalid_action(message: impl Into) -> Self { + Self::new( + 400, + "Desktop Invalid Action", + "desktop_invalid_action", + message, + ) + } + + pub fn screenshot_failed( + message: impl Into, + processes: Vec, + ) -> Self { + Self::new( + 502, + "Desktop Screenshot Failed", + "desktop_screenshot_failed", + message, + ) + .with_processes(processes) + } + + pub fn input_failed(message: impl Into, processes: Vec) -> Self { + Self::new(502, "Desktop Input Failed", "desktop_input_failed", message) + .with_processes(processes) + } + + pub fn to_problem_details(&self) -> ProblemDetails { + let mut extensions = Map::new(); + extensions.insert("code".to_string(), Value::String(self.code.to_string())); + if !self.missing_dependencies.is_empty() { + extensions.insert( + "missingDependencies".to_string(), + Value::Array( + self.missing_dependencies + .iter() + .cloned() + .map(Value::String) + .collect(), + ), + ); + } + if let Some(install_command) = self.install_command.as_ref() { + extensions.insert( + "installCommand".to_string(), + Value::String(install_command.clone()), + ); + } + if !self.processes.is_empty() { + extensions.insert("processes".to_string(), json!(self.processes)); + } + + ProblemDetails { + type_: format!("urn:sandbox-agent:error:{}", self.code), + title: self.title.to_string(), + status: self.status, + detail: Some(self.message.clone()), + instance: None, + extensions, + } + } + + pub fn to_error_info(&self) -> DesktopErrorInfo { + DesktopErrorInfo { + code: self.code.to_string(), + message: self.message.clone(), + } + } + + pub fn code(&self) -> &'static str { + self.code + } + + fn new( + status: u16, + title: &'static str, + code: &'static str, + message: impl Into, + ) -> Self { + Self { + status, + title, + code, + message: message.into(), + missing_dependencies: Vec::new(), + install_command: None, + processes: Vec::new(), + } + } + + fn with_missing_dependencies(mut self, missing_dependencies: Vec) -> Self { + self.missing_dependencies = missing_dependencies; + self + } + + fn with_install_command(mut self, install_command: Option) -> Self { + self.install_command = install_command; + self + } + + fn with_processes(mut self, processes: Vec) -> Self { + self.processes = processes; + self + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn dependencies_missing_detail_includes_install_command() { + let problem = DesktopProblem::dependencies_missing( + vec!["Xvfb".to_string(), "openbox".to_string()], + Some("sandbox-agent install desktop --yes".to_string()), + Vec::new(), + ); + let details = problem.to_problem_details(); + let detail = details.detail.expect("detail"); + assert!(detail.contains("Desktop dependencies are not installed: Xvfb, openbox")); + assert!(detail.contains("sandbox-agent install desktop --yes")); + assert_eq!( + details.extensions.get("installCommand"), + Some(&Value::String( + "sandbox-agent install desktop --yes".to_string() + )) + ); + } +} diff --git a/server/packages/sandbox-agent/src/desktop_install.rs b/server/packages/sandbox-agent/src/desktop_install.rs new file mode 100644 index 0000000..480da7d --- /dev/null +++ b/server/packages/sandbox-agent/src/desktop_install.rs @@ -0,0 +1,324 @@ +use std::fmt; +use std::io::{self, Write}; +use std::path::PathBuf; +use std::process::Command as ProcessCommand; + +use clap::ValueEnum; + +const AUTOMATIC_INSTALL_SUPPORTED_DISTROS: &str = + "Automatic desktop dependency installation is supported on Debian/Ubuntu (apt), Fedora/RHEL (dnf), and Alpine (apk)."; +const AUTOMATIC_INSTALL_UNSUPPORTED_ENVS: &str = + "Automatic installation is not supported on macOS, Windows, or Linux distributions without apt, dnf, or apk."; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] +pub enum DesktopPackageManager { + Apt, + Dnf, + Apk, +} + +#[derive(Debug, Clone)] +pub struct DesktopInstallRequest { + pub yes: bool, + pub print_only: bool, + pub package_manager: Option, + pub no_fonts: bool, +} + +pub(crate) fn desktop_platform_support_message() -> String { + format!("Desktop APIs are only supported on Linux. {AUTOMATIC_INSTALL_SUPPORTED_DISTROS}") +} + +fn linux_install_support_message() -> String { + format!("{AUTOMATIC_INSTALL_SUPPORTED_DISTROS} {AUTOMATIC_INSTALL_UNSUPPORTED_ENVS}") +} + +pub fn install_desktop(request: DesktopInstallRequest) -> Result<(), String> { + if std::env::consts::OS != "linux" { + return Err(format!( + "desktop installation is only supported on Linux. {}", + linux_install_support_message() + )); + } + + let package_manager = match request.package_manager { + Some(value) => value, + None => detect_package_manager().ok_or_else(|| { + format!( + "could not detect a supported package manager. {} Install the desktop dependencies manually on this distribution.", + linux_install_support_message() + ) + })?, + }; + + let packages = desktop_packages(package_manager, request.no_fonts); + let used_sudo = !running_as_root() && find_binary("sudo").is_some(); + if !running_as_root() && !used_sudo { + return Err( + "desktop installation requires root or sudo access; rerun as root or install dependencies manually" + .to_string(), + ); + } + + println!("Desktop package manager: {}", package_manager); + println!("Desktop packages:"); + for package in &packages { + println!(" - {package}"); + } + println!("Install command:"); + println!( + " {}", + render_install_command(package_manager, used_sudo, &packages) + ); + + if request.print_only { + return Ok(()); + } + + if !request.yes && !prompt_yes_no("Proceed with desktop dependency installation? [y/N] ")? { + return Err("installation cancelled".to_string()); + } + + run_install_commands(package_manager, used_sudo, &packages)?; + + println!("Desktop dependencies installed."); + Ok(()) +} + +fn detect_package_manager() -> Option { + if find_binary("apt-get").is_some() { + return Some(DesktopPackageManager::Apt); + } + if find_binary("dnf").is_some() { + return Some(DesktopPackageManager::Dnf); + } + if find_binary("apk").is_some() { + return Some(DesktopPackageManager::Apk); + } + None +} + +fn desktop_packages(package_manager: DesktopPackageManager, no_fonts: bool) -> Vec { + let mut packages = match package_manager { + DesktopPackageManager::Apt => vec![ + "xvfb", + "openbox", + "xdotool", + "imagemagick", + "ffmpeg", + "x11-xserver-utils", + "dbus-x11", + "xauth", + "fonts-dejavu-core", + ], + DesktopPackageManager::Dnf => vec![ + "xorg-x11-server-Xvfb", + "openbox", + "xdotool", + "ImageMagick", + "ffmpeg", + "xrandr", + "dbus-x11", + "xauth", + "dejavu-sans-fonts", + ], + DesktopPackageManager::Apk => vec![ + "xvfb", + "openbox", + "xdotool", + "imagemagick", + "ffmpeg", + "xrandr", + "dbus", + "xauth", + "ttf-dejavu", + ], + } + .into_iter() + .map(str::to_string) + .collect::>(); + + if no_fonts { + packages.retain(|package| { + package != "fonts-dejavu-core" + && package != "dejavu-sans-fonts" + && package != "ttf-dejavu" + }); + } + + packages +} + +fn render_install_command( + package_manager: DesktopPackageManager, + used_sudo: bool, + packages: &[String], +) -> String { + let sudo = if used_sudo { "sudo " } else { "" }; + match package_manager { + DesktopPackageManager::Apt => format!( + "{sudo}apt-get update && {sudo}env DEBIAN_FRONTEND=noninteractive apt-get install -y {}", + packages.join(" ") + ), + DesktopPackageManager::Dnf => { + format!("{sudo}dnf install -y {}", packages.join(" ")) + } + DesktopPackageManager::Apk => { + format!("{sudo}apk add --no-cache {}", packages.join(" ")) + } + } +} + +fn run_install_commands( + package_manager: DesktopPackageManager, + used_sudo: bool, + packages: &[String], +) -> Result<(), String> { + match package_manager { + DesktopPackageManager::Apt => { + run_command(command_with_privilege( + used_sudo, + "apt-get", + vec!["update".to_string()], + ))?; + let mut args = vec![ + "DEBIAN_FRONTEND=noninteractive".to_string(), + "apt-get".to_string(), + "install".to_string(), + "-y".to_string(), + ]; + args.extend(packages.iter().cloned()); + run_command(command_with_privilege(used_sudo, "env", args))?; + } + DesktopPackageManager::Dnf => { + let mut args = vec!["install".to_string(), "-y".to_string()]; + args.extend(packages.iter().cloned()); + run_command(command_with_privilege(used_sudo, "dnf", args))?; + } + DesktopPackageManager::Apk => { + let mut args = vec!["add".to_string(), "--no-cache".to_string()]; + args.extend(packages.iter().cloned()); + run_command(command_with_privilege(used_sudo, "apk", args))?; + } + } + Ok(()) +} + +fn command_with_privilege( + used_sudo: bool, + program: &str, + args: Vec, +) -> (String, Vec) { + if used_sudo { + let mut sudo_args = vec![program.to_string()]; + sudo_args.extend(args); + ("sudo".to_string(), sudo_args) + } else { + (program.to_string(), args) + } +} + +fn run_command((program, args): (String, Vec)) -> Result<(), String> { + let status = ProcessCommand::new(&program) + .args(&args) + .status() + .map_err(|err| format!("failed to run `{program}`: {err}"))?; + if !status.success() { + return Err(format!( + "command `{}` exited with status {}", + format_command(&program, &args), + status + )); + } + Ok(()) +} + +fn prompt_yes_no(prompt: &str) -> Result { + print!("{prompt}"); + io::stdout() + .flush() + .map_err(|err| format!("failed to flush prompt: {err}"))?; + let mut input = String::new(); + io::stdin() + .read_line(&mut input) + .map_err(|err| format!("failed to read confirmation: {err}"))?; + let normalized = input.trim().to_ascii_lowercase(); + Ok(matches!(normalized.as_str(), "y" | "yes")) +} + +fn running_as_root() -> bool { + #[cfg(unix)] + unsafe { + return libc::geteuid() == 0; + } + #[cfg(not(unix))] + { + false + } +} + +fn find_binary(name: &str) -> Option { + let path_env = std::env::var_os("PATH")?; + for path in std::env::split_paths(&path_env) { + let candidate = path.join(name); + if candidate.is_file() { + return Some(candidate); + } + } + None +} + +fn format_command(program: &str, args: &[String]) -> String { + let mut parts = vec![program.to_string()]; + parts.extend(args.iter().cloned()); + parts.join(" ") +} + +impl fmt::Display for DesktopPackageManager { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + DesktopPackageManager::Apt => write!(f, "apt"), + DesktopPackageManager::Dnf => write!(f, "dnf"), + DesktopPackageManager::Apk => write!(f, "apk"), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn desktop_platform_support_message_mentions_linux_and_supported_distros() { + let message = desktop_platform_support_message(); + assert!(message.contains("only supported on Linux")); + assert!(message.contains("Debian/Ubuntu (apt)")); + assert!(message.contains("Fedora/RHEL (dnf)")); + assert!(message.contains("Alpine (apk)")); + } + + #[test] + fn linux_install_support_message_mentions_unsupported_environments() { + let message = linux_install_support_message(); + assert!(message.contains("Debian/Ubuntu (apt)")); + assert!(message.contains("Fedora/RHEL (dnf)")); + assert!(message.contains("Alpine (apk)")); + assert!(message.contains("macOS")); + assert!(message.contains("Windows")); + assert!(message.contains("without apt, dnf, or apk")); + } + + #[test] + fn desktop_packages_support_no_fonts() { + let packages = desktop_packages(DesktopPackageManager::Apt, true); + assert!(!packages.iter().any(|value| value == "fonts-dejavu-core")); + assert!(packages.iter().any(|value| value == "xvfb")); + } + + #[test] + fn render_install_command_matches_package_manager() { + let packages = vec!["xvfb".to_string(), "openbox".to_string()]; + let command = render_install_command(DesktopPackageManager::Apk, false, &packages); + assert_eq!(command, "apk add --no-cache xvfb openbox"); + } +} diff --git a/server/packages/sandbox-agent/src/desktop_recording.rs b/server/packages/sandbox-agent/src/desktop_recording.rs new file mode 100644 index 0000000..39b174a --- /dev/null +++ b/server/packages/sandbox-agent/src/desktop_recording.rs @@ -0,0 +1,329 @@ +use std::collections::BTreeMap; +use std::fs; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use tokio::sync::Mutex; + +use sandbox_agent_error::SandboxError; + +use crate::desktop_types::{ + DesktopRecordingInfo, DesktopRecordingListResponse, DesktopRecordingStartRequest, + DesktopRecordingStatus, DesktopResolution, +}; +use crate::process_runtime::{ + ProcessOwner, ProcessRuntime, ProcessStartSpec, ProcessStatus, RestartPolicy, +}; + +#[derive(Debug, Clone)] +pub struct DesktopRecordingContext { + pub display: String, + pub environment: std::collections::HashMap, + pub resolution: DesktopResolution, +} + +#[derive(Debug, Clone)] +pub struct DesktopRecordingManager { + process_runtime: Arc, + recordings_dir: PathBuf, + inner: Arc>, +} + +#[derive(Debug, Default)] +struct DesktopRecordingState { + next_id: u64, + current_id: Option, + recordings: BTreeMap, +} + +#[derive(Debug, Clone)] +struct RecordingEntry { + info: DesktopRecordingInfo, + path: PathBuf, +} + +impl DesktopRecordingManager { + pub fn new(process_runtime: Arc, state_dir: PathBuf) -> Self { + Self { + process_runtime, + recordings_dir: state_dir.join("recordings"), + inner: Arc::new(Mutex::new(DesktopRecordingState::default())), + } + } + + pub async fn start( + &self, + context: DesktopRecordingContext, + request: DesktopRecordingStartRequest, + ) -> Result { + if find_binary("ffmpeg").is_none() { + return Err(SandboxError::Conflict { + message: "ffmpeg is required for desktop recording".to_string(), + }); + } + + self.ensure_recordings_dir()?; + + { + let mut state = self.inner.lock().await; + self.refresh_locked(&mut state).await?; + if state.current_id.is_some() { + return Err(SandboxError::Conflict { + message: "a desktop recording is already active".to_string(), + }); + } + } + + let mut state = self.inner.lock().await; + let id_num = state.next_id + 1; + state.next_id = id_num; + let id = format!("rec_{id_num}"); + let file_name = format!("{id}.mp4"); + let path = self.recordings_dir.join(&file_name); + let fps = request.fps.unwrap_or(30).clamp(1, 60); + let args = vec![ + "-y".to_string(), + "-video_size".to_string(), + format!("{}x{}", context.resolution.width, context.resolution.height), + "-framerate".to_string(), + fps.to_string(), + "-f".to_string(), + "x11grab".to_string(), + "-i".to_string(), + context.display, + "-c:v".to_string(), + "libx264".to_string(), + "-preset".to_string(), + "ultrafast".to_string(), + "-pix_fmt".to_string(), + "yuv420p".to_string(), + path.to_string_lossy().to_string(), + ]; + let snapshot = self + .process_runtime + .start_process(ProcessStartSpec { + command: "ffmpeg".to_string(), + args, + cwd: None, + env: context.environment, + tty: false, + interactive: false, + owner: ProcessOwner::Desktop, + restart_policy: Some(RestartPolicy::Never), + }) + .await?; + + let info = DesktopRecordingInfo { + id: id.clone(), + status: DesktopRecordingStatus::Recording, + process_id: Some(snapshot.id), + file_name, + bytes: 0, + started_at: chrono::Utc::now().to_rfc3339(), + ended_at: None, + }; + state.current_id = Some(id.clone()); + state.recordings.insert( + id, + RecordingEntry { + info: info.clone(), + path, + }, + ); + Ok(info) + } + + pub async fn stop(&self) -> Result { + let (recording_id, process_id) = { + let mut state = self.inner.lock().await; + self.refresh_locked(&mut state).await?; + let recording_id = state + .current_id + .clone() + .ok_or_else(|| SandboxError::Conflict { + message: "no desktop recording is active".to_string(), + })?; + let process_id = state + .recordings + .get(&recording_id) + .and_then(|entry| entry.info.process_id.clone()); + (recording_id, process_id) + }; + + if let Some(process_id) = process_id { + let snapshot = self + .process_runtime + .stop_process(&process_id, Some(5_000)) + .await?; + if snapshot.status == ProcessStatus::Running { + let _ = self + .process_runtime + .kill_process(&process_id, Some(1_000)) + .await; + } + } + + let mut state = self.inner.lock().await; + self.refresh_locked(&mut state).await?; + let entry = state + .recordings + .get(&recording_id) + .ok_or_else(|| SandboxError::NotFound { + resource: "desktop_recording".to_string(), + id: recording_id.clone(), + })?; + Ok(entry.info.clone()) + } + + pub async fn list(&self) -> Result { + let mut state = self.inner.lock().await; + self.refresh_locked(&mut state).await?; + Ok(DesktopRecordingListResponse { + recordings: state + .recordings + .values() + .map(|entry| entry.info.clone()) + .collect(), + }) + } + + pub async fn get(&self, id: &str) -> Result { + let mut state = self.inner.lock().await; + self.refresh_locked(&mut state).await?; + state + .recordings + .get(id) + .map(|entry| entry.info.clone()) + .ok_or_else(|| SandboxError::NotFound { + resource: "desktop_recording".to_string(), + id: id.to_string(), + }) + } + + pub async fn download_path(&self, id: &str) -> Result { + let mut state = self.inner.lock().await; + self.refresh_locked(&mut state).await?; + let entry = state + .recordings + .get(id) + .ok_or_else(|| SandboxError::NotFound { + resource: "desktop_recording".to_string(), + id: id.to_string(), + })?; + if !entry.path.is_file() { + return Err(SandboxError::NotFound { + resource: "desktop_recording_file".to_string(), + id: id.to_string(), + }); + } + Ok(entry.path.clone()) + } + + pub async fn delete(&self, id: &str) -> Result<(), SandboxError> { + let mut state = self.inner.lock().await; + self.refresh_locked(&mut state).await?; + if state.current_id.as_deref() == Some(id) { + return Err(SandboxError::Conflict { + message: "stop the active desktop recording before deleting it".to_string(), + }); + } + let entry = state + .recordings + .remove(id) + .ok_or_else(|| SandboxError::NotFound { + resource: "desktop_recording".to_string(), + id: id.to_string(), + })?; + if entry.path.exists() { + fs::remove_file(&entry.path).map_err(|err| SandboxError::StreamError { + message: format!( + "failed to delete desktop recording {}: {err}", + entry.path.display() + ), + })?; + } + Ok(()) + } + + fn ensure_recordings_dir(&self) -> Result<(), SandboxError> { + fs::create_dir_all(&self.recordings_dir).map_err(|err| SandboxError::StreamError { + message: format!( + "failed to create desktop recordings dir {}: {err}", + self.recordings_dir.display() + ), + }) + } + + async fn refresh_locked(&self, state: &mut DesktopRecordingState) -> Result<(), SandboxError> { + let ids: Vec = state.recordings.keys().cloned().collect(); + for id in ids { + let should_clear_current = { + let Some(entry) = state.recordings.get_mut(&id) else { + continue; + }; + let Some(process_id) = entry.info.process_id.clone() else { + Self::refresh_bytes(entry); + continue; + }; + + let snapshot = match self.process_runtime.snapshot(&process_id).await { + Ok(snapshot) => snapshot, + Err(SandboxError::NotFound { .. }) => { + Self::finalize_entry(entry, false); + continue; + } + Err(err) => return Err(err), + }; + + if snapshot.status == ProcessStatus::Running { + Self::refresh_bytes(entry); + false + } else { + Self::finalize_entry(entry, snapshot.exit_code == Some(0)); + true + } + }; + + if should_clear_current && state.current_id.as_deref() == Some(id.as_str()) { + state.current_id = None; + } + } + + Ok(()) + } + + fn refresh_bytes(entry: &mut RecordingEntry) { + entry.info.bytes = file_size(&entry.path); + } + + fn finalize_entry(entry: &mut RecordingEntry, success: bool) { + let bytes = file_size(&entry.path); + entry.info.status = if success || (entry.path.is_file() && bytes > 0) { + DesktopRecordingStatus::Completed + } else { + DesktopRecordingStatus::Failed + }; + entry + .info + .ended_at + .get_or_insert_with(|| chrono::Utc::now().to_rfc3339()); + entry.info.bytes = bytes; + } +} + +fn find_binary(name: &str) -> Option { + let path_env = std::env::var_os("PATH")?; + for path in std::env::split_paths(&path_env) { + let candidate = path.join(name); + if candidate.is_file() { + return Some(candidate); + } + } + None +} + +fn file_size(path: &Path) -> u64 { + fs::metadata(path) + .map(|metadata| metadata.len()) + .unwrap_or(0) +} diff --git a/server/packages/sandbox-agent/src/desktop_runtime.rs b/server/packages/sandbox-agent/src/desktop_runtime.rs new file mode 100644 index 0000000..2363dda --- /dev/null +++ b/server/packages/sandbox-agent/src/desktop_runtime.rs @@ -0,0 +1,2215 @@ +use std::collections::HashMap; +use std::fs::{self, OpenOptions}; +use std::path::{Path, PathBuf}; +use std::process::{Output, Stdio}; +use std::sync::Arc; +use std::time::Duration; + +use tokio::process::{Child, Command}; +use tokio::sync::Mutex; + +use sandbox_agent_error::SandboxError; + +use crate::desktop_errors::DesktopProblem; +use crate::desktop_install::desktop_platform_support_message; +use crate::desktop_recording::{DesktopRecordingContext, DesktopRecordingManager}; +use crate::desktop_streaming::DesktopStreamingManager; +use crate::desktop_types::{ + DesktopActionResponse, DesktopDisplayInfoResponse, DesktopErrorInfo, DesktopKeyModifiers, + DesktopKeyboardDownRequest, DesktopKeyboardPressRequest, DesktopKeyboardTypeRequest, + DesktopKeyboardUpRequest, DesktopMouseButton, DesktopMouseClickRequest, + DesktopMouseDownRequest, DesktopMouseDragRequest, DesktopMouseMoveRequest, + DesktopMousePositionResponse, DesktopMouseScrollRequest, DesktopMouseUpRequest, + DesktopProcessInfo, DesktopRecordingInfo, DesktopRecordingListResponse, + DesktopRecordingStartRequest, DesktopRegionScreenshotQuery, DesktopResolution, + DesktopScreenshotFormat, DesktopScreenshotQuery, DesktopStartRequest, DesktopState, + DesktopStatusResponse, DesktopStreamStatusResponse, DesktopWindowInfo, + DesktopWindowListResponse, +}; +use crate::process_runtime::{ + ProcessOwner, ProcessRuntime, ProcessStartSpec, ProcessStatus, RestartPolicy, +}; + +const DEFAULT_WIDTH: u32 = 1440; +const DEFAULT_HEIGHT: u32 = 900; +const DEFAULT_DPI: u32 = 96; +const DEFAULT_DISPLAY_NUM: i32 = 99; +const MAX_DISPLAY_PROBE: i32 = 10; +const SCREENSHOT_TIMEOUT: Duration = Duration::from_secs(10); +const INPUT_TIMEOUT: Duration = Duration::from_secs(5); +const STARTUP_TIMEOUT: Duration = Duration::from_secs(15); +const PNG_SIGNATURE: &[u8] = b"\x89PNG\r\n\x1a\n"; +const JPEG_SIGNATURE: &[u8] = b"\xff\xd8\xff"; +const WEBP_RIFF_SIGNATURE: &[u8] = b"RIFF"; +const WEBP_WEBP_SIGNATURE: &[u8] = b"WEBP"; + +#[derive(Debug, Clone)] +pub struct DesktopRuntime { + config: DesktopRuntimeConfig, + process_runtime: Arc, + recording_manager: DesktopRecordingManager, + streaming_manager: DesktopStreamingManager, + inner: Arc>, +} + +#[derive(Debug, Clone)] +pub struct DesktopRuntimeConfig { + state_dir: PathBuf, + display_num: i32, + assume_linux_for_tests: bool, +} + +#[derive(Debug)] +struct DesktopRuntimeStateData { + state: DesktopState, + display_num: i32, + display: Option, + resolution: Option, + started_at: Option, + last_error: Option, + missing_dependencies: Vec, + install_command: Option, + runtime_log_path: PathBuf, + environment: HashMap, + xvfb: Option, + openbox: Option, + dbus_pid: Option, +} + +#[derive(Debug)] +struct ManagedDesktopProcess { + name: &'static str, + process_id: String, + pid: Option, + running: bool, +} + +#[derive(Debug, Clone)] +struct DesktopReadyContext { + display: String, + environment: HashMap, + resolution: DesktopResolution, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct DesktopScreenshotData { + pub bytes: Vec, + pub content_type: &'static str, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +struct DesktopScreenshotOptions { + format: DesktopScreenshotFormat, + quality: u8, + scale: f32, +} + +impl Default for DesktopScreenshotOptions { + fn default() -> Self { + Self { + format: DesktopScreenshotFormat::Png, + quality: 85, + scale: 1.0, + } + } +} + +impl DesktopScreenshotOptions { + fn content_type(self) -> &'static str { + match self.format { + DesktopScreenshotFormat::Png => "image/png", + DesktopScreenshotFormat::Jpeg => "image/jpeg", + DesktopScreenshotFormat::Webp => "image/webp", + } + } + + fn output_arg(self) -> &'static str { + match self.format { + DesktopScreenshotFormat::Png => "png:-", + DesktopScreenshotFormat::Jpeg => "jpeg:-", + DesktopScreenshotFormat::Webp => "webp:-", + } + } + + fn needs_convert(self) -> bool { + self.format != DesktopScreenshotFormat::Png || (self.scale - 1.0).abs() > f32::EPSILON + } +} + +impl Default for DesktopRuntimeConfig { + fn default() -> Self { + let display_num = std::env::var("SANDBOX_AGENT_DESKTOP_DISPLAY_NUM") + .ok() + .and_then(|value| value.parse::().ok()) + .filter(|value| *value > 0) + .unwrap_or(DEFAULT_DISPLAY_NUM); + + let state_dir = std::env::var("SANDBOX_AGENT_DESKTOP_STATE_DIR") + .ok() + .map(PathBuf::from) + .unwrap_or_else(default_state_dir); + + let assume_linux_for_tests = std::env::var("SANDBOX_AGENT_DESKTOP_TEST_ASSUME_LINUX") + .ok() + .map(|value| value == "1" || value.eq_ignore_ascii_case("true")) + .unwrap_or(false); + + Self { + state_dir, + display_num, + assume_linux_for_tests, + } + } +} + +impl DesktopRuntime { + pub fn new(process_runtime: Arc) -> Self { + Self::with_config(process_runtime, DesktopRuntimeConfig::default()) + } + + pub fn with_config(process_runtime: Arc, config: DesktopRuntimeConfig) -> Self { + let runtime_log_path = config.state_dir.join("desktop-runtime.log"); + let recording_manager = + DesktopRecordingManager::new(process_runtime.clone(), config.state_dir.clone()); + Self { + process_runtime, + recording_manager, + streaming_manager: DesktopStreamingManager::new(), + inner: Arc::new(Mutex::new(DesktopRuntimeStateData { + state: DesktopState::Inactive, + display_num: config.display_num, + display: None, + resolution: None, + started_at: None, + last_error: None, + missing_dependencies: Vec::new(), + install_command: None, + runtime_log_path, + environment: HashMap::new(), + xvfb: None, + openbox: None, + dbus_pid: None, + })), + config, + } + } + + pub async fn status(&self) -> DesktopStatusResponse { + let mut state = self.inner.lock().await; + self.refresh_status_locked(&mut state).await; + self.snapshot_locked(&state) + } + + pub async fn start( + &self, + request: DesktopStartRequest, + ) -> Result { + let mut state = self.inner.lock().await; + + if !self.platform_supported() { + let problem = DesktopProblem::unsupported_platform(desktop_platform_support_message()); + self.record_problem_locked(&mut state, &problem); + state.state = DesktopState::Failed; + return Err(problem); + } + + if matches!(state.state, DesktopState::Starting | DesktopState::Stopping) { + return Err(DesktopProblem::runtime_starting( + "Desktop runtime is busy transitioning state", + )); + } + + self.refresh_status_locked(&mut state).await; + if state.state == DesktopState::Active { + return Ok(self.snapshot_locked(&state)); + } + + if !state.missing_dependencies.is_empty() { + return Err(DesktopProblem::dependencies_missing( + state.missing_dependencies.clone(), + state.install_command.clone(), + self.processes_locked(&state), + )); + } + + self.ensure_state_dir_locked(&state).map_err(|err| { + DesktopProblem::runtime_failed(err, None, self.processes_locked(&state)) + })?; + self.write_runtime_log_locked(&state, "starting desktop runtime"); + + let width = request.width.unwrap_or(DEFAULT_WIDTH); + let height = request.height.unwrap_or(DEFAULT_HEIGHT); + let dpi = request.dpi.unwrap_or(DEFAULT_DPI); + validate_start_request(width, height, dpi)?; + + let display_num = self.choose_display_num()?; + let display = format!(":{display_num}"); + let resolution = DesktopResolution { + width, + height, + dpi: Some(dpi), + }; + let environment = self.base_environment(&display)?; + + state.state = DesktopState::Starting; + state.display_num = display_num; + state.display = Some(display.clone()); + state.resolution = Some(resolution.clone()); + state.started_at = None; + state.last_error = None; + state.environment = environment; + state.install_command = None; + + if let Err(problem) = self.start_dbus_locked(&mut state).await { + return Err(self.fail_start_locked(&mut state, problem).await); + } + if let Err(problem) = self.start_xvfb_locked(&mut state, &resolution).await { + return Err(self.fail_start_locked(&mut state, problem).await); + } + if let Err(problem) = self.wait_for_socket(display_num).await { + return Err(self.fail_start_locked(&mut state, problem).await); + } + if let Err(problem) = self.start_openbox_locked(&mut state).await { + return Err(self.fail_start_locked(&mut state, problem).await); + } + + let ready = DesktopReadyContext { + display, + environment: state.environment.clone(), + resolution, + }; + + let display_info = match self.query_display_info_locked(&state, &ready).await { + Ok(display_info) => display_info, + Err(problem) => return Err(self.fail_start_locked(&mut state, problem).await), + }; + state.resolution = Some(display_info.resolution.clone()); + + let screenshot_options = DesktopScreenshotOptions::default(); + if let Err(problem) = self + .capture_screenshot_locked(&state, None, &screenshot_options) + .await + { + return Err(self.fail_start_locked(&mut state, problem).await); + } + + state.state = DesktopState::Active; + state.started_at = Some(chrono::Utc::now().to_rfc3339()); + state.last_error = None; + self.write_runtime_log_locked( + &state, + &format!( + "desktop runtime active on {} ({}x{}, dpi {})", + display_info.display, + display_info.resolution.width, + display_info.resolution.height, + display_info.resolution.dpi.unwrap_or(DEFAULT_DPI) + ), + ); + + Ok(self.snapshot_locked(&state)) + } + + pub async fn stop(&self) -> Result { + let mut state = self.inner.lock().await; + if matches!(state.state, DesktopState::Starting | DesktopState::Stopping) { + return Err(DesktopProblem::runtime_starting( + "Desktop runtime is busy transitioning state", + )); + } + + state.state = DesktopState::Stopping; + self.write_runtime_log_locked(&state, "stopping desktop runtime"); + let _ = self.recording_manager.stop().await; + let _ = self.streaming_manager.stop().await; + + self.stop_openbox_locked(&mut state).await; + self.stop_xvfb_locked(&mut state).await; + self.stop_dbus_locked(&mut state); + + state.state = DesktopState::Inactive; + state.display = None; + state.resolution = None; + state.started_at = None; + state.last_error = None; + state.missing_dependencies = self.detect_missing_dependencies(); + state.install_command = self.install_command_for(&state.missing_dependencies); + state.environment.clear(); + + Ok(self.snapshot_locked(&state)) + } + + pub async fn shutdown(&self) { + let _ = self.stop().await; + } + + pub async fn screenshot( + &self, + query: DesktopScreenshotQuery, + ) -> Result { + let options = screenshot_options(query.format, query.quality, query.scale)?; + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let bytes = self + .capture_screenshot_locked(&state, Some(&ready), &options) + .await?; + Ok(DesktopScreenshotData { + bytes, + content_type: options.content_type(), + }) + } + + pub async fn screenshot_region( + &self, + query: DesktopRegionScreenshotQuery, + ) -> Result { + validate_region(&query)?; + let options = screenshot_options(query.format, query.quality, query.scale)?; + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let crop = format!("{}x{}+{}+{}", query.width, query.height, query.x, query.y); + let bytes = self + .capture_screenshot_with_crop_locked(&state, &ready, &crop, &options) + .await?; + Ok(DesktopScreenshotData { + bytes, + content_type: options.content_type(), + }) + } + + pub async fn mouse_position(&self) -> Result { + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + self.mouse_position_locked(&state, &ready).await + } + + pub async fn move_mouse( + &self, + request: DesktopMouseMoveRequest, + ) -> Result { + validate_coordinates(request.x, request.y)?; + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let args = vec![ + "mousemove".to_string(), + request.x.to_string(), + request.y.to_string(), + ]; + self.run_input_command_locked(&state, &ready, args).await?; + self.mouse_position_locked(&state, &ready).await + } + + pub async fn click_mouse( + &self, + request: DesktopMouseClickRequest, + ) -> Result { + validate_coordinates(request.x, request.y)?; + let click_count = request.click_count.unwrap_or(1); + if click_count == 0 { + return Err(DesktopProblem::invalid_action( + "clickCount must be greater than 0", + )); + } + + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let button = mouse_button_code(request.button.unwrap_or(DesktopMouseButton::Left)); + let mut args = vec![ + "mousemove".to_string(), + request.x.to_string(), + request.y.to_string(), + "click".to_string(), + ]; + if click_count > 1 { + args.push("--repeat".to_string()); + args.push(click_count.to_string()); + } + args.push(button.to_string()); + self.run_input_command_locked(&state, &ready, args).await?; + self.mouse_position_locked(&state, &ready).await + } + + pub async fn mouse_down( + &self, + request: DesktopMouseDownRequest, + ) -> Result { + let coordinates = validate_optional_coordinates(request.x, request.y)?; + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let button = mouse_button_code(request.button.unwrap_or(DesktopMouseButton::Left)); + let args = mouse_button_transition_args("mousedown", coordinates, button); + self.run_input_command_locked(&state, &ready, args).await?; + self.mouse_position_locked(&state, &ready).await + } + + pub async fn mouse_up( + &self, + request: DesktopMouseUpRequest, + ) -> Result { + let coordinates = validate_optional_coordinates(request.x, request.y)?; + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let button = mouse_button_code(request.button.unwrap_or(DesktopMouseButton::Left)); + let args = mouse_button_transition_args("mouseup", coordinates, button); + self.run_input_command_locked(&state, &ready, args).await?; + self.mouse_position_locked(&state, &ready).await + } + + pub async fn drag_mouse( + &self, + request: DesktopMouseDragRequest, + ) -> Result { + validate_coordinates(request.start_x, request.start_y)?; + validate_coordinates(request.end_x, request.end_y)?; + + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let button = mouse_button_code(request.button.unwrap_or(DesktopMouseButton::Left)); + let args = vec![ + "mousemove".to_string(), + request.start_x.to_string(), + request.start_y.to_string(), + "mousedown".to_string(), + button.to_string(), + "mousemove".to_string(), + request.end_x.to_string(), + request.end_y.to_string(), + "mouseup".to_string(), + button.to_string(), + ]; + self.run_input_command_locked(&state, &ready, args).await?; + self.mouse_position_locked(&state, &ready).await + } + + pub async fn scroll_mouse( + &self, + request: DesktopMouseScrollRequest, + ) -> Result { + validate_coordinates(request.x, request.y)?; + let delta_x = request.delta_x.unwrap_or(0); + let delta_y = request.delta_y.unwrap_or(0); + if delta_x == 0 && delta_y == 0 { + return Err(DesktopProblem::invalid_action( + "deltaX or deltaY must be non-zero", + )); + } + + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let mut args = vec![ + "mousemove".to_string(), + request.x.to_string(), + request.y.to_string(), + ]; + + append_scroll_clicks(&mut args, delta_y, 5, 4); + append_scroll_clicks(&mut args, delta_x, 7, 6); + + self.run_input_command_locked(&state, &ready, args).await?; + self.mouse_position_locked(&state, &ready).await + } + + pub async fn type_text( + &self, + request: DesktopKeyboardTypeRequest, + ) -> Result { + if request.text.is_empty() { + return Err(DesktopProblem::invalid_action("text must not be empty")); + } + + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let args = type_text_args(request.text, request.delay_ms.unwrap_or(10)); + self.run_input_command_locked(&state, &ready, args).await?; + Ok(DesktopActionResponse { ok: true }) + } + + pub async fn press_key( + &self, + request: DesktopKeyboardPressRequest, + ) -> Result { + if request.key.trim().is_empty() { + return Err(DesktopProblem::invalid_action("key must not be empty")); + } + + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let args = press_key_args(request.key, request.modifiers); + self.run_input_command_locked(&state, &ready, args).await?; + Ok(DesktopActionResponse { ok: true }) + } + + pub async fn key_down( + &self, + request: DesktopKeyboardDownRequest, + ) -> Result { + if request.key.trim().is_empty() { + return Err(DesktopProblem::invalid_action("key must not be empty")); + } + + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let args = key_transition_args("keydown", request.key); + self.run_input_command_locked(&state, &ready, args).await?; + Ok(DesktopActionResponse { ok: true }) + } + + pub async fn key_up( + &self, + request: DesktopKeyboardUpRequest, + ) -> Result { + if request.key.trim().is_empty() { + return Err(DesktopProblem::invalid_action("key must not be empty")); + } + + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let args = key_transition_args("keyup", request.key); + self.run_input_command_locked(&state, &ready, args).await?; + Ok(DesktopActionResponse { ok: true }) + } + + pub async fn display_info(&self) -> Result { + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + self.query_display_info_locked(&state, &ready).await + } + + pub async fn list_windows(&self) -> Result { + let mut state = self.inner.lock().await; + let ready = self.ensure_ready_locked(&mut state).await?; + let active_window_id = self.active_window_id_locked(&state, &ready).await?; + let window_ids = self.window_ids_locked(&state, &ready).await?; + let mut windows = Vec::with_capacity(window_ids.len()); + for window_id in window_ids { + let title = self.window_title_locked(&state, &ready, &window_id).await?; + let (x, y, width, height) = self + .window_geometry_locked(&state, &ready, &window_id) + .await?; + windows.push(DesktopWindowInfo { + id: window_id.clone(), + title, + x, + y, + width, + height, + is_active: active_window_id + .as_deref() + .map(|active| active == window_id) + .unwrap_or(false), + }); + } + Ok(DesktopWindowListResponse { windows }) + } + + pub async fn start_recording( + &self, + request: DesktopRecordingStartRequest, + ) -> Result { + let context = self.recording_context().await?; + self.recording_manager.start(context, request).await + } + + pub async fn stop_recording(&self) -> Result { + self.recording_manager.stop().await + } + + pub async fn list_recordings(&self) -> Result { + self.recording_manager.list().await + } + + pub async fn get_recording(&self, id: &str) -> Result { + self.recording_manager.get(id).await + } + + pub async fn recording_download_path(&self, id: &str) -> Result { + self.recording_manager.download_path(id).await + } + + pub async fn delete_recording(&self, id: &str) -> Result<(), SandboxError> { + self.recording_manager.delete(id).await + } + + pub async fn start_streaming(&self) -> DesktopStreamStatusResponse { + self.streaming_manager.start().await + } + + pub async fn stop_streaming(&self) -> DesktopStreamStatusResponse { + self.streaming_manager.stop().await + } + + pub async fn ensure_streaming_active(&self) -> Result<(), SandboxError> { + self.streaming_manager.ensure_active().await + } + + async fn recording_context(&self) -> Result { + let mut state = self.inner.lock().await; + let ready = self + .ensure_ready_locked(&mut state) + .await + .map_err(desktop_problem_to_sandbox_error)?; + Ok(DesktopRecordingContext { + display: ready.display, + environment: ready.environment, + resolution: ready.resolution, + }) + } + + async fn ensure_ready_locked( + &self, + state: &mut DesktopRuntimeStateData, + ) -> Result { + self.refresh_status_locked(state).await; + match state.state { + DesktopState::Active => { + let display = state.display.clone().ok_or_else(|| { + DesktopProblem::runtime_failed( + "Desktop runtime has no active display", + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + let resolution = state.resolution.clone().ok_or_else(|| { + DesktopProblem::runtime_failed( + "Desktop runtime has no active resolution", + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + Ok(DesktopReadyContext { + display, + environment: state.environment.clone(), + resolution, + }) + } + DesktopState::InstallRequired => Err(DesktopProblem::dependencies_missing( + state.missing_dependencies.clone(), + state.install_command.clone(), + self.processes_locked(state), + )), + DesktopState::Inactive => Err(DesktopProblem::runtime_inactive( + "Desktop runtime has not been started", + )), + DesktopState::Starting | DesktopState::Stopping => Err( + DesktopProblem::runtime_starting("Desktop runtime is still transitioning"), + ), + DesktopState::Failed => Err(DesktopProblem::runtime_failed( + state + .last_error + .as_ref() + .map(|error| error.message.clone()) + .unwrap_or_else(|| "Desktop runtime is unhealthy".to_string()), + state.install_command.clone(), + self.processes_locked(state), + )), + } + } + + async fn refresh_status_locked(&self, state: &mut DesktopRuntimeStateData) { + let missing_dependencies = if self.platform_supported() { + self.detect_missing_dependencies() + } else { + Vec::new() + }; + state.missing_dependencies = missing_dependencies.clone(); + state.install_command = self.install_command_for(&missing_dependencies); + + if !self.platform_supported() { + state.state = DesktopState::Failed; + state.last_error = Some( + DesktopProblem::unsupported_platform(desktop_platform_support_message()) + .to_error_info(), + ); + return; + } + + if !missing_dependencies.is_empty() { + state.state = DesktopState::InstallRequired; + state.last_error = Some( + DesktopProblem::dependencies_missing( + missing_dependencies, + state.install_command.clone(), + self.processes_locked(state), + ) + .to_error_info(), + ); + return; + } + + if matches!( + state.state, + DesktopState::Inactive | DesktopState::Starting | DesktopState::Stopping + ) { + if state.state == DesktopState::Inactive { + state.last_error = None; + } + return; + } + + if state.state == DesktopState::Failed + && state.display.is_none() + && state.xvfb.is_none() + && state.openbox.is_none() + && state.dbus_pid.is_none() + { + return; + } + + let Some(display) = state.display.clone() else { + state.state = DesktopState::Failed; + state.last_error = Some( + DesktopProblem::runtime_failed( + "Desktop runtime has no display", + None, + self.processes_locked(state), + ) + .to_error_info(), + ); + return; + }; + + if let Err(problem) = self.ensure_process_running_locked(state, "Xvfb").await { + self.record_problem_locked(state, &problem); + state.state = DesktopState::Failed; + return; + } + if let Err(problem) = self.ensure_process_running_locked(state, "openbox").await { + self.record_problem_locked(state, &problem); + state.state = DesktopState::Failed; + return; + } + + if !socket_path(state.display_num).exists() { + let problem = DesktopProblem::runtime_failed( + format!("X socket for display {display} is missing"), + state.install_command.clone(), + self.processes_locked(state), + ); + self.record_problem_locked(state, &problem); + state.state = DesktopState::Failed; + return; + } + + let ready = DesktopReadyContext { + display, + environment: state.environment.clone(), + resolution: state.resolution.clone().unwrap_or(DesktopResolution { + width: DEFAULT_WIDTH, + height: DEFAULT_HEIGHT, + dpi: Some(DEFAULT_DPI), + }), + }; + + match self.query_display_info_locked(state, &ready).await { + Ok(display_info) => { + state.resolution = Some(display_info.resolution); + } + Err(problem) => { + self.record_problem_locked(state, &problem); + state.state = DesktopState::Failed; + return; + } + } + + let screenshot_options = DesktopScreenshotOptions::default(); + if let Err(problem) = self + .capture_screenshot_locked(state, Some(&ready), &screenshot_options) + .await + { + self.record_problem_locked(state, &problem); + state.state = DesktopState::Failed; + return; + } + + state.state = DesktopState::Active; + state.last_error = None; + } + + async fn ensure_process_running_locked( + &self, + state: &mut DesktopRuntimeStateData, + name: &str, + ) -> Result<(), DesktopProblem> { + let process_id = match name { + "Xvfb" => state + .xvfb + .as_ref() + .map(|process| process.process_id.clone()), + "openbox" => state + .openbox + .as_ref() + .map(|process| process.process_id.clone()), + _ => None, + }; + + let Some(process_id) = process_id else { + return Err(DesktopProblem::runtime_failed( + format!("{name} is not running"), + state.install_command.clone(), + self.processes_locked(state), + )); + }; + + let snapshot = self + .process_runtime + .snapshot(&process_id) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to inspect {name}: {err}"), + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + + if let Some(process) = match name { + "Xvfb" => state.xvfb.as_mut(), + "openbox" => state.openbox.as_mut(), + _ => None, + } { + process.pid = snapshot.pid; + process.running = snapshot.status == ProcessStatus::Running; + } + + if snapshot.status == ProcessStatus::Running { + return Ok(()); + } + + self.write_runtime_log_locked(state, &format!("{name} exited; attempting restart")); + match name { + "Xvfb" => { + let resolution = state.resolution.clone().ok_or_else(|| { + DesktopProblem::runtime_failed( + "desktop resolution missing during Xvfb restart", + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + state.xvfb = None; + self.start_xvfb_locked(state, &resolution).await?; + } + "openbox" => { + state.openbox = None; + self.start_openbox_locked(state).await?; + } + _ => {} + } + + let restarted_snapshot = self + .process_runtime + .snapshot(match name { + "Xvfb" => state + .xvfb + .as_ref() + .map(|process| process.process_id.as_str()) + .unwrap_or_default(), + "openbox" => state + .openbox + .as_ref() + .map(|process| process.process_id.as_str()) + .unwrap_or_default(), + _ => "", + }) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to inspect restarted {name}: {err}"), + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + if restarted_snapshot.status == ProcessStatus::Running { + Ok(()) + } else { + Err(DesktopProblem::runtime_failed( + format!("{name} exited with status {:?}", snapshot.exit_code), + state.install_command.clone(), + self.processes_locked(state), + )) + } + } + + async fn start_dbus_locked( + &self, + state: &mut DesktopRuntimeStateData, + ) -> Result<(), DesktopProblem> { + if find_binary("dbus-launch").is_none() { + self.write_runtime_log_locked( + state, + "dbus-launch not found; continuing without D-Bus session", + ); + return Ok(()); + } + + let output = run_command_output("dbus-launch", &[], &state.environment, INPUT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to launch dbus-launch: {err}"), + None, + self.processes_locked(state), + ) + })?; + + if !output.status.success() { + self.write_runtime_log_locked( + state, + &format!( + "dbus-launch failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + ); + return Ok(()); + } + + for line in String::from_utf8_lossy(&output.stdout).lines() { + if let Some((key, value)) = line.split_once('=') { + let cleaned = value.trim().trim_end_matches(';').to_string(); + if key == "DBUS_SESSION_BUS_ADDRESS" { + state.environment.insert(key.to_string(), cleaned); + } else if key == "DBUS_SESSION_BUS_PID" { + state.dbus_pid = cleaned.parse::().ok(); + } + } + } + + Ok(()) + } + + async fn start_xvfb_locked( + &self, + state: &mut DesktopRuntimeStateData, + resolution: &DesktopResolution, + ) -> Result<(), DesktopProblem> { + let Some(display) = state.display.clone() else { + return Err(DesktopProblem::runtime_failed( + "Desktop display was not configured before starting Xvfb", + None, + self.processes_locked(state), + )); + }; + let args = vec![ + display, + "-screen".to_string(), + "0".to_string(), + format!("{}x{}x24", resolution.width, resolution.height), + "-dpi".to_string(), + resolution.dpi.unwrap_or(DEFAULT_DPI).to_string(), + "-nolisten".to_string(), + "tcp".to_string(), + ]; + let snapshot = self + .process_runtime + .start_process(ProcessStartSpec { + command: "Xvfb".to_string(), + args, + cwd: None, + env: state.environment.clone(), + tty: false, + interactive: false, + owner: ProcessOwner::Desktop, + restart_policy: Some(RestartPolicy::Always), + }) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to start Xvfb: {err}"), + None, + self.processes_locked(state), + ) + })?; + state.xvfb = Some(ManagedDesktopProcess { + name: "Xvfb", + process_id: snapshot.id, + pid: snapshot.pid, + running: snapshot.status == ProcessStatus::Running, + }); + Ok(()) + } + + async fn start_openbox_locked( + &self, + state: &mut DesktopRuntimeStateData, + ) -> Result<(), DesktopProblem> { + let snapshot = self + .process_runtime + .start_process(ProcessStartSpec { + command: "openbox".to_string(), + args: Vec::new(), + cwd: None, + env: state.environment.clone(), + tty: false, + interactive: false, + owner: ProcessOwner::Desktop, + restart_policy: Some(RestartPolicy::Always), + }) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to start openbox: {err}"), + None, + self.processes_locked(state), + ) + })?; + state.openbox = Some(ManagedDesktopProcess { + name: "openbox", + process_id: snapshot.id, + pid: snapshot.pid, + running: snapshot.status == ProcessStatus::Running, + }); + Ok(()) + } + + async fn stop_xvfb_locked(&self, state: &mut DesktopRuntimeStateData) { + if let Some(process) = state.xvfb.take() { + self.write_runtime_log_locked(state, "stopping Xvfb"); + let _ = self + .process_runtime + .stop_process(&process.process_id, Some(2_000)) + .await; + if self + .process_runtime + .snapshot(&process.process_id) + .await + .ok() + .is_some_and(|snapshot| snapshot.status == ProcessStatus::Running) + { + let _ = self + .process_runtime + .kill_process(&process.process_id, Some(1_000)) + .await; + } + } + } + + async fn stop_openbox_locked(&self, state: &mut DesktopRuntimeStateData) { + if let Some(process) = state.openbox.take() { + self.write_runtime_log_locked(state, "stopping openbox"); + let _ = self + .process_runtime + .stop_process(&process.process_id, Some(2_000)) + .await; + if self + .process_runtime + .snapshot(&process.process_id) + .await + .ok() + .is_some_and(|snapshot| snapshot.status == ProcessStatus::Running) + { + let _ = self + .process_runtime + .kill_process(&process.process_id, Some(1_000)) + .await; + } + } + } + + fn stop_dbus_locked(&self, state: &mut DesktopRuntimeStateData) { + if let Some(pid) = state.dbus_pid.take() { + #[cfg(unix)] + unsafe { + libc::kill(pid as i32, libc::SIGTERM); + } + } + } + + async fn fail_start_locked( + &self, + state: &mut DesktopRuntimeStateData, + problem: DesktopProblem, + ) -> DesktopProblem { + self.record_problem_locked(state, &problem); + self.write_runtime_log_locked(state, "desktop runtime startup failed; cleaning up"); + self.stop_openbox_locked(state).await; + self.stop_xvfb_locked(state).await; + self.stop_dbus_locked(state); + state.state = DesktopState::Failed; + state.display = None; + state.resolution = None; + state.started_at = None; + state.environment.clear(); + problem + } + + async fn capture_screenshot_locked( + &self, + state: &DesktopRuntimeStateData, + ready: Option<&DesktopReadyContext>, + options: &DesktopScreenshotOptions, + ) -> Result, DesktopProblem> { + match ready { + Some(ready) => { + self.capture_screenshot_with_crop_locked(state, ready, "", options) + .await + } + None => { + let ready = DesktopReadyContext { + display: state + .display + .clone() + .unwrap_or_else(|| format!(":{}", state.display_num)), + environment: state.environment.clone(), + resolution: state.resolution.clone().unwrap_or(DesktopResolution { + width: DEFAULT_WIDTH, + height: DEFAULT_HEIGHT, + dpi: Some(DEFAULT_DPI), + }), + }; + self.capture_screenshot_with_crop_locked(state, &ready, "", options) + .await + } + } + } + + async fn capture_screenshot_with_crop_locked( + &self, + state: &DesktopRuntimeStateData, + ready: &DesktopReadyContext, + crop: &str, + options: &DesktopScreenshotOptions, + ) -> Result, DesktopProblem> { + let mut args = vec!["-window".to_string(), "root".to_string()]; + if !crop.is_empty() { + args.push("-crop".to_string()); + args.push(crop.to_string()); + } + args.push("png:-".to_string()); + + let output = run_command_output("import", &args, &ready.environment, SCREENSHOT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::screenshot_failed( + format!("failed to capture desktop screenshot: {err}"), + self.processes_locked(state), + ) + })?; + if !output.status.success() { + return Err(DesktopProblem::screenshot_failed( + format!( + "desktop screenshot command failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + self.processes_locked(state), + )); + } + let bytes = maybe_convert_screenshot(output.stdout, options, &ready.environment) + .await + .map_err(|message| { + DesktopProblem::screenshot_failed(message, self.processes_locked(state)) + })?; + validate_image_bytes(&bytes, options.format).map_err(|message| { + DesktopProblem::screenshot_failed(message, self.processes_locked(state)) + })?; + Ok(bytes) + } + + async fn active_window_id_locked( + &self, + state: &DesktopRuntimeStateData, + ready: &DesktopReadyContext, + ) -> Result, DesktopProblem> { + let args = vec!["getactivewindow".to_string()]; + let output = run_command_output("xdotool", &args, &ready.environment, INPUT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to query active window: {err}"), + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + if !output.status.success() { + if output.status.code() == Some(1) && output.stdout.is_empty() { + return Ok(None); + } + return Err(DesktopProblem::runtime_failed( + format!( + "active window query failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + state.install_command.clone(), + self.processes_locked(state), + )); + } + let window_id = String::from_utf8_lossy(&output.stdout).trim().to_string(); + if window_id.is_empty() { + Ok(None) + } else { + Ok(Some(window_id)) + } + } + + async fn window_ids_locked( + &self, + state: &DesktopRuntimeStateData, + ready: &DesktopReadyContext, + ) -> Result, DesktopProblem> { + let args = vec![ + "search".to_string(), + "--onlyvisible".to_string(), + "--name".to_string(), + "".to_string(), + ]; + let output = run_command_output("xdotool", &args, &ready.environment, INPUT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to list desktop windows: {err}"), + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + if !output.status.success() { + if output.status.code() == Some(1) && output.stdout.is_empty() { + return Ok(Vec::new()); + } + return Err(DesktopProblem::runtime_failed( + format!( + "desktop window listing failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + state.install_command.clone(), + self.processes_locked(state), + )); + } + Ok(String::from_utf8_lossy(&output.stdout) + .lines() + .map(str::trim) + .filter(|line| !line.is_empty()) + .map(ToString::to_string) + .collect()) + } + + async fn window_title_locked( + &self, + state: &DesktopRuntimeStateData, + ready: &DesktopReadyContext, + window_id: &str, + ) -> Result { + let args = vec!["getwindowname".to_string(), window_id.to_string()]; + let output = run_command_output("xdotool", &args, &ready.environment, INPUT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to query window title: {err}"), + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + if !output.status.success() { + return Err(DesktopProblem::runtime_failed( + format!( + "window title query failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + state.install_command.clone(), + self.processes_locked(state), + )); + } + Ok(String::from_utf8_lossy(&output.stdout) + .trim_end() + .to_string()) + } + + async fn window_geometry_locked( + &self, + state: &DesktopRuntimeStateData, + ready: &DesktopReadyContext, + window_id: &str, + ) -> Result<(i32, i32, u32, u32), DesktopProblem> { + let args = vec!["getwindowgeometry".to_string(), window_id.to_string()]; + let output = run_command_output("xdotool", &args, &ready.environment, INPUT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to query window geometry: {err}"), + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + if !output.status.success() { + return Err(DesktopProblem::runtime_failed( + format!( + "window geometry query failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + state.install_command.clone(), + self.processes_locked(state), + )); + } + parse_window_geometry(&output.stdout).map_err(|message| { + DesktopProblem::runtime_failed( + message, + state.install_command.clone(), + self.processes_locked(state), + ) + }) + } + + async fn mouse_position_locked( + &self, + state: &DesktopRuntimeStateData, + ready: &DesktopReadyContext, + ) -> Result { + let args = vec!["getmouselocation".to_string(), "--shell".to_string()]; + let output = run_command_output("xdotool", &args, &ready.environment, INPUT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::input_failed( + format!("failed to query mouse position: {err}"), + self.processes_locked(state), + ) + })?; + if !output.status.success() { + return Err(DesktopProblem::input_failed( + format!( + "mouse position command failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + self.processes_locked(state), + )); + } + parse_mouse_position(&output.stdout) + .map_err(|message| DesktopProblem::input_failed(message, self.processes_locked(state))) + } + + async fn run_input_command_locked( + &self, + state: &DesktopRuntimeStateData, + ready: &DesktopReadyContext, + args: Vec, + ) -> Result<(), DesktopProblem> { + let output = run_command_output("xdotool", &args, &ready.environment, INPUT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::input_failed( + format!("failed to execute desktop input command: {err}"), + self.processes_locked(state), + ) + })?; + if !output.status.success() { + return Err(DesktopProblem::input_failed( + format!( + "desktop input command failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + self.processes_locked(state), + )); + } + Ok(()) + } + + async fn query_display_info_locked( + &self, + state: &DesktopRuntimeStateData, + ready: &DesktopReadyContext, + ) -> Result { + let args = vec!["--current".to_string()]; + let output = run_command_output("xrandr", &args, &ready.environment, INPUT_TIMEOUT) + .await + .map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to query display info: {err}"), + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + if !output.status.success() { + return Err(DesktopProblem::runtime_failed( + format!( + "display query failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + ), + state.install_command.clone(), + self.processes_locked(state), + )); + } + let resolution = parse_xrandr_resolution(&output.stdout).map_err(|message| { + DesktopProblem::runtime_failed( + message, + state.install_command.clone(), + self.processes_locked(state), + ) + })?; + Ok(DesktopDisplayInfoResponse { + display: ready.display.clone(), + resolution: DesktopResolution { + dpi: ready.resolution.dpi, + ..resolution + }, + }) + } + + fn detect_missing_dependencies(&self) -> Vec { + let mut missing = Vec::new(); + for (name, binary) in [ + ("Xvfb", "Xvfb"), + ("openbox", "openbox"), + ("xdotool", "xdotool"), + ("import", "import"), + ("xrandr", "xrandr"), + ] { + if find_binary(binary).is_none() { + missing.push(name.to_string()); + } + } + missing + } + + fn install_command_for(&self, missing_dependencies: &[String]) -> Option { + if !self.platform_supported() || missing_dependencies.is_empty() { + None + } else { + Some("sandbox-agent install desktop --yes".to_string()) + } + } + + fn platform_supported(&self) -> bool { + cfg!(target_os = "linux") || self.config.assume_linux_for_tests + } + + fn choose_display_num(&self) -> Result { + for offset in 0..MAX_DISPLAY_PROBE { + let candidate = self.config.display_num + offset; + if !socket_path(candidate).exists() { + return Ok(candidate); + } + } + Err(DesktopProblem::runtime_failed( + "unable to find an available X display starting at :99", + None, + Vec::new(), + )) + } + + fn base_environment(&self, display: &str) -> Result, DesktopProblem> { + let mut environment = HashMap::new(); + environment.insert("DISPLAY".to_string(), display.to_string()); + environment.insert( + "HOME".to_string(), + self.config + .state_dir + .join("home") + .to_string_lossy() + .to_string(), + ); + environment.insert( + "USER".to_string(), + std::env::var("USER").unwrap_or_else(|_| "sandbox-agent".to_string()), + ); + environment.insert( + "PATH".to_string(), + std::env::var("PATH").unwrap_or_default(), + ); + fs::create_dir_all(self.config.state_dir.join("home")).map_err(|err| { + DesktopProblem::runtime_failed( + format!("failed to create desktop home: {err}"), + None, + Vec::new(), + ) + })?; + Ok(environment) + } + + async fn wait_for_socket(&self, display_num: i32) -> Result<(), DesktopProblem> { + let socket = socket_path(display_num); + let parent = socket + .parent() + .map(Path::to_path_buf) + .unwrap_or_else(|| PathBuf::from("/tmp/.X11-unix")); + let _ = fs::create_dir_all(parent); + + let start = tokio::time::Instant::now(); + while start.elapsed() < STARTUP_TIMEOUT { + if socket.exists() { + return Ok(()); + } + tokio::time::sleep(Duration::from_millis(100)).await; + } + + Err(DesktopProblem::runtime_failed( + format!("timed out waiting for X socket {}", socket.display()), + None, + Vec::new(), + )) + } + + fn snapshot_locked(&self, state: &DesktopRuntimeStateData) -> DesktopStatusResponse { + DesktopStatusResponse { + state: state.state, + display: state.display.clone(), + resolution: state.resolution.clone(), + started_at: state.started_at.clone(), + last_error: state.last_error.clone(), + missing_dependencies: state.missing_dependencies.clone(), + install_command: state.install_command.clone(), + processes: self.processes_locked(state), + runtime_log_path: Some(state.runtime_log_path.to_string_lossy().to_string()), + } + } + + fn processes_locked(&self, state: &DesktopRuntimeStateData) -> Vec { + let mut processes = Vec::new(); + if let Some(process) = state.xvfb.as_ref() { + processes.push(DesktopProcessInfo { + name: process.name.to_string(), + pid: process.pid, + running: process.running, + log_path: None, + }); + } + if let Some(process) = state.openbox.as_ref() { + processes.push(DesktopProcessInfo { + name: process.name.to_string(), + pid: process.pid, + running: process.running, + log_path: None, + }); + } + if let Some(pid) = state.dbus_pid { + processes.push(DesktopProcessInfo { + name: "dbus".to_string(), + pid: Some(pid), + running: process_exists(pid), + log_path: None, + }); + } + processes + } + + fn record_problem_locked(&self, state: &mut DesktopRuntimeStateData, problem: &DesktopProblem) { + state.last_error = Some(problem.to_error_info()); + self.write_runtime_log_locked( + state, + &format!("{}: {}", problem.code(), problem.to_error_info().message), + ); + } + + fn ensure_state_dir_locked(&self, state: &DesktopRuntimeStateData) -> Result<(), String> { + fs::create_dir_all(&self.config.state_dir).map_err(|err| { + format!( + "failed to create desktop state dir {}: {err}", + self.config.state_dir.display() + ) + })?; + if let Some(parent) = state.runtime_log_path.parent() { + fs::create_dir_all(parent).map_err(|err| { + format!( + "failed to create runtime log dir {}: {err}", + parent.display() + ) + })?; + } + Ok(()) + } + + fn write_runtime_log_locked(&self, state: &DesktopRuntimeStateData, message: &str) { + if let Some(parent) = state.runtime_log_path.parent() { + let _ = fs::create_dir_all(parent); + } + let line = format!("{} {}\n", chrono::Utc::now().to_rfc3339(), message); + let _ = OpenOptions::new() + .create(true) + .append(true) + .open(&state.runtime_log_path) + .and_then(|mut file| std::io::Write::write_all(&mut file, line.as_bytes())); + } +} + +fn desktop_problem_to_sandbox_error(problem: DesktopProblem) -> SandboxError { + SandboxError::Conflict { + message: problem.to_error_info().message, + } +} + +fn default_state_dir() -> PathBuf { + if let Ok(value) = std::env::var("XDG_STATE_HOME") { + return PathBuf::from(value).join("sandbox-agent").join("desktop"); + } + if let Some(home) = dirs::home_dir() { + return home + .join(".local") + .join("state") + .join("sandbox-agent") + .join("desktop"); + } + std::env::temp_dir().join("sandbox-agent-desktop") +} + +fn socket_path(display_num: i32) -> PathBuf { + PathBuf::from(format!("/tmp/.X11-unix/X{display_num}")) +} + +fn find_binary(name: &str) -> Option { + let path_env = std::env::var_os("PATH")?; + for path in std::env::split_paths(&path_env) { + let candidate = path.join(name); + if candidate.is_file() { + return Some(candidate); + } + } + None +} + +async fn run_command_output( + command: &str, + args: &[String], + environment: &HashMap, + timeout: Duration, +) -> Result { + run_command_output_with_optional_stdin(command, args, environment, timeout, None).await +} + +async fn run_command_output_with_stdin( + command: &str, + args: &[String], + environment: &HashMap, + timeout: Duration, + stdin_bytes: Vec, +) -> Result { + run_command_output_with_optional_stdin(command, args, environment, timeout, Some(stdin_bytes)) + .await +} + +async fn run_command_output_with_optional_stdin( + command: &str, + args: &[String], + environment: &HashMap, + timeout: Duration, + stdin_bytes: Option>, +) -> Result { + use tokio::io::{AsyncReadExt, AsyncWriteExt}; + + let mut child = Command::new(command); + child.args(args); + child.envs(environment); + child.stdin(if stdin_bytes.is_some() { + Stdio::piped() + } else { + Stdio::null() + }); + child.stdout(Stdio::piped()); + child.stderr(Stdio::piped()); + + let mut child = child.spawn().map_err(|err| err.to_string())?; + let stdout = child + .stdout + .take() + .ok_or_else(|| "failed to capture child stdout".to_string())?; + let stderr = child + .stderr + .take() + .ok_or_else(|| "failed to capture child stderr".to_string())?; + + let stdin_task = if let Some(bytes) = stdin_bytes { + let mut stdin = child + .stdin + .take() + .ok_or_else(|| "failed to capture child stdin".to_string())?; + Some(tokio::spawn(async move { + stdin.write_all(&bytes).await?; + stdin.shutdown().await + })) + } else { + None + }; + + let stdout_task = tokio::spawn(async move { + let mut stdout = stdout; + let mut bytes = Vec::new(); + stdout.read_to_end(&mut bytes).await.map(|_| bytes) + }); + let stderr_task = tokio::spawn(async move { + let mut stderr = stderr; + let mut bytes = Vec::new(); + stderr.read_to_end(&mut bytes).await.map(|_| bytes) + }); + + let status = match tokio::time::timeout(timeout, child.wait()).await { + Ok(result) => result.map_err(|err| err.to_string())?, + Err(_) => { + terminate_child(&mut child).await?; + if let Some(stdin_task) = stdin_task { + let _ = stdin_task.await; + } + let _ = stdout_task.await; + let _ = stderr_task.await; + return Err(format!("command timed out after {}s", timeout.as_secs())); + } + }; + + if let Some(stdin_task) = stdin_task { + stdin_task + .await + .map_err(|err| err.to_string())? + .map_err(|err| err.to_string())?; + } + + let stdout = stdout_task + .await + .map_err(|err| err.to_string())? + .map_err(|err| err.to_string())?; + let stderr = stderr_task + .await + .map_err(|err| err.to_string())? + .map_err(|err| err.to_string())?; + + Ok(Output { + status, + stdout, + stderr, + }) +} + +async fn terminate_child(child: &mut Child) -> Result<(), String> { + if let Ok(Some(_)) = child.try_wait() { + return Ok(()); + } + child.start_kill().map_err(|err| err.to_string())?; + let _ = tokio::time::timeout(Duration::from_secs(5), child.wait()).await; + Ok(()) +} + +fn process_exists(pid: u32) -> bool { + #[cfg(unix)] + unsafe { + return libc::kill(pid as i32, 0) == 0 + || std::io::Error::last_os_error().raw_os_error() != Some(libc::ESRCH); + } + #[cfg(not(unix))] + { + let _ = pid; + false + } +} + +fn parse_xrandr_resolution(bytes: &[u8]) -> Result { + let text = String::from_utf8_lossy(bytes); + for line in text.lines() { + if let Some(index) = line.find(" current ") { + let tail = &line[index + " current ".len()..]; + let mut parts = tail.split(','); + if let Some(current) = parts.next() { + let dims: Vec<&str> = current.split_whitespace().collect(); + if dims.len() >= 3 { + let width = dims[0] + .parse::() + .map_err(|_| "failed to parse xrandr width".to_string())?; + let height = dims[2] + .parse::() + .map_err(|_| "failed to parse xrandr height".to_string())?; + return Ok(DesktopResolution { + width, + height, + dpi: None, + }); + } + } + } + } + Err("unable to parse xrandr current resolution".to_string()) +} + +fn parse_mouse_position(bytes: &[u8]) -> Result { + let text = String::from_utf8_lossy(bytes); + let mut x = None; + let mut y = None; + let mut screen = None; + let mut window = None; + for line in text.lines() { + if let Some((key, value)) = line.split_once('=') { + match key { + "X" => x = value.parse::().ok(), + "Y" => y = value.parse::().ok(), + "SCREEN" => screen = value.parse::().ok(), + "WINDOW" => window = Some(value.to_string()), + _ => {} + } + } + } + match (x, y) { + (Some(x), Some(y)) => Ok(DesktopMousePositionResponse { + x, + y, + screen, + window, + }), + _ => Err("unable to parse xdotool mouse position".to_string()), + } +} + +fn type_text_args(text: String, delay_ms: u32) -> Vec { + vec![ + "type".to_string(), + "--delay".to_string(), + delay_ms.to_string(), + "--".to_string(), + text, + ] +} + +fn press_key_args(key: String, modifiers: Option) -> Vec { + vec![ + "key".to_string(), + "--".to_string(), + key_with_modifiers(key, modifiers), + ] +} + +fn key_transition_args(command: &str, key: String) -> Vec { + vec![command.to_string(), "--".to_string(), key] +} + +fn key_with_modifiers(key: String, modifiers: Option) -> String { + let Some(modifiers) = modifiers else { + return key; + }; + + let mut parts = Vec::new(); + if modifiers.ctrl == Some(true) { + parts.push("ctrl"); + } + if modifiers.shift == Some(true) { + parts.push("shift"); + } + if modifiers.alt == Some(true) { + parts.push("alt"); + } + if modifiers.cmd == Some(true) { + parts.push("super"); + } + parts.push(key.as_str()); + parts.join("+") +} + +fn mouse_button_transition_args( + command: &str, + coordinates: Option<(i32, i32)>, + button: u8, +) -> Vec { + let mut args = Vec::new(); + if let Some((x, y)) = coordinates { + args.push("mousemove".to_string()); + args.push(x.to_string()); + args.push(y.to_string()); + } + args.push(command.to_string()); + args.push(button.to_string()); + args +} + +fn screenshot_options( + format: Option, + quality: Option, + scale: Option, +) -> Result { + let quality = quality.unwrap_or(85); + if !(1..=100).contains(&quality) { + return Err(DesktopProblem::invalid_action( + "quality must be between 1 and 100", + )); + } + + let scale = scale.unwrap_or(1.0); + if !(0.1..=1.0).contains(&scale) { + return Err(DesktopProblem::invalid_action( + "scale must be between 0.1 and 1.0", + )); + } + + Ok(DesktopScreenshotOptions { + format: format.unwrap_or(DesktopScreenshotFormat::Png), + quality, + scale, + }) +} + +async fn maybe_convert_screenshot( + bytes: Vec, + options: &DesktopScreenshotOptions, + environment: &HashMap, +) -> Result, String> { + if !options.needs_convert() { + return Ok(bytes); + } + + let mut args = vec!["png:-".to_string()]; + if (options.scale - 1.0).abs() > f32::EPSILON { + args.push("-resize".to_string()); + args.push(format!("{:.2}%", options.scale * 100.0)); + } + if options.format != DesktopScreenshotFormat::Png { + args.push("-quality".to_string()); + args.push(options.quality.to_string()); + } + args.push(options.output_arg().to_string()); + + let output = + run_command_output_with_stdin("convert", &args, environment, SCREENSHOT_TIMEOUT, bytes) + .await?; + if !output.status.success() { + return Err(format!( + "desktop screenshot conversion failed: {}", + String::from_utf8_lossy(&output.stderr).trim() + )); + } + Ok(output.stdout) +} + +fn validate_image_bytes(bytes: &[u8], format: DesktopScreenshotFormat) -> Result<(), String> { + match format { + DesktopScreenshotFormat::Png => { + if bytes.len() < PNG_SIGNATURE.len() || &bytes[..PNG_SIGNATURE.len()] != PNG_SIGNATURE { + return Err("desktop screenshot did not return PNG bytes".to_string()); + } + } + DesktopScreenshotFormat::Jpeg => { + if bytes.len() < JPEG_SIGNATURE.len() + || &bytes[..JPEG_SIGNATURE.len()] != JPEG_SIGNATURE + { + return Err("desktop screenshot did not return JPEG bytes".to_string()); + } + } + DesktopScreenshotFormat::Webp => { + if bytes.len() < 12 + || &bytes[..WEBP_RIFF_SIGNATURE.len()] != WEBP_RIFF_SIGNATURE + || &bytes[8..12] != WEBP_WEBP_SIGNATURE + { + return Err("desktop screenshot did not return WebP bytes".to_string()); + } + } + } + Ok(()) +} + +fn validate_start_request(width: u32, height: u32, dpi: u32) -> Result<(), DesktopProblem> { + if width == 0 || height == 0 { + return Err(DesktopProblem::invalid_action( + "Desktop width and height must be greater than 0", + )); + } + if dpi == 0 { + return Err(DesktopProblem::invalid_action( + "Desktop dpi must be greater than 0", + )); + } + Ok(()) +} + +fn validate_region(query: &DesktopRegionScreenshotQuery) -> Result<(), DesktopProblem> { + validate_coordinates(query.x, query.y)?; + if query.width == 0 || query.height == 0 { + return Err(DesktopProblem::invalid_action( + "Screenshot region width and height must be greater than 0", + )); + } + Ok(()) +} + +fn validate_optional_coordinates( + x: Option, + y: Option, +) -> Result, DesktopProblem> { + match (x, y) { + (Some(x), Some(y)) => { + validate_coordinates(x, y)?; + Ok(Some((x, y))) + } + (None, None) => Ok(None), + _ => Err(DesktopProblem::invalid_action( + "x and y must both be provided when setting coordinates", + )), + } +} + +fn validate_coordinates(x: i32, y: i32) -> Result<(), DesktopProblem> { + if x < 0 || y < 0 { + return Err(DesktopProblem::invalid_action( + "Desktop coordinates must be non-negative", + )); + } + Ok(()) +} + +fn mouse_button_code(button: DesktopMouseButton) -> u8 { + match button { + DesktopMouseButton::Left => 1, + DesktopMouseButton::Middle => 2, + DesktopMouseButton::Right => 3, + } +} + +fn append_scroll_clicks( + args: &mut Vec, + delta: i32, + positive_button: u8, + negative_button: u8, +) { + if delta == 0 { + return; + } + let button = if delta > 0 { + positive_button + } else { + negative_button + }; + let repeat = delta.unsigned_abs(); + args.push("click".to_string()); + if repeat > 1 { + args.push("--repeat".to_string()); + args.push(repeat.to_string()); + } + args.push(button.to_string()); +} + +fn parse_window_geometry(bytes: &[u8]) -> Result<(i32, i32, u32, u32), String> { + let text = String::from_utf8_lossy(bytes); + let mut position = None; + let mut geometry = None; + for line in text.lines() { + let trimmed = line.trim(); + if let Some(value) = trimmed.strip_prefix("Position:") { + let coordinate_text = value + .trim() + .split_whitespace() + .next() + .ok_or_else(|| "unable to parse window position".to_string())?; + let (x, y) = coordinate_text + .split_once(',') + .ok_or_else(|| "unable to parse window position".to_string())?; + let x = x + .parse::() + .map_err(|_| "failed to parse window x coordinate".to_string())?; + let y = y + .parse::() + .map_err(|_| "failed to parse window y coordinate".to_string())?; + position = Some((x, y)); + } + if let Some(value) = trimmed.strip_prefix("Geometry:") { + let (width, height) = value + .trim() + .split_once('x') + .ok_or_else(|| "unable to parse window geometry".to_string())?; + let width = width + .parse::() + .map_err(|_| "failed to parse window width".to_string())?; + let height = height + .parse::() + .map_err(|_| "failed to parse window height".to_string())?; + geometry = Some((width, height)); + } + } + + match (position, geometry) { + (Some((x, y)), Some((width, height))) => Ok((x, y, width, height)), + _ => Err("unable to parse xdotool window geometry".to_string()), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_xrandr_resolution_reads_current_geometry() { + let bytes = b"Screen 0: minimum 1 x 1, current 1440 x 900, maximum 32767 x 32767\n"; + let parsed = parse_xrandr_resolution(bytes).expect("parse resolution"); + assert_eq!(parsed.width, 1440); + assert_eq!(parsed.height, 900); + } + + #[test] + fn parse_mouse_position_reads_shell_output() { + let bytes = b"X=123\nY=456\nSCREEN=0\nWINDOW=0\n"; + let parsed = parse_mouse_position(bytes).expect("parse mouse position"); + assert_eq!(parsed.x, 123); + assert_eq!(parsed.y, 456); + assert_eq!(parsed.screen, Some(0)); + assert_eq!(parsed.window.as_deref(), Some("0")); + } + + #[test] + fn png_validation_rejects_non_png_bytes() { + let error = validate_image_bytes(b"not png", DesktopScreenshotFormat::Png) + .expect_err("validation should fail"); + assert!(error.contains("PNG")); + } + + #[test] + fn type_text_args_insert_double_dash_before_user_text() { + let args = type_text_args("--help".to_string(), 5); + assert_eq!(args, vec!["type", "--delay", "5", "--", "--help"]); + } + + #[test] + fn press_key_args_insert_double_dash_before_user_key() { + let args = press_key_args("--help".to_string(), None); + assert_eq!(args, vec!["key", "--", "--help"]); + } + + #[test] + fn press_key_args_builds_key_sequence_from_modifiers() { + let args = press_key_args( + "a".to_string(), + Some(DesktopKeyModifiers { + ctrl: Some(true), + shift: Some(true), + alt: Some(false), + cmd: None, + }), + ); + assert_eq!(args, vec!["key", "--", "ctrl+shift+a"]); + } + + #[test] + fn append_scroll_clicks_uses_positive_direction_buttons() { + let mut args = Vec::new(); + append_scroll_clicks(&mut args, 2, 5, 4); + append_scroll_clicks(&mut args, -3, 7, 6); + assert_eq!( + args, + vec!["click", "--repeat", "2", "5", "click", "--repeat", "3", "6"] + ); + } + + #[test] + fn parse_window_geometry_reads_xdotool_output() { + let bytes = b"Window 123\n Position: 400,300 (screen: 0)\n Geometry: 1440x900\n"; + let parsed = parse_window_geometry(bytes).expect("parse geometry"); + assert_eq!(parsed, (400, 300, 1440, 900)); + } + + #[cfg(unix)] + #[tokio::test] + async fn run_command_output_kills_child_on_timeout() { + let pid_file = std::env::temp_dir().join(format!( + "sandbox-agent-desktop-runtime-timeout-{}.pid", + std::process::id() + )); + let _ = std::fs::remove_file(&pid_file); + let command = format!("echo $$ > {}; exec sleep 30", pid_file.display()); + let args = vec!["-c".to_string(), command]; + + let error = run_command_output("sh", &args, &HashMap::new(), Duration::from_millis(200)) + .await + .expect_err("command should time out"); + assert!(error.contains("timed out")); + + let pid = std::fs::read_to_string(&pid_file) + .expect("pid file should exist") + .trim() + .parse::() + .expect("pid should parse"); + + for _ in 0..20 { + if !process_exists(pid) { + let _ = std::fs::remove_file(&pid_file); + return; + } + tokio::time::sleep(Duration::from_millis(50)).await; + } + + let _ = std::fs::remove_file(&pid_file); + panic!("timed out child process {pid} still exists after timeout cleanup"); + } +} diff --git a/server/packages/sandbox-agent/src/desktop_streaming.rs b/server/packages/sandbox-agent/src/desktop_streaming.rs new file mode 100644 index 0000000..86fb611 --- /dev/null +++ b/server/packages/sandbox-agent/src/desktop_streaming.rs @@ -0,0 +1,47 @@ +use std::sync::Arc; + +use tokio::sync::Mutex; + +use sandbox_agent_error::SandboxError; + +use crate::desktop_types::DesktopStreamStatusResponse; + +#[derive(Debug, Clone)] +pub struct DesktopStreamingManager { + inner: Arc>, +} + +#[derive(Debug, Default)] +struct DesktopStreamingState { + active: bool, +} + +impl DesktopStreamingManager { + pub fn new() -> Self { + Self { + inner: Arc::new(Mutex::new(DesktopStreamingState::default())), + } + } + + pub async fn start(&self) -> DesktopStreamStatusResponse { + let mut state = self.inner.lock().await; + state.active = true; + DesktopStreamStatusResponse { active: true } + } + + pub async fn stop(&self) -> DesktopStreamStatusResponse { + let mut state = self.inner.lock().await; + state.active = false; + DesktopStreamStatusResponse { active: false } + } + + pub async fn ensure_active(&self) -> Result<(), SandboxError> { + if self.inner.lock().await.active { + Ok(()) + } else { + Err(SandboxError::Conflict { + message: "desktop streaming is not active".to_string(), + }) + } + } +} diff --git a/server/packages/sandbox-agent/src/desktop_types.rs b/server/packages/sandbox-agent/src/desktop_types.rs new file mode 100644 index 0000000..7f813da --- /dev/null +++ b/server/packages/sandbox-agent/src/desktop_types.rs @@ -0,0 +1,302 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use utoipa::{IntoParams, ToSchema}; + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum DesktopState { + Inactive, + InstallRequired, + Starting, + Active, + Stopping, + Failed, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopResolution { + pub width: u32, + pub height: u32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub dpi: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopErrorInfo { + pub code: String, + pub message: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopProcessInfo { + pub name: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pid: Option, + pub running: bool, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub log_path: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopStatusResponse { + pub state: DesktopState, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub display: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub resolution: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub started_at: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub last_error: Option, + #[serde(default)] + pub missing_dependencies: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub install_command: Option, + #[serde(default)] + pub processes: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub runtime_log_path: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, IntoParams, Default)] +#[serde(rename_all = "camelCase")] +pub struct DesktopStartRequest { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub width: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub height: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub dpi: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, IntoParams, Default)] +#[serde(rename_all = "camelCase")] +pub struct DesktopScreenshotQuery { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub format: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub quality: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub scale: Option, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum DesktopScreenshotFormat { + Png, + Jpeg, + Webp, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, IntoParams)] +#[serde(rename_all = "camelCase")] +pub struct DesktopRegionScreenshotQuery { + pub x: i32, + pub y: i32, + pub width: u32, + pub height: u32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub format: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub quality: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub scale: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopMousePositionResponse { + pub x: i32, + pub y: i32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub screen: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub window: Option, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum DesktopMouseButton { + Left, + Middle, + Right, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopMouseMoveRequest { + pub x: i32, + pub y: i32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopMouseClickRequest { + pub x: i32, + pub y: i32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub button: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub click_count: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopMouseDownRequest { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub x: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub y: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub button: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopMouseUpRequest { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub x: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub y: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub button: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopMouseDragRequest { + pub start_x: i32, + pub start_y: i32, + pub end_x: i32, + pub end_y: i32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub button: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopMouseScrollRequest { + pub x: i32, + pub y: i32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub delta_x: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub delta_y: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopKeyboardTypeRequest { + pub text: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub delay_ms: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopKeyboardPressRequest { + pub key: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub modifiers: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq, Default)] +#[serde(rename_all = "camelCase")] +pub struct DesktopKeyModifiers { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ctrl: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub shift: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub alt: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub cmd: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopKeyboardDownRequest { + pub key: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DesktopKeyboardUpRequest { + pub key: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopActionResponse { + pub ok: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopDisplayInfoResponse { + pub display: String, + pub resolution: DesktopResolution, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopWindowInfo { + pub id: String, + pub title: String, + pub x: i32, + pub y: i32, + pub width: u32, + pub height: u32, + pub is_active: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopWindowListResponse { + pub windows: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, Default)] +#[serde(rename_all = "camelCase")] +pub struct DesktopRecordingStartRequest { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub fps: Option, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum DesktopRecordingStatus { + Recording, + Completed, + Failed, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopRecordingInfo { + pub id: String, + pub status: DesktopRecordingStatus, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub process_id: Option, + pub file_name: String, + pub bytes: u64, + pub started_at: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ended_at: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopRecordingListResponse { + pub recordings: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DesktopStreamStatusResponse { + pub active: bool, +} diff --git a/server/packages/sandbox-agent/src/lib.rs b/server/packages/sandbox-agent/src/lib.rs index e84b10b..d7b92d6 100644 --- a/server/packages/sandbox-agent/src/lib.rs +++ b/server/packages/sandbox-agent/src/lib.rs @@ -3,6 +3,12 @@ mod acp_proxy_runtime; pub mod cli; pub mod daemon; +mod desktop_errors; +mod desktop_install; +mod desktop_recording; +mod desktop_runtime; +mod desktop_streaming; +pub mod desktop_types; mod process_runtime; pub mod router; pub mod server_logs; diff --git a/server/packages/sandbox-agent/src/process_runtime.rs b/server/packages/sandbox-agent/src/process_runtime.rs index cd1bedd..3f2ce8d 100644 --- a/server/packages/sandbox-agent/src/process_runtime.rs +++ b/server/packages/sandbox-agent/src/process_runtime.rs @@ -1,5 +1,5 @@ use std::collections::{HashMap, VecDeque}; -use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; use std::sync::Arc; use std::time::Instant; @@ -27,6 +27,22 @@ pub enum ProcessStream { Pty, } +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ProcessOwner { + User, + Desktop, + System, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum RestartPolicy { + Never, + Always, + OnFailure, +} + #[derive(Debug, Clone)] pub struct ProcessStartSpec { pub command: String, @@ -35,6 +51,8 @@ pub struct ProcessStartSpec { pub env: HashMap, pub tty: bool, pub interactive: bool, + pub owner: ProcessOwner, + pub restart_policy: Option, } #[derive(Debug, Clone)] @@ -78,6 +96,7 @@ pub struct ProcessSnapshot { pub cwd: Option, pub tty: bool, pub interactive: bool, + pub owner: ProcessOwner, pub status: ProcessStatus, pub pid: Option, pub exit_code: Option, @@ -129,17 +148,27 @@ struct ManagedProcess { cwd: Option, tty: bool, interactive: bool, + owner: ProcessOwner, + #[allow(dead_code)] + restart_policy: RestartPolicy, + spec: ProcessStartSpec, created_at_ms: i64, - pid: Option, max_log_bytes: usize, - stdin: Mutex>, - #[cfg(unix)] - pty_resize_fd: Mutex>, + runtime: Mutex, status: RwLock, sequence: AtomicU64, logs: Mutex>, total_log_bytes: Mutex, log_tx: broadcast::Sender, + stop_requested: AtomicBool, +} + +#[derive(Debug)] +struct ManagedRuntime { + pid: Option, + stdin: Option, + #[cfg(unix)] + pty_resize_fd: Option, } #[derive(Debug)] @@ -162,17 +191,17 @@ struct ManagedStatus { } struct SpawnedPipeProcess { - process: Arc, child: Child, stdout: tokio::process::ChildStdout, stderr: tokio::process::ChildStderr, + runtime: ManagedRuntime, } #[cfg(unix)] struct SpawnedTtyProcess { - process: Arc, child: Child, reader: tokio::fs::File, + runtime: ManagedRuntime, } impl ProcessRuntime { @@ -224,21 +253,14 @@ impl ProcessRuntime { &self, spec: ProcessStartSpec, ) -> Result { - let config = self.get_config().await; - - let process_refs = { - let processes = self.inner.processes.read().await; - processes.values().cloned().collect::>() - }; - - let mut running_count = 0usize; - for process in process_refs { - if process.status.read().await.status == ProcessStatus::Running { - running_count += 1; - } + if spec.command.trim().is_empty() { + return Err(SandboxError::InvalidRequest { + message: "command must not be empty".to_string(), + }); } - if running_count >= config.max_concurrent_processes { + let config = self.get_config().await; + if self.running_process_count().await >= config.max_concurrent_processes { return Err(SandboxError::Conflict { message: format!( "max concurrent process limit reached ({})", @@ -247,73 +269,44 @@ impl ProcessRuntime { }); } - if spec.command.trim().is_empty() { - return Err(SandboxError::InvalidRequest { - message: "command must not be empty".to_string(), - }); - } - let id_num = self.inner.next_id.fetch_add(1, Ordering::Relaxed); let id = format!("proc_{id_num}"); + let process = Arc::new(ManagedProcess { + id: id.clone(), + command: spec.command.clone(), + args: spec.args.clone(), + cwd: spec.cwd.clone(), + tty: spec.tty, + interactive: spec.interactive, + owner: spec.owner, + restart_policy: spec.restart_policy.unwrap_or(RestartPolicy::Never), + spec, + created_at_ms: now_ms(), + max_log_bytes: config.max_log_bytes_per_process, + runtime: Mutex::new(ManagedRuntime { + pid: None, + stdin: None, + #[cfg(unix)] + pty_resize_fd: None, + }), + status: RwLock::new(ManagedStatus { + status: ProcessStatus::Running, + exit_code: None, + exited_at_ms: None, + }), + sequence: AtomicU64::new(1), + logs: Mutex::new(VecDeque::new()), + total_log_bytes: Mutex::new(0), + log_tx: broadcast::channel(512).0, + stop_requested: AtomicBool::new(false), + }); - if spec.tty { - #[cfg(unix)] - { - let spawned = self - .spawn_tty_process(id.clone(), spec, config.max_log_bytes_per_process) - .await?; - let process = spawned.process.clone(); - self.inner - .processes - .write() - .await - .insert(id, process.clone()); - - let p = process.clone(); - tokio::spawn(async move { - pump_output(p, spawned.reader, ProcessStream::Pty).await; - }); - - let p = process.clone(); - tokio::spawn(async move { - watch_exit(p, spawned.child).await; - }); - - return Ok(process.snapshot().await); - } - #[cfg(not(unix))] - { - return Err(SandboxError::StreamError { - message: "tty process mode is not supported on this platform".to_string(), - }); - } - } - - let spawned = self - .spawn_pipe_process(id.clone(), spec, config.max_log_bytes_per_process) - .await?; - let process = spawned.process.clone(); + self.spawn_existing_process(process.clone()).await?; self.inner .processes .write() .await .insert(id, process.clone()); - - let p = process.clone(); - tokio::spawn(async move { - pump_output(p, spawned.stdout, ProcessStream::Stdout).await; - }); - - let p = process.clone(); - tokio::spawn(async move { - pump_output(p, spawned.stderr, ProcessStream::Stderr).await; - }); - - let p = process.clone(); - tokio::spawn(async move { - watch_exit(p, spawned.child).await; - }); - Ok(process.snapshot().await) } @@ -412,11 +405,13 @@ impl ProcessRuntime { }) } - pub async fn list_processes(&self) -> Vec { + pub async fn list_processes(&self, owner: Option) -> Vec { let processes = self.inner.processes.read().await; let mut items = Vec::with_capacity(processes.len()); for process in processes.values() { - items.push(process.snapshot().await); + if owner.is_none_or(|expected| process.owner == expected) { + items.push(process.snapshot().await); + } } items.sort_by(|a, b| a.id.cmp(&b.id)); items @@ -453,6 +448,7 @@ impl ProcessRuntime { wait_ms: Option, ) -> Result { let process = self.lookup_process(id).await?; + process.stop_requested.store(true, Ordering::SeqCst); process.send_signal(SIGTERM).await?; maybe_wait_for_exit(process.clone(), wait_ms.unwrap_or(2_000)).await; Ok(process.snapshot().await) @@ -464,6 +460,7 @@ impl ProcessRuntime { wait_ms: Option, ) -> Result { let process = self.lookup_process(id).await?; + process.stop_requested.store(true, Ordering::SeqCst); process.send_signal(SIGKILL).await?; maybe_wait_for_exit(process.clone(), wait_ms.unwrap_or(1_000)).await; Ok(process.snapshot().await) @@ -506,6 +503,17 @@ impl ProcessRuntime { Ok(process.log_tx.subscribe()) } + async fn running_process_count(&self) -> usize { + let processes = self.inner.processes.read().await; + let mut running = 0usize; + for process in processes.values() { + if process.status.read().await.status == ProcessStatus::Running { + running += 1; + } + } + running + } + async fn lookup_process(&self, id: &str) -> Result, SandboxError> { let process = self.inner.processes.read().await.get(id).cloned(); process.ok_or_else(|| SandboxError::NotFound { @@ -514,11 +522,83 @@ impl ProcessRuntime { }) } - async fn spawn_pipe_process( + async fn spawn_existing_process( &self, - id: String, - spec: ProcessStartSpec, - max_log_bytes: usize, + process: Arc, + ) -> Result<(), SandboxError> { + process.stop_requested.store(false, Ordering::SeqCst); + let mut runtime_guard = process.runtime.lock().await; + let mut status_guard = process.status.write().await; + + if process.tty { + #[cfg(unix)] + { + let SpawnedTtyProcess { + child, + reader, + runtime, + } = self.spawn_tty_process(&process.spec)?; + *runtime_guard = runtime; + status_guard.status = ProcessStatus::Running; + status_guard.exit_code = None; + status_guard.exited_at_ms = None; + drop(status_guard); + drop(runtime_guard); + + let process_for_output = process.clone(); + tokio::spawn(async move { + pump_output(process_for_output, reader, ProcessStream::Pty).await; + }); + + let runtime = self.clone(); + tokio::spawn(async move { + watch_exit(runtime, process, child).await; + }); + + return Ok(()); + } + #[cfg(not(unix))] + { + return Err(SandboxError::StreamError { + message: "tty process mode is not supported on this platform".to_string(), + }); + } + } + + let SpawnedPipeProcess { + child, + stdout, + stderr, + runtime, + } = self.spawn_pipe_process(&process.spec)?; + *runtime_guard = runtime; + status_guard.status = ProcessStatus::Running; + status_guard.exit_code = None; + status_guard.exited_at_ms = None; + drop(status_guard); + drop(runtime_guard); + + let process_for_stdout = process.clone(); + tokio::spawn(async move { + pump_output(process_for_stdout, stdout, ProcessStream::Stdout).await; + }); + + let process_for_stderr = process.clone(); + tokio::spawn(async move { + pump_output(process_for_stderr, stderr, ProcessStream::Stderr).await; + }); + + let runtime = self.clone(); + tokio::spawn(async move { + watch_exit(runtime, process, child).await; + }); + + Ok(()) + } + + fn spawn_pipe_process( + &self, + spec: &ProcessStartSpec, ) -> Result { let mut cmd = Command::new(&spec.command); cmd.args(&spec.args) @@ -551,35 +631,14 @@ impl ProcessRuntime { .ok_or_else(|| SandboxError::StreamError { message: "failed to capture stderr".to_string(), })?; - let pid = child.id(); - - let (tx, _rx) = broadcast::channel(512); - let process = Arc::new(ManagedProcess { - id, - command: spec.command, - args: spec.args, - cwd: spec.cwd, - tty: false, - interactive: spec.interactive, - created_at_ms: now_ms(), - pid, - max_log_bytes, - stdin: Mutex::new(stdin.map(ProcessStdin::Pipe)), - #[cfg(unix)] - pty_resize_fd: Mutex::new(None), - status: RwLock::new(ManagedStatus { - status: ProcessStatus::Running, - exit_code: None, - exited_at_ms: None, - }), - sequence: AtomicU64::new(1), - logs: Mutex::new(VecDeque::new()), - total_log_bytes: Mutex::new(0), - log_tx: tx, - }); Ok(SpawnedPipeProcess { - process, + runtime: ManagedRuntime { + pid: child.id(), + stdin: stdin.map(ProcessStdin::Pipe), + #[cfg(unix)] + pty_resize_fd: None, + }, child, stdout, stderr, @@ -587,11 +646,9 @@ impl ProcessRuntime { } #[cfg(unix)] - async fn spawn_tty_process( + fn spawn_tty_process( &self, - id: String, - spec: ProcessStartSpec, - max_log_bytes: usize, + spec: &ProcessStartSpec, ) -> Result { use std::os::fd::AsRawFd; use std::process::Stdio; @@ -632,8 +689,8 @@ impl ProcessRuntime { let child = cmd.spawn().map_err(|err| SandboxError::StreamError { message: format!("failed to spawn tty process: {err}"), })?; - let pid = child.id(); + drop(slave_fd); let master_raw = master_fd.as_raw_fd(); @@ -644,32 +701,12 @@ impl ProcessRuntime { let writer_file = tokio::fs::File::from_std(std::fs::File::from(writer_fd)); let resize_file = std::fs::File::from(resize_fd); - let (tx, _rx) = broadcast::channel(512); - let process = Arc::new(ManagedProcess { - id, - command: spec.command, - args: spec.args, - cwd: spec.cwd, - tty: true, - interactive: spec.interactive, - created_at_ms: now_ms(), - pid, - max_log_bytes, - stdin: Mutex::new(Some(ProcessStdin::Pty(writer_file))), - pty_resize_fd: Mutex::new(Some(resize_file)), - status: RwLock::new(ManagedStatus { - status: ProcessStatus::Running, - exit_code: None, - exited_at_ms: None, - }), - sequence: AtomicU64::new(1), - logs: Mutex::new(VecDeque::new()), - total_log_bytes: Mutex::new(0), - log_tx: tx, - }); - Ok(SpawnedTtyProcess { - process, + runtime: ManagedRuntime { + pid, + stdin: Some(ProcessStdin::Pty(writer_file)), + pty_resize_fd: Some(resize_file), + }, child, reader: reader_file, }) @@ -694,6 +731,7 @@ pub struct ProcessLogFilter { impl ManagedProcess { async fn snapshot(&self) -> ProcessSnapshot { let status = self.status.read().await.clone(); + let pid = self.runtime.lock().await.pid; ProcessSnapshot { id: self.id.clone(), command: self.command.clone(), @@ -701,8 +739,9 @@ impl ManagedProcess { cwd: self.cwd.clone(), tty: self.tty, interactive: self.interactive, + owner: self.owner, status: status.status, - pid: self.pid, + pid, exit_code: status.exit_code, created_at_ms: self.created_at_ms, exited_at_ms: status.exited_at_ms, @@ -752,10 +791,13 @@ impl ManagedProcess { }); } - let mut guard = self.stdin.lock().await; - let stdin = guard.as_mut().ok_or_else(|| SandboxError::Conflict { - message: "process does not accept stdin".to_string(), - })?; + let mut runtime = self.runtime.lock().await; + let stdin = runtime + .stdin + .as_mut() + .ok_or_else(|| SandboxError::Conflict { + message: "process does not accept stdin".to_string(), + })?; match stdin { ProcessStdin::Pipe(pipe) => { @@ -825,7 +867,7 @@ impl ManagedProcess { if self.status.read().await.status != ProcessStatus::Running { return Ok(()); } - let Some(pid) = self.pid else { + let Some(pid) = self.runtime.lock().await.pid else { return Ok(()); }; @@ -840,8 +882,9 @@ impl ManagedProcess { #[cfg(unix)] { use std::os::fd::AsRawFd; - let guard = self.pty_resize_fd.lock().await; - let Some(fd) = guard.as_ref() else { + + let runtime = self.runtime.lock().await; + let Some(fd) = runtime.pty_resize_fd.as_ref() else { return Err(SandboxError::Conflict { message: "PTY resize handle unavailable".to_string(), }); @@ -857,6 +900,32 @@ impl ManagedProcess { Ok(()) } + + #[allow(dead_code)] + fn should_restart(&self, exit_code: Option) -> bool { + match self.restart_policy { + RestartPolicy::Never => false, + RestartPolicy::Always => true, + RestartPolicy::OnFailure => exit_code.unwrap_or(1) != 0, + } + } + + async fn mark_exited(&self, exit_code: Option, exited_at_ms: Option) { + { + let mut status = self.status.write().await; + status.status = ProcessStatus::Exited; + status.exit_code = exit_code; + status.exited_at_ms = exited_at_ms; + } + + let mut runtime = self.runtime.lock().await; + runtime.pid = None; + let _ = runtime.stdin.take(); + #[cfg(unix)] + { + let _ = runtime.pty_resize_fd.take(); + } + } } fn stream_matches(stream: ProcessStream, filter: ProcessLogFilterStream) -> bool { @@ -909,21 +978,16 @@ where } } -async fn watch_exit(process: Arc, mut child: Child) { +async fn watch_exit(runtime: ProcessRuntime, process: Arc, mut child: Child) { + let _ = runtime; let wait = child.wait().await; let (exit_code, exited_at_ms) = match wait { Ok(status) => (status.code(), Some(now_ms())), Err(_) => (None, Some(now_ms())), }; - { - let mut state = process.status.write().await; - state.status = ProcessStatus::Exited; - state.exit_code = exit_code; - state.exited_at_ms = exited_at_ms; - } - - let _ = process.stdin.lock().await.take(); + let _ = process.stop_requested.swap(false, Ordering::SeqCst); + process.mark_exited(exit_code, exited_at_ms).await; } async fn capture_output(mut reader: R, max_bytes: usize) -> std::io::Result<(Vec, bool)> diff --git a/server/packages/sandbox-agent/src/router.rs b/server/packages/sandbox-agent/src/router.rs index 110c325..70d3f45 100644 --- a/server/packages/sandbox-agent/src/router.rs +++ b/server/packages/sandbox-agent/src/router.rs @@ -34,12 +34,16 @@ use tar::Archive; use tokio_stream::wrappers::BroadcastStream; use tower_http::trace::TraceLayer; use tracing::Span; -use utoipa::{Modify, OpenApi, ToSchema}; +use utoipa::{IntoParams, Modify, OpenApi, ToSchema}; use crate::acp_proxy_runtime::{AcpProxyRuntime, ProxyPostOutcome}; +use crate::desktop_errors::DesktopProblem; +use crate::desktop_runtime::DesktopRuntime; +use crate::desktop_types::*; use crate::process_runtime::{ - decode_input_bytes, ProcessLogFilter, ProcessLogFilterStream, ProcessRuntime, - ProcessRuntimeConfig, ProcessSnapshot, ProcessStartSpec, ProcessStatus, ProcessStream, RunSpec, + decode_input_bytes, ProcessLogFilter, ProcessLogFilterStream, + ProcessOwner as RuntimeProcessOwner, ProcessRuntime, ProcessRuntimeConfig, ProcessSnapshot, + ProcessStartSpec, ProcessStatus, ProcessStream, RunSpec, }; use crate::ui; @@ -87,6 +91,7 @@ pub struct AppState { acp_proxy: Arc, opencode_server_manager: Arc, process_runtime: Arc, + desktop_runtime: Arc, pub(crate) branding: BrandingMode, version_cache: Mutex>, } @@ -111,12 +116,14 @@ impl AppState { }, )); let process_runtime = Arc::new(ProcessRuntime::new()); + let desktop_runtime = Arc::new(DesktopRuntime::new(process_runtime.clone())); Self { auth, agent_manager, acp_proxy, opencode_server_manager, process_runtime, + desktop_runtime, branding, version_cache: Mutex::new(HashMap::new()), } @@ -138,6 +145,10 @@ impl AppState { self.process_runtime.clone() } + pub(crate) fn desktop_runtime(&self) -> Arc { + self.desktop_runtime.clone() + } + pub(crate) fn purge_version_cache(&self, agent: AgentId) { self.version_cache.lock().unwrap().remove(&agent); } @@ -172,6 +183,59 @@ pub fn build_router(state: AppState) -> Router { pub fn build_router_with_state(shared: Arc) -> (Router, Arc) { let mut v1_router = Router::new() .route("/health", get(get_v1_health)) + .route("/desktop/status", get(get_v1_desktop_status)) + .route("/desktop/start", post(post_v1_desktop_start)) + .route("/desktop/stop", post(post_v1_desktop_stop)) + .route("/desktop/screenshot", get(get_v1_desktop_screenshot)) + .route( + "/desktop/screenshot/region", + get(get_v1_desktop_screenshot_region), + ) + .route( + "/desktop/mouse/position", + get(get_v1_desktop_mouse_position), + ) + .route("/desktop/mouse/move", post(post_v1_desktop_mouse_move)) + .route("/desktop/mouse/click", post(post_v1_desktop_mouse_click)) + .route("/desktop/mouse/down", post(post_v1_desktop_mouse_down)) + .route("/desktop/mouse/up", post(post_v1_desktop_mouse_up)) + .route("/desktop/mouse/drag", post(post_v1_desktop_mouse_drag)) + .route("/desktop/mouse/scroll", post(post_v1_desktop_mouse_scroll)) + .route( + "/desktop/keyboard/type", + post(post_v1_desktop_keyboard_type), + ) + .route( + "/desktop/keyboard/press", + post(post_v1_desktop_keyboard_press), + ) + .route( + "/desktop/keyboard/down", + post(post_v1_desktop_keyboard_down), + ) + .route("/desktop/keyboard/up", post(post_v1_desktop_keyboard_up)) + .route("/desktop/display/info", get(get_v1_desktop_display_info)) + .route("/desktop/windows", get(get_v1_desktop_windows)) + .route( + "/desktop/recording/start", + post(post_v1_desktop_recording_start), + ) + .route( + "/desktop/recording/stop", + post(post_v1_desktop_recording_stop), + ) + .route("/desktop/recordings", get(get_v1_desktop_recordings)) + .route( + "/desktop/recordings/:id", + get(get_v1_desktop_recording).delete(delete_v1_desktop_recording), + ) + .route( + "/desktop/recordings/:id/download", + get(get_v1_desktop_recording_download), + ) + .route("/desktop/stream/start", post(post_v1_desktop_stream_start)) + .route("/desktop/stream/stop", post(post_v1_desktop_stream_stop)) + .route("/desktop/stream/ws", get(get_v1_desktop_stream_ws)) .route("/agents", get(get_v1_agents)) .route("/agents/:agent", get(get_v1_agent)) .route("/agents/:agent/install", post(post_v1_agent_install)) @@ -316,12 +380,40 @@ async fn opencode_unavailable() -> Response { pub async fn shutdown_servers(state: &Arc) { state.acp_proxy().shutdown_all().await; state.opencode_server_manager().shutdown().await; + state.desktop_runtime().shutdown().await; } #[derive(OpenApi)] #[openapi( paths( get_v1_health, + get_v1_desktop_status, + post_v1_desktop_start, + post_v1_desktop_stop, + get_v1_desktop_screenshot, + get_v1_desktop_screenshot_region, + get_v1_desktop_mouse_position, + post_v1_desktop_mouse_move, + post_v1_desktop_mouse_click, + post_v1_desktop_mouse_down, + post_v1_desktop_mouse_up, + post_v1_desktop_mouse_drag, + post_v1_desktop_mouse_scroll, + post_v1_desktop_keyboard_type, + post_v1_desktop_keyboard_press, + post_v1_desktop_keyboard_down, + post_v1_desktop_keyboard_up, + get_v1_desktop_display_info, + get_v1_desktop_windows, + post_v1_desktop_recording_start, + post_v1_desktop_recording_stop, + get_v1_desktop_recordings, + get_v1_desktop_recording, + get_v1_desktop_recording_download, + delete_v1_desktop_recording, + post_v1_desktop_stream_start, + post_v1_desktop_stream_stop, + get_v1_desktop_stream_ws, get_v1_agents, get_v1_agent, post_v1_agent_install, @@ -360,6 +452,37 @@ pub async fn shutdown_servers(state: &Arc) { components( schemas( HealthResponse, + DesktopState, + DesktopResolution, + DesktopErrorInfo, + DesktopProcessInfo, + DesktopStatusResponse, + DesktopStartRequest, + DesktopScreenshotQuery, + DesktopScreenshotFormat, + DesktopRegionScreenshotQuery, + DesktopMousePositionResponse, + DesktopMouseButton, + DesktopMouseMoveRequest, + DesktopMouseClickRequest, + DesktopMouseDownRequest, + DesktopMouseUpRequest, + DesktopMouseDragRequest, + DesktopMouseScrollRequest, + DesktopKeyboardTypeRequest, + DesktopKeyboardPressRequest, + DesktopKeyModifiers, + DesktopKeyboardDownRequest, + DesktopKeyboardUpRequest, + DesktopActionResponse, + DesktopDisplayInfoResponse, + DesktopWindowInfo, + DesktopWindowListResponse, + DesktopRecordingStartRequest, + DesktopRecordingStatus, + DesktopRecordingInfo, + DesktopRecordingListResponse, + DesktopStreamStatusResponse, ServerStatus, ServerStatusInfo, AgentCapabilities, @@ -381,12 +504,14 @@ pub async fn shutdown_servers(state: &Arc) { FsActionResponse, FsUploadBatchResponse, ProcessConfig, + ProcessOwner, ProcessCreateRequest, ProcessRunRequest, ProcessRunResponse, ProcessState, ProcessInfo, ProcessListResponse, + ProcessListQuery, ProcessLogsStream, ProcessLogsQuery, ProcessLogEntry, @@ -438,6 +563,12 @@ impl From for ApiError { } } +impl From for ApiError { + fn from(value: DesktopProblem) -> Self { + Self::Problem(value.to_problem_details()) + } +} + impl IntoResponse for ApiError { fn into_response(self) -> Response { let problem = match &self { @@ -476,6 +607,628 @@ async fn get_v1_health() -> Json { }) } +/// Get desktop runtime status. +/// +/// Returns the current desktop runtime state, dependency status, active +/// display metadata, and supervised process information. +#[utoipa::path( + get, + path = "/v1/desktop/status", + tag = "v1", + responses( + (status = 200, description = "Desktop runtime status", body = DesktopStatusResponse), + (status = 401, description = "Authentication required", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_status( + State(state): State>, +) -> Result, ApiError> { + Ok(Json(state.desktop_runtime().status().await)) +} + +/// Start the private desktop runtime. +/// +/// Lazily launches the managed Xvfb/openbox stack, validates display health, +/// and returns the resulting desktop status snapshot. +#[utoipa::path( + post, + path = "/v1/desktop/start", + tag = "v1", + request_body = DesktopStartRequest, + responses( + (status = 200, description = "Desktop runtime status after start", body = DesktopStatusResponse), + (status = 400, description = "Invalid desktop start request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is already transitioning", body = ProblemDetails), + (status = 501, description = "Desktop API unsupported on this platform", body = ProblemDetails), + (status = 503, description = "Desktop runtime could not be started", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_start( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let status = state.desktop_runtime().start(body).await?; + Ok(Json(status)) +} + +/// Stop the private desktop runtime. +/// +/// Terminates the managed openbox/Xvfb/dbus processes owned by the desktop +/// runtime and returns the resulting status snapshot. +#[utoipa::path( + post, + path = "/v1/desktop/stop", + tag = "v1", + responses( + (status = 200, description = "Desktop runtime status after stop", body = DesktopStatusResponse), + (status = 409, description = "Desktop runtime is already transitioning", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_stop( + State(state): State>, +) -> Result, ApiError> { + let status = state.desktop_runtime().stop().await?; + Ok(Json(status)) +} + +/// Capture a full desktop screenshot. +/// +/// Performs a health-gated full-frame screenshot of the managed desktop and +/// returns the requested image bytes. +#[utoipa::path( + get, + path = "/v1/desktop/screenshot", + tag = "v1", + params(DesktopScreenshotQuery), + responses( + (status = 200, description = "Desktop screenshot as image bytes"), + (status = 400, description = "Invalid screenshot query", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or screenshot capture failed", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_screenshot( + State(state): State>, + Query(query): Query, +) -> Result { + let screenshot = state.desktop_runtime().screenshot(query).await?; + Ok(( + [(header::CONTENT_TYPE, screenshot.content_type)], + Bytes::from(screenshot.bytes), + ) + .into_response()) +} + +/// Capture a desktop screenshot region. +/// +/// Performs a health-gated screenshot crop against the managed desktop and +/// returns the requested region image bytes. +#[utoipa::path( + get, + path = "/v1/desktop/screenshot/region", + tag = "v1", + params(DesktopRegionScreenshotQuery), + responses( + (status = 200, description = "Desktop screenshot region as image bytes"), + (status = 400, description = "Invalid screenshot region", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or screenshot capture failed", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_screenshot_region( + State(state): State>, + Query(query): Query, +) -> Result { + let screenshot = state.desktop_runtime().screenshot_region(query).await?; + Ok(( + [(header::CONTENT_TYPE, screenshot.content_type)], + Bytes::from(screenshot.bytes), + ) + .into_response()) +} + +/// Get the current desktop mouse position. +/// +/// Performs a health-gated mouse position query against the managed desktop. +#[utoipa::path( + get, + path = "/v1/desktop/mouse/position", + tag = "v1", + responses( + (status = 200, description = "Desktop mouse position", body = DesktopMousePositionResponse), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input check failed", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_mouse_position( + State(state): State>, +) -> Result, ApiError> { + let position = state.desktop_runtime().mouse_position().await?; + Ok(Json(position)) +} + +/// Move the desktop mouse. +/// +/// Performs a health-gated absolute pointer move on the managed desktop and +/// returns the resulting mouse position. +#[utoipa::path( + post, + path = "/v1/desktop/mouse/move", + tag = "v1", + request_body = DesktopMouseMoveRequest, + responses( + (status = 200, description = "Desktop mouse position after move", body = DesktopMousePositionResponse), + (status = 400, description = "Invalid mouse move request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_mouse_move( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let position = state.desktop_runtime().move_mouse(body).await?; + Ok(Json(position)) +} + +/// Click on the desktop. +/// +/// Performs a health-gated pointer move and click against the managed desktop +/// and returns the resulting mouse position. +#[utoipa::path( + post, + path = "/v1/desktop/mouse/click", + tag = "v1", + request_body = DesktopMouseClickRequest, + responses( + (status = 200, description = "Desktop mouse position after click", body = DesktopMousePositionResponse), + (status = 400, description = "Invalid mouse click request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_mouse_click( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let position = state.desktop_runtime().click_mouse(body).await?; + Ok(Json(position)) +} + +/// Press and hold a desktop mouse button. +/// +/// Performs a health-gated optional pointer move followed by `xdotool mousedown` +/// and returns the resulting mouse position. +#[utoipa::path( + post, + path = "/v1/desktop/mouse/down", + tag = "v1", + request_body = DesktopMouseDownRequest, + responses( + (status = 200, description = "Desktop mouse position after button press", body = DesktopMousePositionResponse), + (status = 400, description = "Invalid mouse down request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_mouse_down( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let position = state.desktop_runtime().mouse_down(body).await?; + Ok(Json(position)) +} + +/// Release a desktop mouse button. +/// +/// Performs a health-gated optional pointer move followed by `xdotool mouseup` +/// and returns the resulting mouse position. +#[utoipa::path( + post, + path = "/v1/desktop/mouse/up", + tag = "v1", + request_body = DesktopMouseUpRequest, + responses( + (status = 200, description = "Desktop mouse position after button release", body = DesktopMousePositionResponse), + (status = 400, description = "Invalid mouse up request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_mouse_up( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let position = state.desktop_runtime().mouse_up(body).await?; + Ok(Json(position)) +} + +/// Drag the desktop mouse. +/// +/// Performs a health-gated drag gesture against the managed desktop and +/// returns the resulting mouse position. +#[utoipa::path( + post, + path = "/v1/desktop/mouse/drag", + tag = "v1", + request_body = DesktopMouseDragRequest, + responses( + (status = 200, description = "Desktop mouse position after drag", body = DesktopMousePositionResponse), + (status = 400, description = "Invalid mouse drag request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_mouse_drag( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let position = state.desktop_runtime().drag_mouse(body).await?; + Ok(Json(position)) +} + +/// Scroll the desktop mouse wheel. +/// +/// Performs a health-gated scroll gesture at the requested coordinates and +/// returns the resulting mouse position. +#[utoipa::path( + post, + path = "/v1/desktop/mouse/scroll", + tag = "v1", + request_body = DesktopMouseScrollRequest, + responses( + (status = 200, description = "Desktop mouse position after scroll", body = DesktopMousePositionResponse), + (status = 400, description = "Invalid mouse scroll request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_mouse_scroll( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let position = state.desktop_runtime().scroll_mouse(body).await?; + Ok(Json(position)) +} + +/// Type desktop keyboard text. +/// +/// Performs a health-gated `xdotool type` operation against the managed +/// desktop. +#[utoipa::path( + post, + path = "/v1/desktop/keyboard/type", + tag = "v1", + request_body = DesktopKeyboardTypeRequest, + responses( + (status = 200, description = "Desktop keyboard action result", body = DesktopActionResponse), + (status = 400, description = "Invalid keyboard type request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_keyboard_type( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let response = state.desktop_runtime().type_text(body).await?; + Ok(Json(response)) +} + +/// Press a desktop keyboard shortcut. +/// +/// Performs a health-gated `xdotool key` operation against the managed +/// desktop. +#[utoipa::path( + post, + path = "/v1/desktop/keyboard/press", + tag = "v1", + request_body = DesktopKeyboardPressRequest, + responses( + (status = 200, description = "Desktop keyboard action result", body = DesktopActionResponse), + (status = 400, description = "Invalid keyboard press request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_keyboard_press( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let response = state.desktop_runtime().press_key(body).await?; + Ok(Json(response)) +} + +/// Press and hold a desktop keyboard key. +/// +/// Performs a health-gated `xdotool keydown` operation against the managed +/// desktop. +#[utoipa::path( + post, + path = "/v1/desktop/keyboard/down", + tag = "v1", + request_body = DesktopKeyboardDownRequest, + responses( + (status = 200, description = "Desktop keyboard action result", body = DesktopActionResponse), + (status = 400, description = "Invalid keyboard down request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_keyboard_down( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let response = state.desktop_runtime().key_down(body).await?; + Ok(Json(response)) +} + +/// Release a desktop keyboard key. +/// +/// Performs a health-gated `xdotool keyup` operation against the managed +/// desktop. +#[utoipa::path( + post, + path = "/v1/desktop/keyboard/up", + tag = "v1", + request_body = DesktopKeyboardUpRequest, + responses( + (status = 200, description = "Desktop keyboard action result", body = DesktopActionResponse), + (status = 400, description = "Invalid keyboard up request", body = ProblemDetails), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 502, description = "Desktop runtime health or input failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_keyboard_up( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let response = state.desktop_runtime().key_up(body).await?; + Ok(Json(response)) +} + +/// Get desktop display information. +/// +/// Performs a health-gated display query against the managed desktop and +/// returns the current display identifier and resolution. +#[utoipa::path( + get, + path = "/v1/desktop/display/info", + tag = "v1", + responses( + (status = 200, description = "Desktop display information", body = DesktopDisplayInfoResponse), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 503, description = "Desktop runtime health or display query failed", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_display_info( + State(state): State>, +) -> Result, ApiError> { + let info = state.desktop_runtime().display_info().await?; + Ok(Json(info)) +} + +/// List visible desktop windows. +/// +/// Performs a health-gated visible-window enumeration against the managed +/// desktop and returns the current window metadata. +#[utoipa::path( + get, + path = "/v1/desktop/windows", + tag = "v1", + responses( + (status = 200, description = "Visible desktop windows", body = DesktopWindowListResponse), + (status = 409, description = "Desktop runtime is not ready", body = ProblemDetails), + (status = 503, description = "Desktop runtime health or window query failed", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_windows( + State(state): State>, +) -> Result, ApiError> { + let windows = state.desktop_runtime().list_windows().await?; + Ok(Json(windows)) +} + +/// Start desktop recording. +/// +/// Starts an ffmpeg x11grab recording against the managed desktop and returns +/// the created recording metadata. +#[utoipa::path( + post, + path = "/v1/desktop/recording/start", + tag = "v1", + request_body = DesktopRecordingStartRequest, + responses( + (status = 200, description = "Desktop recording started", body = DesktopRecordingInfo), + (status = 409, description = "Desktop runtime is not ready or a recording is already active", body = ProblemDetails), + (status = 502, description = "Desktop recording failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_recording_start( + State(state): State>, + Json(body): Json, +) -> Result, ApiError> { + let recording = state.desktop_runtime().start_recording(body).await?; + Ok(Json(recording)) +} + +/// Stop desktop recording. +/// +/// Stops the active desktop recording and returns the finalized recording +/// metadata. +#[utoipa::path( + post, + path = "/v1/desktop/recording/stop", + tag = "v1", + responses( + (status = 200, description = "Desktop recording stopped", body = DesktopRecordingInfo), + (status = 409, description = "No active desktop recording", body = ProblemDetails), + (status = 502, description = "Desktop recording stop failed", body = ProblemDetails) + ) +)] +async fn post_v1_desktop_recording_stop( + State(state): State>, +) -> Result, ApiError> { + let recording = state.desktop_runtime().stop_recording().await?; + Ok(Json(recording)) +} + +/// List desktop recordings. +/// +/// Returns the current desktop recording catalog. +#[utoipa::path( + get, + path = "/v1/desktop/recordings", + tag = "v1", + responses( + (status = 200, description = "Desktop recordings", body = DesktopRecordingListResponse), + (status = 502, description = "Desktop recordings query failed", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_recordings( + State(state): State>, +) -> Result, ApiError> { + let recordings = state.desktop_runtime().list_recordings().await?; + Ok(Json(recordings)) +} + +/// Get desktop recording metadata. +/// +/// Returns metadata for a single desktop recording. +#[utoipa::path( + get, + path = "/v1/desktop/recordings/{id}", + tag = "v1", + params( + ("id" = String, Path, description = "Desktop recording ID") + ), + responses( + (status = 200, description = "Desktop recording metadata", body = DesktopRecordingInfo), + (status = 404, description = "Unknown desktop recording", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_recording( + State(state): State>, + Path(id): Path, +) -> Result, ApiError> { + let recording = state.desktop_runtime().get_recording(&id).await?; + Ok(Json(recording)) +} + +/// Download a desktop recording. +/// +/// Serves the recorded MP4 bytes for a completed desktop recording. +#[utoipa::path( + get, + path = "/v1/desktop/recordings/{id}/download", + tag = "v1", + params( + ("id" = String, Path, description = "Desktop recording ID") + ), + responses( + (status = 200, description = "Desktop recording as MP4 bytes"), + (status = 404, description = "Unknown desktop recording", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_recording_download( + State(state): State>, + Path(id): Path, +) -> Result { + let path = state.desktop_runtime().recording_download_path(&id).await?; + let bytes = tokio::fs::read(&path) + .await + .map_err(|err| SandboxError::StreamError { + message: format!("failed to read desktop recording {}: {err}", path.display()), + })?; + Ok(([(header::CONTENT_TYPE, "video/mp4")], Bytes::from(bytes)).into_response()) +} + +/// Delete a desktop recording. +/// +/// Removes a completed desktop recording and its file from disk. +#[utoipa::path( + delete, + path = "/v1/desktop/recordings/{id}", + tag = "v1", + params( + ("id" = String, Path, description = "Desktop recording ID") + ), + responses( + (status = 204, description = "Desktop recording deleted"), + (status = 404, description = "Unknown desktop recording", body = ProblemDetails), + (status = 409, description = "Desktop recording is still active", body = ProblemDetails) + ) +)] +async fn delete_v1_desktop_recording( + State(state): State>, + Path(id): Path, +) -> Result { + state.desktop_runtime().delete_recording(&id).await?; + Ok(StatusCode::NO_CONTENT) +} + +/// Start desktop streaming. +/// +/// Enables desktop websocket streaming for the managed desktop. +#[utoipa::path( + post, + path = "/v1/desktop/stream/start", + tag = "v1", + responses( + (status = 200, description = "Desktop streaming started", body = DesktopStreamStatusResponse) + ) +)] +async fn post_v1_desktop_stream_start( + State(state): State>, +) -> Result, ApiError> { + Ok(Json(state.desktop_runtime().start_streaming().await)) +} + +/// Stop desktop streaming. +/// +/// Disables desktop websocket streaming for the managed desktop. +#[utoipa::path( + post, + path = "/v1/desktop/stream/stop", + tag = "v1", + responses( + (status = 200, description = "Desktop streaming stopped", body = DesktopStreamStatusResponse) + ) +)] +async fn post_v1_desktop_stream_stop( + State(state): State>, +) -> Result, ApiError> { + Ok(Json(state.desktop_runtime().stop_streaming().await)) +} + +/// Open a desktop websocket streaming session. +/// +/// Upgrades the connection to a websocket that streams JPEG desktop frames and +/// accepts mouse and keyboard control frames. +#[utoipa::path( + get, + path = "/v1/desktop/stream/ws", + tag = "v1", + params( + ("access_token" = Option, Query, description = "Bearer token alternative for WS auth") + ), + responses( + (status = 101, description = "WebSocket upgraded"), + (status = 409, description = "Desktop runtime or streaming session is not ready", body = ProblemDetails), + (status = 502, description = "Desktop stream failed", body = ProblemDetails) + ) +)] +async fn get_v1_desktop_stream_ws( + State(state): State>, + Query(_query): Query, + ws: WebSocketUpgrade, +) -> Result { + state.desktop_runtime().ensure_streaming_active().await?; + Ok(ws + .on_upgrade(move |socket| desktop_stream_ws_session(socket, state.desktop_runtime())) + .into_response()) +} + #[utoipa::path( get, path = "/v1/agents", @@ -1238,6 +1991,8 @@ async fn post_v1_processes( env: body.env.into_iter().collect(), tty: body.tty, interactive: body.interactive, + owner: RuntimeProcessOwner::User, + restart_policy: None, }) .await?; @@ -1298,6 +2053,7 @@ async fn post_v1_processes_run( get, path = "/v1/processes", tag = "v1", + params(ProcessListQuery), responses( (status = 200, description = "List processes", body = ProcessListResponse), (status = 501, description = "Process API unsupported on this platform", body = ProblemDetails) @@ -1305,12 +2061,16 @@ async fn post_v1_processes_run( )] async fn get_v1_processes( State(state): State>, + Query(query): Query, ) -> Result, ApiError> { if !process_api_supported() { return Err(process_api_not_supported().into()); } - let snapshots = state.process_runtime().list_processes().await; + let snapshots = state + .process_runtime() + .list_processes(query.owner.map(into_runtime_process_owner)) + .await; Ok(Json(ProcessListResponse { processes: snapshots.into_iter().map(map_process_snapshot).collect(), })) @@ -1691,6 +2451,46 @@ enum TerminalClientFrame { Close, } +#[derive(Debug, Deserialize)] +#[serde(tag = "type", rename_all = "camelCase")] +enum DesktopStreamClientFrame { + MoveMouse { + x: i32, + y: i32, + }, + MouseDown { + #[serde(default)] + x: Option, + #[serde(default)] + y: Option, + #[serde(default)] + button: Option, + }, + MouseUp { + #[serde(default)] + x: Option, + #[serde(default)] + y: Option, + #[serde(default)] + button: Option, + }, + Scroll { + x: i32, + y: i32, + #[serde(default)] + delta_x: Option, + #[serde(default)] + delta_y: Option, + }, + KeyDown { + key: String, + }, + KeyUp { + key: String, + }, + Close, +} + async fn process_terminal_ws_session( mut socket: WebSocket, runtime: Arc, @@ -1803,6 +2603,133 @@ async fn process_terminal_ws_session( } } +async fn desktop_stream_ws_session(mut socket: WebSocket, desktop_runtime: Arc) { + let display_info = match desktop_runtime.display_info().await { + Ok(info) => info, + Err(err) => { + let _ = send_ws_error(&mut socket, &err.to_error_info().message).await; + let _ = socket.close().await; + return; + } + }; + + if send_ws_json( + &mut socket, + json!({ + "type": "ready", + "width": display_info.resolution.width, + "height": display_info.resolution.height, + }), + ) + .await + .is_err() + { + return; + } + + let mut frame_tick = tokio::time::interval(Duration::from_millis(100)); + + loop { + tokio::select! { + ws_in = socket.recv() => { + match ws_in { + Some(Ok(Message::Text(text))) => { + match serde_json::from_str::(&text) { + Ok(DesktopStreamClientFrame::MoveMouse { x, y }) => { + if let Err(err) = desktop_runtime + .move_mouse(DesktopMouseMoveRequest { x, y }) + .await + { + let _ = send_ws_error(&mut socket, &err.to_error_info().message).await; + } + } + Ok(DesktopStreamClientFrame::MouseDown { x, y, button }) => { + if let Err(err) = desktop_runtime + .mouse_down(DesktopMouseDownRequest { x, y, button }) + .await + { + let _ = send_ws_error(&mut socket, &err.to_error_info().message).await; + } + } + Ok(DesktopStreamClientFrame::MouseUp { x, y, button }) => { + if let Err(err) = desktop_runtime + .mouse_up(DesktopMouseUpRequest { x, y, button }) + .await + { + let _ = send_ws_error(&mut socket, &err.to_error_info().message).await; + } + } + Ok(DesktopStreamClientFrame::Scroll { x, y, delta_x, delta_y }) => { + if let Err(err) = desktop_runtime + .scroll_mouse(DesktopMouseScrollRequest { + x, + y, + delta_x, + delta_y, + }) + .await + { + let _ = send_ws_error(&mut socket, &err.to_error_info().message).await; + } + } + Ok(DesktopStreamClientFrame::KeyDown { key }) => { + if let Err(err) = desktop_runtime + .key_down(DesktopKeyboardDownRequest { key }) + .await + { + let _ = send_ws_error(&mut socket, &err.to_error_info().message).await; + } + } + Ok(DesktopStreamClientFrame::KeyUp { key }) => { + if let Err(err) = desktop_runtime + .key_up(DesktopKeyboardUpRequest { key }) + .await + { + let _ = send_ws_error(&mut socket, &err.to_error_info().message).await; + } + } + Ok(DesktopStreamClientFrame::Close) => { + let _ = socket.close().await; + break; + } + Err(err) => { + let _ = send_ws_error(&mut socket, &format!("invalid desktop stream frame: {err}")).await; + } + } + } + Some(Ok(Message::Ping(payload))) => { + let _ = socket.send(Message::Pong(payload)).await; + } + Some(Ok(Message::Close(_))) | None => break, + Some(Ok(Message::Binary(_))) | Some(Ok(Message::Pong(_))) => {} + Some(Err(_)) => break, + } + } + _ = frame_tick.tick() => { + let frame = desktop_runtime + .screenshot(DesktopScreenshotQuery { + format: Some(DesktopScreenshotFormat::Jpeg), + quality: Some(60), + scale: Some(1.0), + }) + .await; + match frame { + Ok(frame) => { + if socket.send(Message::Binary(frame.bytes.into())).await.is_err() { + break; + } + } + Err(err) => { + let _ = send_ws_error(&mut socket, &err.to_error_info().message).await; + let _ = socket.close().await; + break; + } + } + } + } + } +} + async fn send_ws_json(socket: &mut WebSocket, payload: Value) -> Result<(), ()> { socket .send(Message::Text( @@ -2171,6 +3098,14 @@ fn into_runtime_process_config(config: ProcessConfig) -> ProcessRuntimeConfig { } } +fn into_runtime_process_owner(owner: ProcessOwner) -> RuntimeProcessOwner { + match owner { + ProcessOwner::User => RuntimeProcessOwner::User, + ProcessOwner::Desktop => RuntimeProcessOwner::Desktop, + ProcessOwner::System => RuntimeProcessOwner::System, + } +} + fn map_process_snapshot(snapshot: ProcessSnapshot) -> ProcessInfo { ProcessInfo { id: snapshot.id, @@ -2179,6 +3114,11 @@ fn map_process_snapshot(snapshot: ProcessSnapshot) -> ProcessInfo { cwd: snapshot.cwd, tty: snapshot.tty, interactive: snapshot.interactive, + owner: match snapshot.owner { + RuntimeProcessOwner::User => ProcessOwner::User, + RuntimeProcessOwner::Desktop => ProcessOwner::Desktop, + RuntimeProcessOwner::System => ProcessOwner::System, + }, status: match snapshot.status { ProcessStatus::Running => ProcessState::Running, ProcessStatus::Exited => ProcessState::Exited, diff --git a/server/packages/sandbox-agent/src/router/support.rs b/server/packages/sandbox-agent/src/router/support.rs index 0e7a7b1..6bcc103 100644 --- a/server/packages/sandbox-agent/src/router/support.rs +++ b/server/packages/sandbox-agent/src/router/support.rs @@ -33,7 +33,8 @@ pub(super) async fn require_token( .and_then(|value| value.to_str().ok()) .and_then(|value| value.strip_prefix("Bearer ")); - let allow_query_token = request.uri().path().ends_with("/terminal/ws"); + let allow_query_token = request.uri().path().ends_with("/terminal/ws") + || request.uri().path().ends_with("/stream/ws"); let query_token = if allow_query_token { request .uri() diff --git a/server/packages/sandbox-agent/src/router/types.rs b/server/packages/sandbox-agent/src/router/types.rs index 6d40e2a..218ad77 100644 --- a/server/packages/sandbox-agent/src/router/types.rs +++ b/server/packages/sandbox-agent/src/router/types.rs @@ -425,6 +425,14 @@ pub enum ProcessState { Exited, } +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +pub enum ProcessOwner { + User, + Desktop, + System, +} + #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema)] #[serde(rename_all = "camelCase")] pub struct ProcessInfo { @@ -435,6 +443,7 @@ pub struct ProcessInfo { pub cwd: Option, pub tty: bool, pub interactive: bool, + pub owner: ProcessOwner, pub status: ProcessState, #[serde(default, skip_serializing_if = "Option::is_none")] pub pid: Option, @@ -451,6 +460,13 @@ pub struct ProcessListResponse { pub processes: Vec, } +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, ToSchema, IntoParams)] +#[serde(rename_all = "camelCase")] +pub struct ProcessListQuery { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub owner: Option, +} + #[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, ToSchema, PartialEq, Eq)] #[serde(rename_all = "lowercase")] pub enum ProcessLogsStream { diff --git a/server/packages/sandbox-agent/tests/support/docker.rs b/server/packages/sandbox-agent/tests/support/docker.rs new file mode 100644 index 0000000..9305d95 --- /dev/null +++ b/server/packages/sandbox-agent/tests/support/docker.rs @@ -0,0 +1,593 @@ +use std::collections::{BTreeMap, BTreeSet}; +use std::fs; +use std::io::{Read, Write}; +use std::net::TcpStream; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::OnceLock; +use std::thread; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; + +use sandbox_agent::router::AuthConfig; +use serial_test::serial; +use tempfile::TempDir; + +const CONTAINER_PORT: u16 = 3000; +const DEFAULT_PATH: &str = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"; +const DEFAULT_IMAGE_TAG: &str = "sandbox-agent-test:dev"; +const STANDARD_PATHS: &[&str] = &[ + "/usr/local/sbin", + "/usr/local/bin", + "/usr/sbin", + "/usr/bin", + "/sbin", + "/bin", +]; + +static IMAGE_TAG: OnceLock = OnceLock::new(); +static DOCKER_BIN: OnceLock = OnceLock::new(); +static CONTAINER_COUNTER: AtomicU64 = AtomicU64::new(0); + +#[derive(Clone)] +pub struct DockerApp { + base_url: String, +} + +impl DockerApp { + pub fn http_url(&self, path: &str) -> String { + format!("{}{}", self.base_url, path) + } + + pub fn ws_url(&self, path: &str) -> String { + let suffix = self + .base_url + .strip_prefix("http://") + .unwrap_or(&self.base_url); + format!("ws://{suffix}{path}") + } +} + +pub struct TestApp { + pub app: DockerApp, + install_dir: PathBuf, + _root: TempDir, + container_id: String, +} + +#[derive(Default)] +pub struct TestAppOptions { + pub env: BTreeMap, + pub extra_paths: Vec, + pub replace_path: bool, +} + +impl TestApp { + pub fn new(auth: AuthConfig) -> Self { + Self::with_setup(auth, |_| {}) + } + + pub fn with_setup(auth: AuthConfig, setup: F) -> Self + where + F: FnOnce(&Path), + { + Self::with_options(auth, TestAppOptions::default(), setup) + } + + pub fn with_options(auth: AuthConfig, options: TestAppOptions, setup: F) -> Self + where + F: FnOnce(&Path), + { + let root = tempfile::tempdir().expect("create docker test root"); + let layout = TestLayout::new(root.path()); + layout.create(); + setup(&layout.install_dir); + + let container_id = unique_container_id(); + let image = ensure_test_image(); + let env = build_env(&layout, &auth, &options); + let mounts = build_mounts(root.path(), &env); + let base_url = run_container(&container_id, &image, &mounts, &env, &auth); + + Self { + app: DockerApp { base_url }, + install_dir: layout.install_dir, + _root: root, + container_id, + } + } + + pub fn install_path(&self) -> &Path { + &self.install_dir + } + + pub fn root_path(&self) -> &Path { + self._root.path() + } +} + +impl Drop for TestApp { + fn drop(&mut self) { + let _ = Command::new(docker_bin()) + .args(["rm", "-f", &self.container_id]) + .output(); + } +} + +pub struct LiveServer { + base_url: String, +} + +impl LiveServer { + pub async fn spawn(app: DockerApp) -> Self { + Self { + base_url: app.base_url, + } + } + + pub fn http_url(&self, path: &str) -> String { + format!("{}{}", self.base_url, path) + } + + pub fn ws_url(&self, path: &str) -> String { + let suffix = self + .base_url + .strip_prefix("http://") + .unwrap_or(&self.base_url); + format!("ws://{suffix}{path}") + } + + pub async fn shutdown(self) {} +} + +struct TestLayout { + home: PathBuf, + xdg_data_home: PathBuf, + xdg_state_home: PathBuf, + appdata: PathBuf, + local_appdata: PathBuf, + install_dir: PathBuf, +} + +impl TestLayout { + fn new(root: &Path) -> Self { + let home = root.join("home"); + let xdg_data_home = root.join("xdg-data"); + let xdg_state_home = root.join("xdg-state"); + let appdata = root.join("appdata").join("Roaming"); + let local_appdata = root.join("appdata").join("Local"); + let install_dir = xdg_data_home.join("sandbox-agent").join("bin"); + Self { + home, + xdg_data_home, + xdg_state_home, + appdata, + local_appdata, + install_dir, + } + } + + fn create(&self) { + for dir in [ + &self.home, + &self.xdg_data_home, + &self.xdg_state_home, + &self.appdata, + &self.local_appdata, + &self.install_dir, + ] { + fs::create_dir_all(dir).expect("create docker test dir"); + } + } +} + +fn ensure_test_image() -> String { + IMAGE_TAG + .get_or_init(|| { + let repo_root = repo_root(); + let image_tag = std::env::var("SANDBOX_AGENT_TEST_IMAGE") + .unwrap_or_else(|_| DEFAULT_IMAGE_TAG.to_string()); + let output = Command::new(docker_bin()) + .args(["build", "--tag", &image_tag, "--file"]) + .arg( + repo_root + .join("docker") + .join("test-agent") + .join("Dockerfile"), + ) + .arg(&repo_root) + .output() + .expect("build sandbox-agent test image"); + if !output.status.success() { + panic!( + "failed to build sandbox-agent test image: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + image_tag + }) + .clone() +} + +fn build_env( + layout: &TestLayout, + auth: &AuthConfig, + options: &TestAppOptions, +) -> BTreeMap { + let mut env = BTreeMap::new(); + env.insert( + "HOME".to_string(), + layout.home.to_string_lossy().to_string(), + ); + env.insert( + "USERPROFILE".to_string(), + layout.home.to_string_lossy().to_string(), + ); + env.insert( + "XDG_DATA_HOME".to_string(), + layout.xdg_data_home.to_string_lossy().to_string(), + ); + env.insert( + "XDG_STATE_HOME".to_string(), + layout.xdg_state_home.to_string_lossy().to_string(), + ); + env.insert( + "APPDATA".to_string(), + layout.appdata.to_string_lossy().to_string(), + ); + env.insert( + "LOCALAPPDATA".to_string(), + layout.local_appdata.to_string_lossy().to_string(), + ); + + for (key, value) in std::env::vars() { + if key == "PATH" { + continue; + } + if key == "XDG_STATE_HOME" || key == "HOME" || key == "USERPROFILE" { + continue; + } + if key.starts_with("SANDBOX_AGENT_") || key.starts_with("OPENCODE_COMPAT_") { + env.insert(key.clone(), rewrite_localhost_url(&key, &value)); + } + } + + if let Some(token) = auth.token.as_ref() { + env.insert("SANDBOX_AGENT_TEST_AUTH_TOKEN".to_string(), token.clone()); + } + + if options.replace_path { + env.insert( + "PATH".to_string(), + options.env.get("PATH").cloned().unwrap_or_default(), + ); + } else { + let mut custom_path_entries = + custom_path_entries(layout.install_dir.parent().expect("install base")); + custom_path_entries.extend(explicit_path_entries()); + custom_path_entries.extend( + options + .extra_paths + .iter() + .filter(|path| path.is_absolute() && path.exists()) + .cloned(), + ); + custom_path_entries.sort(); + custom_path_entries.dedup(); + + if custom_path_entries.is_empty() { + env.insert("PATH".to_string(), DEFAULT_PATH.to_string()); + } else { + let joined = custom_path_entries + .iter() + .map(|path| path.to_string_lossy().to_string()) + .collect::>() + .join(":"); + env.insert("PATH".to_string(), format!("{joined}:{DEFAULT_PATH}")); + } + } + + for (key, value) in &options.env { + if key == "PATH" { + continue; + } + env.insert(key.clone(), rewrite_localhost_url(key, value)); + } + + env +} + +fn build_mounts(root: &Path, env: &BTreeMap) -> Vec { + let mut mounts = BTreeSet::new(); + mounts.insert(root.to_path_buf()); + + for key in [ + "HOME", + "USERPROFILE", + "XDG_DATA_HOME", + "XDG_STATE_HOME", + "APPDATA", + "LOCALAPPDATA", + "SANDBOX_AGENT_DESKTOP_FAKE_STATE_DIR", + ] { + if let Some(value) = env.get(key) { + let path = PathBuf::from(value); + if path.is_absolute() { + mounts.insert(path); + } + } + } + + if let Some(path_value) = env.get("PATH") { + for entry in path_value.split(':') { + if entry.is_empty() || STANDARD_PATHS.contains(&entry) { + continue; + } + let path = PathBuf::from(entry); + if path.is_absolute() && path.exists() { + mounts.insert(path); + } + } + } + + mounts.into_iter().collect() +} + +fn run_container( + container_id: &str, + image: &str, + mounts: &[PathBuf], + env: &BTreeMap, + auth: &AuthConfig, +) -> String { + let mut args = vec![ + "run".to_string(), + "-d".to_string(), + "--rm".to_string(), + "--name".to_string(), + container_id.to_string(), + "-p".to_string(), + format!("127.0.0.1::{CONTAINER_PORT}"), + ]; + + #[cfg(unix)] + { + args.push("--user".to_string()); + args.push(format!("{}:{}", unsafe { libc::geteuid() }, unsafe { + libc::getegid() + })); + } + + if cfg!(target_os = "linux") { + args.push("--add-host".to_string()); + args.push("host.docker.internal:host-gateway".to_string()); + } + + for mount in mounts { + args.push("-v".to_string()); + args.push(format!("{}:{}", mount.display(), mount.display())); + } + + for (key, value) in env { + args.push("-e".to_string()); + args.push(format!("{key}={value}")); + } + + args.push(image.to_string()); + args.push("server".to_string()); + args.push("--host".to_string()); + args.push("0.0.0.0".to_string()); + args.push("--port".to_string()); + args.push(CONTAINER_PORT.to_string()); + match auth.token.as_ref() { + Some(token) => { + args.push("--token".to_string()); + args.push(token.clone()); + } + None => args.push("--no-token".to_string()), + } + + let output = Command::new(docker_bin()) + .args(&args) + .output() + .expect("start docker test container"); + if !output.status.success() { + panic!( + "failed to start docker test container: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + + let port_output = Command::new(docker_bin()) + .args(["port", container_id, &format!("{CONTAINER_PORT}/tcp")]) + .output() + .expect("resolve mapped docker port"); + if !port_output.status.success() { + panic!( + "failed to resolve docker test port: {}", + String::from_utf8_lossy(&port_output.stderr) + ); + } + + let mapping = String::from_utf8(port_output.stdout) + .expect("docker port utf8") + .trim() + .to_string(); + let host_port = mapping.rsplit(':').next().expect("mapped host port").trim(); + let base_url = format!("http://127.0.0.1:{host_port}"); + wait_for_health(&base_url, auth.token.as_deref()); + base_url +} + +fn wait_for_health(base_url: &str, token: Option<&str>) { + let started = SystemTime::now(); + loop { + if probe_health(base_url, token) { + return; + } + + if started + .elapsed() + .unwrap_or_else(|_| Duration::from_secs(0)) + .gt(&Duration::from_secs(30)) + { + panic!("timed out waiting for sandbox-agent docker test server"); + } + thread::sleep(Duration::from_millis(200)); + } +} + +fn probe_health(base_url: &str, token: Option<&str>) -> bool { + let address = base_url.strip_prefix("http://").unwrap_or(base_url); + let mut stream = match TcpStream::connect(address) { + Ok(stream) => stream, + Err(_) => return false, + }; + let _ = stream.set_read_timeout(Some(Duration::from_secs(2))); + let _ = stream.set_write_timeout(Some(Duration::from_secs(2))); + + let mut request = + format!("GET /v1/health HTTP/1.1\r\nHost: {address}\r\nConnection: close\r\n"); + if let Some(token) = token { + request.push_str(&format!("Authorization: Bearer {token}\r\n")); + } + request.push_str("\r\n"); + + if stream.write_all(request.as_bytes()).is_err() { + return false; + } + + let mut response = String::new(); + if stream.read_to_string(&mut response).is_err() { + return false; + } + + response.starts_with("HTTP/1.1 200") || response.starts_with("HTTP/1.0 200") +} + +fn custom_path_entries(root: &Path) -> Vec { + let mut entries = Vec::new(); + if let Some(value) = std::env::var_os("PATH") { + for entry in std::env::split_paths(&value) { + if !entry.exists() { + continue; + } + if entry.starts_with(root) || entry.starts_with(std::env::temp_dir()) { + entries.push(entry); + } + } + } + entries.sort(); + entries.dedup(); + entries +} + +fn explicit_path_entries() -> Vec { + let mut entries = Vec::new(); + if let Some(value) = std::env::var_os("SANDBOX_AGENT_TEST_EXTRA_PATHS") { + for entry in std::env::split_paths(&value) { + if entry.is_absolute() && entry.exists() { + entries.push(entry); + } + } + } + entries +} + +fn rewrite_localhost_url(key: &str, value: &str) -> String { + if key.ends_with("_URL") || key.ends_with("_URI") { + return value + .replace("http://127.0.0.1", "http://host.docker.internal") + .replace("http://localhost", "http://host.docker.internal"); + } + value.to_string() +} + +fn unique_container_id() -> String { + let millis = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|value| value.as_millis()) + .unwrap_or(0); + let counter = CONTAINER_COUNTER.fetch_add(1, Ordering::Relaxed); + format!( + "sandbox-agent-test-{}-{millis}-{counter}", + std::process::id() + ) +} + +fn repo_root() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../../..") + .canonicalize() + .expect("repo root") +} + +fn docker_bin() -> &'static Path { + DOCKER_BIN + .get_or_init(|| { + if let Some(value) = std::env::var_os("SANDBOX_AGENT_TEST_DOCKER_BIN") { + let path = PathBuf::from(value); + if path.exists() { + return path; + } + } + + for candidate in [ + "/usr/local/bin/docker", + "/opt/homebrew/bin/docker", + "/usr/bin/docker", + ] { + let path = PathBuf::from(candidate); + if path.exists() { + return path; + } + } + + PathBuf::from("docker") + }) + .as_path() +} + +#[cfg(test)] +mod tests { + use super::*; + + struct EnvVarGuard { + key: &'static str, + old: Option, + } + + impl EnvVarGuard { + fn set(key: &'static str, value: &Path) -> Self { + let old = std::env::var_os(key); + std::env::set_var(key, value); + Self { key, old } + } + } + + impl Drop for EnvVarGuard { + fn drop(&mut self) { + match self.old.as_ref() { + Some(value) => std::env::set_var(self.key, value), + None => std::env::remove_var(self.key), + } + } + } + + #[test] + #[serial] + fn build_env_keeps_test_local_xdg_state_home() { + let root = tempfile::tempdir().expect("create docker support tempdir"); + let host_state = tempfile::tempdir().expect("create host xdg state tempdir"); + let _guard = EnvVarGuard::set("XDG_STATE_HOME", host_state.path()); + + let layout = TestLayout::new(root.path()); + layout.create(); + + let env = build_env(&layout, &AuthConfig::disabled(), &TestAppOptions::default()); + assert_eq!( + env.get("XDG_STATE_HOME"), + Some(&layout.xdg_state_home.to_string_lossy().to_string()) + ); + } +} diff --git a/server/packages/sandbox-agent/tests/v1_agent_process_matrix.rs b/server/packages/sandbox-agent/tests/v1_agent_process_matrix.rs index 029ca25..fc88c4c 100644 --- a/server/packages/sandbox-agent/tests/v1_agent_process_matrix.rs +++ b/server/packages/sandbox-agent/tests/v1_agent_process_matrix.rs @@ -1,37 +1,14 @@ use std::fs; use std::path::Path; -use axum::body::Body; -use axum::http::{Method, Request, StatusCode}; use futures::StreamExt; -use http_body_util::BodyExt; -use sandbox_agent::router::{build_router, AppState, AuthConfig}; -use sandbox_agent_agent_management::agents::AgentManager; +use reqwest::{Method, StatusCode}; +use sandbox_agent::router::AuthConfig; use serde_json::{json, Value}; -use tempfile::TempDir; -use tower::util::ServiceExt; -struct TestApp { - app: axum::Router, - _install_dir: TempDir, -} - -impl TestApp { - fn with_setup(setup: F) -> Self - where - F: FnOnce(&Path), - { - let install_dir = tempfile::tempdir().expect("create temp install dir"); - setup(install_dir.path()); - let manager = AgentManager::new(install_dir.path()).expect("create agent manager"); - let state = AppState::new(AuthConfig::disabled(), manager); - let app = build_router(state); - Self { - app, - _install_dir: install_dir, - } - } -} +#[path = "support/docker.rs"] +mod docker_support; +use docker_support::TestApp; fn write_executable(path: &Path, script: &str) { fs::write(path, script).expect("write executable"); @@ -101,28 +78,29 @@ fn setup_stub_agent_process_only(install_dir: &Path, agent: &str) { } async fn send_request( - app: &axum::Router, + app: &docker_support::DockerApp, method: Method, uri: &str, body: Option, ) -> (StatusCode, Vec) { - let mut builder = Request::builder().method(method).uri(uri); - let request_body = if let Some(body) = body { - builder = builder.header("content-type", "application/json"); - Body::from(body.to_string()) + let client = reqwest::Client::new(); + let response = if let Some(body) = body { + client + .request(method, app.http_url(uri)) + .header("content-type", "application/json") + .body(body.to_string()) + .send() + .await + .expect("request handled") } else { - Body::empty() + client + .request(method, app.http_url(uri)) + .send() + .await + .expect("request handled") }; - - let request = builder.body(request_body).expect("build request"); - let response = app.clone().oneshot(request).await.expect("request handled"); let status = response.status(); - let bytes = response - .into_body() - .collect() - .await - .expect("collect body") - .to_bytes(); + let bytes = response.bytes().await.expect("collect body"); (status, bytes.to_vec()) } @@ -145,7 +123,7 @@ async fn agent_process_matrix_smoke_and_jsonrpc_conformance() { .chain(agent_process_only_agents.iter()) .copied() .collect(); - let test_app = TestApp::with_setup(|install_dir| { + let test_app = TestApp::with_setup(AuthConfig::disabled(), |install_dir| { for agent in native_agents { setup_stub_artifacts(install_dir, agent); } @@ -201,21 +179,15 @@ async fn agent_process_matrix_smoke_and_jsonrpc_conformance() { assert_eq!(new_json["id"], 2, "{agent}: session/new id"); assert_eq!(new_json["result"]["echoedMethod"], "session/new"); - let request = Request::builder() - .method(Method::GET) - .uri(format!("/v1/acp/{agent}-server")) - .body(Body::empty()) - .expect("build sse request"); - - let response = test_app - .app - .clone() - .oneshot(request) + let response = reqwest::Client::new() + .get(test_app.app.http_url(&format!("/v1/acp/{agent}-server"))) + .header("accept", "text/event-stream") + .send() .await .expect("sse response"); assert_eq!(response.status(), StatusCode::OK); - let mut stream = response.into_body().into_data_stream(); + let mut stream = response.bytes_stream(); let chunk = tokio::time::timeout(std::time::Duration::from_secs(5), async move { while let Some(item) = stream.next().await { let bytes = item.expect("sse chunk"); diff --git a/server/packages/sandbox-agent/tests/v1_api.rs b/server/packages/sandbox-agent/tests/v1_api.rs index fa572e6..02558a7 100644 --- a/server/packages/sandbox-agent/tests/v1_api.rs +++ b/server/packages/sandbox-agent/tests/v1_api.rs @@ -1,128 +1,19 @@ use std::fs; use std::io::{Read, Write}; -use std::net::{SocketAddr, TcpListener, TcpStream}; +use std::net::{TcpListener, TcpStream}; use std::path::Path; use std::time::Duration; -use axum::body::Body; -use axum::http::{header, HeaderMap, Method, Request, StatusCode}; -use axum::Router; use futures::StreamExt; -use http_body_util::BodyExt; -use sandbox_agent::router::{build_router, AppState, AuthConfig}; -use sandbox_agent_agent_management::agents::AgentManager; +use reqwest::header::{self, HeaderMap, HeaderName, HeaderValue}; +use reqwest::{Method, StatusCode}; +use sandbox_agent::router::AuthConfig; use serde_json::{json, Value}; use serial_test::serial; -use tempfile::TempDir; -use tokio::sync::oneshot; -use tokio::task::JoinHandle; -use tower::util::ServiceExt; -struct TestApp { - app: Router, - install_dir: TempDir, -} - -impl TestApp { - fn new(auth: AuthConfig) -> Self { - Self::with_setup(auth, |_| {}) - } - - fn with_setup(auth: AuthConfig, setup: F) -> Self - where - F: FnOnce(&Path), - { - let install_dir = tempfile::tempdir().expect("create temp install dir"); - setup(install_dir.path()); - let manager = AgentManager::new(install_dir.path()).expect("create agent manager"); - let state = AppState::new(auth, manager); - let app = build_router(state); - Self { app, install_dir } - } - - fn install_path(&self) -> &Path { - self.install_dir.path() - } -} - -struct EnvVarGuard { - key: &'static str, - previous: Option, -} - -struct LiveServer { - address: SocketAddr, - shutdown_tx: Option>, - task: JoinHandle<()>, -} - -impl LiveServer { - async fn spawn(app: Router) -> Self { - let listener = tokio::net::TcpListener::bind("127.0.0.1:0") - .await - .expect("bind live server"); - let address = listener.local_addr().expect("live server address"); - let (shutdown_tx, shutdown_rx) = oneshot::channel::<()>(); - - let task = tokio::spawn(async move { - let server = - axum::serve(listener, app.into_make_service()).with_graceful_shutdown(async { - let _ = shutdown_rx.await; - }); - - let _ = server.await; - }); - - Self { - address, - shutdown_tx: Some(shutdown_tx), - task, - } - } - - fn http_url(&self, path: &str) -> String { - format!("http://{}{}", self.address, path) - } - - fn ws_url(&self, path: &str) -> String { - format!("ws://{}{}", self.address, path) - } - - async fn shutdown(mut self) { - if let Some(shutdown_tx) = self.shutdown_tx.take() { - let _ = shutdown_tx.send(()); - } - - let _ = tokio::time::timeout(Duration::from_secs(3), async { - let _ = self.task.await; - }) - .await; - } -} - -impl EnvVarGuard { - fn set(key: &'static str, value: &str) -> Self { - let previous = std::env::var_os(key); - std::env::set_var(key, value); - Self { key, previous } - } - - fn set_os(key: &'static str, value: &std::ffi::OsStr) -> Self { - let previous = std::env::var_os(key); - std::env::set_var(key, value); - Self { key, previous } - } -} - -impl Drop for EnvVarGuard { - fn drop(&mut self) { - if let Some(previous) = self.previous.as_ref() { - std::env::set_var(self.key, previous); - } else { - std::env::remove_var(self.key); - } - } -} +#[path = "support/docker.rs"] +mod docker_support; +use docker_support::{LiveServer, TestApp}; fn write_executable(path: &Path, script: &str) { fs::write(path, script).expect("write executable"); @@ -168,17 +59,18 @@ exit 0 } fn serve_registry_once(document: Value) -> String { - let listener = TcpListener::bind("127.0.0.1:0").expect("bind registry server"); - let address = listener.local_addr().expect("registry address"); + let listener = TcpListener::bind("0.0.0.0:0").expect("bind registry server"); + let port = listener.local_addr().expect("registry address").port(); let body = document.to_string(); - std::thread::spawn(move || { - if let Ok((mut stream, _)) = listener.accept() { - respond_json(&mut stream, &body); + std::thread::spawn(move || loop { + match listener.accept() { + Ok((mut stream, _)) => respond_json(&mut stream, &body), + Err(_) => break, } }); - format!("http://{address}/registry.json") + format!("http://127.0.0.1:{port}/registry.json") } fn respond_json(stream: &mut TcpStream, body: &str) { @@ -196,74 +88,96 @@ fn respond_json(stream: &mut TcpStream, body: &str) { } async fn send_request( - app: &Router, + app: &docker_support::DockerApp, method: Method, uri: &str, body: Option, headers: &[(&str, &str)], ) -> (StatusCode, HeaderMap, Vec) { - let mut builder = Request::builder().method(method).uri(uri); + let client = reqwest::Client::new(); + let mut builder = client.request(method, app.http_url(uri)); for (name, value) in headers { - builder = builder.header(*name, *value); + let header_name = HeaderName::from_bytes(name.as_bytes()).expect("header name"); + let header_value = HeaderValue::from_str(value).expect("header value"); + builder = builder.header(header_name, header_value); } - let request_body = if let Some(body) = body { - builder = builder.header(header::CONTENT_TYPE, "application/json"); - Body::from(body.to_string()) + let response = if let Some(body) = body { + builder + .header(header::CONTENT_TYPE, "application/json") + .body(body.to_string()) + .send() + .await + .expect("request handled") } else { - Body::empty() + builder.send().await.expect("request handled") }; - - let request = builder.body(request_body).expect("build request"); - let response = app.clone().oneshot(request).await.expect("request handled"); let status = response.status(); let headers = response.headers().clone(); - let bytes = response - .into_body() - .collect() - .await - .expect("collect body") - .to_bytes(); + let bytes = response.bytes().await.expect("collect body"); (status, headers, bytes.to_vec()) } async fn send_request_raw( - app: &Router, + app: &docker_support::DockerApp, method: Method, uri: &str, body: Option>, headers: &[(&str, &str)], content_type: Option<&str>, ) -> (StatusCode, HeaderMap, Vec) { - let mut builder = Request::builder().method(method).uri(uri); + let client = reqwest::Client::new(); + let mut builder = client.request(method, app.http_url(uri)); for (name, value) in headers { - builder = builder.header(*name, *value); + let header_name = HeaderName::from_bytes(name.as_bytes()).expect("header name"); + let header_value = HeaderValue::from_str(value).expect("header value"); + builder = builder.header(header_name, header_value); } - let request_body = if let Some(body) = body { + let response = if let Some(body) = body { if let Some(content_type) = content_type { builder = builder.header(header::CONTENT_TYPE, content_type); } - Body::from(body) + builder.body(body).send().await.expect("request handled") } else { - Body::empty() + builder.send().await.expect("request handled") }; - - let request = builder.body(request_body).expect("build request"); - let response = app.clone().oneshot(request).await.expect("request handled"); let status = response.status(); let headers = response.headers().clone(); - let bytes = response - .into_body() - .collect() - .await - .expect("collect body") - .to_bytes(); + let bytes = response.bytes().await.expect("collect body"); (status, headers, bytes.to_vec()) } +async fn launch_desktop_focus_window(app: &docker_support::DockerApp, display: &str) { + let command = r#"nohup xterm -geometry 80x24+40+40 -title 'Sandbox Desktop Test' -e sh -lc 'sleep 60' >/tmp/sandbox-agent-xterm.log 2>&1 < /dev/null & for _ in $(seq 1 50); do wid="$(xdotool search --onlyvisible --name 'Sandbox Desktop Test' 2>/dev/null | head -n 1 || true)"; if [ -n "$wid" ]; then xdotool windowactivate "$wid"; exit 0; fi; sleep 0.1; done; exit 1"#; + let (status, _, body) = send_request( + app, + Method::POST, + "/v1/processes/run", + Some(json!({ + "command": "sh", + "args": ["-lc", command], + "env": { + "DISPLAY": display, + }, + "timeoutMs": 10_000 + })), + &[], + ) + .await; + + assert_eq!( + status, + StatusCode::OK, + "unexpected desktop focus window launch response: {}", + String::from_utf8_lossy(&body) + ); + let parsed = parse_json(&body); + assert_eq!(parsed["exitCode"], 0); +} + fn parse_json(bytes: &[u8]) -> Value { if bytes.is_empty() { Value::Null @@ -284,7 +198,7 @@ fn initialize_payload() -> Value { }) } -async fn bootstrap_server(app: &Router, server_id: &str, agent: &str) { +async fn bootstrap_server(app: &docker_support::DockerApp, server_id: &str, agent: &str) { let initialize = initialize_payload(); let (status, _, _body) = send_request( app, @@ -297,17 +211,17 @@ async fn bootstrap_server(app: &Router, server_id: &str, agent: &str) { assert_eq!(status, StatusCode::OK); } -async fn read_first_sse_data(app: &Router, server_id: &str) -> String { - let request = Request::builder() - .method(Method::GET) - .uri(format!("/v1/acp/{server_id}")) - .body(Body::empty()) - .expect("build request"); - - let response = app.clone().oneshot(request).await.expect("sse response"); +async fn read_first_sse_data(app: &docker_support::DockerApp, server_id: &str) -> String { + let client = reqwest::Client::new(); + let response = client + .get(app.http_url(&format!("/v1/acp/{server_id}"))) + .header("accept", "text/event-stream") + .send() + .await + .expect("sse response"); assert_eq!(response.status(), StatusCode::OK); - let mut stream = response.into_body().into_data_stream(); + let mut stream = response.bytes_stream(); tokio::time::timeout(Duration::from_secs(5), async move { while let Some(chunk) = stream.next().await { let bytes = chunk.expect("stream chunk"); @@ -323,21 +237,21 @@ async fn read_first_sse_data(app: &Router, server_id: &str) -> String { } async fn read_first_sse_data_with_last_id( - app: &Router, + app: &docker_support::DockerApp, server_id: &str, last_event_id: u64, ) -> String { - let request = Request::builder() - .method(Method::GET) - .uri(format!("/v1/acp/{server_id}")) + let client = reqwest::Client::new(); + let response = client + .get(app.http_url(&format!("/v1/acp/{server_id}"))) + .header("accept", "text/event-stream") .header("last-event-id", last_event_id.to_string()) - .body(Body::empty()) - .expect("build request"); - - let response = app.clone().oneshot(request).await.expect("sse response"); + .send() + .await + .expect("sse response"); assert_eq!(response.status(), StatusCode::OK); - let mut stream = response.into_body().into_data_stream(); + let mut stream = response.bytes_stream(); tokio::time::timeout(Duration::from_secs(5), async move { while let Some(chunk) = stream.next().await { let bytes = chunk.expect("stream chunk"); @@ -375,5 +289,7 @@ mod acp_transport; mod config_endpoints; #[path = "v1_api/control_plane.rs"] mod control_plane; +#[path = "v1_api/desktop.rs"] +mod desktop; #[path = "v1_api/processes.rs"] mod processes; diff --git a/server/packages/sandbox-agent/tests/v1_api/config_endpoints.rs b/server/packages/sandbox-agent/tests/v1_api/config_endpoints.rs index 3aec8ca..e212c86 100644 --- a/server/packages/sandbox-agent/tests/v1_api/config_endpoints.rs +++ b/server/packages/sandbox-agent/tests/v1_api/config_endpoints.rs @@ -22,8 +22,9 @@ async fn mcp_config_requires_directory_and_name() { #[tokio::test] async fn mcp_config_crud_round_trip() { let test_app = TestApp::new(AuthConfig::disabled()); - let project = tempfile::tempdir().expect("tempdir"); - let directory = project.path().to_string_lossy().to_string(); + let project = test_app.root_path().join("mcp-config-project"); + fs::create_dir_all(&project).expect("create project dir"); + let directory = project.to_string_lossy().to_string(); let entry = json!({ "type": "local", @@ -99,8 +100,9 @@ async fn skills_config_requires_directory_and_name() { #[tokio::test] async fn skills_config_crud_round_trip() { let test_app = TestApp::new(AuthConfig::disabled()); - let project = tempfile::tempdir().expect("tempdir"); - let directory = project.path().to_string_lossy().to_string(); + let project = test_app.root_path().join("skills-config-project"); + fs::create_dir_all(&project).expect("create project dir"); + let directory = project.to_string_lossy().to_string(); let entry = json!({ "sources": [ diff --git a/server/packages/sandbox-agent/tests/v1_api/control_plane.rs b/server/packages/sandbox-agent/tests/v1_api/control_plane.rs index dc352ca..fdd4131 100644 --- a/server/packages/sandbox-agent/tests/v1_api/control_plane.rs +++ b/server/packages/sandbox-agent/tests/v1_api/control_plane.rs @@ -1,4 +1,5 @@ use super::*; +use std::collections::BTreeMap; #[tokio::test] async fn v1_health_removed_legacy_and_opencode_unmounted() { @@ -137,10 +138,19 @@ async fn v1_filesystem_endpoints_round_trip() { #[tokio::test] #[serial] async fn require_preinstall_blocks_missing_agent() { - let test_app = { - let _preinstall = EnvVarGuard::set("SANDBOX_AGENT_REQUIRE_PREINSTALL", "true"); - TestApp::new(AuthConfig::disabled()) - }; + let mut env = BTreeMap::new(); + env.insert( + "SANDBOX_AGENT_REQUIRE_PREINSTALL".to_string(), + "true".to_string(), + ); + let test_app = TestApp::with_options( + AuthConfig::disabled(), + docker_support::TestAppOptions { + env, + ..Default::default() + }, + |_| {}, + ); let (status, _, body) = send_request( &test_app.app, @@ -176,20 +186,26 @@ async fn lazy_install_runs_on_first_bootstrap() { ] })); - let _registry = EnvVarGuard::set("SANDBOX_AGENT_ACP_REGISTRY_URL", ®istry_url); - let test_app = TestApp::with_setup(AuthConfig::disabled(), |install_path| { - fs::create_dir_all(install_path.join("agent_processes")) - .expect("create agent processes dir"); - write_executable(&install_path.join("codex"), "#!/usr/bin/env sh\nexit 0\n"); - fs::create_dir_all(install_path.join("bin")).expect("create bin dir"); - write_fake_npm(&install_path.join("bin").join("npm")); - }); + let helper_bin_root = tempfile::tempdir().expect("helper bin tempdir"); + let helper_bin = helper_bin_root.path().join("bin"); + fs::create_dir_all(&helper_bin).expect("create helper bin dir"); + write_fake_npm(&helper_bin.join("npm")); - let original_path = std::env::var_os("PATH").unwrap_or_default(); - let mut paths = vec![test_app.install_path().join("bin")]; - paths.extend(std::env::split_paths(&original_path)); - let merged_path = std::env::join_paths(paths).expect("join PATH"); - let _path_guard = EnvVarGuard::set_os("PATH", merged_path.as_os_str()); + let mut env = BTreeMap::new(); + env.insert("SANDBOX_AGENT_ACP_REGISTRY_URL".to_string(), registry_url); + let test_app = TestApp::with_options( + AuthConfig::disabled(), + docker_support::TestAppOptions { + env, + extra_paths: vec![helper_bin.clone()], + ..Default::default() + }, + |install_path| { + fs::create_dir_all(install_path.join("agent_processes")) + .expect("create agent processes dir"); + write_executable(&install_path.join("codex"), "#!/usr/bin/env sh\nexit 0\n"); + }, + ); let (status, _, _) = send_request( &test_app.app, diff --git a/server/packages/sandbox-agent/tests/v1_api/desktop.rs b/server/packages/sandbox-agent/tests/v1_api/desktop.rs new file mode 100644 index 0000000..76d9389 --- /dev/null +++ b/server/packages/sandbox-agent/tests/v1_api/desktop.rs @@ -0,0 +1,494 @@ +use super::*; +use futures::{SinkExt, StreamExt}; +use serial_test::serial; +use std::collections::BTreeMap; +use tokio_tungstenite::connect_async; +use tokio_tungstenite::tungstenite::Message; + +fn png_dimensions(bytes: &[u8]) -> (u32, u32) { + assert!(bytes.starts_with(b"\x89PNG\r\n\x1a\n")); + let width = u32::from_be_bytes(bytes[16..20].try_into().expect("png width bytes")); + let height = u32::from_be_bytes(bytes[20..24].try_into().expect("png height bytes")); + (width, height) +} + +async fn recv_ws_message( + ws: &mut tokio_tungstenite::WebSocketStream< + tokio_tungstenite::MaybeTlsStream, + >, +) -> Message { + tokio::time::timeout(Duration::from_secs(5), ws.next()) + .await + .expect("timed out waiting for websocket frame") + .expect("websocket stream ended") + .expect("websocket frame") +} + +#[tokio::test] +#[serial] +async fn v1_desktop_status_reports_install_required_when_dependencies_are_missing() { + let temp = tempfile::tempdir().expect("create empty path tempdir"); + let mut env = BTreeMap::new(); + env.insert( + "PATH".to_string(), + temp.path().to_string_lossy().to_string(), + ); + + let test_app = TestApp::with_options( + AuthConfig::disabled(), + docker_support::TestAppOptions { + env, + replace_path: true, + ..Default::default() + }, + |_| {}, + ); + + let (status, _, body) = + send_request(&test_app.app, Method::GET, "/v1/desktop/status", None, &[]).await; + + assert_eq!(status, StatusCode::OK); + let parsed = parse_json(&body); + assert_eq!(parsed["state"], "install_required"); + assert!(parsed["missingDependencies"] + .as_array() + .expect("missingDependencies array") + .iter() + .any(|value| value == "Xvfb")); + assert_eq!( + parsed["installCommand"], + "sandbox-agent install desktop --yes" + ); +} + +#[tokio::test] +#[serial] +async fn v1_desktop_lifecycle_and_actions_work_with_real_runtime() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/start", + Some(json!({ + "width": 1440, + "height": 900, + "dpi": 96 + })), + &[], + ) + .await; + assert_eq!( + status, + StatusCode::OK, + "unexpected start response: {}", + String::from_utf8_lossy(&body) + ); + let parsed = parse_json(&body); + assert_eq!(parsed["state"], "active"); + let display = parsed["display"] + .as_str() + .expect("desktop display") + .to_string(); + assert!(display.starts_with(':')); + assert_eq!(parsed["resolution"]["width"], 1440); + assert_eq!(parsed["resolution"]["height"], 900); + + let (status, headers, body) = send_request_raw( + &test_app.app, + Method::GET, + "/v1/desktop/screenshot", + None, + &[], + None, + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!( + headers + .get(header::CONTENT_TYPE) + .and_then(|value| value.to_str().ok()), + Some("image/png") + ); + assert!(body.starts_with(b"\x89PNG\r\n\x1a\n")); + assert_eq!(png_dimensions(&body), (1440, 900)); + + let (status, headers, body) = send_request_raw( + &test_app.app, + Method::GET, + "/v1/desktop/screenshot?format=jpeg&quality=50", + None, + &[], + None, + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!( + headers + .get(header::CONTENT_TYPE) + .and_then(|value| value.to_str().ok()), + Some("image/jpeg") + ); + assert!(body.starts_with(&[0xff, 0xd8, 0xff])); + + let (status, headers, body) = send_request_raw( + &test_app.app, + Method::GET, + "/v1/desktop/screenshot?scale=0.5", + None, + &[], + None, + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!( + headers + .get(header::CONTENT_TYPE) + .and_then(|value| value.to_str().ok()), + Some("image/png") + ); + assert_eq!(png_dimensions(&body), (720, 450)); + + let (status, _, body) = send_request_raw( + &test_app.app, + Method::GET, + "/v1/desktop/screenshot/region?x=10&y=20&width=30&height=40", + None, + &[], + None, + ) + .await; + assert_eq!(status, StatusCode::OK); + assert!(body.starts_with(b"\x89PNG\r\n\x1a\n")); + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + "/v1/desktop/display/info", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let display_info = parse_json(&body); + assert_eq!(display_info["display"], display); + assert_eq!(display_info["resolution"]["width"], 1440); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/mouse/move", + Some(json!({ "x": 400, "y": 300 })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let mouse = parse_json(&body); + assert_eq!(mouse["x"], 400); + assert_eq!(mouse["y"], 300); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/mouse/drag", + Some(json!({ + "startX": 100, + "startY": 110, + "endX": 220, + "endY": 230, + "button": "left" + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let dragged = parse_json(&body); + assert_eq!(dragged["x"], 220); + assert_eq!(dragged["y"], 230); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/mouse/click", + Some(json!({ + "x": 220, + "y": 230, + "button": "left", + "clickCount": 1 + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let clicked = parse_json(&body); + assert_eq!(clicked["x"], 220); + assert_eq!(clicked["y"], 230); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/mouse/down", + Some(json!({ + "x": 220, + "y": 230, + "button": "left" + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let mouse_down = parse_json(&body); + assert_eq!(mouse_down["x"], 220); + assert_eq!(mouse_down["y"], 230); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/mouse/move", + Some(json!({ "x": 260, "y": 280 })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let moved_while_down = parse_json(&body); + assert_eq!(moved_while_down["x"], 260); + assert_eq!(moved_while_down["y"], 280); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/mouse/up", + Some(json!({ "button": "left" })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let mouse_up = parse_json(&body); + assert_eq!(mouse_up["x"], 260); + assert_eq!(mouse_up["y"], 280); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/mouse/scroll", + Some(json!({ + "x": 220, + "y": 230, + "deltaY": -3 + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let scrolled = parse_json(&body); + assert_eq!(scrolled["x"], 220); + assert_eq!(scrolled["y"], 230); + + let (status, _, body) = + send_request(&test_app.app, Method::GET, "/v1/desktop/windows", None, &[]).await; + assert_eq!(status, StatusCode::OK); + assert!(parse_json(&body)["windows"].is_array()); + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + "/v1/desktop/mouse/position", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let position = parse_json(&body); + assert_eq!(position["x"], 220); + assert_eq!(position["y"], 230); + + launch_desktop_focus_window(&test_app.app, &display).await; + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/keyboard/type", + Some(json!({ "text": "hello world", "delayMs": 5 })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["ok"], true); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/keyboard/press", + Some(json!({ "key": "ctrl+l" })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["ok"], true); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/keyboard/press", + Some(json!({ + "key": "l", + "modifiers": { + "ctrl": true + } + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["ok"], true); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/keyboard/down", + Some(json!({ "key": "shift" })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["ok"], true); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/keyboard/up", + Some(json!({ "key": "shift" })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["ok"], true); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/recording/start", + Some(json!({ "fps": 8 })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let recording = parse_json(&body); + let recording_id = recording["id"].as_str().expect("recording id").to_string(); + assert_eq!(recording["status"], "recording"); + + tokio::time::sleep(Duration::from_secs(2)).await; + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/recording/stop", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let stopped_recording = parse_json(&body); + assert_eq!(stopped_recording["id"], recording_id); + assert_eq!(stopped_recording["status"], "completed"); + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + "/v1/desktop/recordings", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert!(parse_json(&body)["recordings"].is_array()); + + let (status, headers, body) = send_request_raw( + &test_app.app, + Method::GET, + &format!("/v1/desktop/recordings/{recording_id}/download"), + None, + &[], + None, + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!( + headers + .get(header::CONTENT_TYPE) + .and_then(|value| value.to_str().ok()), + Some("video/mp4") + ); + assert!(body.windows(4).any(|window| window == b"ftyp")); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/stream/start", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["active"], true); + + let (mut ws, _) = connect_async(test_app.app.ws_url("/v1/desktop/stream/ws")) + .await + .expect("connect desktop stream websocket"); + + let ready = recv_ws_message(&mut ws).await; + match ready { + Message::Text(text) => { + let value: Value = serde_json::from_str(&text).expect("desktop stream ready frame"); + assert_eq!(value["type"], "ready"); + assert_eq!(value["width"], 1440); + assert_eq!(value["height"], 900); + } + other => panic!("expected text ready frame, got {other:?}"), + } + + let frame = recv_ws_message(&mut ws).await; + match frame { + Message::Binary(bytes) => assert!(bytes.starts_with(&[0xff, 0xd8, 0xff])), + other => panic!("expected binary jpeg frame, got {other:?}"), + } + + ws.send(Message::Text( + json!({ + "type": "moveMouse", + "x": 320, + "y": 330 + }) + .to_string() + .into(), + )) + .await + .expect("send desktop stream mouse move"); + let _ = ws.close(None).await; + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/stream/stop", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["active"], false); + + let (status, _, _) = send_request( + &test_app.app, + Method::DELETE, + &format!("/v1/desktop/recordings/{recording_id}"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::NO_CONTENT); + + let (status, _, body) = + send_request(&test_app.app, Method::POST, "/v1/desktop/stop", None, &[]).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["state"], "inactive"); +} diff --git a/server/packages/sandbox-agent/tests/v1_api/processes.rs b/server/packages/sandbox-agent/tests/v1_api/processes.rs index 3c02029..136a51c 100644 --- a/server/packages/sandbox-agent/tests/v1_api/processes.rs +++ b/server/packages/sandbox-agent/tests/v1_api/processes.rs @@ -2,6 +2,7 @@ use super::*; use base64::engine::general_purpose::STANDARD as BASE64; use base64::Engine; use futures::{SinkExt, StreamExt}; +use serial_test::serial; use tokio_tungstenite::connect_async; use tokio_tungstenite::tungstenite::Message; @@ -277,6 +278,98 @@ async fn v1_process_tty_input_and_logs() { assert_eq!(status, StatusCode::NO_CONTENT); } +#[tokio::test] +#[serial] +async fn v1_processes_owner_filter_separates_user_and_desktop_processes() { + let test_app = TestApp::new(AuthConfig::disabled()); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/processes", + Some(json!({ + "command": "sh", + "args": ["-lc", "sleep 30"], + "tty": false, + "interactive": false + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let user_process_id = parse_json(&body)["id"] + .as_str() + .expect("process id") + .to_string(); + + let (status, _, body) = send_request( + &test_app.app, + Method::POST, + "/v1/desktop/start", + Some(json!({ + "width": 1024, + "height": 768 + })), + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["state"], "active"); + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + "/v1/processes?owner=user", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let user_processes = parse_json(&body)["processes"] + .as_array() + .cloned() + .unwrap_or_default(); + assert!(user_processes + .iter() + .any(|process| process["id"] == user_process_id)); + assert!(user_processes + .iter() + .all(|process| process["owner"] == "user")); + + let (status, _, body) = send_request( + &test_app.app, + Method::GET, + "/v1/processes?owner=desktop", + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + let desktop_processes = parse_json(&body)["processes"] + .as_array() + .cloned() + .unwrap_or_default(); + assert!(desktop_processes.len() >= 2); + assert!(desktop_processes + .iter() + .all(|process| process["owner"] == "desktop")); + + let (status, _, _) = send_request( + &test_app.app, + Method::POST, + &format!("/v1/processes/{user_process_id}/kill"), + None, + &[], + ) + .await; + assert_eq!(status, StatusCode::OK); + + let (status, _, body) = + send_request(&test_app.app, Method::POST, "/v1/desktop/stop", None, &[]).await; + assert_eq!(status, StatusCode::OK); + assert_eq!(parse_json(&body)["state"], "inactive"); +} + #[tokio::test] async fn v1_process_not_found_returns_404() { let test_app = TestApp::new(AuthConfig::disabled()); @@ -413,22 +506,17 @@ async fn v1_process_logs_follow_sse_streams_entries() { .expect("process id") .to_string(); - let request = Request::builder() - .method(Method::GET) - .uri(format!( + let response = reqwest::Client::new() + .get(test_app.app.http_url(&format!( "/v1/processes/{process_id}/logs?stream=stdout&follow=true" - )) - .body(Body::empty()) - .expect("build request"); - let response = test_app - .app - .clone() - .oneshot(request) + ))) + .header("accept", "text/event-stream") + .send() .await .expect("sse response"); assert_eq!(response.status(), StatusCode::OK); - let mut stream = response.into_body().into_data_stream(); + let mut stream = response.bytes_stream(); let chunk = tokio::time::timeout(Duration::from_secs(5), async move { while let Some(chunk) = stream.next().await { let bytes = chunk.expect("stream chunk"); From 4252c705dfc093213cc8d232f7183e055c25d9a4 Mon Sep 17 00:00:00 2001 From: Nathan Flurry Date: Mon, 16 Mar 2026 17:56:50 -0700 Subject: [PATCH 29/48] chore: remove .context/ from git and add to .gitignore Co-Authored-By: Claude Opus 4.6 (1M context) --- .../CleanShot 2026-03-08 at 18.53.28@2x.png | Bin 112229 -> 0 bytes .context/attachments/PR instructions.md | 19 -- .context/attachments/Review request-v1.md | 101 -------- .context/attachments/Review request-v2.md | 101 -------- .context/attachments/Review request-v3.md | 101 -------- .context/attachments/Review request.md | 101 -------- .context/attachments/plan.md | 215 ------------------ .context/docker-test-image.stamp | 0 .context/docker-test-zgvGyf/bin/Xvfb | 15 -- .context/docker-test-zgvGyf/bin/dbus-launch | 4 - .context/docker-test-zgvGyf/bin/import | 3 - .context/docker-test-zgvGyf/bin/openbox | 6 - .context/docker-test-zgvGyf/bin/xdotool | 57 ----- .context/docker-test-zgvGyf/bin/xrandr | 5 - .../bin/agent_processes/mock-acp | 111 --------- .../bin/agent_processes/mock-acp | 111 --------- .../xdg-data/sandbox-agent/logs/log-03-08-26 | 4 - .../xdg-data/sandbox-agent/telemetry_id | 1 - .context/notes.md | 0 .../desktop-computer-use-api-enhancements.md | 215 ------------------ .context/proposal-revert-actions-to-queues.md | 202 ---------------- .../proposal-rivetkit-sandbox-resilience.md | 94 -------- .context/proposal-task-owner-git-auth.md | 200 ---------------- .context/todos.md | 0 .gitignore | 1 + 25 files changed, 1 insertion(+), 1666 deletions(-) delete mode 100644 .context/attachments/CleanShot 2026-03-08 at 18.53.28@2x.png delete mode 100644 .context/attachments/PR instructions.md delete mode 100644 .context/attachments/Review request-v1.md delete mode 100644 .context/attachments/Review request-v2.md delete mode 100644 .context/attachments/Review request-v3.md delete mode 100644 .context/attachments/Review request.md delete mode 100644 .context/attachments/plan.md delete mode 100644 .context/docker-test-image.stamp delete mode 100755 .context/docker-test-zgvGyf/bin/Xvfb delete mode 100755 .context/docker-test-zgvGyf/bin/dbus-launch delete mode 100755 .context/docker-test-zgvGyf/bin/import delete mode 100755 .context/docker-test-zgvGyf/bin/openbox delete mode 100755 .context/docker-test-zgvGyf/bin/xdotool delete mode 100755 .context/docker-test-zgvGyf/bin/xrandr delete mode 100755 .context/docker-test-zgvGyf/xdg-data/Library/Application Support/sandbox-agent/bin/agent_processes/mock-acp delete mode 100755 .context/docker-test-zgvGyf/xdg-data/sandbox-agent/bin/agent_processes/mock-acp delete mode 100644 .context/docker-test-zgvGyf/xdg-data/sandbox-agent/logs/log-03-08-26 delete mode 100644 .context/docker-test-zgvGyf/xdg-data/sandbox-agent/telemetry_id delete mode 100644 .context/notes.md delete mode 100644 .context/plans/desktop-computer-use-api-enhancements.md delete mode 100644 .context/proposal-revert-actions-to-queues.md delete mode 100644 .context/proposal-rivetkit-sandbox-resilience.md delete mode 100644 .context/proposal-task-owner-git-auth.md delete mode 100644 .context/todos.md diff --git a/.context/attachments/CleanShot 2026-03-08 at 18.53.28@2x.png b/.context/attachments/CleanShot 2026-03-08 at 18.53.28@2x.png deleted file mode 100644 index 955a813aedbc9474109f9d654f2184ce9a58a8cf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 112229 zcmeAS@N?(olHy`uVBq!ia0y~yV41+cz_^Tqje&td`E;Ef0|S#*W=KRygs+cPa(=E} zVoH8es$NBI0Rsrw*jE%JCTFLXC?ut(XXe=|z2CiGNg*@ERw>-n*TA>HIW;5GqpB!1 zxXLdixhgx^GDXSWj?1RPsv@@_H?<^Dp&~aYuh^=>RtapbRbH_bNLXJ<0j#7X+g2&U zH$cHTzbI9~OwT~iK*^3v!KNrB%__*n4XU{)CCyeTqokz3N?*Ucyj-u`STDaQUEk2s z(o)~RNZ-gvw4zL=M_V}pPZko50cS0)HBdWR$h{shC?|>2B93J*(xBjA~h$%B{MfQuQ)S5 z&sNFM(98mC8dy5CIJL+*KQ}iuuf$d<~`3=E9kna<7up3cq+0Y&*~nK`Kp3>p(_C!F1>X#f1>f%Jc)`~6imT}IB5#hSrm(fQ+48)W8AoN> zm)P+=fB5&DnH+XLigJl9=7=??bpA!PS)$Aw4U{hM|7<$cRp zCVXi7q2{!UCphgn-8aitEo?r$)cNz_=I3@(*5~AU$WD_{KFOt7DOh46HZyTv>CTJy zjf>CDWMS9n+jwKSgP{Ae$BQ()p0r+HkznK-5}~ZN^TFL;&v|dp64Lkf{4js();Ahg zjvV&LkDk4wp^+yhbZVkx{PeF^nZMmzbM4VTsgn7--kmmMQD6A+!LIeD*K_Bb{^!19 z<$mT5&!TMJiLk$8U|{G_JV7UB}LmSV#~~LzW86c{Y*;Pu8CD%^S|32KeO8S_gv2T=l@j9yZ@SD!Nh;( zTR9CF)ef-CU=U7V@@YWj+)#bsuvSOmrt9HebSZ?UCI)VgKXM2@y2>pc4XjJVJ{G8B zu>h)z+v5)(l!-;rr~wU}BPR^DcpNx*ka69*ycMgAwp;XB&%~B64I1z3Gm1^n`*5?Z zrETH+hJ!b@)S0e2^UYQ4^^AYbv-SolOHG_Lr)9~J$tzZ`&Mv$2^xcbVi_P13q#4!K z)t@{~OFrARbj8Y*JMU~Sy!N%J$=M+D70*svWnI?SVe2Qne*1FC`-a9B_8+W%PAhwV zqie@w_or6#4I1xDGwwauFthxQ?*BE5nrw6;e{7CeM;DH-NuPa41WDzs+aii;f~5vB|nXBM$U}bS+X!D-(Q`%oyYn9 zKjx>=w{~Y+_sQ95T>sg^8KcD%wN`BF)~(aCex_aj`uX$mEw|rI`R(oP_2D%CXOK_O zGJHX)Py*vioB7c`%g;%aUapXCT)JluPjoiv#WANCmd1fUMh83{BP5wNlCL! z^~znB7ayNfEx%>wn}fBx3k&|o?bJ2Ukho}Q$id0kzP_HpY+ZEgWzWu{|4XeB8D9oX zd|JA`jOn7`T!DLvLW&C?e)#g~(D(Z_so_`lP1!%0b&0p<^y_J;rB8St1GmTXgZr{` z%46)SdhX72OIE!Wv?{!?aN?c&L0q$&k1;YbIuITp{eAHp@!r|yYg{j_WZC&YXzqrFl9eao=9E_RNi;p!z!5TK zs*1@MmAU1^vK1zQXQoc|u-L_~-Y=UcE78A0 z0hFKw?B<<8E2SN{St1;=82fguF2BzxVNS;cI#3thrY7Be0;{-ts09t9FmXT zw9CHq=d<7hy_Q~sGvB^^S+ZZ2yXyqIpGdsDz0dyKz7p>Qeicd9`KT37z{&`~BG1Kyz7aQ$Q#=gG3 z#zAXs{YGyO4-K`uR>5&)#&6}q119ueeDeH>yhKWF{Flq+VGpxUZJEiz#W(f1-p{Y> zuaEy^?zhQ&{8xoPV2;R>r>DkeUZD7stl<6`Yf?=&~7Z|`ooF*#1=pZep(zUPhY zSDk0Ct^am$_qBc7xI8x6R$a`?ZO*;ae}t*&?8+94UGv|5;;Z0O*>m^zh2(2@KQ57u z`11MtC-yy8#C+dB@_nrC6&f<>!-vD$<4=Aqwv{xmHT%QlyykM>`}>`n7AXlyZ@##+ zI=|$#wM5^R==f-_LlbK)Tx!lQ*SPOF`C`ZWeLtCOt2*y8xkugiEd8~-fBpKsns~-#o-vVa;Yq5dEWh=Ba^?e z8g8t9xIoeQ;?`>Y+3Bj!x;2g}w|{$lW1{n=qIB8n?{E9()&83M)sMr`aQY>t^4!}_ zFEdlWm`gQjUQU~98h^Lo#{s)H_jh~e*~yz-N}0IKY|^EQ*6%Yzj?~WWvCg}j`eB=8 z@zON02`NG~hfs6ax;#)GfBczSjWO-c5zQN0>UvARn{MQu9b)(Yhi#8Usq&wfi?dkY za$OTMbeW~yw~gtNm(YYsQ|^jCjnUo5aevXeyStPvRD9y%g(fUJyXUZ5f*?~+q9dbR z&6oa9H=g+&Sy#gCc|}0A%Vn{l@GJ{cb9Wc5KffMtU;FOKvVXj%VsE%?Ikwg~{jkGD zqaeZ8da*wi>l|olEng6OXPM{SvVJF-2&dvBF@bmPlY|M zkqIpmET-N|oTzjw=%&lF_b0hNmv4QsJ@s)^&#uCl&E7Y<#Y6(#h1KqN1qf+HYaQ>G zJMvEUc5Yk8&dJOCwhQLxvi%7YeDp}{*YB+%;mL=lM8Ddi|E=Hf&qVQBM}O?f4vv;zzpAoL zjcu=LZR+elYyJFQzHp=BB7LJ3@%a~&4OVC#^7OgmDk!VoCp&f8mg)e;RZVPUgqQnlj|{(4UYW_cN7K~vXY~}NNk3%E?|xr!)+W`}Hvisp z`RLtOlUh1dtg3!2u-WSWzfojqlD1rD`Ee&r$5R>NhK??!Yh(7^&{A6IYFT<@x4@O=zONxo^u1_#{xA!-r&Aq=p1zRVwEcqJwb4sViKhvVTrMGg9y^4Q! z*ym-;^PnkAlPb1dRhg%8)XCF9Xk};c^80*+Q@jcT6+aahD=d1rB*Xfm`n>=ApZ9P2 z=i9yO*UV0{z$4Zn&P?-u?dM-A@vVBBFs)7I@4S{=UxSq!?33l^55sJ2pqH;&i8VmhWg> zr0Od8DAC6CS1gD0#&azvgLD7fa+zI`zIyd4#YMBOw6dz03a&FoX_+*xp3K1QA^zcx z%dT4zu3ZyU6z0C4w2kS~(@uv;??P)>xG%a_Fvwcg9C+2ZdOtnAQSy;UfD+URM>qKf?zp8%ded?*Tn{!%b zO%n&+9a2qw0K?ZEx1_ESacLr9AUe`I{@QuiTiHhUMPh6u9obLEECu zR_3nhvP;YB-mjCrR`=Cdxc9c%+T$l%EVB0t6siSFt?&q(+*P#i*Icpp*G?_nA9O3S zuXKmi)&LW|r6vA_{}**!*`T-O^5T#wwTpCZl!P987-;F7IJ(Jo|1Fm{TaPlobh>fF zd;0CY$=rSNwn=+qtKaKzdt93Q1Z!SoJpcZ?l0qlHjp6}`-fhqJyz2k@S#Uy2{1l_B zu1juf1pjVH{4!^!??hXrNr?ydoR6;2%3^%^&1NpAApnDEL-MDEFCc`OQE3{#KVi z`q21y(e0=!Ni9JRQU)0uzj$Socw0S!qyN0_FN&SRk-tPx+5L3xu~S?%{uhlFtq8ty zZEaMVsJ~16{5p}FC9#udY|H=dxB7YC3+wAb!nc`alXgw$5OGanEyxbxU}OK0P*}zH z?V0#|!_#)xT{CL}>-ete*-U2mb6cgG0}MG>vVhw6SN?wix6 z!PeI9eq%{3Cu7z{efx;(0rs2MoZ(rs=!;|OOW%7g&F!)J=9;a?R$k8EEU3u;_I~!) z?~~XscUlGcX%=+8P!?=encDk0>`_V3q>!z0U#o0gIal5o}qq;lRZChVk;vd@uD05XS0m#!Xpd+McB(TdhVQOA->zJvif(>t%?1n zUp?OFEj8IBw&$IQIIfsQgRi)o#XZm})9eF=AR%ZDwj^3QK zGG@A8+Wp%3zOycR-4fKleXg(K&~mkwZEL4@1-VSP1&)UxZr0t~@)v&DYdlfrQcZa| zzZYYT!>XwML)qRlcC5D4aP5drzmqq4nd+i`7m-UpMHG+lNE>_JYdkx3QU9+Wk_8P% z7Vr8T`C+@|BlAC>p0aOR9~?a`X#eD&l{ZE9e(pXSl32FBXP?kqCDWit8QHlR?necS zCVPC%(t8{5qeXS@Ceu@5a!y{0GZee(J|9)T_SGe2hLM%07w1%cH+5O-(nT68JBua< zbp+0?=ypu{yXQ_$wpO%boZKdjOI{y>r(a`P!kwyYb1!N4_bK|Td+a7jm0HO@yL0=A z_%T5bAxS$Uv~p`UxZL_v$ImqB)8WWor@tHP_AyQRG2`8hmBv-yWiuX!FWxtA#r*yT zm7uFr%W*@ATjP8mVdqnDlE@-c0bx&n_xiL5I>@3iGz{5674 zlgw^b#cx$nIJ@Jop7eQ%M2@*h$1P%K-qO(fak;5dg>7o|-u7ofOM185p7Vi4NH8?X zyZWM@=+cEduUxyfSlIP*jC~$gleLNK5e|RZ#Ofyw|94;5RQK__hC=+JZDL=ocS5?J z8)oVS?Qzk1v{Otmrt7=^-!ox4Kc@ysZqAt|yw5+&_~&|0hb#qt14RYR)8_Gi-mmdk z_+WxScGV&izpAoDdO@1``(NA54)M;MQzG(yPsbjsxl=SuBm38KdWfHL)Ah-V@P41O zDYsy*dgvZIsrbH^ZlagYd;HF#T+}u15dJ)+rZV4!iX@ zeUGbk*}RoQ@U(+qoLBxj*=Y00>w``odvdAfX`p{#l5_qX4*5@iCZ3wPKwMEFc1MNE zwL9CYIE{iQFMS$qyXwB@#41gj4O5=hxK65^U)yr))~(GfD%FA+-&j-x8&!OsoEMv5 zx6$*q$oE^58dQGS+UzkXIr7!4?Zcwj*embvE)|-fVwUw!M`7iBj_asnAqU=12ZvFP zz6q13iM`o`S&%sg+_5yC1k7FWFZ4-S2X3?vme= zy|S4uwQhPSV%BLQj~1L;-5Xe!a92DN7iv_oJ9FHBj#BlDB>`GqCQBco#->X zw(4k5@6;{A5rVGVu^BsOFioo3zQW?u`dWsdj&h{3OuFT=yP@o1l+*4)O>c9fi*omRI=7j;d-q#zTJN6AL4Jp) zvxvNuD=yx2!skzTX2xpwk8$@bQq}%2Ip-XEmb%R8`8!voN$X8(&o2ASr1jfuy>rl^ zjb`f?h)uY%Xjz0uhhDLK%@wzdjnAD$Bt?6@j8ygYv^MSB_iN_T<;xOuC8ub2d$fuO z<^~1+il3XiXm9(Gv$mH4UT;(t&K10?@=nq2-Hfk$gZ;iJeSUNM`5kTZmC79N_dI9Z zQC)H3@9Xejm1M8rmg}vX0{8cOy;pm=C})xG#0O}tg&(zo35+k9<}J4heYQJa@b zoXcHF+8;cF(;eP)U75Bi{kfTpbY3{LWJbrTtaU4nax86l z;wxxtoPIyqIP~R9 zCj2&}YNBN2c}L$v9aSF6rZIbdC?_0#BtOBw=XTUBC56tdZvB$|GtR|6O+EUq^mUl- z_O1<1E}bv0uwL(%OMWZ9=-I>O-%p;es7p(laB05RpSOPdN|*J%zjpC3`-bxCe6Oow zlXfRQd9m?Y@kjoTK2xlj++2?*oN4ha+HVoKY>{VX=cD)OPlJT|JSC=fZSUuQ`=@++ zi0$du-nwy}bN`kFg+v7=F8o@T{UZO1(~T9u?~n9$tk^Dm{M-_cu2`P@nQ6bzZDODF zXUC5gv|+ESED;X78jhzcFeXfMVz8)5(09H5Chv0>V`HO)e^cw~)zRI>Qx-XQGvB^& zbJCAR^X}a`URC0we#azlQPr%idXna=gX3J5cU)VcApGLO2EL*tJw<{i&qUl@W|RNc zoa4Xz#5j$qn=S>dKDgB*kc*f9_z}&^OZ8p{a>Orva`cJwL~Ey*30(qh)Aasalv=2? zDD>Dp8`oEx&DGhd#_2>B9SHQ8#Fi)~!2xOK{0 zua5js2cgeza&NDT^IxK+yZr8fS;Ecl7R2tpa{gf4tEpA7rM16o`wv{b`){|#u_)Fh z-3yf_&bY2?Df<1@o{3qj4jjon9T1kR-Lvw&@0I9k^{K^s_I#OPR2#OrKKjxP<#xV= z`|pe|Ixq6oRamMLT(wf^lg})5*>L^no-=1kR%Y39DF#)W2-F-0aK)z_T+q=8f z-;Ql~oxj2S&NAOU!l%XlCy5?i!nf6Y@)fobNlaj{p9qFa|OGj`|k;8Hm=Ltb}{48K7aN3 zo9^{{WM-|q(VY_X`@V^J_)R&NCijANkFH}}Ez=LOTUlT9aIzN)nsm5QZS&QOe(BZb z>)oU{jrmJnwbqwQcWj^B|M0BE?JG7xEI&Mt{wu7A`7OSc!_PFpQLQ-#YrX_HA`Kl(gm%v?~(#c|%Z=1pga z-qOn4;?uW;Cme|o6g2qu?BtjDw}02jcN{8q?XV8_E>vsI%$JY<6;RgTu~T!oh^bHeZOPp!n|FD5NFR$pt}=`9u#GUafNQS7?Y^GjvrCVATa{Q8nBU%0S1Ol!EuFB=DDt`2?tNR$;$wpSlj34)WzS5} zQd{)xmQi?&|C!v3442TUH)ok;y^nf+$jvtX&+k)n7pWipC4Trx{9Ze>wkCschCL|w zJkC40C~yY9HIB~vZ7Mx?pMPrJ0YF<12rHZ)w(lViTmg4*Ft zV0zob!0qw&P2-2R294RR4FdgfZw)z;nXMmuTzq?@hVI7LKP6%lZXHd2FdKdHd%;}6 z1jdzfZ3Xtps$H1J(Zlnup?%S=MzcG+S1fIw&A#aF*K4Nso}Gv5{oc4TzddtJ?$m*U z&KJuMDEthwX!UhFsE(2R4uit4P2TQ*iP{CJ#&)ak$GA5#M@g5R`}5aHK&AG0X zH|9((JNH@tDNC;#bNKaTcR#f3tcpIfN&l(s$6M$I8Z>Hy3VrRrEI)o>$UzIMjOU;k zqUZb_`TeNXIHI=|4w~->|Hb*?D{6BXG3|Qc9Os6HE9c||?#rQvFU;O`CX8Ye)_oCv z@EUal59*8rCZ7d!g+bwIudvS+BX*%y#bko!xn8OtxSozW7YMUuhJ&>_C>hi{)KtU8 zpqgPG+HnaK%$NKd)}Kcm%ZD#;Xe{%4FiUX4o;zPZeEZtky`6<8bDI zqpBZvi%!@grOv|EcJS_T`Hcr<3vZt_4#?1Qcg$6t6I*gYTJ}N0n#Sg9l74LdAyy%W zjSq-ME*D;CYJNz2$}_g#3zsaAIM!lvMK$d}{PVUuvxH=HJzc8`^@2Wc3cP-ypm09N z`eWF=x5b+2ot)fjx5Y zToN7SvgSx7mK8R%ooIbwbHVzM{L?A7%9d;q$@X8K6nrP}%C)Oo=c}J7Kg3d}EFki< z`GsTShwg7p%zFJu zYVU^&%3aPEFkjkqs=#PJ%|k z@P(|cKVCfc|L}78{0A2fHc$Um^tS5n>M)0{-WQDS4br)~_D$aUZLk zvUu`##{GHsn&+@_yYb2Qoco&gWXhFA0oMgRf9||+RV3NTLH5p8sgx_b7u-8Ku|?Im zs`M>m`zouD@EH?l%Q)Hc>1p`y@1yRsF)wdr z?t8_uY?Pnc%gEe zlCCN1%fiAJ;&PpjnqrnbU2^F1b>_WR#}z!@^M?1T&#|ouyprKKwV69hdW%7d>fz=~ zH?o=+ZLoT*axeCetkLJ zIu~1>{N~0+35$w?Zu_dsZM+ei(@dx7#%6{4eERtD;OzYUy@`k26K+g6sG;=nWt4vS z)P^g{WeRe$ew*yc(mbm3&cZ3FY2nq$7fVeGjFxlk$`W?l8FFC0&4jqPZDM-&{_S`@ zO|!ttH#38`>Pg1p0wZm1+e0=N!zc5qn=Ej7-8$zHlf`8IN%;{5S*urF@HqA47=O^OCkCYMVJW}0Wr^OmsvZ3`Jm)qhKRMRc#gws! z=}DEhdhb;?iyc)F?HymXy=t~!oO9Vd!z5Pj;p(N;kL}PaRz3Sh&&leKe~QPs1QdL* zjNP+h14sV9@4PkJA4FHqJD6}_`~BK}yV~E6=5D`t=yv{I&F`_N)x&!mzC5yzJGdfn zd(zuoTQ`VEE?l{~z5Ae`ahk6?>)R>Q!ULI=mriU=lWTU&KOn7bz94`9!WVMV%{`ah zGrpV1O6}RLsJ)xv?x9+NmlL@CZ`GcSc;6M1tof&jNigSwYi6S2(p6IzuiD1d$hfb= zzo&U~vhpUQpc6W~CCcyHiN;lRMXl0k>|ZXYcgw&ci&@~3CQqoPciL5iul>UmulMIyRNei$?fY$;Z8oUfSNy2_{ms4JnU|NxPP;FC z)tbZH{_go_zfN&ir$sz)*!NEKqTZ>rlBkfT8EZw&a)X|x#G39~q2tnYv~Iu2yPCeX z+4CBHKM_Blp4DCasyis)%gePZwD*{8zmg-!SB^H&e_xYP@!Otnzoch)FkSjN<;|O% zJwK1V{_%Bs{F=*0tK#c-GyMJiefzcb(k2{{k$-v>6Sh<{Y96p>IREdZ{0xgkE2Si> z2UoY-9ej2*`hxa0{W*JOKYS{#Z*LVjTof!j>DxQ!$N$z|FfmJd+NshbX~xF8CHvOI zIl><}`q~!jF{umrEi6vqSoHUZUe>!6=2;ViW-;GasAsX*eK;bh>&~2uin6h~1lDB&K-cvUKm$s}{R5g?S#z zZajTfzH@142M3RPNRCNQ<;M>zPapleey^9>{iD~WfAsqE`1t&~Hv39*O>gH~+r0g` zVqMWY*2jzMwrwvDuDZCWb^oKq{PrPw#b@WGKYMcP&$rW>3U|M~D#+*yS+u)f{gd0v z+;@k+?~tq&6FInZ)}ptE=1erb!R%^Vvf+nC1r`OU(JfAOwC{2^)b0}-lmsL z=kD&l&vDo}{om0VM{ZN&oqCTIKHbmi%F0TUFh_56ey~;$xVusLS?yobMb^jWe9i8Y zx9)m-dz!Ke9 zYW{nEuGzZ&YFnAELhI!B7uesQcTQO#zp6*=2Mdd$o~CI0`RJAyFTvNBjHTZHf1X)! zCFGEk{0aB#d2W9fG8dEGCDf(!OzKI>b0rlGmg9bN92h&j@~mXrcw~C4yz;rfaW~A~ z%xzZ<;6xmp!>XlW+Hs^$AT+FZqo4P-*WBXSrJ4cBv z=7H>1o4r#OO*q3D9W8fcFGeAMm8He~;wDjTK|#sP&(>jUBNBZ*y}i6PWrl|_os78c zp>|7c)9Q&`)2EB?n`6%JyiQun^xcHFY2}>9W*+i;*P{!4 ze_x15ac=*>@X5QdeWsGG&7Rr`Q6UXitG6F=f8BU)pJK+E{3X-%c&z+&=bU)`;>bsf zt|X>ACQ&)}cM99?E_1eWRh>F*x)Zm1%Ww8Y?mpr!?yU>2ZDo?H>*H8d>Rr6LpyIdT z4pWl@k6yOAi|e_THVY^1VVan)cl^P@+7E~BP8JjqY1m^Ve9X>3ymQL0lAYaA9Fx}F z_m~)cNXBQy=5Jk9a@-!iQ`WUKq$qrRaev>$7cZVLDN7!H{-vya^G?QlE3bYywSM6T zr;pET3r}TFt-O)9Yr_gl_r3Z}|L*?Te?T&8Dd-!CpX$t|938P|^9E-mwTaGUjh z;*}Q{&C0*^@Be>SZPAHkn_IiAXXe-cylP`~MlR3zRn7Iykx$A)GN&_5;&wS^EBl_OvyTqR}tOiqOe7KqYesTLT zNi|>9Z_`v3&a0VZ zFM5-NX63e*{?mW`4dQk@^?K`+s-^ReGX)9n)K*lmeJ0yBJ2ofy+N^gk9?cWIe*IzN zA(tx+Q?7p2xphE;#isa!P5SoQsIT`}HU9lOxy}EUQ*(4V;% zcILA`nKJd@!Hb+#d##O(HYcuJ<>(ryx#+NRKt!UWTPm|q_v#4~rZwLABy`JDS0wNs zThA+rwpmRw6_@$))6&B&Dt;}v=h*G#W%*h8@!MOcf@Zv4EpSs;YG2*F7Xoq~PT?Y} zxP5O}z7Kz)k*d4EJN=A(8BeDr>yFwxDX-hhIb5B1-%E3I|1`ZC{OZoXzwUwI>krP= zx9j_}=+EhILaSO8zP28}SE4NR=<(IF0q2t$S8`r_DmFnWWBdO8+QqZ-4xF2*XmlxS zP3){$SlT*$Vlu==CV_9a?R@&R7c!Irg zV@i~7re$y6x`Lv@10Q!Ru6TI(dthvEZz1z9g)`P1tHW~}=iA9JYrfUJFsVauV_iot z*MW17KNgf+Uzk0^oabGhfz;}Y5z+0Jgp^lF%?*5go%8(J=q`(24(|j!uYdmR{=Xo; zhv!p+do!o#6tnFK*B#&Zopd|D&-tRh5nE#)U&fXx3zuzV3+*div2MizQ{5G+|5fVQ z-CfMGGz(7%Rz8_pawmdKcx&mx67%Nm;!e&?0#DxePD(Pi;gJn^p3$(=tNHEjZ4c(p z|Mz**-uf&>kJ^WmRpfdV1wCW0e2x;Hz{VrdF>h~`Qro=l#xvWsJFo7mt=^dXI!r`V zbmjEvuPw-I&*}pwDm^uIH(`~w!P9HtWy0_}Sg>M#ZIk97h@V9B1#kYBC72cJ8&Mw=dTqyDBPw|SCYmWRgpLM^; zXh**Nk1uc9cKqbdxppbgD{cRN+ty9No)#a^)@0xR7bP60;kQ|`Tu;>fy#vd?TydE@ zvwLowPu(XGJ%4`3p?*k+68J=>$Hto%hwB>sor4wpD{QG_OTYR?cTQ7%Km(yjO_AdJH zp_ln#oVTE$l(5_RT8^oGc3Vu#P5D$+9!*{UuifGkFV~0fM>}RXyt%(u{#cd%j~^v1 z%iGRu*n7F~_lLe0f)^RH`R2B)+$Q(1*-fRX9>Jm?q1nEQ)x=k!x{b;J*UVm|NPZ??euwNjm561o~k~kX`gIQxfi;2hVm3D z{hu|-&FJ?slbvoxc6w&v$+DcX(-*I3U63N=kz`f6Y3G+Ul9xADU)P&$_I1xQmLSg-vzFF)!ad6+xqDMap|2%xyK4IRpvTFicC5(%InM|27CBWj5lEeuO-zk6g*zml# z9zJii;^r^YE~_5g+5J8%Y{I6mXd`BEnKZXYkDV~k9%SByzEKN?Z(U6;gP$&G<$RX z9%??_U^aiZ%eTh!%=V6tU*3Oa`Kpn3i*(FG_Qx;ov!>hZn&Z6d!2kbctJ@=AU){aW z>7#%B*4xK1HblkjWeHm!={-B)>!kVvS2QOVemKCm^WHV#iQCK)A4xvUeEaI^mdNYj zKTgO0Z)0p`JL$b8oj-zC7Is3IlPh)YYm+HD=}dH!7YoY=ua|!^zhkw=YaSB zJ}kd`V;B4HhilL8J5)Yp-?O;&N3ZN_HZW_tbW7t?`oV|J784@a{JXwMKAGHIeZ172 z!F7+!InzZ~LYgnm^(fKsZBI^?%_^-ksBSx8Y9Mw}H0sZ$GIggWXMtyK#mu$~zGpZD ze$x~x&0sh0nsQ=Q$$^!7g4W#I#9ZmRv*ORW+lI+!IG(7@Z`*!ua*Lv{P9N_F&QHtU zUeLX1kPqAaP=&o5TT8F5SpG3ir@>CIl^=~pUu1y#k! z`aBi=k?>J0r+@z!wZmD88Fy!yzOXM)a9Pq5vT5@sgOm#nE3<4@J-;bDp)PjI4XK^U z#v5mP>AtN~cTrH})9jAw5oEe_`~9Bgf-a3oT|o{)(Yku-6P2~BqF&{FyAg1|!QSqp z#k2|Er)=t2EAu>XiEr_v$wg}nr2l{T`~AbQ*XvK8eN}yLPo{F!w>P=rHj~skJC=0{ zAM?IiY<2GYd!~4=^EvA}IF?8}nzp~#{M4p5vln&mjQjG=e8S7!hW6*vG%v~-=JELb z-6deU|E*8LlEUqxDmQwvy^o&MmtpkJ-6pMjW!ldqpT#?QT;+WneElXjJhlCof5Eq6 z*G<(kukX3M^txxbFw)`9LDUi3bw*5zDY=`=>nBG{@w=&YxZU*atZUciD_^>%<~uV< z_TL%%e~tOi>yvH8XX<2MTRVOF$ulR8Tu}6FnsleI(@#cY#}RvlhO(LK-6|_9yTb(? zMe6?jyS9MkX@j2dDc6t zt4QYFF0pBECa+kmd)hnmxQU_=OK?@p`xkZv#h+YSS58_qDdwd3NztupT}oODWLKV8 za$8%CWvN~%$EVM(&Sjk2^rHOddB|Rv&(g`XmW}tz#P^ObBc?t%{>1&B$I_XS1;&zs zZ{If`YO@jv{uJ%8z`W8}bn!%+S#JAxGpzjC`s#T};Oi_!4-SvxC)U4Q|0H6)PJckl zg|4TkpQ?Se+1l{s#Nz%(Gt=i!KJKmVv}NV$)vd-$+g5gaJl~c3jAwS*R*B6WEmUV*md%k{GY4W`FOat^?hA1_m!(<;TMClw>MVFrLAqB zu>DJKvigOzIIHZxAO7vVvHyIXTKipAk>0mX4!<6rWH(H6;aT-k`QE_@?(?&kJucbD zs%n~d!q2AHqdc8u_mu@F4zi>R#eFi?xv@D>KZ(ia;>u!7B58ga(J*Id4VZ~havY;jYkxM1#ozpvEF}2~zkqQ%& zYlj}ZZk@0};L1$hf^AnE;~#hAH7ZI7+@8s^^DGDN_hobMWCg1IDOr{MP*{+M`=#)@ zDP7Y_4CL6iy*s|avH78_SeuW~+&!8?s}`PWYPytE!lj&eI-{-F*Y@p|GzI1NLJ@al z9z1{Ecjbx7i!FLLCPueeeiqk~yLi+-l-tqCq3V@?y-NK`mW&fBF{m}tXtYnlono^C87LG zM%91u$tlmR%HBlO{QN4pH6qHT!+X2Wyp}gPUd>CCf^I*%S3dtruI;}skNJPRp8r3` zR{Z>nh?qUT`#ThV-H!jC5VqN4~nwc(SmW{i)^IyPvzBp5Bo2 zGN|O`B~kysKlROOj;1K~{r!0I_|wu`RiA<#bZk5dr+H16G@h_CmHW)u3h$kgdjdnl zw0P~;Kb{`y8WB>Lm-e2|%GN0^_ui4{T@zQm{oia>r7_2;+ROLT#y6X18XV1?@>=fi za;N(8&zY5<9IOo2S8*=Qz8`4z(GsIsuGy$Q?~n37KSAf9${z>WH)K8aT6yc$$L4;! zLsz1gNA5p7ulk#0$OQlOyJ{T86dtSjOqi{H!F>6LO?#)fx^VdJukF*3^Spj*cQ$+N zZ!_1J&a(7N25IN^ald;fcgf&&;qPg^9vz$a`WvPllP%WRu;`_949~qSd7_gh3*|oB zzO?8l>k`hpyOK`U$Gc}GTRsx2>5bdR7BYE1_Z{YoJB#B(mOoMGt?7G}x%&44iF%RW zSs$KFvbC5O+H~WborIKdqvu|>MH_E1rY_Smy7$eMp;9Py$&#d_3oq};JHb)%S*3o_ zB$EuL<_*Ue9p9(K-Kp9>VY(21^D;3#-d+)(D-E02cvnfuJXloR+PzrJQSP3>vrFsp z9aoIh{(Or^E2UdsloEj%Wiy)D`I z&{FTzU{Gq^^?zUW$6K%SADr9$Zc}CczproX4d1UemVf=KGK=wLyL|m7PsY9>i8+m%(HZVAZ^a}e6+^uOkr7!`HaXulxICr_&utlLh}H z+iqrw-pnzZytC5%^`7Wuen;Eo>y_3k`d#_1a?54a9rF}3-ED1CoMc>gYkXr_(r^Dq zVEw*N^NpEb%s<5NXY&7Awd+Su3L4c2JKfkhZF{EpCcP)?Ri`SR57H8EwU(>;9D5~X z=F+s+7bBnd9sFO#cHI7_OzO)_v)boDJNIiyR+dgHp2;+w&-UT>>1%b*t-SS3(@V^b z*?q74pMM)T{1k1Ue4KjUwxz3f|EbdNd5aS(cC6GZHz`iC+{EJgIp^r%w6-E+^o06$ z&Vl{$Rg&ND)$gyFKKX>u6OG8`Elc-&{boI*^!M9}SN;BvuE*D9mUaD-GR~XZwYU1a znf1S<5|+z-A13F|t-KbrRQ2eOr|tQw3_`2&}iD80M2QQ)pM%YnV81g-Y!`>cpxyI~9L zEP%r=&TTZ9&>zsV+qQ=bN>c-QJ`y^UT)jZ{4lS z*IR!&-MulXp2>|*x|3tO(UNCrO8$>qo7H45^_F(r-^e2O%qHf(p@d*vQu&quk;{D< z?2|9Qi12>oFY1#rX>Dm~le4qwqGp~K!ta_Ha(UU@s+2vhdsOlmvl0)9II}`)*#&c>C*L^UF$9NNHhno)uv(*` zQ)$+mE%Ph_LwcF~<9&V}2)mH+)pOoqhOGiBoF4Dqy;E6p^md1XQ^w1CTe+7mxaw%@ zudA@Nj611(^X~UEyX*IUXgYgAUE{0oOV1Slp3HYqFK6B~3$#71Df<4|$HzI>#Haq> z^GWO7JnQSrY?XAbNbzYXEe@L`ID6BS3-Ygy%h$Cm^`1UQ)lKub=kr(9Vk`4wt&X11 z%)4Y^dic&9nbi+ezi$6(;Vs`%?a?vCOXIDCc8X>F90Q`&2<_hL_(F z<#)GPPpwNkAN#U)-NHXN+K#cko>9An`F8iw%(H#@>zsecv)Wz%v;V?tZjULy{Db{A zwz;VMXRmJyu8^NG?Jsw#wEdLnWv?v+?EG)psqfr8bMmXX@r!;u-*0;P^jVCKsV__1 zpYPr~-Z!#8iOyTypi-7A?R4LVKOn8^M(XaRpZ;{~KWb!`pVZEAS~PKQk!H7iZj;H| zZPw>^wb%x(u-tfZp2n3qH>S*)(ct^~{M*}JckYOaPEg~O>)5~NgV~pZwjUqepXV0# z&hyM1E6!;>K{@yK@_v-;l(@H8`&jw=M#e49VL3b0zp2R|E8HxvC4KinXeEoHDQ`z% zXVYpsmQE2y5m~v$1_Sve)0QMZo><{C-%M6heOuDyi;pk3eLMP4h+U@NzOUn0j7?c^ zJkRR~&Q2lBO-xP!Zq{3tJ_&nNGEXpC=yGApwubc=tDRo2Qh%y(3IVI+pAO~&$xwz%?_L}*|+V1T|r5iQ}V`{ zWhKSUHxJ8(9y2QW__@K`Rk5q6>%j{-hTsHo71JFLU#_=5esQw;xvEAzx8|rL6IfI> zew#nK^OZrpU~GLNjxbUYCciwOoGoE1j0vhYlSwW)}G`!uIghYwtier5FQ4%b6{k zH`~rSB9wIY&!3t%H#XkY($ZYKbcy$h_m;-WbI)9B^SJxTQ19WAv+qh?nK)J^XsaLe z)BABq|5)0Tc`O^fx@Kf6JPeNgW&c$@LPPZWp^d>gfmU=hdEwNv%TAP5tFPblYu44r zKSWRQWuDE6I`jX>Wq-p;ui%#RPdyTaug^6pwQ7lQ8M5ns!xt znnLi@LZyAB%g@f@ZQkB*6O!_FO~fZ|7lXq)r)jGxSY8f${eIsUon7TGTWxpqCw*Lh z`oX(Lo&Rjv7N%eS=W)ufak2W+R?)PD{7ZXyKNL4^50>j%_4(t?eQZb4XMcPy81+S= zLY;4x;g@A6Ehf4Pdd3M}sI*wTX&s9;{~Pz^?_w8bu2>*@vr91jl9J;2o=<;#&b)iu zub%&g(>Jrtl`BDK%&vSwsFH?5bC z&-k?J!LyfrZ>qSZGQ!P#w+aTb@rz8Tcq(&+m$T3DN5yT&H%n`dTzUNZqK9>S?3+bm zO;eaA6%_T`&*UiowKqsG?Mkfm@hzSXQf@CZ&j0xw`Q_E!=vnt(Z8T+EX*uy|+>?OS z;=w!>6Rtcr%|6GWvN>-zZ~nyNfzhWf9qs=3Bf0+;Pbjy?A$iXq1}jfbe5{^RwmbXX z_La|XuDMN5w=6p}Of32<1 z9=$>h=Vbg^e!D)digSM5r^3zU`ulv3ZlC$(?)5hguE*ci{uXqbcPvQq%6|ExR`yS- zdU<(PJaSet6TirRIjd$|DgXBP%9e`V;a@*|I3yq?EhgCMbxY*Ug@3jlp*Id`sB>yh zJLtULJtM+G>(uED`RDyQ8rnZ=hWj||df!;>;qXg*>FqhkHvg}Zl-@4C=*s4R43l>% zDm#N*`o*sIFX`XEsivf4b3#gl@)y3N1YR3!D`*bf5>QxV0#6;?5}%>o{3 zw=yfmO1fWlP5Pshrb0LZzz6UmhtMfK}m#t#+&|GhtY2^2(_$qaGWo=mi90x-R_eB2pL^ap-_t?p^JmjfunlOTXic#hbU>akcz)^q5lCrL!Hg z&FA~<?JM8IeOiQ^N6f3H#U3C|BdBPY@MPHt((y9b_i&hLIaTOoW(wyChz zm(A+e^+S(x9BX^|r1-n+UR@GD*K3z?VyW()lZTg{ILvw5U)HH6k<)Loe^AyV%_Fbw zdN{mle#iLEAT0dAY!^lUa`gidUB9?)ZQFaVK}wvtQg`l=KZn~Vo-;xy(Hx7egGqmNwdCNHLvc z*t1P=+Mz5L7WRdAVwlW#@h7gSwX&ONGyl+rHx6R^Zq#xe)nmNC_T#W)aOalAr}ckX zUz^pSqF3?ZgCGkF+o|hzg?}t|O-gvY;0429h0xIsBApO)72FyZfgrk_nI7r&Oz)f}tPsek_1tX{A* z%dMhf$JGy24-Po01eH2gv~zq~E>^GQyL8bWnY+omPw|F3IS7gOD<*!nozL;pZi>2< z(5kGHhgWUe<#khM{_pQEIN$rpPTW?q)6s^j9+FJ)Xbt z`>l;PpRQ55#MJdJYsKOYW%(cWyJY+hZamw!{d(YZ>HbOi&lZ_|SGu!(FL!}7M!D5< zV4dyn7t(7ruV$ZlAzkq4iRbL!FC~PVWw@@aSheZDZi;b4prG}h&bjxNeEM6?{h?l~ zTYTmFD4RGz#+Pjmoj26|EXs)4n{a2dB>VTAmU4}sVyV*)T@7s4dwB56#}~bC-TRa; z1a&MpWwQEVX-L0<$t+EwMc*yvoLcYx^;hZ(iB~-}>9;)B-jQ_td(6Q)PwP{-tW$23 zs8-C~jcS&1w;%HzdhwuR&L1I`K)wepFFazNKW5t7t$a!I6Gva^i|dPxC8K^HSQzKc z6vn$rljjY?((`Li`yEyM6n1KLPD}SO@v0q*CT~}-YT2czbVP7A|Ivf@^?BCY{+8kT z#9ff`wdmF+TN$hAQY@1g%QMs1R%N}`wAJO+~JFcU?G_*Bt#X^)DSUo=Ybz5}6=Z^Wx#= zGaKK!^4WfTGI`6}N0-g)pZ|P5?`U{@o#ytfp*@1aL6izMByvF2*Ox+*e-rZQuUt%?P&!x9dHg4H0+OJyq!L*>rm1T+l|Ihko z_C8MR3NuuwzpW)>y=7vV-Dy{c0t+_29em4zjF)a^oggmwlQmRS>HiJC8IvAe)NN9l z5F`EIPPW7)qf-fYwz6KltMu^VdVL-B$qRbla-V%*sBy23YgZ8Oz8gEZ<8$t@7%%P2 zoFnrvVcmfvGdy@cUw3wAyS}i*mVGY&rOlH(e6n1`xx$Sv_+_x(OtqLXwdqQgaZ!f) z4&|q6b5uSqZ&aGFhWL%*69@bVo09^LnNvTAie z$n!R~qU54$yBMb_i99TIl=<#% z`OOwXuKUa1K3R2NQAuci%@@JApXX1T6}vtayrub0QiyY^s*Os-x@}+Q>9;nhh zof5f~s?8NC>NewHU&ji&f{ZopvYqZ#^|wk-hBSSVoh|4!(alPzs!KChC+UH}68&GC zQe9~UZ><#(3}Hiq(4V;Zd(9D$c!d{Xx&7 zLtbyX&O220vhB5ISzI2XU^)49fY+grR7c)de@-md&=FW(%>BXG)oJdMFUHBvwr8g7 zy0SmnWS0a}KzP!jb#H=qJf0_6@#@*d5OXDURpxW7ogY-2-*O%Ix)QBny3UK;e6F}x zhPzqsLD4uptBQ+yQCH*oR+Ox1-MELVd%Ns`i#5VrQg<}xo@Z25i4Ya!pWDvfCA@v{ z#DC=*y`ou{+)HWMoAh;7n*M5$3CqsUXe__~|3F$!@6jg9@^i=D#qK`w{WyDk%~w%2 zKB*jIw~!^V_m=c+>oQE*(;*eLF>9+ysNJ$TcDK(1_2dueLhCUfrKYy0)i>o#)5-HP&gC)jcP z*mswP=9^Y(md#~p-sjwMJUEm*42nuFDVXnM@KoOv_}~u5^?>>=xvT@Tf?wW<&RZgT z-6kN;ZKA7+*OxE+?j6hZ%a+eQ#@v6WyIP&cG1-$ZXtGAq;~Q}$T%THQHeY)9s^mj? zze{PU?Iy48hj$ePIA2Dd&@?{W-J&hasXaS9_s~3N^?;z)4$JPG`@q}fGQpq0>m>K8 zH7dNVKUYrq=eDv;eC3u<2chZFE`cuJT;wk<7n#u1)U3Fu``q%HpFh(p9{zm(bY-q+ z@)FMV`#;8?nP=?2YTHYT_p9|!R=8f)qq)k&)Y#Z);V^G+kf!p*jB{_S&w0NQTy|5w{tsu> zmshH~>9*(o|H}UJSu*;y=xoO88BaHHd(`ht<@xvVNz9Cxy#kk(T9@fZH#JXTD@#6} zQ1aeq_soOL>-BE;mUgPJP2BNlX<=h}tn!~PRy%^X_ysIbm)Yy|(0keCdt0}16kK$8 zAJs1vELXFo_eIKwr7MJQ%|DgkurZs*E_Rn}w%OXB_22F>`>cCq_rHGm_nB|^t}ol_ zx_4odg9nGHi}+sY4RbAas1?3ZssAt3E+TV#W8uQtNmfiO9xD_%zs7~H-gfKW?=N=$ zzw?|iidx(9q{CI?`H#fs=k`9ISN+^Lyl!6Q{O`Z(46SarXmgkB*kr_-)+;a8X4kGP zHBY?h;4Gf}_1!=1yjJ`<6SC_4(Sz^Yw*30NaFO9+y#6$cW32TLGq^UmmfVZmv-$g>sT=3^ zH%y5SIPhtm?(!7*qyN?(;r2+nACQ`}ap%VC>UF=RYhFj+w^Qw@wmr@Be&_3TH$>*F zmJqgfoU-8W!+!syhg+{7E1UZ@)M8Oy?`E|wgS?BjdACaH+MgS^NZ#4AGhEqYx?j$m z*S7UDnI`R+ZYYxeZ|;Ayf0HK0M0IuL-|3&V{k|i2bojc@mLIzBwe3$?X*_Ro$>(H) zn2-C{``bNctyu8*{xRhXf%V%oKh024;Phac)PSIBd^wLi|7vDt@@SoPemeK>;Mv-# z2F(@@dl&59U46oTrYG}}!~Hf*?fWZ#1&d<>GoUadB5u`pKsvy*uOJP>zSX9 z^MwotFF(0CTV|yi&G&fys@*BwZN44v|Gi1#|2E|4escG1n!3m@t!e7r>r+-_Tk$O4 zE>Tn3nAB1sj8BxxP%?P$q0VIY*?;WX9yly@fYtX1sZG;9sA?s+s9eUS=Nc75-$< zYk4@VS6ppz-$}QIy~i1~tUP>tnl-iMvsSk{O=5SfmRKeuE$i<1W&74d>(dY8ZUvwJ zdWR!3qHbC6VkKQ(R?U+Ur{ATthG?JWnrxVw!er7HWwr3u#EH>O-&f5p*mIFNetwr~ zX4wlLFP1}lKLk!F`swrH)7Jy?WeuT~1;#sWr+7FzImvl<)bQMT*Y5h-HLOW!@nxZp z5izR|{eR1BZBbDrdi~$AX8t*aH`C|U@_iQ5v3UCO|-apHZNiQ zo%VH-{62Ged53$_wVn4qud}V5c<#S(cW>Xf zG9MXxy>H)|_Sj9B&>8(@`7_12rw?_uwgxh0KCictdwc5J!u9g|-{)FQ4?bVn$8o{$ z|3#}6lRlmMIp_1t7csxSzqnseb?@@eHHx79-bm@hgK5%%quvkt{q0&^yTui0G3yIy}-=zL>Ss&U!-d%W#;58m&p+%ZW|{j<1U&)lL@ zuiZ~if81{WW1mRB-@;EnXD5nIxHNh4hmU{F6%1cExVcH~nmKW?Z@R$7b1ikd*?I5& zy-~7>`D~)}wVb>C0?w40#v9WT?Ujip5f|>gU9ef5f%$f~ zbu42<@Qp@YQ`VdbZ2N5+-Yind*xY+2z9_-JRQuoGzjG~8(#1Em@U*--o_(Ug-;dig`QQG?q)7W{Ge~bjhYMlt{0y?Qn{BKD={xrpxhdD}$FCy@+l9 z^{Yz8zU_t#gFo~$f!o)=eg)<&gw)q#V{=Qp1}xUQ}6@QfKO z+G!aG@N3idjG%=p|bp5wc70;0Hp z79KG%EN;D0y`rnb4AaYv<*Gg$WL)T;^6p;wf94-*t7CoA%2rvO7d(CP?_FVkhn&-f zCXI=r6ViO=_1urM=l^zhwqcnIpOwZrmg4RWt5^0bUb22*d?9nn1eUqF(znewMo+ht zzPxT9TPV+}cg5cqtV`pcA}G!(qv~_ZWQusNvtV)8F@K!{*A|;b{QKAbc;%l^;B`;XpLzwdo(^*sDx(68vSk9H33lV#G?4c0JSYBs;uCa)jW@~$>I z&__{qPC{G1rPF!+`z`i+eu#Ow_dB;RUfrux{wnjeb7Gq0;Z2_xY*alU`@%W+t=*Tc z^BVjTj~$BKu6TAr$D>0$PeM~3t-UIrbobTN9q+qXC8AB3^mf%4-E@6>;NaYdt$$2= zPH#zwDii8D8`TznyI>0AEk>)ETE~2wF0F7oaPRom+lkj^E#zytaI0R3mDMccDwj+q z=cBpKcW>Nz>pofEL-2I7=VJGy6i>Oh=%1XEE_E;9wg~iMzf`_{cDMcdwFhTS_PbJ9 z{h}`@fvI)zX}9T#XJ!NzxP}<4P%bDp-yq}1&ih~RjYUMO@9k|DUtZ!dKHbmL`6eLU zC#|B)T0~L*jLzlk+%;*O@o#VL_pJ;O>QFA2t}9$FHR;i}nq8TvPrCkW{=P8rsMnS^ zcPeicZd`2o!aP1LK#Qb;PQi+o zPwf})+9lQh_xAboKTc|CX>WXTExPda+U*x*ioQLwoxdUI+1#c1kJp;tw|QLk>A*^s zwor#(=imQh>AzQDY%|x_AU$HCr@zDeYYpMk*ExE7%Xf8iZ7BL$Bx4iR>fmwm+WVgu zqPS-WFG<`{A}q7Fmt*H|gQ6oLDa)rQaq`U4JJYySgE8A|?SXe|UmK{|$|v7w)LnCV z!Mb-E#S=wVthYU-qbk2{oz4-DS_b9GJ}(lVT{x*{zRNi)&Ro8d(@fy`RFa%Jk? zZ#MBH91;4pGRv)}@ffSVbs4DwY|A?%HoCJZyBmy?%BE6 z`{VDa?1AFj9=IH@^U*wiygxs_X_~Nl!4ntZ7)9Yro4pLTB`Mi(iNAT6Cvkad$A^D$ z^-oQ6ZkI7Qwze(`lQOYlejnfa#f5j*u2&nX?|%#4Z~N0v$by@3;-fF`Pr6DhHJp=r z?LvFq!;Irr;ny#)?|-tl^ZH-mp5lG=w?dt4d0*5%tm|^0UmjzTJL!%hC$zAGQ`2P{ z>;EjTPg~-_9G6hOrC8RU>pEqxwcWve$?ggLFA5i3^%U=$ z{_yzUjJ$le#6-s}avM*tb5APLG}1kJ;ezcFudu}Yt8Q~_HnDr2;s3O2Pl?V&%_rXv zAKQ>8Z5`}tQx;>rkB8eWB5=X?OOZ?3`kj394{*#r$;Kc3f9sXGEtj4)&kH$qs`x?3 z?}x_Gf`(5YCnPic|N1Y;s%^~-^HQNxZch@we$$QEw_7S%?X265z0$%)!3n9$cRY3X zUlc5v@b8@GpC9Yx9rIL#Pv48_jj>vFMv~Ry=AG^*OPtsm)enhtE<9Yzv~;Oek6h-F z;PZdjqRT=Lwu?MdTe?wu4%^JklONctHzfbh`uV_%>A9pyXb`Oj|JySr>ta5RTzmGY~f3sNI)ruNRi!lr-vMo)yrzK-8ID>7d>r9I=NEUmv%`+bH_gb*wDi&9SW^y-Nf*yu+S+wdb%yaOmPMY|Psvt)c-}j^ z@N{hdf;@N8brQyWv9`WHmb~N0+TE9Mj79ccR1J4rb(;ODqBI5VVihGe*{IvQ1A;F% zo%*Dm@cqY3*3j;M_xA31yvuOI_p1*Ne|>%7!g`17)zYUL8vWL!?68@;V&&bcKs%RD z%k=V?GQD*FO^iL?b=A2&>-f8SvV4#0W8U64qN8M~!szKBG=T+~`mu`hPTB9bD^{;P z`t9u|spqTz=^gpn9_n!G(;@vplSHi=P1hfnH~TNzzL3j1t$z{oN(RPs>C@X91U7e6 zy_z-AYLU>pBRLwidJH7KT0C`22rQ$z-AIhTd>NQQUp-9#K0R^bn&b6$KTXQaH!6oZ$r;UfdF9r1hVl!I^A;Of z{CM;u{qxto+!H^WUY*i0asJMU-+tM~M_#*lefpw3#hA%>6KBNQYZ-YLr`O8v{I^qB zB8>6n;pNjGHRoDieDLkTbNfHqBHFtOiw>_op=YAJQ-f9Zc%O2k?DZ{b+J7z_SZ?I7 zIz{{Uzc{PrDIVV5NgP|Hm)$Aavu~Hw{r@i~@5sHCVN-TH#OBB8?*)4fcOTE6-z_v( z+%rmb%0q@b-_B)ho8-5)_0j~c=_>{N*WPM=8ogc7_GpjHGxeuW)07nrCGXYoOYwy- zZn1O^k1=JRZ5{jLdHHpP1zt^|$w%&br#)^wI{AV17XDccQoGg^m8Aq9ynfy3<4XRo zas2?}Otl|e-7S!^Z({Gs4_{-g zAC}4=x}#ai@#9a$yS1S!UZvCpd&zClIsE!fZu7KA|F`XJ?`D*~i{Cy+6QlBj6}<<3 zg2FEI(+us0sTm~?PZZUBo+ceIX-SjWr9<1kDlU0C%dt{0A$;3}*@yh=CAiizWlfc? zY8LyTb@9$+?p!w`A-9;e`X3kTlWzWA{c&^t-KL|)MTNKf=O-;UV~hCzLvab)tc$1n za=RQ@s~9Q}@98u)M>s{zlJOeWhK%dKqhC!^Fnh!J8))DqF9d zxtZ(2{K(Gd<>m`IpQcO??>n;QQ^^v|-4{v<)F&}1O8jwLq08kGQ8sN>&e4Tt_W!EC zCe{SKc8RLvs{0YZ>6W(q(ckU-ix(Bx)$A2%el_dDvu}%f?@yi9r!sx>T))LqF?MIW z7BMm3iz{QuUvIh`enfa1!32vmK#(q%$+=?Wy=(m3g*No%hMJa7|wm&Pv}eWU?s(g$ zRP%gZtHr)`j3*^JpFQSdv5}kNKeOZPnSfWt?GHbdC7yAy<>G2wm3+10)6cUj-iK_u z9c^?-O1`PlE@Q9Qj47TLf&24j$n#fP;t4cM7whd6Dn*loKE9RCJ23I??h6-hafVO0 zdu)!J$@lM@-J>KlJvle5645w&x-oQhb8qamOFC*Pv%a42y0q~8Ro-d`??8|CX?*6N zzLecDHWodrn|s3ZOvAGE{4d_HgedMPiE2xneer$iV)c}^V?iBBd-=XiQ_HcBYhJWS zW=elTL0Nw@Yq8Xd1AaM)tbAL)JKERj&8b}c@zg8c9glypN~CfcG-p`*WQj8=s_)_X zyX}(flZ+fk`{w<;FFP$xoc>q#vhK#!n-`Yc2r$uVGIaIaQFg_rME2F;hqk3JK3f~) z9&`B;uxHiFoI}3l_Z$4ujm`4zcZOz{KXfQH@9AU>i1vN)GM}xfB!RWZVC5N$OFJU% zHp8+Y>aYxm$79`?SgMek7P_D#>B9GC=UNP}_%|*QXY^HC)B1SBp9E988@o6IvKJ&r zZ0nkP@rspfiIl9+*|46(o3rQLyY9E)JYFy|Lgz|CTQ~ zS9VU#FnO-@^;Go7E9__NKYf4Dd3;~zKHu37-Y5LBp6i|%D#7VC?YOFz;I`_5O}{vz zZv3iwd2I2GNO7n2O00s+-p`qIcVr$cxnuR4OQ$|p)#%2_wM#6-z19VW&1s#W+7fGO zbN%7b3HzY>4hgf zoEDsK{9aa`P(7Jz*R91fPCj^3X{`0(;->P4Z%Y?w^vhnFl=C7qy1;2J+v6ukEnb-< zX!hT?s+OtSkfJn){hIUnNk-&8u8!JU$BDYqJKi`{Fud;8mhEh_S^FEb;4Uo4v9t98_P&#{BTZ$12UTK9@y zkbF5Sh&}S2+NPOK7bdkfJ*`mNE2N&4{X|$BccH%lROoA(?v<$IIr8sd)5?;^857;Z zFM4OM{_tm&Zou*WS$eN~6?R7ap2e@YSj?>3#c- zv>-`6d3)x^n=f4~E6{d(rgq@Kh8)}GEnhufZius!JZPePW2?-=I(xf2S6X=8??_C} zaH+q_HYHuspxj6DuJiGWFG~+jp42*5&X`A7?7(b0w)cCtH!Zw3_s2=u@Z_*Smao6> zx4Rxs$e(s+rP8q%;;AX=CUc#9w+k|F?AHCzW>WF9_iV+>vr{u(eJiS$n)5BpXTAl~ z|Br{|AD_K`|A_kT?Z&|?MWnCgyGVVWpJ4bnAXGTSOZ$tLZu6d*-A|IQ?@VCfQWfXh zyhiWBL4yMMS+k?s{xVJ!pMug}h8OxOjGhS*tV>#!n3SoWK60mv>4}Q^qMiCJ*UjY} zFHGs4;Ki)(eMGEB|DZDW^27Hpw{e&I;m!<5Cl6rHt zJs^6ota+SSo8#S1i-$ak?04-eKfiCbUoIs7ex1|njZq;gvkli@Ib>vRd}~&Dz!A>F zcV9c*;J!DTjjdaBCC`(vl}sF|pa0E>PC2>4;p(-&Kb}rrpZs*5+YbqbeJ^H7p4h7P z@a^{dZU+`PIu#lH+V%T_{0FB@lf$pm{|TLbFQm)XR$qR=`@5hB+lwt`JKpNmm06!z z#*y{v^;3e0>}1;JgJ~C%Q>uh^PqyvZ6=pv#dM9&TFRvAsOY-Hd5e8<=s}i*D+L_!p zxbaJTy?*;_z4`8W`vtShWRCw@I%i_M+={CO%KyIL44IpAV2Nh(#gavGe{22H>{d#AUb1YmCP%skgZy=ljXvkk z>`^c6xch96ZGfgzz=cYY68443JMa9_6yC6VA=mF4OYt@&8^y{FBGJ=7xGCt@v`d`}qTl&AoBA zcZlzu+rIJNUiArUzdY=H9`BHvsVWo2Y;$4p-gVr?>*n?EF`Q}<8O}Zb_~D+#LR;p% zIAH(hfqKP*-}l`vtT<4ezmL)VuV6%zT+GfN)fq*zFW%Y3@BMBU>(pR1rdrzz6*3>b zlq^`jUizAf@fDqo+}}zMN~8sBk7Bfr4gR%Yno+pKvyx}^vztp~Di=I5WIcU-UTvv< z-Oo3#F9Zf~9X#Np+~vAZfOV5V$wPzUNlkBT#G7X6spvXSWh#+oy?N7j^%v)Z*{YdS zcci{bt*AIy@IroG6uf!z{A`QX|45rMOBZ|_x<(x@ev)B-0S6cyg0r(?7@+bi<#d)ZNuo$g653=>oOhIQ>e{C z-Q^F``|q{>UN@VGJyuY*ozL-ck>g>b4Fv&L_7*N|N=mrGk|GxV;J`xVJMu1mUTRiS zvp>B4|5Nha?qtJdv8KX%*U2qerhfQ(_R?ex);jU$ht7U7F`eJU@W;uc$W0<^t^0}f zt=FC>o+y`Jlf}P9O!!ZgvQvq0Yx5lM>l1EDv;=zk3%(ETaP`|Yq3*{I>4?qCCB2-j znm0BF1l0r?+j|~8hfiA*(>#spO!BSl?rq|? zkKI{)W4_PgiyU+P&ablNule)lu&tgdqvz_Yi#AlSeB$nUP;z5q>SguAy`T52Gf%zs zRKh&*(g90%KKVoHudng!5tlZLW|H-CJ)y64>|rzek^?1NvOnF>25Kg-s5E+aAK1X{ zaoOAJ!;7cW6%Q0-OCOzGn7Y&>CEYCd2K(B1x|bImi_Wia;h8q~=NZo(pX2NL78zci z;X0*lR~uW4FV~+RjW>7XWhp9^Flfr;9q=q%Jpb& zR9d@ZewyWMaru7_<0q{Bp75;q`T}>!M^aWT%vu^sSwAEv@pidaP34R#?RYd{dVXm@ z)?|SNn>}TdCd$bAbO{u9%}5qWE4L7L)!6#ti&)2nw^z<8S&46%$k4p?=S+zUe_xxI zJ-og?kA2Nd4$UT>BV`JQ&Pyz55LtL5<%0IYJ_Cn4N3+|vauoHcC$tNSEUBrD2$|8w zT;i0wBKO~=`FqZ+(U}t3=T_z?_oa$?%9->o`MmAirRv!Qk8Vulj`K@f*0F5q>eGqu z-4z5n;=Tn5O}KQ~xZtnuc6a;mWM|p1J^%A;ZguBbtE!5c-z~d-pmy*7bJ7{PmRnm2 z-u&p?k@M@!i#cm&Uwi!EUoG=z9O-_kYr+jy6A!VpFb5(x;7_!fw{z@=Xv)x3I6_G_iEcD zrc0HN_bgE72-LSVmW`E_eUYwq^MNAkl7lW!so|+|SRowk@?|=aXN4fJr9DqEooE)Wgpw_1%QtNVlvK>!^=!^#92n&5WAX zz`E#=vx2{+!TJR&QVBqo(ac6pVn>j3BX57)IqH4Y7=Sz*E&rx2ypF3ydi+r$r>2gW= zO}KMr$ftRkF-1QkXI*JcSipa(fr)J?(=-;*zAYLb&G-LdaIc70)U{q~xW#%=h@!{y z$rn!u|FvU@(a8?JRO`}v;`oGIUR__WX}dLVSsRw6u!YInepqVLsH9POX-cMqeWq7Q zt>C1Zp43M7$aPm76E%buUJ^AoGTLm=wD8)}r&CjuPyK0FJKO9+czEWIgI7<$r0W!Z(Php)_^B*;3$ z{I`~fQN-{2^8fR%1XyU!v|z1%{dz-h-;|3_Q)e8yv}(d?{i5|2^5Ts< z9NCLR6INHJPH5u2>pm|}yhcaGc*(O%jYkTyw~ME7`6Mppy31wG)yrm*ccWw3-Q62r zJX?Kn`nm_U6^YxeAG}Bp*mlaPT+p~*hoAvnkxvF^rY{9{oC!Bh3A;Pt2`*!xD%5IC+ zB#E?Ke)-^^?Zw*pyJCMW%I1CQAap2{Pc~TWUbELyr+k0&|F0i@FW=>ywnAEHQDj&d z-M$-RvA!U41)qx`?HtS6An@ z7L}OI1%+QKs-unVe*XAuqgWXG@Ppv%D4i`k3U}OnXIv+~jb+`*^NMcq@%J;G=HJja zn(p(bCW(bf(PHkm7iE7;kC-0a$~swQny#9v`r(HK4^qy4d=c}}Hv2@xiH_R&r3@l@ zCc>LH9?{TTEpx&XwCmbidiCPpN9E_AKY#v#RE7L)f$QhboO<{@z1eSlYDBHPy>ouZ z3t63rW9J0db8>d>IUl@p$@U<_l`S&M0$+iQhemH7X?x_!Ul_*HIk z@4dTz9yZfd6t!#bpL?3gz;ODCg-e-tDyx5J-^#=*5-$>Ex_iaW#ZA6dlUOCIOymCj z+`n*<4!iJkU#ITPqHO;n!iHPy_DsEA#Czw_}Mq3@|ZlO{gh@#S0|z?pJ&$pe8IeB%ba;%r}`}X`Rw+)KEL~$3T^ws!V|yUVP%&6b)@|J zoyq<#{m0+!+__Rve&2U9HNSm5*XM}UuY9r1BK_;9?-y@G)Zg0pdFQ=M_0zIgMv!6_ zv1mlQ@Uk8L_c3p86{FldQ)%@+g9`H(hk8CvE%<6(xMrE)wlho;MV~gV-p4rq&5h3L z_dEID@|wQL%s=wAI4$u|%f%O^UpBYswm7)BET%obn zp*Y?8*H1y`eGHo<6P5(9YX1B2AVr|$>a@}s^=C~aihMH)zHGQHb^2n7*fRyz14|_j zRTtUb+xo$ zwfL@=Gj?2X-jScT_w~LN3PJJi2Ok{vXJsywRh$&kC)Ygj>Egt#Hy=EGzQVS0&bvv; z7xyeay?#g6!iQ2^y0;%b?Ej})enG>PIGeL{8=H%dXC7}koc8U`PmQ#ncg1_J zO;fvfx|-SX-o~Gc?&LjuBt5}L{Hl2G)-!FZEDL2iJ3S+cp6`t}v3<2bh$(1K!NVe} zkI%*DS-y=4bh7v@^{l+s?AzOWowB#hj^E?kbFbj@T94Bwy7tApIzQq_;n+-;C5zkn zCtW?%Sm~V0qk3TO`LAtJ!S5zxDYk-CK1}8GQ1YpMU2oHO>{gb@(F$r&+hJz`+!L6z*3+Wqg~coB+XXk5Wai{7bLW35S-qe6-Z?)B$$PPZaURPj@xQnI zs68c`N5ZmNXV#&z3?DNIAI<`U$tN7$C$}oKPjtL)H!<49eL_>;!hieZR0`)!U;Ebj zc6EZ!1_%Fk;TpXzKR-KVy=fZ^tvhA!OwpOSIVJqjbZ`ELGeu_$UQ;(R*F1AZ<<`s) zDXvhzF84G3O5DfP4thU4ahxwj?cVyvgZJ+4)m`4HCH_dUXJ_yc{`c48s`R4tM5E2) zDmvZ@uU~W}^Q^l(-;~6?-!6T=xM}01O+{@9=eyn?jyLfsdCwK7H!bhMAkw_&WlV&Uv4= zJzKpXSSRmD`qz%Ti@NVTbY{PB_|tZlT`gVSUWQd~o;MwN*!+9r%gLpM+6+&17S6HE z_A8mM@%i@FTJGJGOkM5s-O|*K_04V(nfAO>(d$#(zsr{|zSn!Zxz=o2!qGX_hYGh; z9)A9A5ySrW2#KG2`Rj@m0{(oRKHpwwXRA-^N2HNmHUp%csPk{=~AieOS;$o zK&dU_wT0`E8wJ4?JyZAg&Nko-UKT57-E!A@+sA4fslPc+dxVZ}Dz?*Y7nvMa|I^>* zTc+gV3VHn(GWJonAAC5EhYQQP$i7{>=il<{eRnr&%Oy#p4fKM#|M-^x{-zf)>f z6U&$2hJXy;4ZD9lzW%cJ-vo1m_pYAP118wpIqS^){JFi5Y4_BxW#12-GSwBCz*NGy zGNMb2=j%o1Ct_RkOtdf7rf@jUHWT{)^;4eB-Wua6(?V{%n8c;26m#G2V(a#^dAWn*95)tOIs3oSmqtp1nt z{^q2n9}C~7pC}LXJgLd#z2~uWiuB#@z4@;76V@kr2E^u`Jy7-C_RHIhoik?qf7N~d z;zk#pWLie)oI88<8Ws7qXQVDgWCoru1;u)YI&I$GNRG&Sn4hHtoT$WtUm5 z{iqfz4%e&iPp#a#@$#beAMdx``1nj)-|h4Eq{-)=S8XiDvW(G#X}VS8`Va@9g7}IT z(j1%qoIbYTcGH%eCz~%{U$=@mV8)k#pgEZZtpNue-los{!@KYI2ip@qMweu-^t)JD zIh|2__44zDOb12x{I=!yS-B5)eS4dHZ-QkbABVY@)-i@m%@@M!7U`vlT>ScY>-NPZ zC5)RS5A1zgzQ1{1r0Y}TV+WUBKXC9v>~?zw)7z)imo9T&do{q~(`Ik!qYM|n_AWfs z<-xK0`x|GsEF+=GlNUXj-qaMRafQG261V8}jti`dul<-1WW(6MzwkjYqWAQvsFL!{F8+Hzu1^2-=IDQmK5@@SM`e~D?z{ei zNw9C;jgE4)(v8lygVKbK9y`1Jo}}*EE^)z0lY%u?+>xCtbb4{;+5Y-oPNk&#m-qjl zAI#EN5$L$1Dz56^%8anUKR=VIKc!w~w>rsrG5hF#R^mu+|^ zt@&X|&#u+y+nZH<()p@S_zj9H_l{X8-^=*#!9q$%P7oPBmn_F{PjJ)mI zX-=nJStzQuMmm@uO=a|*Z>eqNZ*e&3;Ps^Tf-fA``z3Ch*Bc2K=-xYSJz>#t$)Nr9 z@AEcR-s%uf{ujikId4BlmiW^~=iVPh`zsoQ&h*x&9sRx8*8k&o9WUkZJ^%YGo^B~T ze0KZA-YJrI_b1)7UY?>j_xxAC=!kd7lP=H&Ob>)qKTPHJ*sQJf;d{53)9P!BP4;ti zs%e|Oz1MbTkIt7(JZsz!PukQWr2EzV!->1*3-^AsP%8N&A}X@r_`7sY!Q#@YI~O*H zojJD7&u2f|R|ySXv9#Z7TqQTx7^(NTFF(wh>)~l0D3QljkRH9&WBaQM(ZRi2-|uO4 z4L{$sRYjhWdH2?fyjQ)&Cw1j1EDiF#>+wWwWAyC#ogO;9p1z(5oiVbLCJViA@LI4z zq$O~9$}Evsk9m46ckEUuy}7om$oV|a68XPrtoLqI2rS#;v}^LA4=sB+G@3m;7e+X4 zp46mTA=NASa%Sa;6w52WLeDQ+xb2m9<&#gJxDEf`dcFA31>^c3XAhXV%zUt7qwu2l z?WbBh#pND3&pgU`U1x{ctbF138C~Km=e{-Dt10k`@gCXrk?B(0wC!(YlwP}^(O%}9 zbYV_J4@hf>uW|MPzsx8EbP0W z9;;pPe+2zBE(ZGcY>%s+nVJ`SG54eTziOM{lkqu4dn7(j=koaYCp`GYs=sRvN57H! zxc;Nk+#fan&Ra3@iItoa*`;@_Yyb0uhXVd=O$(@5En}>=EBo;-(-n3#84o`EjLCnX zvQrUrLi2#C-ve#&2`u;PDMsI)TQWT}O zOn=WbL(i(RMB@zsIxg(ax`+DmMADaUTDwP7-u5q#QE64knOQwz*0&E;&$$qxYj818 zXu$*F&?OQ*Yg5Zjl|T0>@myY)%oki3XjYQOT(xUgM3VdWz$4y@9fdb^=5~KLzCP$_ zDrdCjzmG4!M(lcPpmdDIg-P(m%zt&g|Pyr1Hj`qO2~^xk8gv-U>(J1D6GnYnh z6clc63HW$yetbuo+R?tw>uc8J#8f|?^kUtv$)*uoAN~DtcsYm1>+9=|Y*d~kM8r*Jf9#9JGtOiVbu|NK5C zw`u=-R?0LVTbE}cG10c@P{f}7e+5|QD(w4z;;zA^ihyfj-;n2`6=yPR+i4=xoB<*X!b$ilf*hT73;WEIkJ=|3y#TsQjE8^L+1<4wM z|2T>#&&XVoIxVon(xYyFHtvLer1_Fbtk}c3tFqz2Bn0e%Y1x#Jp(FfAnPK#jsaOk6+x~9xoqZ_wDd+pS*>8 zn5?EtROudLf4`^FYxmde{2h{!H+Np#_W96U5$Vm(Vwx{)>Ut3^_)TZmqU~DNyh^!?LYD@Sy=ar+=skk1hz_Q$TBFP=Eov$gx% zpZ|~uWZbz7JR=*>Wzr-XHZ*zzx?`t9&hA|j?CjMEYP_4EXHWUqo4eh z6OG^WluI7vwsP)F`sjV+{^tjiPW-4$e)(+I8)Nh9H(8eG?~3~~t0)@VT*}M?u8Inr z2Z~R5eZ0K-xk5W1w|m3V%qZ49R-*H2{xWZC+RsXW8;c&{9DKPq z5)*izOix)fUp2SRYSEQ7v%3RIK2^-+*d#f7(z``lQkea|Xnyz~w}j)u(=?B5OpNOz zf)9S$#<6*uv&dYf6F%ph;_?(<`Q^Dg|9zqXIcd&#Q6QYy`Z65FWL6Az-`T< zE0MSaia^7gZa-;%ZKIyEzCgnBF7ZzWZjOuCBK1FBj%H3mN9s ze`b!cuD275GwpmDclS(j=Caf3E)Q35D8{nAv3)Z^%)sI3wWlx7D6g_DlTcs9pA>SE zBga0hm$`k_!s7OVt5Y{mhzj9pKhG`Cyyol9%n~Nn1)o_A4$V2U$w+&e?LXljDO;wU zOMhmRlx^%ed*)qkSX=S$i3?0GF)nor4rZRUOX`QR?R?cdtC9`YbAnbnGS#2S{vr9M zj9q5$9`*1B$NN%q-h@8lkh*|72P-Q03}fxD_KLJ!Y$_KI+o znj5-A_x!jc^)ZekI6SV@l*gi%_E}4Vr6uaC7GD2#;Z}@$bw68VrRa-y!NwOgCbnjZy|`kv zBZe;|{H^T!%|p}P zF1~Q9O32z=UwT@)uSG`Qj;DPuwmxE>HvLZFdxz*BKkmq~9zL9I{CLHVB|24o9A$O? zR^I33koYVX+y19Xscg^dqtZHyyW}@c`{KkBxjl{b@adbX=d-%ol*`LXKKxeNzqyB7 zQQ_!2|CqG*>tt{2tUWEcKEh~|On9uVe$}4+`}x=J|Fb;!S(lLc$Di#|ADw3X*i*mg zgqZ#J8rCI0CZq=%SrmV}lQAvWN2BZR;U_DN4eA~}S+POo*2V5KiiYP@YIwFcs4(4j zE!Zs~{c_SOyAp+6wRZJO8cOW8Jc+?Fz_3q-Nqla@loaE~*Y8&{sJ1@+adv)OV&*I< z{=2(bls|v*@bq`_c2-up`&UPPwM&0A@9x_jaqW|sg4IkG3T%5H!MNG6`PnqSH+DP3 zA8TtZ{Ki_a=oN?C6S0I}P1y@hEd1S(8logW+xO-IZtVq$FC*D{rM2UJ3(s{3% z&FwCJcyF$l%#wyLUv@QT^haE{a+oidku7_(^Xx7q<||%LdNmI1n=60dx!vv=MGN=U zDROicnFczE-8aoG@Yo`twM&vO_Q1K6PEXH^4LO0kmYPc}S~oRpTG-Uk9;b^PmtL&p zz4}q>8JnccjoNkdpE_}Pd|Wg^paj&h3ohz?^j_8OgJoQf(JUWXm*v>n zXCIz(JjOZcx9qvsZ!*MZUz|8=TA%dv8m&7!BftIl^x6M7AMejuvuF3SUcW10tLAmq z<;7-+jX_&qNdN2P7ZCq$GcV7~a9iH3hUVYj-L^meALGH8`6XgnTU=dg$7-kIzRa*$&2+BX%;Sg5u_!9`Aw zrh^@OeD3Tnu-o<~&&_=IY}<D5t68|$SHd=5EhzF?TKaLb=JV$+ zS()!*(ux24V{zgwv2p=Hb?>JU+rHg9$hIf?E>H2p2n%H|lk4*xI{a2HIPUt?>)^ZC z;~G|9`Co_ImmWKlVL3%0BrcgRNaxGXqBP}D1Le#Q0*p7j@Bc7Xi#4ltIMFfR*rLUv z?9I7q`IebW83zI}N3sAFf%k!>gXgpNPD>zcG*<8$<(BFn&Q zYR9zoZQRpm+3wQ)C(-N1i%!s`RJiC%HUOktXIVB)bmv(8MBVM~_&@L-bbj{Jua zUyNI>i<%s~AJP11PR@@%Ry&kCKeP!i?0IqVZjFpuz412Ld+oW$UoVcm>$b#jahvRo zTTxMwpRVz%-}ShBqjqJ@A&V7f*fdN^_*!p>o_*ftw$wpsuA}w!HAxf7?U(tiYw0_9 zqg36`>EMCv*2zgi&fB%0ookqux1w)fUPP!PmxJDoms6N_Z*7&8)7bs1LN4I?!-L$> z-x8gL=el1@iMYI3g6UmD+uXZOIqyQRZ(;xT&$yc-{#vr4jO(Hl-<}AM;x|!B6Ri4G zGn4%ETa_QW2<>Vc(p4E54r$IR5y1=kFW) zQZH++o6D`J(3yQ?_k<(W7aP~TdSGWE!^cq`=$UExiM#vA!WTC-7hCV1liRzux6A0G zQisNM>vLg%@>ch)_R z-B%}HW{VS#tvB9OT*Q8F{z~N=hhFC;ohZ8}eok0eZ%KoQ{+o`iNB_#MT({`(>Gm=^ z;f)F(e`I%6&V8G^!fMNW)m}fY?Vc&|9e4IH6>3;*%fH$2x4QiJJuk-0hL$_n#$Hw{ z7hL4>VCuY?;gIH}v~Gn&=lpy1x1{MvT)B-+;#T#K=IdRa-RqjJCpoAW7fpHd;#sOe zxqPc;uT!(hWXY=C?0e7dEzmw&k$ReO&(7xzyZ31x^s-cucsZqO#l_2)`~LlVxBkuS zetrkL$=XlqtBaM+R9>4?xx7OmJvz8U>(ET$;(sw3jGi&YMt6fZep^Wh1FRf zBuvwEmYv^|mGC|B8Bb@>_JwYLX8pe{kh}3;!bI(|v&|)+a_??2{3doe@|9VaYJR-Q zD~Y0gU5U>>D#xv`3f>l+Q@6*%JmmR~8#a=QWR(@M8SaopA+R{eL5tv!|Dta?#+42 z<>Vr$`{wuWU*{}*Es~zaS38Fk2tM^~U(L6nInJDU%U&Ma|0Odb?oB!T`uS$#7@rxI zm5(IeCs&pny7RM}&1%QdsO$Wt2Y)1d_;qV?PP~1I!p?$SKC_JG_D#ETd79wT%-!XA zM>i@jw{A?D?Amv6$?0WtA5H6&6<&Wel>Me}zIf}wh2BC((`8oieDGnKbatA7K$q^d z_`et1SFE+vGxL}H*k@GuU@`xFXXn_rn#D=^r?lOjTV{8EbY;54tg}#fow4CLm18r` zhMn1yb?!j1$lNpb&8#XL_wQKJ5*c)veb=L-&$_}VPF9^0@1!Da^T__bIH$++qVq|A zD;P5`x0PD>xQFD-IsY~3`H65W4JN%(Ch@%uBEOmTRUeMIQ~XxxiHM)e{fn|a)>#h! zmYgh+e_6O?7iVLg{TVIOUDZ(=Qvd5s2@5;4A~5(0r}E`l4J^%qUwM5qa$mc!9~HWo zWH+BrWnyj0R;fhe$cdAtFi$lV-1~}y>vY%0d)ez3B;PLU(e`UTDdkq4%`7x&jmBIP zLAk|}cD6@XY+$U*pAdUhhT-T!yJ@F(N_@(_puYIr>f`(O{gZojH!;F^)5ABpi#SZb z@c+K?T;^Q(!xOWtYM*KDbYJvm)vphV(IESmB9 zWz02!b6VQ0CVTHm=)NtslGSm2b2z}#&v~u-yuC+`sI2%|^XuFsM$;=h#b4Vgs&JOe z?XOAe-T9;TJMYh&j^F+%m8)i~znHoBPwW%*Bd*xC%1&U>i(`tHX7p6pQ}id|&)G@U zhL)EZ(!EW4j{f|xA!p6@!>yCA_8P3#Wche}rQJa$UgN4OrwmS?i<)q9jk0a)66Mg) z4Hp_kcv>!8&3DiB=62p9dgAoCmPwC-XPNR!<~)@<=d;*AwCSk)k#pDMTqhm#{~nwd z;&5rj%*j*Y8xowfN>1wZE@t);4?BF~z}tq+(rbBn&+xozZT{(_mu>fNOQ!fYv+aQ~ zFWlB)B5vI)oShJ?P|WSy=;}%Sv+~t7>&sR}`?0O{ZBS9*tg&R-7sfQnAz@MWlZ&s{A4|61IlJ~? zEXR&N0TMbTHY}0%E}uIW7S}V+f;V!p_JemBZTr{rKARx?_IUrowd>jxuhqTS!gj%= zg>ScT-BPZXmXy&J^k zzM!%3R#dds7cJ!=$s>1eUTbJ*Ug~XUA9d@^3vQ{xORJB{+sAeO(!1w=;P2C#X7Pfn zU%v26nG&j?Y&=`>W*Xa!*KA?pj3@Qln5ygY<2&9fp+vQ^$WaeZ5qkK zvB#udy}q&XbZdbI&)T^2e^#nYnbzM`R@1$9{}jbrvFVxl^0U>kRW9J#Kj8;QO)l#a ziADS4j700p@AE8K{v`2to79`T>FzV;-ZIxa zdGmMK)r*Z+$iMvAdO}+}S)g1Zu3CLT*g|aq7in2f)sja&MmI0`2(CDJ@08usIFTn$ z7uI}A{a|EzTR8CG`VGdvT{{;XZvM5n*COOcL+#GS;ujcDp#}-rn(?!IHVfWT;jH0ZrFBUSdT(_1vk?p6|O~`G+4frap0>c(m=#oD!3-+Z5mME6jhI=QD-PgeM?u!|z$=n41lo&feyK zICX3Lhil9BelSjm;@j|eu3#lAE6+W-%Bbmj2i;5TUcY>oWEsP}iciiW`NgIboI7*l zbsN2%9J<^irp@jC^v0{_ob1W6kBra0ZCSj3Ij>tON9)T>iGyKp`p(IvEKgU`)MRXb z-RJBmc=GJdsLgU!UG{1999(O;az)IyoZ>ij+I!>Y8-h!MeP)$EotjZ{@#&FiZ_d?; zhlR@@-97tz(&>f5AGa%RZ&z4wSyDsmr65n+qPgeWW+;_=;@EF@z}4fyB;g53Jy-wR zSEn;=58inEV|~+>$8hJF@5J?ZQHS8jLqzuIsAbopE*>AK&+)9UvJUVDAdV-Kb!Ad!T6r49Ev zJbni+-n34|fe8x|P-W!u2)vXMg+WP*v!jqd_))fs1JiHj#y>wlpU!-G+Iy+jRF(XDhf_~Z zI{NA86wOVUmzVur_-o?zj~&W=6Y8Iye!g_~=k@zj4!7|dR*Gnac<|Z0P&i-zcUP(5 z!!IW!nQ~^&d3Y)WQTistD@<70`e1*Kk;Tg=lQ-{+{k`}9`jVMODKmWAw(PDyS6=>P zqI=rk?+?#K2Jic0wEA^&f4Znn#Ds72c&qn6zgN9EY<=YG@8Zke@7Nx!UUY`defRl_ zCEnumpLgYlwHC$f`y_3DPPY7>sMK-4$?r0LsciE#k;&|T;vv0&_X1l6^A^Ti2lk~g zSEsT~>e9coG&?+Xmd6OY%qyv+CO{Fe%iK1v@x zX|F$}dUn>&P^tefEq3lI&Hg{L?+1%q{UguJ%iD4@TBgTW1W7{a8j{d?$yulFaNgL{JG$4SolKFmT{Z!7oV9*oD&$J=nXf^ zx2}ee$7hUFA3RX3`@7rr@0L9m{#O34%(wEhc*v4@?~#$^`#qnRcs_P3>~><*)SFeS z=8%5YW3gNB(?{NV6VrFwDhDqQdT{&lvB(`2o%>@rGLJ}ac%S)e<}bNllQrXOb=hSa z)|FlV`q|(9b%+zygnnyb(KPrshsQePE_6)KwE0IG-pr z#>~IAo@vtlH4NUCdHWxqi_vfX)O+&tA-7(pr(E61f9|N}gsu+j_B?;&sqM7VJOjhi zEd}|vwn*MqKk_U%xBWkFh5d)z8k;hm%4G*W?|QwSzhJSf+ja339uGJBrgBO@TAua3 zXL_9V#@tJf-G3|F-(tvZFmnLM*|$XL6Mv803};=EHr?zyyNZCUkf{{wk~`aHySxP5>_s2K=JVmR{ ziD)kRy>D@z^0u@4Zkq*eH@%!3eHoH27;+Q#Z*IJ^-hg$%g)bnWz=kqj9$4-(zUDh`|q^dZ+7QSe)aumMUFF)b06Jsc-OqJVao3_k4@^_ z_%k#WE}r|g?e6#b=d$q*zp8G1>G^lBXtpVm5x4JaeqO8#O`Ht53Y&8h=HGRRo_@yT z?5wR=y`MK3t4&+b=7+e^8zq?PmFLZVI$tRwZZT45p&Paazj88Uy(y7o@ z9uE(-K7D?6_STxsPaYd4Z>qf%^I1&yNzC0dzn4t*o2J}n;WRDtZ`zp|dh_eQ%{(*H zSUq6d8=EI4(K#|T$-ln^Eh~|<+~*KcLi3g6&OIAFK-W`_vf9mpGrJwK0^GG~+nr~D0 z`)|3VQN@Hxp1t8~A_D!a*Y1%wtNCGYb5m;ff0<_%U#|o&joSJu?!x=q_1{m4is{$? z`%`pw>-Ce8Q?;{g{if{S!pHqgf5OYl%enn5e!tuO{!rZKKX1N8Z%XN`|21Dvd-69~ ztE`#+lS^M;_nf_YU6<$M9gq7aM{RxWaW=|yY0y%yq6dxaCvR=dPWia#=%v-qUvH}W z`)fw|1IFLGpG;a77oqh=;Ny(szKPO#IqIpWr(WHrxvbcK)0VjVr{>%&W>HU4&5GZX zf8AL9H^URrvPSh=D~@O`=h*T0(`o%rZ(m(CDtzQpS%2Vv%JDwg#PyzwkM%x1!Q@_* zD4Vy-vunD>!=3;CB=5GV_|M!}?$#rrXaE0C@d5pt%sI=wuC5B&{w^{r`|6U0Z^J{?>d(*5lT;tJ%CA~JdF#$yYv&XmlWh9OQ!DHQ7 z@86Pl`Pl2|Zc*#Ne}6vn>;C4FG_2Xd^ZUi(kgjLPEuTzq-m@hubARxgBT19Y-#`5H z_tw^X|HZ$)yn8$S(f$wHmAn4w&HKBva^~}9J}VEu>vp-*?}l7Av-dR1mjAuBZn8X| zq*n5=9<#v7_lj5L9x`g18+U8ZPMHP&4{uF{{cR z@zrpHym8r_^Oeuzt(UreU3DpjUGlpn^Op-OSA^fs3CY=>AS(9$e))g*cROC|*}r($ zE*J7}@%H~R&m;`9wwQYuCr;I#rzdTeW6*!^@5bZvFE~EGI_dcV8^-$24AFIOr-nc2 zJT7k>*SPy`oj_Q0%q^dJHLo&vda)mUc59WB!%m~6XBU^h|8qjQXfKyl=Q>TT32}Fq zmG%@oxb<6A+;pO(R#|JT!0dZ3?z(4PU;BFdL}~s~ncp9eUs_me?axzi_+t(@eVe}* zQuUj-{a)Q|;m|tL*y;WH(Hn#KLLaJ3eck$e{^Y!@-z(#GUEKer zVsCiNZAYu@keZyEi*~LF+o*kSsj0c0s`}->Lv*ACq*8Bu5DfBU z4TyPB{rhWeaLIg~c!T2jU(QEYyk3|2nzJBp#$TR?ANGE?pRmaF@P7XeSMj)leO$Aq zpIE#7{=>bqx*D$h-T6G^f~>}3dr9f-`F}qi_bn=YeT}KwyX@}&1IRf^e|Ig~AN~FB_p(WxcQ?FDRCe?D$He;R*zd2`kgQa@9ni0QusW9H>$y+wY_ z#gmyTel8V%x>9|fhtzqO4ljqQkpEKu;{Q&6Gi$YRyVT1E>i4dCvQC(tw^KE);D9{y zp3hQq78~=2vHh8ByMy_&#*L(F7bRq8s7fuk@wDpw-SQ`L*=^!!tUmKCqq$^*PxP1V zAy9Vo$CRXlKYj*BVFV>0M;c+gvVB7rl zuMfnZnywT5kpEryt2J$#UtRgOIC0ydgumYPu5&+5JoUAATlU^vr76$P&24;9_S?~6 zmTB>_E$?p_3LF=k5U{SXneBJ;Uxy$#Y!<=N6iI z-G1|8G54)|g?A-*%=Ud~x-iqf%+_#0{~^D}Vo%;PoLTeYci8`yMGbS=ToTKdg_p4` zdGjTl-|ox(8D-l}C+${T5mQj-9J#4t~>7o zRoJ`4Ns48qa%ot`rq_#(_1etl?Pf2mII<#8D@RsJ?9Fe-H!r?Fn|&!TeeokNSBEMe zSK}KYo9wtAeR{f9Z?gU}pO2q6%f496#wVw9b=CC^dmrx#+@`IU|MdJK*CQIgcJGk1 zV&s)Jn-aJ8{+HjX>K4xgjq~qc{daS3(Wcx-oF4Dwd$+yF*}H3Nc51fT>}YOiY<%*3vXA~s*gH-?8nd)s_n47p@x$X)t(Q-o+?IR0C;X80wAmpm zH)XR0osHVcRR8kL&Bg8y?y_<}JH4Qh+19??p?Ci}`9;j7Ja%rqQVUO?v77G9XL)0` z;nHe5)=Qn{4Tp-hn*ZJ_+F|W9-8XU9zg_RIdxplYE9&l2VePXz)j!#1!YAb@zUl-y z(cAj_=e^QZG`^X0ZK;-R)t+TeGqQT`_}|!b`d{6M4F_zhGVQ*1P}nt9Ip{htN~ z>0@7SyniQj|8UW)qXrGqPb{u-L03K40N6zHE+>=W8Jb}SK(o``6IzJ zhlS^7+1Kw2SrOoPE3Vl0z_x_Vysxf13pH{tIwd2T^Y$O_*FPn9EPlS(+*x!Zv2tl2 z*Q&4Q?T%OTZ_7*jxKv5upqQf1LuK0i>^g}y|V25*WVUPS~rDVNZs)5fBUnN+bx_P-^*U9Y`;@AJKna&Uf{}RQ{f6?%qm-LEp=u(wvF zNsnIUOwx%}6ZP;<_dRmNJLKDno3GBycc{D|XI0RtIr&}ml&w`~qwMu=UDjRB1iCZ4c&fior;aU6c%$XHC%yVz)^IagUD2?#wE^7U(W3+97_e&LjAH z3+EBz%tv4JTOQwUa_!z+(i!QqC+TQc>e?S^Q|0}q7qC9s@on8{Q|5;!yjz9;*FG^V z{&q)a?nDt`{E!iysYXytw+;- zhRfE@Z>w#qSUxq|&irOt{zWm~Z|<|>v-cMS`pC?-J{7wyX6FnG5B|4_uiAHaEjIiZ z;=pypaZh;MYe8ZD`!DwVg9Q)WKKwbdvdIM)BfVi$2R|K zR0(L4zP+_yllf6;{X|BK_665cD;h1zYIYTWHw#vWlNvLkr_h>j`FUtEKO-wQ$Zoy4l^{}Q^Xr}|c( z7D|4!>#NUoKi_+a<#+dM2h210c(m^Qt8KZ=ALHwuUY+qtxh{lh(mUqI&!?}*dBJ@! zC_O*LF=qQ|9&?r>uHMrs|Ch{PHKV}VZeQwUx7b~FV#3!&CzOaK`N&M3cYAvqXVA(H zAGN&(jR#w9mWMlB`up_JG(n|Fx&@2>l|>cozqfh%d6t}YdsdbD$o!W5V8%0XTV8{l zu>Gok=gwW<@&2EGzy$u&_m5otyL!{FFK@*^yGyk^FS&h#N$6kwl|1&E@5|@cRq>by z=iMv*Z!R-)vL$B3e5v^^F7&VA$QhQ;&b4=baB@CAW?WG|L0Dhz&t1l^Q}p-y%y?dW zzWn>?cE-K4%~;rj>;62w%UeGGBdh9+d>!ZQLLL; z^qB8iUd@?98(n9A6t+LTDq>~OgS{?J9iHdUrTeq0Ecv>h@%``0q&I!2W*g(U3x zK3#p<(O9+6*SFHZ`Sc#~_j%`?ir9bU{Z5=9r7!#Ac5_4rd*`EL+{JJ2c2ueDlm7Sb zfTQQy?IUKH^&U6XVto0w{NCr!PV)pFJi4l? z_5Wo2n|Hy7Z(SCh5Hq{{bYeHt|JohJ3;wM8dqg3i$$u-)I5cr2?U#SH?RV&s zw_M`eR*0=X`?kdJ@$o5w<+f7|mhl9BEic=5j5pD8LOoYm?mg4*cQT}v6%volwBD_s z(>7gG`u)!TzkAP|oC6&J$zcAym-mTXgRS6%FBvap$lu%c@z!ZpgWJdXysysfYY_P+ zUS)khI&ZJ(2j_cy&9~*ABquy;+kL)j(X{z;bp^X0cFw!oSZS4WmywH2y7QCU3|&6k zFZbJ>A3xUr{p~>dyFEYu8MEajp5Ab$=I!^(f)flHHeTMfs&w`Yqub4yXR0=wZkk#A z?e~NH_V%5Mvu2jZI9|!M*J<_51aDtcbMEi`;uODUTNt6EIXCv_Jdoem zxlL+m$fdJW%yVzCq`z2d|BO}o+M{1Tm(O3aCvVBzyWXrSl^>r@|9ibl^k(g)>FiPOI*^#1L#wMrBAR~ni1#jaORRNYmg=AUr+ z`lnqNKcC-qvqEb+pUu;GzpZ`-UQT>HkEfNlWOA(Q*CS@dz5h$*l;>=#-1go=Xiw2p zt!>kv9}|4~(a&R(@@s}8^?O3i&E!_iUjP2(QfBG@FY?PD7sc*V^Pax`i}{zM&#qYT z-Hx&9c695JOO{K$t$#b^^RxMXo*NwhcwGGB<-=^3mjB&5(frY_x7YW)RQ>ezdVIU; z1n!~CwCOS zwzF7tsOA0tQ{MU~C;QvHv)}#I=eXZrm&X=%lGc8wrrf@oo*?{C=vS@!?vlz0cj}(U z)@^BfZZ@Iyn%i5B0LGK|zx@2YHG65*)rjhE!X6I}cvclZ)Blir!R2V%h1q6#la6+a zoqBR|@|T9jg`29rW(m*v(C2$Nbj3%r{Oe1XEMoFZU@rXqbOZ0sP=`esbCLAp^VfJk3CGG>-BaQJQ~RY_Yj%eH zy{+AQzP(!AIZ0%W(vPWmOY+v0|N1`t_q)BvtFP2HerY*)*Zq$D|2LZ_opSqQ_4vyD zBUKCKj$HU~=fh=}%WvQN%)DE@r{rSV>95jrtx8uV7?;m^CQU98^D6`lw5?=_z_iDmTspY?vOPwSooZ>|U(P2D~3N(;C8{=FYA@m}0WT*l|FQn&TlVv-rkwI1FBYeu<+(?Y$-|Jiv{qL-nea?pijzzz7 zxBoa_W_x*Su>C~~zUVszth+7NAKCKz>-8;D`|bZ(2-s>da&|g#)WrKYny3e{8EukN z2->p$qg&+8qNPuF-(D%5e!5#LP5NL?xkpX75o6}mE|t=(t3C4@+HUm3GM?Nz|MICR z({!~8RCb-aDE#r)*ZSMtle!v8a&NCr&FPr%>RIA<>G>Jeam(L!y>8;xn-W)*wP@$N zP{xmU%gdhT|2g!|YO;5q-L3`VEy71ug|3tl^Ywq{y?#HpZ;0*J2b`7p2N*P$f1CW` zP-pZ1oxj&fxb3`X_F{j}j*RJoO6C>0cXnL-wrn<1w?9E&@P|8xhsC;|OOg)m_t~}Q z+qQ-$=S~!yo-WJC*__JLv8ni~)n>O#k=|;y)d}&N6V*cD@&Kv{C>N=>`K6PTLGVNheJHQALrM;dtQA0?*D1u@6}$f z-uC}ce`L`N&E)uR4&whZqPG;Tx?Uy2TlZ{cUCHjh4_=;2`>brmC!^l4e5*9FqkPLA z%Vl>v)v~XxnHXid>h!s})_)hIFipy4U&+D1$|W+Pl3V;|CC`uh)kWW9cc*{30cySc z^?77b5;@=gw_+$`)bu+igYWM8skv?6C$5v{Wa4pO4rJomD?SKRoF66uQw%-jKw)D(@clAkk-He5mXk1#gT34iv=1>Mv!)!AF}&7tUfOX20?FYUE5 ze*C}lA;kYw(f!+#(`9G;e(EuOZS^UBzsanLpgX?7?^X?tKd-wJ6CG%g|?tUw-fA>=Asg%Uc8P&{H z!G2E?_b+D(TEMq%@8cyFpY`5}YKQUc$zPs&=iRXv+wT{qKbZUGn(ZEKbI%sP&{{m9!@vCZGs zXB)fFw>Mnghrb0gp43m?|L-sJ$Ggdogy&{tUfS||UWnO6x0=s+dv^a`o0KmnEUxZ$ ztLO3C`)QA6@7}BV1=K`S#Pox*HmI?nP^x-#S*{ zIy0bbj!MR#0{?A})ti19+1$Ec@WgW-q9R_OtMFahpYi4I8ICP`JxewT-@WuVVX@4N z-OKwQYWyfU-F~pQ_Lg9abgZ3_M8FN^-4#D?GEdN8QIYA*yL7}MCSAK?`3Xt;JEi%n zX8cM{PO?6C`R=ZxM>@mX4gSB`79SzCZvO7d4?8%8nNxk#6kZzbbC5l3CRP5q#Qa{_ zZH?VuecT;h-F<%6;b-v8lpA*|q`y75X1JVH#oC_3@!Q6)>nzq(v$4LKkrSNx`^C38 zViTgaWr=Fc-dnjf*8O&G?~b$OQ@a|TO#2qs>dSRg#HClx{$8{=cfxbg@VJ-HUv{0o(O~=Lw^8a7 z+wb+qCtA)#nzh>6cxU|&dsl~|e$nmrlTw(oto81kOjCHcqO5vB7de>~P8P*b4o;pM_ z+ey5bu6a?XC$jkY&yNNYoBkh?pJx6>`%Q@ck+XTfLsy6Cax*!6d~{UUXoh9=x_CF4 ziFwntXYNfF$dSK1@05<=ZrhLR^fI}h%)DE*Oj%*4`XSrje?B*Ex}o#Ht@iCrn;Ef> zj~y%yVG`O`dSjOD3BJ?o-OtaflRoq>ciAo%ErDg#?^>owzC85p&9@^ursnp)MYg$Y z%fIJs!hG_r`=qlmuKkdode5r;#h;HEzxw?{*gs8Gn7h<6VZoelIw$XBNZ(i+T^FsuayaGwlHYdo zi~nzzMd|5qOP~1b^=>tjP#@>^`ZvkPJooe~+-$g2t3R`$MOC`&Uh?tY{@LbitP&>@ z8rmBjH#ts>p01qGS5CI}KKqCCh&Yuzosn@mt=!XFn(E#WEEJ3rV}-i4RN zi%z&BsTL6?mOsr~i#hAJvzMBJ;yPPpX8Tzi^}B7G843-0<(qSVyPLW&S+7rdw}SoI z48@e`?95BOXPNbCPR?_B&EFy@-<(uEjW;dw#%3#p1qlLa*V&m%t@U|I-WJ^EZ9I9z zb=Qp4M|=<7{8+ieaD{26Ufi}j<;u0y8{REzJ-EwuhKm%a10GyvuwnXpMLD)@WmPvb z?cbbCIn`agXaA$!#s&APe!q2?xAI1{tUC*a|74S+M31s$$8J4E5aWy z3r_g(Z*lBywg|^HoYEz`lM-gkRkHol?c{K%NNG2({cID=^>=;udC;OVM{i5c&7iaY%Ps%? zc%1Ba-l^j2s?bfPXEySk0rhz*Q~O_)tY&;)udwyim5-GN8W+#J{;;CbHgdVo%}s{t z|My(ozTT$b|GFXtyNagOJM&&$h-<6;GO_Z@`&|cD9Y0vHW%t7>Nz0vhN)d!axU6Dcy9M1N2zj& z_w|PF`qA4@9ctx%vf=QjQ=Lb2tqbURq?7e{fNQ@Bd$~ z*BgcPtH*ClYF*)=ZuQ&OYt`GE;gj9OPrZrUT(dsp?cY+{hpPi8tNp7kc<|!W&f;e$ z7%M+{{QUHkIk){oqI|7}dFcy=^arQ>Y(FmI@+d02yev*?<-8?n=_?*}9@y4r`_AIN z^tVkxmS-<7z7fhYrReD?&wYP>{w=V0bW`(Ch{L2`>GPSF*X%D_`+aQ{|II(=7brR} zsp`F}{_)}NxfX?cbYeCHTsWpxl3mg3aR1|>&*we2r_cLXeChAblv_QKjJI9?8%axU}?tNF1?-wroJ5g`l+x`FbRK4F=O+5Sg!v7TwDpR%ITTQaN8+^lO z)-DmfV`Aq2e=OFC*l}TXwB=j(Gv-Y;y?52DTX~mX%DwevaelRhZsE5zGhfWInO)L9 z!9PUaHpelp=4Wbf%KLq#r^WgoFY}c)-OF|FW$8E1@)z8z!|aql_+9xMtMGBZe93+7 z$_ESEnH+zJE%*Bywe9bx?tk@unwbUfb~axABXI9U(`V-Q4-}W*V#@@tqj@t$_}`9@ zJCl;s=WTiI`2Tmsb-howRXHn_w)qfU0djGm)|DBrGYwNbSxSPL@|DO70`QyI_jVwaeN13|0 zT=s2Ey(Aw0>*JpzS0{EAq#fUWH}O!&Yi+&jjDKCMqgMRxvwlCR{{M6Pg^BCbTc&r< zGWo_LVXd?K?ec2*lPe78v$&{O+g|^h@%-GS)z8;GzUUjV>KmV+n>p9CHu0E*INyrn z6YmQ2m%Y0qDXhQyt53Lt6QjZ=9%;j%u6YGcK9w>29p3z_7=DHAugjb8UuJ519;R z%elA5((FUNe8<7L@^6K1ZQr3^ySr*?%#|G*xguu&Sup3t`#(oy-LD^)o4)OO-0L50 z_D*Y7M=vkSxLdp`H(-(rQ&6O6uCKMS-oFLS_Oo(sY+&q1SyWpz(;;Eiu4i)}pSR*x zG|;_kyH7gxz$I4WrE;fZCr#x(&EfMWMS6pp_v>fTd7Y6-&0F5~CT;w4;7R-L>NPuO zx9@G?_1=D`;CCeZ>&W+i^Bod)PM<4Yk&x+gqcgtD>f;^DlVZmYCI6S%y=R;B@2(xv zjL9=+-F(biD6GqAR9f_1sc?GC%nGd&sW-jP`a}k8)STQ_UH<+3!AZvEev8s|z8=y{ zU#9a;_UX}2+bT}K-E_N7gFDAHoxSze)|2wy%5N`Ee`A$${oY2E6ZcGRK1qCcBl!5; z^rPqa-@UnJyvmM2dQS4riMyv3K_C3}nd8X>L7mD+$jpY3*H(%apUcc?Pn|0sYd4GpTCF z)Z>RY#lF!``_8#+%?IawFN`B1 zy6&&(@9*sTa(vf|hB&XOTDm_!J)Nv;|6j+u)KYqy_uqxy{@PX0WVnbs%DTDF!sUNdsj5)qvUk8QcP)w)YK?lJRkPSNdWvqe-|y{R_crP2WXEi*ndvjbAaVZ7=n#cTudZgFC~WP&ymXp- z^)}W zWdqA4bq7Yx+BTm?q|pi3*po*Ci_t6w)+If8abbPf!?4Zs! zvFv+9w_cLhzELe6Am0zzQg15 z$^}1;G}ZmO@cC@LVs(1)rs{8ZHh;fYtiSntdETboCsls?8_o1lJ1OQe< zZ?}rqem-k%Qu!$*YG;z`#DvYS*X<74`RS?ct^I%g82|lvT)wen+nplsTiNgT?oF@H zzu}O5eI4%}{jS+}%Wn5>(+xkT{P+6&`b+-ye?vr0s<>@dG4Rs)?ByXCz~Uv!|pDc74{>)!w3Pvu2IiUwRAzz)B2@Vd#_lVCO%pcvRr$;SD}0Ss{QV_mwZxL%X~DP-OujllPi}}PY1`> z|BiiNUUX%J(qm=kHoni7rHspBqO7L!S)HiwuerJ|)>_x+QfbPYADnfw*45R|Yv&dJP3x8K*I(fyz^}g6|6}2%pS|7c^5u6vt~}`6wydr9heqimPj$eTem+$@EUR)=-GNB< zuB%KdosavK-1&IC(Dr^sY3&zk-`#51bYhK0fy4*6jS9aTgD9_&vmuly!DjX?Y!6N)rbH8bKn2x z7kBN(AKbg1zuSEy_Raoa|9{E{xKwui>G~Gu_W%3!_>i9qynfElJe#;tIbaRflHKoj zPPd9Jo%(;G_B8$FkNYy8+;^{x*zL#idmZM*mXmxc}__uzmLH`ue|~ z^L_uG**(`dozM2$3*~2_i`HpPC~7yp+_26-Gw0pgg$6U8M$1g){Iu4^%gjFD&78Wb zVynqhW&O*lo0j=%&rz}y-oNw7CGVA|mHXN3zTI429dP})Jim?hSuPcw$JOsz#aA4a zOlK2GZRFpQ_pHdi>h0C=i$!vuUrzlGDt-T-)`%~9VdDGfjOxER-)DH}#%;P{-rF4c zZ{_lNV%l@tciG5vum}HMbYk%(pM7__nJ>G>>u_G_x3<2&^y^}dTLpzvmM#md$^EeN zdHjdlXWvwQHs5Ww`MlllANTwJahy)*Uj6T+YRUZMqR$9U$@%BM7PiJ~d6=i<9nLa-_!){imx_rfhM#g!)CH(e(K5Y5&M954> z{M6IiUB>UuS-+n%skY^a>128DBaQxr&%!Tj$?#uL&1pTHYrH1^D7VOr?I|WUzh_Te z_@S+M<<$6k;g03ivLg9ApE@6Qcz*Zu(&>Bna$Y^Z9Q5{f`G?njm($J$O1zG~ZOxLBpA*v0I^SHs{J30o&a1QA*_NsK&Ybi})qSN^>Fa5;c6~3Jzt1lIu8gS1 zjm1vi+}Y)83N}uit2LqMIqyuilAE9IpWSPl(j9Ks9zIjy`?sm*pU(EZr_!(Q#VYiv zdl z|2fdImbrUv|Jh}I88?{f7IH0N(%BN{k|g+ZlZVu=<&)LeX9$?t_GvBI_m`dJ(D#?O z^^IyobZf)^Je|w(?8TSx+RwQQ`RmuuMqVp+_%_7Yr?x7n~N8$ zXIVBqzR-2an#-$7UvIJ48~;vNQk}0(YFWn5R;>wLV)ubvDvIP=@;90*A;ZIb;H zyC5mN{^XzO>tm)q+G{l@G4|uv$dA+WFU@!y!hV~Hx7110W9j~!$?LR_danP-sm@X{ z`C;%X2QAyLm%a-Ibu#}+~rH3v^uR4ex>bL<#1{r-=S3}rXAGZ z_heGU;|<#DcUdW@H=ZlIohz=L=XpiccXAL&QEyJ)*JTmr7tG$HnxLEQ@_froSBJM>N=oIOTYBy0z3MrO3`$N2 zoZo+1Z+F0Q?e$BlzP`F#_xruSec`>d^YdI6yY(&U6jpD$bov?VCA()ck}cl*uB>bR zHN|V`50NFUTORjWUs}0*{=12Xp4~YAVOdMOUZhj~zn|*0f4^S8R~%R6)}=EujbB{m6wY{e=fSaiMsYLbw|u*oo&Wx8+kW#STIEwd2fE8XJ^J(U|9}1W z>fgtz|E$TD-*jeLLFFvrWm#8N94vjSW%cEPbLQ7sSFfz|m45!+#rDXu-*5HywY<8v z*4yTUO3tF6r}g)LSlRzMsC89vz0t_&E2zY=ka;f7tH_v>#S65cr*;~JQvH0bs(|Vs@CH(yKbgz}g-2H_YZZp5!|NrlL z`TG?ILe^EA|L^zz_v61)y`J;g&dbYc-+erPzuMh< zcXQs~!u@!6+F7Z*=+hYv`~Pie@YnZ#^yshh3f1|0I(HR4XcWy@y>{EDOEZkC`Fj7n zSnRa4`A^u|s*dS!jcg)$o zEPwysW7bEPfA`+IXV=q3&F?#^gJe(frDm*K>b-x-dHer!CS~w1;M~WmqxC;KdH-+e z&rR#Y*Dm____(oreYaW8jf6>MyAt2-HhX+=qW=clj@t+PYHnRzHFfnKyJg|$#WVl@ z`sx?eT6yC4==pW6;UKGwUmv_`(nGwz@0>B#wp^@mzN z?R+*Xdqvb&sg~f`Ve2AJ{>fW*T~1;8&Fijbq|c_FI)0~p=Nf?pZ@1sSS8ZE+tR(8w zQSo{4&pc1DO!DhCKKDR)N!d5Ky}#dC$IWjjyR`B0)H{BD3z&DFyHomHHcs+X#fRs& z=Bw#uw_bc)d(h_QTA}$%Le|Ap{`xjMKf~hmw)OkJY5o0t-oEjC)}<{gujhxY3Ak8o zSN_h;j$2aW-BfP>38~Y(ZJqX7w;gAER<$SE{P9vp9q+|{v$eg_j1qgK+wY$!let!Z z?5>M+(V3mk=ekFKkk`|kJljUnT>K7iufg`$!u}Upb8r1xzdvHj48NRfNA{n8!r0GN z_N?m7#P*jzpU?Z8H~C$7s_dk{B{DmOHktTw9hcZ-^||z9aXPrGd0d{6I+gkk5#(87}|{s-gq3ll9&6&jsq5}LVr%zp7B3N zlH2*&i&M|%)wapin^gR2k~e5x?sj(8ms6|Pv-U0AzIe_5TwB{82manuxLTL~?&1Tv znibq{&oocp!0jce@mj9x17m;SjfW}hotHbbj4wZ*$yT-Ev~peK%|e5K7X?Q}H_luw zEB`%*%bUO8-eNt2P0l-fVw!%GUXS9h0HYJX{#zPh2PeRPpb zNYk0twdc;CsA1=q^a`+>wc1Zt!?KBMeNB%1rZWZ)GAiF$s@zuC_dGo8*OfK<0uLtc z)1Q2Lv-*{FcaPTHE@}NgSu}TUWx@nc!_%kJU#h&5SiH`HIhR9!{hn`CDbmLS_ls}K zd#c$R`|{+jk1E_H%)h=XPx*Urs`l=t%kgUx_Wrr#eTmI}&rH>+$*#e3R{Ou7{^H;7 zcib$ty;FDER)6Jt%Fe-aG-CC%=NGl)9K@eBJ$iBTdHe^KIV*l-oWCAl?YpAspy>DJ zgU&1Dv{yWi`5eJn^K0eu3df}aIj^!Mc&B_no^1Cqe*T>`nz~V2PV8@YpQiq6ncA8& zlcqm8T2XgyX8JF$FiE{W`P6HVXQn&<6m3aa^8Z}*`^$-2vno}LufA^oxn<$DJLi1l z^PV%;p6xzw@8AC^LGjhD|BdV~y7*)lH(f}XC$M8j?CmXfYI<%H%+>tnd0D^NQFw^G zlUMeWV(d|_$uYN?*<@!3OlDU3oqS>Xw-@&g&;GOF{>u)*ozcOKV*J&0k@<$yecj_{JbrpNR^UKV`x?wp;u)dGwcSWo5VVoSTk! z518BCU0nUZHhQt~eZA+l&1ZuDPS9%n{>?Y^*t5wWh5sC%-y`WeJ!jIxlzB^h9Z8bYgKgl$)q#(e_llU z+ije={K?(qe%lO=`57?_lT@u|l-rk{64~|TQJEzB>YblhBGYzWS)9FY@3eO>YNmgV ze$$>{yW<8q%A?9Dwh4L(2MlH;lU^YQqz zYsH4qU(Qv(4{UC8XPei4cY$F{g=2Pon@`(hpJOIbKkt3>X1&C{#`nC6#_f-V{A+XH z+x>NFEwDbP94nWk_O0pfjCH$yWxcA|zFXLf`wYL`6Ujo8jNg-FRa&HYEH;+jxU%INZ;Kuz{b{Ch%bTVLI`HIa;U4f*H78chl=Sm&H9@R5kgJa1i^;eR-?V4MD#KeZno z3e+3q-fOOR@Z5V;EMcy+K2O^@yU#q^KFm&(V~x3YX6mo6^+)v=AMk$7wq@OO$>02T z75A%zLo6O-Bxsy0dm6dI=ep!s|L+@29~Ma(+_t%Wp=9TQS671lzUeZ>B*vUKNdEWk zxBQVq+#eM~uG=qK*FEKPq#2jW$?HXeCH|@RGNMycr7}F?diV=@!{*3m*zWnTsCz~B zcJ;N(Uv3YOIGAwwf(5!`NUF_qAM>>c&ZDFy_=oC$8URNSXG5yr6t?H4VQ9m zy>Z#owdYNzdE7L=<#Wnr$uGC$Pg|r@nR}YU$7w-M;YpjDMva~hd`}koFrGKR=A+gV zqM&+n{;6HFTh^prmOkHQ8e!rd`y&3eOkDS}vvW?0C9!fbh0oEIgmygpR~Qm1Qi zbE!l#c5{At+?Oe`LCcS;JbdP?^ap~$cKRD`G#NcG2#ZN6{>XV}&#DECj)Ix-p5Iq* z;yXG~JJHg*=7>Av(v5T7EB^~dzc0@Rqv%AYYL8oiuduitTd?)65m`JkPJ$b=dXw{{OV@6|>Eco_W*K8}EB&$+57bipyu&|*|keTWw@5qe!Dx}ZoZkaK|qt}p#$6RJxR^h`S9ttcBQ6&>9&AL*MB~rE1hs8 zp<(i}I<@8tE^$t#B`*Tvo*5o8u5O?CPgm#1%$v3>d4;!D&g>M zzlZtnUuK!NamUYPy~c`a{}bP(PLKEFFRQ#!-}{UE>4gdW_FtwozB|8JFM_K`E;&ua zA>hN+v~JyMO%Ar!vwk`f3=3@?A7)K!Ni4Yj^kMtoMrEs;ySDQCZ#H`@?9S0ukSW>u z>w{IaPJ-p#g{J&V%W@v3ud$m@8Nqk(6_>ZkQ&sjEuPZWV1!`T#_CllPJ8qy>&_3BRbh*F$@4Im zS0-|{N7hZ-vOxRv<*+|W{FB5V?=#?1`NAu_+~@0S>lXXWtlJ1%hd*RNpz`|I1%k9*E=UjNvmspFM8w|>{};48hleov0h z-gL?IdhB!iUvIC+b9yhed-Cf@bxHvDwzx*8*lVd%_}8SL6=heh7L)g$v*3#P>2iag zK{LKJ>|@Hk9igb~pIsmES~q;QP35HRPlRsFJMz{kZg2J1Pis1#8vjVGZRkn*m9^#m z!I=NgKTIk-Ghw^U{?J*eacZvJbEoZme%a^mxBm18+P0OMchYtI-|W7>Z_l33Jto>p z;Zc*OCojlevp~_EH!anIttg_YOZ8U-Q@-8@--xNw|FS>N_e3;@otmXr+5Z7GF1S z%#SdgvnJxdKDBa-&x&6E?&w=FkbEdskF2pZb>{_fJQ+X2FreOH967Z!7iK)*UsD z@$6mu#%{@qCs%rRUi+`vw%|zJjSGHx1;@Rw?b-9dBlG7Tuc=y@l_|Yh3Ovy)OAD54 z`)j*qsej|TCFURAI%Ync!T2L_U&OD_Th+7mE*YB~?t8uE@rmCH54ZjP=*V7~v@|Me z*P5riEss;R8x1V39(cl>cVlH)PL{mINuJ|(Pyab^UN)sc>_uVv^hc_ynTZqoe9CWh z^j|u3v4mQw?&&(EdFzg!+CSyUuIt`=FJF4-cX)ZT@2?cAJ^S1K^Zl&NesH_- zyU`n7^Lrv3r(bXWyFo+x3is!;9_`-`e!MSjW%GD0lbD)J%)FI;-06pIzq9>*PrT!_ z*R~5goZI_999FXTxP9UD_iu`)eUgrfW&Sgtk>B~)YhI)9?~2S7llin~)ts52=f^ei z%f^F?Z9ewvhB3YOjd`Igh-r+dX92%$-3zthYeuMEAL-Tba-1_$5{^O!uF^tX=gd$^F)j)XQ@Bem`Kg|Nq19%l!$a zIkO)8{ZZG~pKWKdUrQ(|f7y7h07iK6{KIU$}FL%QJnBks3<{x+$G=A)RvwDKszN5?X`&$Lx{gTe};cl|5p)tG0f4-%*?wmI!IW-R1f0hgX{h`;EacT10nQL9Puy0S~ z5#Dm+lZrw~`r;VpyAzmxzc5U`yiYtcK|$s4N2zxXJ8JUfH=Xgh`6=q>0rMF}osSps z&Mceovc)cL+oS94ps5BA;Zs4A<#+7q4|}lp4EL)=oY~sN3Oi<(7br%Y+OaL3Z$+We z(!X(B%NB@Vy;dkR$LDs~B*|Mhd1l<4ba=VsQi-NRWw&}}YR+@@O+1issq68ktqbDY zG`y!j$axV{eYdo(E++S0OLx)fO-!A-DK#|#;szO=TKqm5C#)U^Dct0eoSt;%!P#kX zj8)qfF_hj}lCIsx!@DEkz=FAr^J@6^-;MWFoxOI-QHJjCzdaW#W**DsUy%HX&wQ0_ zgPotwq=yR`CcQkAzy1A&gFT( zH_bl$U3c>KlEUc`H|sRSc*_b5?KU5NQly{y&d26_*p(f>vNj?V(j@9Z@-#z_wEik^|<=5nn+-u9_zB$eKT~X?r{BjzvK6_XlCYz{+3U# z9N4a6xlN1D^6If<<9+&-tU@yy*7Ot=v>&XSaY6lXYiYx#1nZJFA|A)DDS0;@36t}% zI@Z5uW=(o_W6Jq|iurBpPN#mh>#t0_(9+nb?K3C6zr6O)`OVzM(HsYP!bD;_C8szB zCdR(!sVi7KWBVM|!llaAywf+FI9>JUy<5~Cec!0K#GQ(tFYi5mxzXVLzIcOTspWME zZnrKpZ+Q3X--h&UPq{v&wLcZkuKDG`E@o)tvu`_pWZH>(HPuyrw#EFNvC%v=S^M6F zoiob|7PH;mw0V#LW^tpdKvG%Gz?|@=^z;`^JezoWC`=Gh0)% zOP{v%t=8`fo>o?$D&n!>mdVc+?q7wk-0yr#NWQ(Vw!Np6ztW(xsCNC`eT%0oKXB~X z{};-S6WR>+Y`J)z`;$o8)|?lKPDh@|>WN%=CL1$*`t91~Jly+bOp*fB?%)3UVdXnp zP#vLB?K3Z?sO!vRZ#4$ll`@RFADEr*mw{8z>tAd$+kG}o^X0Fd+@t?Di0?>wa)Hi$ zZ~Gq&2PK7>Kgs^_1Pv;97*DTK`JDNQ-#lK_V*|%T;Z-m8$7*YxDOc^>z3^qxLYWCa zB;{UnMbAi|B%bmrm}{{^*Cg%SnfIr$KCbf*{LJ=X{#%yDle^D~$Nb&JnH}TPF~g|b zcgO8(+dU2+Dc$+vG>`3$w51a{XFs{2b^l4o=ibySOHVZEuj}77bIQt`7XdMq8`8hO zy;&bFb35kZwOOb5tooW1#91e;o#=Wmrqb_`WX9pHrt2;2X_m2F60>i9c*!^MXOqym zDWWmcIcu(%d$@_39;nlHs{bWW-~Xyj(SCnRnG^W^v5&Y-?{{@FAJmHnpc3`%Nw?E?Y+NZ~FFLa{6cRHv1(`l)! zLepN&vf?v8v&rd^@^txIP0P*(ANrN%=^u6O)>|LF{jUEeY_m1i%-s2>tiJVIuKrG@ z`{z|2NZl&D-JEm#MD>(OH!n=9ow3>`<#$=+O)0BOtg@dsw-!mB`??|&WyZp`i zq?ucnWPUEvnLO!z?*2PL40~?W>5KkZ_wSzEgdJ-dQ|HAPFK)1hL)zx$*tmlb^4N#|T8N_lePX>pOF|h)0o*$oBPoU9XtA37$W%4zddS?GU2ip3!^4;FCbGdxM^DP#X@Bbu!{I1K?nXUUR zm+tk{(4Mg3>@wwqjfIh>c1Cuu9<@EqUoi8nW=_u92l@B9c#XB?O9DcIHs0TOl_@{w z+M(MGTdeZ=cAvNV&1R-JwPnN38waOqopR5V-hS_>``rCT2@VQ9dkzM6>hHaCygp=a z*3^jEyJm9OyjYgpf4`!p^hVn;o8OW*Ix5WOOBA_vKWl4wzUhG@X!Q6w|80ZG>E?fW z_s7oo9OPxy(ZBP^Q>&YuQyu;t+i)O{cRrU$sp9=2<{O81*wj7ab$c9lCgty0kGFpk zCP#`{{5fmz_N*H?eX{wSv;M79zxmP*iPlzT`-gp(ZeN%_JzAjrQ(g4d#|go))r-^L zUHIe6eb~(LIA>SwFYztC>-ufI&f5iVShzFiv4`+%W4G8f9f~>UFU{yZ%Y< zd-01)JcalAE)QQDCF-WO`uQ4dZ|d(gm~`CE3|_t$%TPn2pj$R695&MIVk z%$;lY>O=on)k|llA4_$-Zoeva-lNoe-qX{)UOqOQ|D)gQk%(E?4T0IKsw)dVR@oOH zI&9pyC-Ft#J>lK;PnL?9yjeW4E+Z#uPu^Yse|t}FzF+tIcdeD#QQMH5h;v<$zdFpd zx9T5`zWDA~pRBiX8^5>pheJmmI&LmMG;8N^&A|1aHK!FNXJ7G`vF8&#VxBG_!Zw zL;2q)C3ej)KVmlh?CFE<2mBA|$)#MJSrcNScYUQ_j-YG#{p$7dckRq%j!%m})6V+9 z>E|PpMWwH=Repaq|4i@(~5dIlt(IKQyE5Zsng}QT@Py+5JH3YtyPd z(FYeYC(ce*zI6G#jr;RFtJ<^Ex+g#5+;nC_*}AVBCIuf8Di*i@k3T;9*=|3}ugoqL zIX$tnHBXnxqzmWv#71pgb4X0BafhaHTC+Ye|n3}CNXMEGl<v^|J7Bg-PO2$@33=}%T41G-`Xn5c0Nh|Jt38iC$e&qsGZ)M4e@OJ@0gy>Sy_{F zn&ZOJw1q};KC91O3Oo~(tn9f(UYKpO$%dlIm6LMsw46S+b^6qVv|~q(ubJy}`swtS z{1snA(|_y`6K}Iyv+>Qhj87`D5$jvt?oZ5U-MJy-P+i8iGYgiy|FoJlXv6$zPvws< z5__tcE|<%#GCO?tDTP-vwfSqhQVtkh5tXl%dbM)z65*d)e{1ht-@nAP@>NLs-|epx z?tD|&xHUaEch)mGu>(f_FZ$0|@7Pqk+e~{}Vz2zB=7pU$GoR0lIWuW?MQh>PrD~I= zAK%zl^RHF>@0X4C?Q`QJPJ2eOZCc2q{B*~Lo0T({8_aZwQgDqd5OuB3x_&n%Rd@Ei ziSDwZ2Yeo->(}>el1XQs6dzV5?KeyPgpv0mxtaOXN^S1zOi@i=d za_8$$lPcGp5!ZPg`;hDRniaYe9-Wx_?M86j`j5@w4<^X|w(f2=zj1r*jmnN)+be5j zyi+1nX1y{!GSND@Hw{H&qB%Z&? z)BJ8x_xy<(XAQ#(q&~dLuDh`#sxb3vW90V5Qf41AQ?eV6$j-5sp0}1&X!h&h#(e7} zE$o90ZFDRysN3AuSsN?ByE!kues9+_-QQhen@vwQv3`EpB0MYLbl6&(RqG-%FC6M= z*coH2^Gr4-ZQhmHTOU6=b&ZMhxOe-#n#sjsy{!E`Vjc?= z(`A;1w%KwA&epwuX5!n9_HN1BZv6k<;}*D{f0k8pWuoNTX)3n&exKg_!{B&3-=mu@ z#Y=4Y-~CzaURY9c_tw^w5G`KNu%wZE>R-VhJ0?iWoWJ*Z>Agm)<^?l6qxR)k)_%Pj z^!`WOmge8@>JMx;D~?o9l$)R6`b}_&MseodhJp=qtQY@$xh3PG(w$$^a~@Z8)T9)b zroGcq&41g=QO_bL>CCX7f9?OidGo3ty4pv~nQ3bOLCrrfQeoqx z*%c12)We0uJoYT_s!!Qfdt~Q)!_$XPi~dQz4NA$&7w9gCc@&p@ZOt+__uMUclM27x z&9{GbW8>uV6q`S7_fHypsQ-M{{K|_HZC}pY|NpW6&A(H78d~Lk?VFYJG-aJ0zn#b4 zA059|bc=dy)cKThyYF`W?J)UV{aq7|^-Dibci)zO)BGo+j9l2hAMVEW-`{S(vZ2n_ zeZ?2|H5yAk|E$O>PCY%_?ve7k^*dK3ghhY)WRNXCSu7?@;o{8n`EPBx4)>eqEW1_u zdRgnkP7x{l!bRU@{;%8rEvoAO-txcIk2@!;Yx3{;{Oxwhd?4Zs(OfRP({- zbH^c_*vZH4`d@hXQ1#%KXSbXGmt4Z4Rq@Li?OWH)@8sZrul%h4r@)%J zyH=9N71QTbs_nf{y!fm1t&*qBTC716-fHH>AMH}hI`m0F@5JuiE{V&9RxL3G+23_Oede>+f~t zpKY03L4Txip2@xQshm%`gunT`6K9w;BbRq}?W8c>btRD;A3w83%``i+dF%U|Q3?-d z8+=wvm*&;G)GB6^@z;z#4WHWV1lc;sR=SxzV7Oz}&v?v5Q%<&ygi z>dt(bRWtLZeRY}G^4ObOyq$k{M5le*zVlrp=c&61tm4%N5;jMKS44!**r3O)a%7|V zF7t;=xw#qSzF1wJkZZGUYLoW*XEV11a9GRTXld0>H=Df4VbYgg`OWsvx8}?|#5P4p z`p3j{VZFZW{C8be{WpuQ+j(y=oBUFAIjfLaZO<>c>idk>?V@I$GLDYRdEPF%z|O8P z?XzI~waD8{ns1Lt>4{sPRe87ZKTF!)r)Rckrhnj>+14v~C~=vDbYIf5$WJXRBx{|G z*RcxuT{WpJ`ntGnUdhIZdpnuKqoyd$S;McvE@^(Ie+%!~iHqHib}>h7*kJncL7Qxd z(6n`bZKC7mon+$t@^!yY2 z(F(1J96XD3CQN$9S$N84n)jTlmJiE%9zL7RI_c!>jUGo%aeU@k_j*ImlEUdTZq;4k zJ(+%C`i?b@Z!13C+ali+y5aw{Elm?^>^kdAZ(FH+&iu3C#Ni*po3`3;9Gb%Y!94No z_L=3LJ+7NGPpio%>aE%7d+$NorVpeU*`a;pP8RtLyf~Hsx>Y(GF(~dTUks=MO`%On#3%%a_LHdmqhS z@>(AYep~} zH~GbPZ_jROn|mku(WzwLSXl)Y_GW7{zM~6RbH$oXc#JmwELs+9`0JbegUcP(4dy?K zcNR6w^ygBU8U8`pQseXy_tcml3k{5RG~3@?cGAe^)=Exp@jWj4`X8T96V9#tVk&Yj z@JsIGACm$UPM*zg^q-^JT9i3qt;3-w4M%Rrg^9GfC*FS;Q9Se6jKasvJ8u6KebpoK zF8AThxT@6W+UuB3zS|#bb7NU<{+?WY=}B*IZ{4`X_x!Z8s5T&{LK#nyK_;=7v2-PfmUk8pyQ-+++vZccG}v{ z)~O=6{lT-vOq|QzX1$+eWcZ2u=iz6fo{#e`@fEZ4OG+L1uuzZx_fOt#<@w&KD*HT7 zrF(u~ee;^)%<~5OZ8x^|p7*^`^MF@PUvGQp>uZghie)VljvrpAz3l7`5sx6h0@Jmn*$YiYpN&Aw?E@0CAF=)QGdgf`$0lnD%I!b`f#yDN9Jlz_%L@PM87yeIjHdpd(cWh?U{IK4qOLyn_{@j!4-@|jN=7;G`y1&>b#`u?7{G{XVTiI`0 zv2AKy_^?QE_JaRrrH8i0Pt3W`A)e-%^J4LSlP?e2d~0TWxU^N>*J!Khk={qXH$Q#a z!CY@S!Lgn#+yCcTi=9Dr)4v=)863kZ^g`|K{9C2lO|&L7cu%rX^qYCos4u!dW$w;b zuU0$1?pgBo({WAXn|+6WWk$Zg5PvK4WL5v3-9Nl1?YRB&_V@I;wPg$LHco%|#KL;c zYq{Jot)`E*mwp+&1eF*pk_wZLs{h!&#(CRRAJ5Nh;_sf#Jo`Gf`rOP71quy|>X;N^wFVywvw1*GQhc%|mZf#O*Kbz!qSa<98O^&yDg{Mg*PQ7`}MfrZ& z4GY;NJ(E|n3eB&1mdyE7Y{v8@^4I74E?0cb%clOxBk_z-`_jzo{C26@COT#zZp)ce zWz(YH@J>5?slfKQ;gV(h7?mT%6T04YHK%{F7PP;A%iSzh>|5_MOBS7G!8;lc-J;kO zb>53kZc{vU_JVVI=4Yt3X6A+bH4Ed7h^l_j$iD2UW?zEVXW?{C>54f& z{dUw;MMUjd(0Op>L6$D&4=OXKeYyQrPisvN`+iU6%1)#7<2;jix<90GHuxI9vXf?= z6!WO(%OR04i3Jb#{+Zgs#JR(}=K*^?|6%54*_r9nnNA*S@c+!Ul-*n5%jOl|Z z)1F%YzV?ITgSkBH`%P-M>lQ!y5^mAqSQj4hxA(SO%wKago;Q1DS4ZD`e8_iO{uA?4 zr+AsAEH#{Ox_uOz%>VM#Qe_W)FG=R5hKCmHXPGyN?++vu-Ij7*HaYWIbQ`Puw$DA) zmJ|9nIXFekUjOvg{9`M38=aq-vNq?1>ed$CpWa>S4PkSPCeJvs)_oFx+U6_4%UJ7W zdQz?X7CRJOwccI3`0dTw;z>Jh-+#Gl`fVEp`+#TZQI?F79;XyL(m$L2wk~=bzImnd zp6PS%o2LZnocgFNQX{l@(lcHoeZBpE{H8cwS!lJQpCh@hXisLw^_9`r>-!64hNaxN zz2?SkUKP`AUO|4XlZ01Kx!`{&LFJUxy$72lcP$NXWNGx}3S4pG*qKN%6_&_q+10OA z7;me{IJNJ6&nMLvvp!QQavPV*6Mwy#%(71cvz`gEZQfp_&)w4eA$K4D=HF`k1qZLh z8tEeQ1{HQCy%`KIG1$yT}m}g=jIV$dRx_Ra?n1ddXuhO z%DC%&{JM>tEL^hY%KZ-iX5R;>=6LET$P1(NJ?;NVuNtU&2)vbLq z^ZpKrM0=CZu?0_sxF)ObD7?*l=+<9zk>GXPa``7*urpYwdbu_J8vEq%FMqcsahwX? zqAkpp_rGIlOqtcQdv;Sro^p9Jf4;Y4k7v$;|Bk!2r1s=gR^Dl}|D>=#RIiYC_5RK(4As;ss>_V1RxF=j98OlMoE_`f;2y>;%N z-hWP?qN5puFYQrWbLLF(?Ixc4k2ZB||DD=WtU19UT+i0UqNg%vroeZh_JO3mUK%CLx z{wiW7w&dm9@AuTj_V|h!?K<${=ks|B*Y(@KYk%sed`&Y)jfubEvR_qKagbp2e8KmJ zA6{p?;5<_&_Lu5>gOBxF?$`aESUXAJ-s6%vTeGjvda}9f=9%eIA2TIB&HD0EZO?;r z*jWI}OB`SKs=qq<{L=jMZMHak*lh zx4VA7yH|a)tgtI;>M8c$A0Jr0*>JeSy!F|)&BYHYukgNzxw$g=!4{sd4;SC;{LEBx z!7hAljHmtI@B6p><+Bb*wJXedox6>5LSFSlR+G9vKMoj+`TX6s`^#zlm7b3k=5w(; zlMYizwQ230&VRARbMo62_c{xu|NWaCSM#s()y2P>?`@-50uL*5`(1Rhma(tX(T&>C z5mEW<{?fOv_Ma|E;^uc@O6{DilYVxV=hIVD zm;e24f8e#U!<*^z>%V2Z+jClPUbN8i4T_2LPA;3d`8mrk0h{BUvD{)h3-*4$H#_Uf zigWKzKQvqzH*?=1yVkSZ2mVJydhy#_IeMo`?^N2wMXe$G{*;AmbZXraoD-%K|L$wF zn$WA$e_FdU+hxlQ`d@s%U%&jKeEplbTiWXy(^dcOnc<-8XWx6F?5yba{f|`F9`U;A zU~RoK^759+!kaVsU+i}*eqZ~|@aD(g_kPxRDe!+-k#=@g7NhSKi14qW4u@2r?>NsQSs~D+ul6df8$B#Wc4tGpUba* zd;8hwZuxmzvzZ5KC;iR5vn$)^0N0We!ugvQ9_La4EvR>%#Z}EUTX6!5Kf_v!)U-=2 z(%a=ewg{P8J(G~Un!VbBvEcODZHt^|vLD;|C_(3TVT8Tx_In>yL_=3ZoIF44?s3)D z-SZvT`2DY346a!r}>16QBEUia)!I z?_-Kc@4u8B1^XXug*y3b;`X-jJ-E#2Z~1eILp@tvjmK22Es2NQ9QI6l!rR!>a%aOM zwYB!GJXS^NQ`HOtPgJjb`pR?<>`~Ro-zug&TwW|Nk{7Vf@!+)#~U;9LP`|Qm{FWs+auR3t( z{gIQw%)N(1KX8V{UC9r)Z$E8=0hh{!qX)!~w>G~_+jZf`xtv0)vx!e9?NRvBH`~B4 zdEP-0g$Dh9MJwg^2Dg7RskveB-_DQyC3~8`+CsOs(w{eVCcNAEe&2zMOV<^wOaJua zUXB&-g`(0u`TIUv-7R~)cHI)D^L^IuDjv=Ja3DcNz~N5A!fn~Nr%jb$J4$vAuAm#<%z=xssq*`RXb2s=r0P<_(lN z7~1T+z-W>vuJY>-OZGdE1^@%{t3^l*#Iy zPsGC;g&LRK>ugSmhqTvN%wT`c>fU|a(>$70h~>pQe|uq1shVGZKA%&(cVw1VRAWoe zrkxgh?SC9LQJwTxusH4PqJz!sm-l|Z|9&r5u)@Qqz2N5|FHV+vU4vk zvOG9d{MTARWp~yo9-sEUt)KAu>hGn!?%cD>Z+;2C9#<}Vw=O+h_^NPH|0BCQ`yOp$ zKjQq~wxxfzeM9R@AMsn??yjk*Om+Chz*U&~o9&yl*&pAnKQdmf;QsS$Px1MMGmX<+ zSh;K2S-)rJ@A2G|qhiuiV_5gbV%Lt?*;|zFZ+`l;ko}*BH1kx=`Tmn{Dx4_n?b8ZK zV~@+*)j4_j4(3Wv`<4GpX7B3$Vtg{{<^}Jl#+E;-2ZbL^k-TISey8e4ShY#vsgUOq zjDL0owKDUZ^f2c;wn_H+Du<>+KOSF9{$sOnZ?e&+cKNyw^Hm#n`|Gr^cm2H{SFb$n zPxgt%@1>7BXQj_8JDu+N4b&IC$34C3Yu(LAbDw{`(q?SN@4dUsXM9;4<8#U(Qcc|^ z=3!z(qutRfyA4={9wo(mI=A~h+Y}eQSru$E-`*)c$LGUvGjXy+w5sCrq@oVp6_-<% zeY2TpSoFlbY@7M(YK;k>9vx?jJK$q*?q#&v`-uJhThAIyEdHC2)%jGr@Jv{uQr_&# zlCum>=a+v}m5VE?bV!`GVdkD>&V$PRcE^@AY0s%_&s6pK9=TIA%Fkf#^lH}`f25{c zo8EsDGi%|a6-ATzcSKYrJUXy8e!t^`y*%9Z(|(-%ux;mF*|>YjvTI`a8dc3BF14MF zcf9^5fvY=t~?SpX~a)K{ZD$|C?HyTK>17_z(UctUd$_A1m9qh1;7eHEz?#!!kBH_8#2T zVKetRe#+F@e_Bnp<>)5!ng1<5CN|~kpEoku@a4h>pPfI8m#OBg`+Ma;GEeM|cKN!B zYxmRlvAgr${&V?)+ir*d%^Qj`x&7`LevH+-#o*^^T#_Sh!*%-hqF*r|+`lY-aI>tY zSpKTsrnl>#q%B|YK5A3zHmR`xZ@w|aY$y>ufA|&4rGr7weU`H<>NmTe!{^>`CQ1LY zd1Ao4nP$?ZQ)j=9{QXUK&S_ttr~|rLpa0}k?zFk8ae3Lg6Z+G*XYV<1c1t|xqa!CL z#|S;Y)qSj0be`(P`DxMR7w@UCZCf*y*ET0pA+xnsecox|Ek?SY=O6>U*K=M8^7}C_ zubK6}Us->a$>R@CEdG|5edS}{J7dow*8L_w4fEFRkhUs}l?h^cv-pYR6Hz0xZ*z{g z-I|%9esz6Xbo&JTFKgGO-eLQmvv09Vg|k}D*=ill!^_@&yK#Jq=qtTx;m&Nv_xtM3 z9x2~>Lx@FvU)$G*6@7ngH$J%}dcSVkksDf^mZHhYyX%s95}ubA?>n!}t)ka4M}Nk= zt81%UwySWZRf1LmpWQF+mSuZb=&9-cHHo%|qM5$^{eJ)Qo}I-RUtV0y+?=2IadU0o z&F@uTUk8_(t-dpDy2z4$KikE0<6O8aS041w$&!A4$j3W2ZV_Ag>b2XJJv}}B;uFuw zOvYk`FGSoWG8G$dz85>|e}0BeevxnQ>^TowPp3W8P=4sdmeA(2A{j4j! z!}mRTniH(%zij`FFUBjo%>$P7AFp{dS=yvP;cm_4-hZ_*{+W65h5IL3^T$mKzPx$Q zfg&fJ-pFI8osRcPPv7XX;R@?%uij3xw6pWxU9>gIJoD)MBa36Vj>N4~R#wjcy0bTY zb==|2n{IA+SpVf}c>G0^ZClQyb}sf=yrE7^YENp$)m5R(-|zcwr&qG z65pH?abuxg1AEdk$*&cP(HG97&6E3nXpJKGy*)Ooxwd!Pf;KvCnKAu{?lY^cZPN{= z9hsV2?I8D&_v+5vZzBA}%#P-OVmjuwc7R>CvvIF@2jqFFLFGZzHFG&nA0c1?x&@CC+&PBJ$1u#B0Tl zaXdXWeX*+dG=X1hE#7X{_%zw%?4x6{e-kb*^NpOh^4y-^0=G)H$C`b7ceGA^0`E4} zyX!x-v|XQ`IP=4|JvMUF;$E}|_}AUrqGOvk^Qm#Z!~JU;9!}izDOLVf?)JNTpI)4- z=6fb{&Iztu|7~ke6*0V@=>GcJ-zQ(|FE8`ezOrfkl4D^zPpo_({@wnELTuSj(YTG~ zR+|zhe%W34I%Hp~b}`qI8s>=4kKGuU+f?IqKgZuIeJ;Cj+21;aZy(+;-K~AU_ut|b zTYuLrFnX}@`Uh=g?O9H#-6BtTnjOfh3ga5>c8S@D}8Y5$6-cG;*~b_-8@Uac0HbLfP|>-7?b4Vp_0 zC$`*v{4zc1q5gEuwB0?cw@llZyX)L+>*I%3ZK=!md?xL^K4ouDv5AdO-P2E^GZ?2o zG0(Vl>hxB=Df_Gxrf)bJxoA~t%&$`sr+rTQyl%M0xR&{6Jo~$S&+i=wvY&Hj#nWkF zd71noOOisC&u8|FYzxDn_s|BSF z*BC<;GHWM%b^IG8zlASD_JUQwpHhb3r)O?d`_>KGK@v9e-a_$~BlKC8? z;$JV%uJqkqc8}BG>8YuU*KWVr=D*PUBWKj>m$&oxKMn5Nx1{p(vr7vcn>l8SOn;hv z?$6JACpmunyI=SFsx>cnm-3t!i^D%EHcZxviZ5W?YO`NdHew-D?&;DZZM#yIr=L!r zf1M#Zzo|tyChcQpF!TK?&3kj|&AuA%dj78D^_@%K zd}qZY4zts!=2*eB^3BwhbrZqo)nOhc#i7B}skDG=iOa)~B&@Pv!6~*ZA&nv5PKE4; zM+L^Id5tchpjlL)Wr(mJUC}8yrd1AF)v2rEmw>|;o1+{SfsPyrw3s>rn=Is9zf^w4 zP=%>`jjrkkff5SVGyD_+Swb48Xw32Qfu$aF*N8N3YH!e*a4K%*D{)O+X9X%v&|q5S z;3eXxx(v6IZ#p}OdZe!R`Pz-+C_z^TK@SnePz6=4yw z1~x~jbTw#A5NZwX#O>sOZvp|VL4B#azFv#IELZ*RW$@3!w^6q?BsSNtpLa-c!`-sm z7tif}^T_qz=l}ouudR&^w{Vj37SvGY$l3XH+Lr70s;lEan}43M^-T5`t=}APcD-H~ zvaY7`z4P_IUxfeud_F(*|Kj35hh)ocG?v}TRPR4!U;p3N{?7;JJ`vL@JLLsM5#N?h z)}OI*&Gg!vnLQO88TJDF>bz6sIi?)@H7n`Gmv9fBSJ(3={Q7nK{h#>WS*D_=osu+P zKP~A$ zle+iK>2)UCuRc1n`x|rA_O8wQp3Vx(T)jT4@rdfY(?{nT?EU|+-L7-8e~w$XnBJH7 zSA#8BB33N8{cgwQ?my@6{|{Ko#V*n};aCvMDGsJJ4vRV`FaHr=d~Rm?r7tfp|M@-X zi_P~x+$Gm6pZ|!I04@I7ksJL#&bHOPLDT~@#Ad_fz5maoStfS-J{&QdJVU4B;?wXg z-}im1um9^hm4DTinKeJ3{hX=Qo*Mt|XVc76RX3;4J=W3Q@QwLn!Kv#3{-y67tKWrg zIK=*Q&+Bv6FJG_Ue@^T4hBuWP!`E-IkX^S!X}!)a@8sI6H+RaUJ^z#{)*kKl=;;Zy zb1NtN+gTpv)xY}k)as&T^LcG8wYPqHonPd)TjRq?_gOa+VuXISFFoZdzSVQD>4&!= z&1}40u{*_B_ifF-?q_@D=|d6iM5T`#bSE6B_FBXfo2mCJsWbm0<7pnEfi@m$~?D;3{bsmeEqa^xMyjN=+o_^@psx#Wt zxhCq*Ezr9!AXa{>XJ*~v*ME!V&ykKY`x3H3BWCv*w#OTa^iMtCWNh*KO>xn>VAq-9 z;%}FHY`t_M!KnJ)9}k;Pk&P^^3x4wLknfk1{i&;$^L6I#v#Xg-_IiiZYVtY0*>c&R z|H{u5e_rf(|GxAwwx6|6LL-a*0zbhb2YLPyBEuAWY2hd`TGtLkI&Ea zPeg4ya%A=t|K4~}56jQ<=5VIO&iEPPSbWyIu>8=4C3OZB9$BCxgj{WZkjZ>e4|e&Qf~*e{7Os4% z9lo@!-oz+$v0Lwn{LWu~wc(JKIIR<*Cb_u&h8|8;81g=`6 zG2zgu==tL7WWDc%G)}Q?J8D=Ez#8=JsnYAxN&g;AIy~u2vFGJeAUB+zUlwwy)ad1x z?#1#RR_DFeFR=Asy|gNS-`C^&?^fH$M!zrk%WMAYql#^hYe-{AL#g8H*mEpN;wg&8 zGp>K>UM>9Y*4o+kIi4u&(J=IllAOqEE2_nHU2pTj9U8CwByuCmLmQWHE!j}D=sfqW z{aO>aHgrVmcO^eqz_sMfY1O=+snY9qJlT-*dFuIxpIMEke*Lsu_GxjguFt)$#qt_H z`%L`ySm}$})im_jvmKi^yU_5+$pdM33=E4oS6rSN z9<;aWYv%sHpeq1>bt0X-v#Zl`o|6B9;I&bux027>zWDwA`h`6^i$O9&o=Mm?3foD10I6L!cSc=y&mfxx?0XDXV#0_ z%b=rs{(ir=SpR3sq8-=IS-|0_37nat*|v7_5b%ais?mo7^j^{$T@bQUSjnI z?n;YaFWm0EUO%nYb-wT1S3je-=J}?7I2pk%7t#3d-UsuwQClbdGDw@e=GQT4!^=mf z$2h6aEirmqe}M7J>GSrN&qNx(yzAJUp?7_4&bkUWy346e{l9gi?7wff^S^vu z6>3uVr=qI*LG+fy!)(@G$ExSZl=&=9{!`Fj{`%Q(c3v5e%gaUQD1AHr|F8V?$GiVk z-!6_jJ+0)Cr+VO0FZRk(QT3pOZ|vt9D4OMIfV#Su3y;hCXUOkjLL_1LnndFvmP)p~5rn)e?xX3qX@?{${XcQP;AeSdAbp=9o% zW1BTfo`!~B7X13^;=bSSyzSRVKi~Sff9CbtUoTr2_O6&6bF1Zu<{BAk@JT`;UpE;P zK3aaqF0Arju)l5Zx%{hLt3uX#InTIXUj3+ZMNU;}_(CUEeICEZ9lzWzu+2ZPwdZuj zt0_&vZ}zl5x(_-N=IQCF-{ja&X6x(B4wI=f$`2Y&8bOsA5G8B zHNO6M;*0D>dME9t=|n!7zoh-!@pjP*3)|(qR$pJUtSt9dMM^}Y$Zrv|pL;uON?r*3 zdOAIR(I(+vedZm@_~u##mcILap?y)@*_p=2vp2(z**Ups-l;!@lAk?e++}M`KF>J4 z$1G;vZ{sZwUtV5*dBJvew{NOtR=v{Z%fi;`D}2!U5_rGvGjFf7dEc+EZy%rJ|9-dL ze(U=iC%dOT(ad=u`J%eO)aH|i)%!o6ZSDWvt$V$8pZvV3Xa48j%V&BL)y^|lqpZLKYFxk(`&aS3YTT#c7Iz|w^cFp zQ852?L-{|mem;8e-G0GDWqYpHEh|#@9@=Iar{?^%Z^iSczt8Xgx1{`jrTQ1?3*Vdk zA4UEDUA4Zhoz?E!jpWE@Yo6?7v)`J^XDuhc>0s@l#D9+i7u$V1CijoIo&E9rXJ?~- zvzi*%U}QJp&h2Pyqs}bF&|E&})$-5pcE5L- zm&N)o{^x^JaTPc9f3R%&BI0Tv5Mdd7%ArW-R993!Pp4U6-&5`{p@-Y~mtT*sp8D_S z^Z5tF%`N$_j>eoy>* z*FR&j`uta`=WaiAC;#BY1t+y9a`S#Y*uLw+xifDLtrOvX4mwS(U9RSUFwfh9qJXdV zX`iOY7)z z$-(g<&k26{x+j5Y8^qhM@n5L=`1AS8i|%q|lO|L)GrjLRt*?B~WCrUK+c^)PZ<@fc zW2A5#(%{R8Na;jzcTnZ-xp);$A|RpeVJDE z?UiiQyAF2M?A6nr|2);J@?q=e%_TRB{3Ubj%kO@F`B&&}{r%c|+9zLscyMHyFUO&i zi~H^UqH`F-=GFB||K=>sxj*mUkH`IAra$srmwV{k>~3=p*AvY)2kIYxz5c>h&f}WM zp^rZ{K3wP z%Mz^r-`!QVba{0NKTC7N32(laGamPS&SYcyz*}+rMZf(rZR^sKZ{ZyJKiQ|8*FA6B zE?2!L+ri@3)Yd7pu9|C!spbXjFv+ahGj~mS{pye&KCg+l{#)aF^#(%Xp65fuJg+Lw z^b`6Qb9?`q%P)kZ=hR81zFHrs|Hdq>-n;O0V%e|6?^?UX6qtV|p4gB$vB*ZvecjGi zUQ)JMA&y6U?IO#4qR)nuKRQvhbDOD;(4FrGnCt4KH$PJEnVvME>)@)LujB*GOCCjk z`(IqaaHom=P@wqyPafOvmCcra!aMD1-u}1u|Lr|ptn0t&UR`?n|Es>{G3U7!I{ve@ znUPbm%ANmKr~1`H>*hytR-C;W-umfjvc%-=GvsHlcHFDXk=(r@TV?wGm+!?trQFrD zQH+?eTH{w^%u^pt=LPj^4%Obh{eF+PyIkp%&%zgvC-3Pvnbq_ELYr1jkj^bZgO8;z zkG{?L`|Ins)dy;y?|!$8PuBX^tbMP;q#B=Szf#GBLaW*FY6 z`f>62n}p2ofoHgH{hqkk?em*YjhP>xFo&F&oxi6Md_2bv11^<_0}6X(%Ws`r&dfjK zg4u00QQi$RCt&6EFC^JM=#X#T#}-IIS>F(|+D z{88H=dgi4rS6av!^BeCj-<{93HK8F*?$qpCYkd2P*GBA{BPCy+b4#c3jPw*|(a4Y6 zUaM)h8|)Ik>BZKQcBiFN@-$nR2IwHDo{$HXdJ@e7CXfEcSDJ1*Jdx8$F;PeP$!CN5 zbzCYp8C#Yn&TF1I|H>N8XklIR{+jRas^2a33t#$~-@f64Lx=6xDU&l#ipK}6`@ywo zUpUuQl zX+zc7S&asTA|B##C4!}Izb%npA30fg&!$iEIiEfVc~*UE6=&-2&%1FTGqyrmJ1#Gg zwP)v>g5R52&vAtDv}$(x+B`m#we4r-a;vp7RQ@+Gac<5iyL%?A@yaCK#1wm$)Hjyt z^Cee4Ft41dx2ErcxU2806Cc3yvT2d`9_-b5ogVp;$5BVP-(}B=NLQa89%)0Rwf-(P zm5Cd=JeRNDk$*UqZK;t@;@_Pb6DF;3ZauqSR)6I;{nsWU9`^rsOq*q9ebG}TiVdk)^e1r27rmwN~JXz(rWh>|u z=CreOpQoGpY;&=ldtU6CV-0E5tVIj#Mrtn!z=aXUIEDpnu2`hW- zB*oQ)R{dLDbfD?ZyqphAZhsCx`0Kh_-}OU`h)1`WUIMFEnp|Q!DF3KD@zG>Xd}CJk zhvj_Iy>t8bxZB4+{}5%8`|M25rlV6%PTqUcSMvUk?RSd2tJ=d8O)@H$REoS@e4o!zrE?qg#RC$!4S|eM;{nMrub0a1) zU49U~WAj9=DPlSk4nGNw+jOxh&`Fc2SMkn7ogBY}UAJ>Kn_kYIxQx^8`x@iSSrJaj zFAhm9_Pm_->{_wn3+K*_WpAVUGv^(vbi6lV*_Cf%ZjC1Mr~AD{J?78N6sV5 z@34L-=HxZXmslYwe0r*OSjA?W>}zY(c;ci#zUkRxu)<_>NWhUAqqLsyzc!s-&vE9= zW9jTLdllb8)lA8HJqc$Ar&_}sP0w!0U#mKORn33i6WycP$GA>T_T$u&uloMVmEW#l z|4FAM+?TU;cn`5^OX_qQir@NksFnNdTJzmXx+gZoti9h5{^$D#p2w<|5zBV{+I=-- z{`AQ)r#Evid;4IocO;YWhlR>qJ}&~_%`g5dX;ZObyUN#;fBPo;S+#zDv`yRo~Wx-#B2i(k$`0?qnv;ry;LD9RBO?Dt)|1Vw%FiGghyf9QG!B6t9~g zUSYZMbd*3>pbq=A@axCrmZs0w4EQm-yryW*e0hzR{A)KBS~gYcvliJf&zC%6y#Dd= z{>5vz-Rhd2C*!VrK3_D$dl{dk)Um05n7wcCSuQ!C#g+Q=F#rAP+OucPbDtayl6?H= z?}kaO2a?xZxX>PRX`QbA(Z5GjByT^xZ2R|DhsW!RPNQi)^~e5x-S_*gx9&L$<+EH{ zEVb3&F5PZ)enH|C_ZRLL>cEE=?U3Kqa`vRJeDjwE_szmmhoyHYD4d^bZhzt0#lC~i z>OKMI?ZVwzGv?r7M(=CJA zZ}sPGQacWb$gS1i(Xh)_bW7OWvs{hFQ6~Bf*^|$Q-MNzbl<}Cz%ys7Xr96(y#V1rs znUyihMr{5NH^a`eI%wHPPW21f`FmdOZ)X=?=-gi8@!2x&t<_drIsN0D9L4`CpD)v8 z)X!rL3jD(Cd2(hDo0ELbuVjrsY#)mpGz|Klq@EZ5uubWiY=P_ij1-42?<2&vR(&nM zCvhX}POV zPJ3f9|4-}MbNsPsnV(<1u3Xcgn_;9mL2vh;qN)w257$XeFkT=d_32@|N$RPT{M!@z z_SndszV2)3G&6MfwM52!Y)n66;{WoWxj6aJkE6d1PyCP;ti5B;`)1?XYuA=PIrrLT zh7b4gUNZ;oFZ-hQDV6?s*uK2g)yn8@qOzSA_u*c30k)UB&3>l}|C(}V!;K4T12?Bh zoz;j^odEWVN!(O9 zjo0?c-Gr<=8y~A%l%K7CzV|zGVr9d;Y2nwH*bkkVX>Z?ZwB>Vu`i%u=mvkndEWe$* z-N7RKUcszszON7J#nxE0$;E`%I>k;&tVq-Eo#%VMbUyE~JvAB=o*mfEzBEF^$9wwP zDSIB@^!;~1ZB@w1Nn+=w%Kx6ZP_V6;^L(||l8T=b=i9&Y2+~=zWa&w+Yg2kpa>$Q5zH!wOUJozOz`})E)Z@)~= z5jK;5wzM?!<;I|c$FE;uo}xYb((=}=pR?|7UUYc*QKnYoe}|67S0qkb8}YI|)perr z8lL3ORv&~X$M4wu?2PwEo^mPKZTC2@@VBHUs_4$?u2;}`e|xf+*nQnXX8tJ=3uK$z zPyR_PPM1_$CtusYgn8eqN9=pr-510^I~WyJWpuvn*=PN4KHSHda=X+6m|lLGmyr1| zZ};DC4iZ;>uL@ngto8BUvzIWKXHhPfVPg^M9RHZ&jo6l+<53 z0n6X~`}Hh3kGI9~$D?V7Q{)!^(b_H6BN{PNDr2&&RhgHzd&14fFVp`NJP2)p{?A%KEcfKGQ_mj1cz?nD_QMO) zKY#kqV0+?E*$$Pbsk6(reb?Y`o+x=e@lD#ik1Ebl|9qwRODk{iv+rx(wC(lk*CJ+` zkEA;VpL{Ptgn51&rj}4hd!>^I$!P*Z}PeY>}j1`X`k3ZtHWk<{rb3=CDx>F zCf|C0-Y2qNd_U!w_WVqne%e^O?#G66eo3kPt71%yZBMt&(+z20-0-yV^Xwa|)C2hR zzrC6&c24Zi4I?$(o|F4^C2lgUn`?e0RXBXto_gn732ZzuZH9Hbg&&l@KUbhuGecTD z*}yL{{*Z{8n(V!B9f5y4pT}#2{7ado=Q_)i|9#4h6=f%;l{bFcE_ygXN+K%o{z>T{ z%a@&sT*_ z$@?{?Utj(^l}qt3AGhlU&CmYozJ8({-(H?#sorqPcoJh(fP`dJ!9tBqmyN&gd15Fa z_Vwf*<8^249VVM+`EaKn@=omuQCN7-TUGnTmi+sQQ~DN)KRdPS>}j3gfOBoFQ=QZ< zZ52IbbiP9V>iOGU64MpL+)t)_>iXev`Ol(Xp~A}z*3EsdA?0~};fgJf_IYpmd%ILB z_tLxy!=Fs^rxs1lPTH{Jslnv!KA<|`G3&NvA8$+CikeZw=F=E`{~z-uk*vLmL z_p02M`ek_7{lO=-Uzs)+KO|hqw4C{LlHtePwm38QiOkCvoY`j-;x=))-_1=gBh%-< zUEW{g!h2xivXDMSoBSEFKkBL-;;wKlS@OG4FsCU zMDDU+_TOSZ**-kFor17F!;?&sZ+){olx5Ge=>A$J2#>WA|vLDXi1(-`3iCwmbD+fV}a?+Lb+qTq+vN zpEfX+Bt*VC|G%uj`p0%F@`(vl}1 zsg0Z-yYP!yDu;O=&%;Ak*IDm8QRRB^+LD-WCl>d!RtSr*urX_#H2j(J?&A6r?_KvN z-#gf<+Ep|EWPWtVEgSKVx%+zd`Cr<)_FUuin*84zxm=#zoX4=0{fX;JhfSx=I_nfX zEia#&pDmO0Vd@^nAGaP`3iGYo5>@Q$;Tb3CuGhYFfllx}9lHSbNmHgjEf+cVK1jiF z-lU(~r_S#+uz{hadHdUyQx*UtN{UeDQQtd#Rlb3#JaG53sR z74N4s{M_gLFj-t^zM4=5&$sr)8&7npin<@3H+^PiyyEzZB&u4~{!OyUGVoec1+XOKD)c( zVlq?78^=kxKVP0welPswGV|STy)5AcnVaoU{<{WT(56W!Kv)%~^( zoTv8JAe^iG`5zHLqUS!e zy;XE)e)sX?p$a!|s0Fqsq>7rq=SbIPnVk2(Z0(>2%(9 zc^~^0oo)49b$#vclXtA{{{43Q{(YI-_d>3po4v-_%rNcTzqU6wzWj)1mrr5;_&)OE zQR%Dip5INr94PaC?psmuWI2uB>IsR)drz^;PBGLJ^J6;Rk{*41B7Euy|?`zuW|WX%gwLv6nkH*xo>G){q4zq_3LXQyIVb_c^j@3Z*3Nj2%BN~>+kjW z`nPPHwu|LG6Zd?r)cHBDb?2dko|&hbUYXy@SR5e3VHLA4$MWnv)5*vB<&PhVi(jPb zJ+0-?4*txKrH@^0jTt*{$yH^1sQUTo=@xPBN8Lty9270D#nwKZ`sIo6#bntlm%iBqR=)y|gNx$pKC+r-Zt3q$yw!iwJ?rxC znv+Rc`=Yk$9XrT+=UR`!5$oUIcvp(GN5w2Y%iX#A@w&4C7BdXQgueVZ@b~-W-0k<0 zoOe7v9@t*KtW#M1*7tSI;@h@_A8uThr*u!J#}iTcJr*HcgJivn+x|GPJGK;KFxmTJeOYwvW|9% znsL5y-Jst1^VMnnl^^X^KjRfXaHhMW?Lkfkm#>k}icP1O?rfZ5{zP%%krVts-6Wr} zEV~=Ik;`XFz^4^EmKySV*m&(aS90B9`UasMjj!)&R=8}xU%7m3)|7%AmIt>vN}gPF zXH8rn%FrTWB{sE8~#SEKdVXo)pn{+wasE z|2-$=HRG0N%_c#Q8uOpKc}4#fG3Fgg-B|m|g?n%0xk>Y4LKr5`E!Vkr=h5nL8OD=; z*X{dJSody$?&&nM6@ee0w(#e=@V>g<&E=Kq=`iW-QPl~qq5`+ByxY6K;&92c%;lFf z!gA*wJ+V*7-C@$hO-u3*nWZKj688x)V7v8TUhTii)v_mk>^4)o{4RF)r9-XUcf$`B zPcwVtd*GuEC*#YAxDbb}*;6`|6-0OW@918x!X(tOqcrPkhgOwV{dU`HNufvg|M|qd zaSm5i;*Smm-klbYE?eKKnB2?kJ3USK0Mqr%4-?XJ>hg9^l#XRtwXOK-1*fBKB7D!X zmj3J(i=Hn&Pu@*Du3%p6w8!i=(x!K2FIZ@J$|LDa{mcnjTbOn}Tv&NFqpak{=f~yQ*OMkhcv$gDT3r!h z?YiF*YjI`wn@imoj0RiVwVye;F|t!1s<_WWi$ih3q$ z9XKm>(ssp*+P`1F38zb^dVk?(72)sa ztI@f!TvFdtqul(-Qcs7X{ke+I7(3fn#*0O-6Tf3Bz}H}?|EtH_;a2YT*rvbp^mAkw zPu@Krcd&hrzx~#%d!Lu4{+UtSw(UmJ=?53xGjy(0-Fz~Ay{xN4lHTzN%MQ*HpObGo zZ_|Ows-IV{=S=%_;AHOlKo*sKo-W@%KFpTgc3G-CKJMb}>|0ewTeZcRgsg5^FFxL4 z!4}|=bIksK4X18|!ThwdbB?a6TOR!STJD?7Y$>Km`Hz(t3#{j;DC~SCbD8JU3<)v4 zpLfzG@-ud}dR@Bl{TTaf>DdC-2AZCtuPfS*TKMtul(%x~`^!CG&zr9EEbG}Fsb4-H z-M9*230g%kGvL||*Va2e3OWTxmCV=vl*%#)FV<1;o3_c5>C*Q5ddqEp9#J?oMU~NW zn|L{+e5hUdCF_?hk2BxQFbq7n_g~Q=KW~Ra+YgzaPMT$&J?rq+mdR6t{cL-88K}pX zs+sMmJ-vj#Z%s3wg@HtS&-{&>wPttluUlk1^tQETIM*Dt58#};?N5NBZL1|;k7mjL>A6;CYh>LW zwAPo-IH-2++Sb+bvI2kotpD=dYTr5W+WiFPdWCPTT5rYUCB&-cZ+&}Y?N_hG>lbb) z%KdigwDL;#?RiILt;;yE`8;ds#5*=R)0*ngEcm#B`t;v7bJyE3326r|YX8l@;k8$A%9>8ldZEwH&o7oVPGkAfDP8q-+uNI} z3ZY-VWxWawvi|f=RXs-Q{gMZo?+&=`2*0x>(l|dZ=&8+S>1Qnu{j%RXDe2CC!yC!J zqQ}kR^Wj>z&vHf6d+gt6eEp_olM@~h9wzDG!CXFjsSiUW+eSqzeir3*^{e>y{$0w z*>Ap8H)fs)VyO(?6jSh-?Z)i%sG7^uR;n0Hm~_nQitjb=X)7<(DxJ09@$s$fG02Eq zR&ysvLVx}gw);N!`rUW>>=NP^4*O;qS8-#5THoqZH`KMywU};OU3By6iW#YY9A*ib zH_JcCd?1^DM0Z)mb1q-gRIg33Hzj|bV4F~R!f5%=YkM>|eU*5!Vntc#!%s>E&#h1H zc{uHB=VaX%$4-Bm6UG*lxn-fw(=V;MCI8RgT9&%Ib(vJ;u}@L$CyiWomcE^}NBzK! z!V`+^A|Khb+_`n1T-%v_V@aFzN*oOo}Nd7$lE@Mk`=_}N^0rzb9z&aM+azx<|e zo9VlnYp2!n_L+UFTQ(<%P3Thdg}TptMPeKK+v+{cmMyM1nY;d-6?;)^>O2p5sXIRM z#jh1p19$yhxoCU%cD+hFb$^aZwKusZ)@_?^@paz`@uoGt${%w}HkU*#5qEcRdTdyE zu(~NR@oh))f-`&BV$3?vH)X zo${w`UJ`!7m9?XVP5Q>V1F`XUAOG1jW$hxN2^$)x%~&E>^OEb{G^Q7iKTLX`GOv(D zMKj=zXpH7%?J3fag~Z&K)daen*2%T-y?Ls;A>@VK#59?6ULhXM@>{0Q$ZzXiv1Yn< zK~UfOyoeU#Gqmd%uy< zUu*I2e=m~zIIm8&EHPTHczH^yo+cwF)8eP*J02XG*O@wb_L)y=3RB&rCq+DeJnb*b zjcHHkbbLA{I3dN}_2#K8*=a3{*TrsrFZx0E;{VkDtqf6}rPn?Ne9N6K5gb*^d#gBA z_Q$I@5AJ2&7LGf8D%PxBqL8)bW$NF}yJi}zY&m(<(cJ92n*EI%(|=rIbv)hTed%B3 zBX7sc5&tF3e(C$e&H|{vQ4@P-S$oN12}S|27UwsJ&>hjMTw!BUazF(8;d8UxlLn1n*x+zI>y2BH>qTTQB)^63lGsjZO z`R(1c?AIo;c~5T$xp;eZ^mLo~oBg>e(?n`GR@^aeI9#^1c}J%As$KmCla^jqdTYTX zbni5y*w3B9&Czygi@C3=Ycp~RPdv!{^Jw6ryL+B)u9lkBqWwrM*W^i>Y*~w6HWrH2X78M}Xp)$zM8?wuzCK2IGtatZYY&&H zvq**ug$amB#XqUO-diVfHT>cEBF?Tp`-d$zOb#?Eojx8uvFOn0GLiQ6(fd}UNBYm1 zt(e{JJA)x6!Ar*}=+;=e>QI&4u>blm;TSE`!x)jQ2~QmoniE$25#r0g@lwJraR z>+b2@Q?qdK6>5rZAHh%qfZ&m-zs~=;j#4Qb(MwxdW_F+Dg7jvq;kE3r~8+B=1lL) zVow_lpP8JT+OknUtM>PV#h%y8Gn76buQr~3Fm0#aLc_kNjji{bKTqk=zx#Y%d7MU0 z7*kN@8RnwDt-q$GfRn)o4G~7pm!6MQt{J{yR+FFKyLI26Wi@|IMD|U1of`iy^3Rib zhquR1FwfucY{F-*g2K;UN(!ESjtMQ_-Y!d=7t18Ja!y|OCZ;f_cLqmF#E#8E6BfJ+TXym1hr`9M zUR=BRohMJ==FJ1MI$O8s>-^zi=ei-jSZrT_bJnS_4%dolU$}OzaHuWxN`A#8J%>B+ zM#EnD$p!{Tp1f~69kgwen!?hf60=ixo^-m+D0pn`>dJSg*DT2VXk1vc?M{#T#z^IP z))zK!n|xwZqGI<+rp(3vrNY9NFi*X&<}ct`kn~tv{LdZjs(l+WKlYwF{I&g6d}sFE zU8TyWrI_u86JGDnc!(FGXypVfh%0iVg#ozVU%yh+n*)=6( zt~uAXFSlvBdFPql=?ClG@Vs;P-BjQEqIsM5IaNi- z?~HgLnXPH*ubgvd&TDVxy4HE@TwBk%+`P1=(PuB0r$f>8wT0JjI9{3Xx^BnQgOQ5Y zPv6L1JpX}%cHf2!gO4|h{9h|8oIGsY<6b0tP)f4+)}r5gW0Pik$0YweVwpLk$V*A@ zma&WajPhgaPQ1N3Wn)X($0Y%WT8a*QUfO;+q9M0#!u7|GzZ_WEn)5*e4SH6z5Q=Ee$hKT(4nr{z0r|GwL+za#$L zqlWbDACnWG&Aw6n^_}b~{>q=14s-6)5$AsSxlTJiW`6v%FI;<99B_EBZTak9cjg4= z2i$)u8+Aoqy?>Kk6-4%g@%?5!l&Lw ztLdIR*;wS1H~I8g#zQM(7AVU23HnIAYkGON@Nw}ahln2KPj)p^WH##jm}PY2X2R~N z_Zg&TYxu2NlN>9=xN<_RlJCt;LZXifCcl4Tn%=qec9_8NyI*F1cyZat_hnk*%7ytY zWv}DBohyEvWaYnfZ+qqEIVGEWWp z!NDeL=Ss!+az6jyw3+*3e(W@wAi(!@LTdBv22E%r=XR+i`a_!wx>ipW%5H zHsZT@e||dgD*VL1`CEO}1t)kV%RbY4w%BZUpv9|OTPxN#I7?Z^Cw+eQ_#VSUhU|!& z>f7(;?GBJXd8=#^hsV;Z*Ej5(_SGQoeoTXDueG>>&9S2^zD>Ovf9lud-LxzwrUL=P0v2Z=tbRFa;r+OWPxGA!zR|o zuJc;6!xBx;_^xm}JFCKVvi;ANZ&^1C3LfXjPZRquKTRs1r#$MBuyOC;b2Rdc8^ZHJzw!Ig(dbhxacq5@yr7cRjImp4(>^ zyQmu#NsIQC-w|EG%q!&kkzLo#z2u$Cg+mEESKR&QS@o{U*|qW`*M`_l6^6CPwVs&Y zsz_cHx7X@#^4-$6)fRqM|16}e%W_We^Pj18-kbkw@?-v!-}V?RI45g;p(Kob=Cu^h zesR%~pHstGJ(pj6b*P)mYDeyE1)Gp#@AjrQ&q@0E>Ho{)@|n-hNNzdECVlB#=;}*p zYc_?>a$R$7@6)=oOT#_7gMx0(XmH7mSS;=nnUVPUs$k{seUkhwjb(x>#rCXN(P7G~ z!?91B|G*aO)f=b0<;;}K4KGMH4LZ8Ev)g7NWA@w9YYVGNUrXdaV%6Mho0j7t&19l= zcDDKSt6Tqj-7c1Qu{Vj@At3mEw#hB^7t7TaChG6=@SSC5+~2tE_{L=S%V#8wJJl7M zZk<>2o!xe6=k%-h@9m75+|_%cPv^&}iokbSYbHL4z2@^sDx)Ui=kxhrE->>)gzkwv zZl$IWXdmX9)#HDA`g*&cUph@vA0OQ;d-`(5Goz<|n--crp2+y0kM;Mj)$3OT9^UI* z_pDJ|#xgl~(zhAz5!&v*`DA^B8hO{oTwdn4bn6az$Gw|o3i~XWsN7Z@|1wc-?cCBF z?{4YS8y7zIx48JsIP+0^$&;+mm)DFB8cx~DWt#rh=@P@~^*@eJ=DfNx`ue$2wKKx~ z_I}r6_GYaytIIr{bI6#nvm#m2#x0Zm5pPed_>2nein!T^#c3DH%b#zW`BD1q={1Tq zQ8#@4OuFS$a{l5xuH23L|9tXZu#$DvoLtuEz8lxRz3u!!^_t$+tgD+IKG^Z^ zR`v_SjsH3Jp7~MmOEUdJZ*3mm(L;A)x0hL*4`8T!AkA*_?=Qb|v4Y9@bB}hNoohTj ztMJdQKhfoTzvhOlDV}YfEp#h?#`GEvH}m^dvnzRW-gq12=j?dM=4fqD-S=JHM?p=6 zC2><$qOqh%m7K;EaG&@}Bd14+_4(_^o~&*?d!*d7>5zQ(zs$?aE`EA?x}x@y!UZw4 z>!$*Ycg1bp)x4L*!nR)?G+P!%?iBfWFyVy! zty9q^{J#|$geSb1d3BcX$2VDtkDbl0aaiAcDZQWFqEG(2TiZeu`vZUO z{3pS%psw)gsj1&YH>X}+o!_Ih>{s-D4R*1B+Wg&9|CQE%RJ+W3T1MHDg+-W+Wr@Gt zzZ*8oHeFs8F4fH?vf;U__+{1UvC~$qxuH<@rXZs4*E5||?QA9?t!ZM(Ek_$HKTQqK zbdL*Bk=!ypyK?c5BMTfo9hL8I{$MNh{U5txQZMHlzKUBdiQT;;;hSuTZF9ww;Ip$+|2Q;d7BUCcze+cL6VtLIeO{TC zBWrCGvktqx?Ge3ClOH7pR-8Ji{_Kiz?o8ibnH(NxzIx>EHJ+t%@b;1EW@6{u4Iidm ziOy%gx75=i$ug=W`u;wn>epqROImZB%_cKha9#YrdD}}97L|~-0j~`b&d)ijn8bF; z{`Hs3FMG}7+Ks1P+?p++c8$$*xlvmt&#{}+biXYY)edWEO3$^<1Xpk~W_PW6x2Idz zo!&6`TXU#tL();g(i3<%V`_|XN3ph8B9W# zjwU*+m(J%}+w4=f?(fe@N3Trezc)=k{@uQ$Me!%EpFZ$8Gp^b5GsAo1{M}#0UQT`C z=j@Pl?qveMo^ZF4nX2G~32fcx7VducQhoC0UGnKpAGTx$J14Gw8)Ldf-u+0|t+>5Y zPaQUHf7^9gqtt%G<(=hIRlOzj9dx7jq?~V2FpIQ{`99&}q>$*{Z`KwTHB}^v?s~5K znK4>#z7>1w!BX+hee4qhex^j$=B>QhW$i6^<9lJeNcfY0u+PleGV5eKwrcI zm!RKvM#_ARrgF}PwN0@X!_{7fZ_T>;AZ~L{_MJi_qZFx2bIojH{0)9Gh|g?04;mk` z^X$+uczRh;`reb|oE4{zd^L+}srY<0$V_vF<*QW-R`#cSK6=&s@%*cP-5Wi{a~33| zmacy%X_N5ySHzXCJd^#RW~)zEO3=C7ZvD!7vH!zYGi2CPVvaO_>)fGj{bSmWrkD2) zPk;GgzW!?%q)Z?gUD+7|*V9-FW!;RKs_& zmy9zHv#l-)sR+ONxtYJ6wb|`xv;8{56R*RJ{C5BQ7O4pxx`#jMwUwd5V2 zdUj89SIqaUrZw6reMdQ$o)1cUqM4eTz-RKbAU9_1ff=u)xgWIG6z&bvT7F>1vt_|2 z(${zzuzA)z=*xQCn&I54V}D626IAvW9zFEn>EuS0*WoKlCH76;t=7~QJu_Tt`?}b} zznNnmPP)GGOu4Dqy6XCx%S>OX$k4q@iF_mZrz?y!nHB3 z?rm@2HroYncVAcWjVs(XC%iWNhEI&;x@1{_snfOl&rWSjxzxNudgprc9|wNh*tH?R2XX8-?v`Ye+|rTE2~&$>=+ule+fb%(W;;r7!{{Ox;v`%+^s%=mt9 zb4c3#uZ)u!_?fP(2vlAbyFBl}#4SZn3$6nuQX z@azkpsulX8{r&#sJ8S0h=3LzW`Rdj!Z1-g<^S9q!^*aAV`L{QbHs5r7X1fUJ$=5tK zdf=OXK`#6HI_LPhpRN~8E` zt{d0AqSUuMt<=do5M$c?xUK7SH~+=vS|{H)_Q$jCb&1|*eZ9|fvRdk{d!04~p(5E< z9V`*mZ?|S>#Yf5*SCt&xvF=XQlw&plD@vC|U)xk;cE7Rn?!mZ^`~Uy7+7Z;N9UkfH zdF}L#^+|bu-faG|$ZylM^;-@tuD`l=x3%F#O#y#~LR0Q%=NB6$AA4du@9*7P**k?N zc)flc-d0kmx+wPdH^;!scVf*-liu-G`z9Xw@UZ0F&7DPNJ`r0Hhm|-8MqCy=(8d-X zp}5)T?A5ELvifW5kGdY18eeucG<=%J9*3iAS55;RkD;Z+F%C>A52>3NSviQqk z{-;?hR{b!&ETqNM<&f&?!0GYy$J2uA-~RHeKg+OWPJGcWU!P#wV=wgXU;d_(YFju@ z*BGj&FFO&w!EEF5`BFJ@SHn<_?kEsQaD2O_?GN{^9nV|8%>V!Ac;)K;E{CJ84O5u| zE~lN{!`h;jlHn4z802b)NTmfVOE$UOo_swlI#)EtGnw<>yzO=CRlCIlw{PAR=5;XT zpVZ3r7-yb%utaPYdmFcY-xsbsn>lPrhCO)-SKMx$T144OkNINoFSR)%>~DU z{q1}CY`54t2L0akdYyarMlt3B<=WqGw}USIm}QyFw)gwH2r1L7kW6v+%i$pH$S0>X z7P&m=JlMRlbT;#f7weTkn|?E1ihIBO7M*u(uY+Zs@YyI(f(l@|H0vJImVbCoC{b9z zvV^7K%T^4}zz;OZwqo*fh`Joza4jAwEy0i30XwUag=I>^6ov`=P(vN;ZLk7H*92d8 z2TqUdD((xvw6UF)0=92LLxa!+A%;uWal3fUOAZajOy7LQsCrkZ(U_EwKmbz^lf$nl z+%7hiWmHvI`%>58HT)PUuoD)5QUK)Wnl1+>P>eBn#N&4HhGohNSeBUXWzG1DoSTt- z?jY#l;K1p@$?!58w~McpuuN&#vTQrU+Wokrf|EmoQIk=jb}f<{p-zQT2WIs(2u=9M zru#tt{}1-7D}&p=WnSsncCRWsYFCM-?+k-RG5x47<}2ogIf8 zeZSxBo@4N8A;k3o8wE8UJ$ku*Qc?kE#_DYJh7B)1>+j?Fd#~cN@84xpzswHX^6$o` zr~h0JyYGwnaw>bhxNOOXgSXagi4FZ>w&CZq+2XPF&g&O(%GU&3dHyWAYulZo(?ZiL zr^lA__HNwz`if@ZEYY>=r!TR&y83{c}YJ<_ROz`U8~n=P2IJlHvPQq_cJG#Z%U4uv~TmWqq4T&VsC$Q z+xs=lFFNA#hYP=6$%Wi**w%RJ8ru}+&ixUorfZ|NE;{Hu@1t1#vGmI;-yUSIzQ1z0 z(gZQadmWdroKL$Dm7TaEB30`BMG-LAjaqW8Y{dNgy{`;asr@20b} zB&KtEge*=wyWo{^dW~DVOi94OgcpV1EU(n;e`dBf@w~nN;rObPcIuY5wpvf|vv_%A zNy3-2=Jzi=_qJQU{ho~L<>mhUyziG)7M`DX>Y!BjjC~<2B8^?3w5Rn;yi4ND*0k~T2oIn4@%4unD zOkO^>Y!RotACD|Y&H3x?YM*XIwf;W6n3?Uu>-GD?KAfMj?0txuPidZ3q_Czglh6^% zGjsPh{M-0$*S3b!`y$<{-`_l7r*1hTd8(i7-!DrN7Jj$?_ksCtrF*>8j~9y->pwqO zc5{_0chrW2eU)psf%EZHA;whswJqU6+wPV{pJw9uRvaVy;M(&2UR{jEJ~!T5R^Ix; z^;x#^aN_CIb(U{79+yiEvYRab<}kngBA-uxc7Iveb!*-!uKVe2cG6kf1Fx%WdT?a+ zgMeARQl{6;G>=8A-D>@?^VO^Df7^Z-Y&?9e^y71HZC~5-7N32h-))i!zx-wHsg{-J zd{v{fr(F}>_SoM-@yfol<*UNi*S)gq(mt;i7bgArOx5eP(w5u^Ieok4l zR;#XS{x-&X=>R^JJjtmSH|R)J34@(x!-QK(h_I-Uss~A)%MGU1s4R8 zc7-?p5?#jhd~Utn+}d+HSsYjR^GLaDzgJW|b4ssxzS;cG=Pldgs$RPOjuDg*aH>9; zIz4c6+S!cFXa9T7ulXZ5_wr(QR}HDqC*MNE7(!Qv_5QkBZg048a@x5$p3`Hl7;m_{ zXY+$8ZQX}_>wT;29vom^dH$^V<+9tk?#Vy9PPIOt?{5F$?TzSPVrBO&vmrrHpI-OV2>-V~)&#i3x(p9qi zj?>}znu*iurNwmiU!4{mcs9y(rCaZ;E$^ycw_cj{;~{&=lpS`x)AMGXY7}Ymkr!cf zT`);$!IXl+u2c6n1U)Ex8_6)v^?i}Nrlh*7iz+% zq81>c8?og?o$-{dO10*>{x6Sx)UjClaFIsl?CU>X?u)K`95Qo9s!AC1zrQPFH{1Oy zDVn*q^b#l*GhOG+e)4_Yig%YDINU1R{WjAB%ZZ#6LezLvp)5_&wozF^m_cuqvCBl9i7gD&OVtHV?JZr+HcGGWUX(0zfhrk zmR-I^KyyjIzk7ti$A?=^>+QbqOlL#o&rMIaWL$Kr`nNMZ^UjV!Bgyr8(%1KzqqgVyT8FOnx*k)i8lNojx|iRdW8Ks0m*4)*UUQNuO7{`Z z{BIW$+evj4JYXTNjbI@l(i=MrD{k@f6cYjc6387g}w zTrIwA$NQag((k;ZS1`IqdS|_kQ7R-OkKQOFR#(>NTl(dZXfT?@HtC>NDi){xrtPSsE2Qt}cD9 z9sB#uWsMitHI6?yIsek^{5=nI)AyWQ@b|}T$>QI0%caz9kN@R-+-O+%mo@w}crn%P zMCV(T&*#4Mf7fpu@!-#K`T8@-2X6PAt@kn7Xm+c_Igg!Pt|s8ZvsT7s?%*=1{blm&$Kw7Ms%b0T=NJ1fyd`{W=3{@0 zi3WDLJFiaLGUeken@XeF6Kj5E9uL@_13D{b?f!ee8owW(Uz1Z%=x!Ny=*R1no zZWo=_U735k%<;zk|3Bu&NtpHg`}KPLgUCnkw@s}nJhc5oX5!)RjnC${{eB}HbN|pE z);(^8*7fiAmgmNLy>i$Yy**R7MtoYM`~J-dA1sU0K3_fQpK+w);TO~MrL6`(-syhN zm)!Exs`RzcG3B25!fHMX)aOe)$*n#iex2?6-SVm579aH%yIgTTM(McF=iHBL&pp## zb=E@fRqM9Cx)Ocv*YSHY0#|tbc=mi=x!v2H*XM0HYj&G0SNPCZcK$k}ZU2hSRsDQA z@mrQ`{=b#N{r9cI@6NmyRjnI!wY)S)dGjOTjMb)W zwX3bJ-j3k265pB*EyqU89oU#;SCJee)Kf1T29K6~s{O7R-* z>NuC_adkVt)>nN<*!Aes={K9tGe+8k$CPfZExp((ZgBCno;@@-2WO@Z`sQ}-<9{%9#-zmzr1E0 zUS?1Jzi9Y(<#n61Q{9ag19sP(-TFyMm!cQD1@^zY+r*o$(Kv;FQ!evvGoL+E-163( zJ@ir}Ot?09essp$(reQk*Gjj{S(iRf@>b8h>U)(XOE)jvC@AAT<<*>zS zVEH`0ubh(m{vYONFKxR2GJTHYUP*TOYZo?MZ+`wQ(B|6Do;jN@_^rx`YK^#1LOZy!4QF&WB5-uk`w`<=75 zmb_c=S7LGcJl%QGhUXW*cb{oqp}XbxE9Qlg^L^6~{W!RALEe4KMZxO7UMzmZ%*fJr zXp7qF>iYK7EXmC&AC+$PbA}bNz2330so|&E_HJW8Yg0Y1H@j<|soi{bAwTQt3b|SD zYa~zQw;t|f`}&&opo&?<{<4gE4cim7i<6h1+n2($KYT{Ot>)`ap35dY{5jGj!{PD3s_OCC?MdQCg%&9K+q+J; zjxnp~pS-Dj#^uy*-EC72D&L)PVzyc4lUSZcXk+{o*W+h|6a#;9^srkJw``rzs7A< zg@;c*n#PGN&;95((&KjIn#_0I z<;nf`CF?W3JCix>GkGj{qqgQGT85>_+_z}pwYoLO`^G`f4;S6#6c@ASb#iieq}Q$Onr+ze z;7+kIr>XpxCzDGKG_tdvlvG-*pF2(6@oj~0Qjg-kndzDP|GdgR)43`9!PB#{_!@lOR5W!`7AfbGCaO|Zl%u__dQ>N3K%9xT;OEz+Z!HVZCk?e zfQ8d`-?^BXvFELfpQxMf3}lJ3oR;8c|L%uy(JvQ`z`7mZUafBGjks_9z9{~wjknUG z8qMcxy!H2WyxZ~m-I1EWcHzGFkDe~^3J!mAe%6Z@GHN}`4HbCgYaTGNhPO>zx_5SO zxhIE*T~ovD@|1d$8%Gj<9X#TBCU*B^PA^~Sx6bhfhDxXI9Qu6zWKBn6kG}n9z23R! z@02{hTOK_9ao_fni)Hz%{uKUxWA!cfo{ho-srmx``vrGb?bF}L$>&hX<7a%`_umuY zS9ew}E}!RY*DCWpl38l$%f%V;_J6-jj$QrpR`)@LwYEQ2DqiX0FYYmHS@i4c>(5pz ze=PVuv0ZMFo%!92rpmW9owjRQja(+I3(O7oHaKJMo6mSP7(CR&)!?vGaKar{M!#se zx*v-7F4Uh;e{lQb*EM|$Co?CMN)%t&vF4uh&nasx=TDPtp6t9Ky1ydRV*aG_sxj`n z-tBtrJo#qTRZH++*^lFYO3zMW4&!TO4mTkiC(?9|RZ**K-Oq2=tu zu;^S_mC}fIzOqK1w-)`z=8Yyh+|>PZR1J9_226jxL%&DXy6swIy6nEXo0IzNp4|{Q z$gK0{!HknxPnPL-Hn1phbMPo^IdIJUz}JhV*Z;C_zt4ANis>{@<^7iHOq#se*;>S6 zvQ5)umK!?9L?76?e)oH=eUF~$T?_0x&c^p7n|X1w%D3nL4r#4%p2PQS%iSl8(k(mr z)oN4Br~eQWk1N=>ciQ&5-)^(V#J3eRS`;npPnX!4=p_AXGUwd3y>jmlzGPq9`Cy%6 z?19+AVBv;`cP8%?J1e@Q=C<%U zhfOw91k7Gs_cshzSav3 z>yBTSFr2pQ?H=Xg7~iyWt9961=6D@aeSvW$BvQ?Os#04$djyluz;S+sS9M z{UKL{*x^^pe@dOv%40vmyj{;C=YGZC(nF@(w!OU-IC+_G?rmp@PR)5w)y#PrITLx> zAF(_WW1Rl+R`&YC*9&*w+9>dH(cJZCub&QZKO*_Vtahk+XK~niUkNR?zyO>wS29 zoonxm>_;|gdMCfd2_-F#PXDZPyL$P#xw`F&$Ce)Sw@GYbHCyM@W52y&<_&Z}lsd)V6GGE2x zwLPJa@BLRWv19mtJ+9i+V#(pHCz*dvIMidf>HYL64O2=av~M>(c~$h$;QUX4>Vk%y zKkU7Ig$^bLZ;dV6cK+v*Oy$+ikg}r7L2&wNpZc0v!a^48j3qXlPo^w%?G|$tXk%Yk z^5f$&OOdqIYqxRjO=t2G+@kd3)!`Pk*?gCh_+|uHsGV!MSN`^z?U#E?Zoa?wJ7-60 z_jHkuob9~jcfXq7C_mBcpYq09`2LNWs=UK`|7tP?Puq6#WIyt-UMrA0?Rm(NHCxT} zr@U_BuCDsL>&GN-=Z8OBj!O55KVDeiG5P&0al_j?%=hSQFZrxyGlj2M)8*Lv%8N%A zZm5Y$KWxNZ_CnSqZ^7FWdmgw+SLfZ}UbrDcJkLT+;ikcXbG7g7lDKGThh}{7joR>-W$Lc`^{2kPblp>N-c!6TAb*JiPe+=c)j|il1{0o?X3L`+bgqhxGmP zPq=^I(2L#kaMKp`XE{7ePtF$2pA{ZgTB;%Qt8-=kq~4FdeNV67zo4zIcY6QEm)^YP z(_^Y4{UU0_JDYFY#r*lXD$-9%qgZUt9g~Un$Jn>nZ~vEPUH@?D^rAyM3h$(c@8-W$ za`=Jd<8?>AfoA9r|9o_azqsXy(C3Z%I>L<34(j_u3-U z%#&S%PORfRKF7qhX0E1)*x&1O?+e^NdwcHY0K0v8lZE@%UuA#IGgpu=(JAG=NnTWY3G>P2rCyK+Esh} zy8L4U)_FzqQk{zZ~DQ`U-9V^>M9FMB4(s(=)`~I z+FQ=)V`MumHsr{1S67{C%_(g2o;v+oyWPG0-fWXQXGKoW{m^Fou(m9UNr-zA&(+tR z2b+^WPg><~xO4M_TXR0VOPb?fv8S6gerK%x3L!IYB^`mEjE}w;hk^Ig77t8MaIkS09@*SnScQf1tYbUJp);QL5?FqYd?X@S}t{ZFA@7}1H zRj}c%;hDPziaeWdx-V-t%RFWzV44!QPC9wn$=}QD%&QMCQ|(D$&b!pqoB#Hi`uxLx zO9OKvB>D3^MI?UT+c9T{!mZhx66P4?-Dh(adVR5d@`vxa5rNCLSSYS}Kl@h))3SRC zDYLAeEH;w;+>y+xWGp1>3=kq+TT4}rTb6rZu6n)F(%AZTOc&^eurv2GA z!Cm&2g6=aG&b88yI$kY1_w&}&sSPc^?7o~(R`iJ!Y~TL&%ihO*0TyQ(e?Afw^{>0V zW9LcLU5ozzxRlbWyU=KUY1s7ui<|c*n0G7-%vN1;-pKLrOUonoocexSoS0g%VExgl zyexT=_ssYGHt70Po&Serw%jqfQ~UlV0w~)7h6=ai8ga zY|mnyJgdz4v3Vx1Vmkv}WAC2arzAY#j&F|CBV`M&C+5A% z7CR#zT52<%w5=_^Jmtruk}I|O^HOI%7JE5~S+T47&=St*BE_9Yqy%pG?7NY3vf|R) zc}2VLguN?wnq{|6V`f^^_aEQ8cg|n5uRiJ!M>hZFudb4lZ?&Cfe5!Zt@vDNPq2V|8 zY`9QvF1cxLqK}CTyIoR?id!P{!)LD_#Y?fM2yDGu^YruG?_V~5Wi4;43C}R!k>a!4 z*O=|pA4B(uvuHFR!qOqrZG>6O)+qmK;5A5ZRiUF^-5DR84Uk?UdjOiqt?w~ol}=Cym! zX7*OG^F!97l&!wMHavc&wnY27df@%AZRT5k^P1l=xWblF?L6<@tfRX27L!sX53W@? zEK+o*+;Y+69UBb!Pi>mo((q)NsOkUa6?vbI%u>$MVRX~m$S%`3!~Y5YXU=_%dpr{^ z9bY)3dhsFO%kEF6-FLs6x?4YVk7#}_bLG~?JxZ-=i**j{Zjs_S<@s@=glV&}-@M6( zPJg$MWn3=A5nA|ihqP16(Na@+MvrChcg0@IFEHDgXfSj3uY|&f`}{x8nbx4PGuQ6d zlglBMo_fqW$L@SOan!=h&9l4Ed|%BmC(kf3p! z@gqa>snhHCWPMKhdTm=yh5B)xYkM;8&#bw-$E&$PVPr`(?AZZOYfWLLD)TfE}Ohv~CpYUJZ1ET^eV4HjyPoYrG*`=rtG$Q0>q z??1Soi+phI!&Kj28~-dSn8nBL9u)g@Pq)?SEw%6B_Ez8iGpXR?QUBMV1hQiB9;-LX zr*>VRS?Hj;@!-^Wfx=7w7V%l6R(H>u?X$1Qwy|QL_WDi#7rvjl!tYxvw%%z0nePa%xs;$YksX+{q|6{LspY}uIzir zJp{9P4zIO%M_o*Sxh648mx`WtRN;nw$9zVxVAhwPq-OK$)Awlyxa z{KmaqOPj?Phgq!iuh_wpeP_?jtxX#ju3htM-R>FTjc2<*I#lS$iRnc+{NC=d-coDI zg;(tNwgk^+=Hr>y)z!r@Yh~D)h{R?m0oC~?tmgw%c^*zWwj;{y-OktRCKRL!pW9lw zXnRvgtYY7u2aARM?JPTjW;tz1omp|%KbPOuY`=4(<@Ns%v)kQmL_TqmRwC}|A{`|Ro{?4~G>2iC`THZ@N z+$-P|<i?7#kMcy}RnI6xTtP9*67&A8Y2YR>haUHGO7yEp3r` z_It}dg&)$%Qo<*9tX=oKS6EfTHn;kbcC}*SAJ4~z!nz&nS#E6d@SSqa_}D?|Z0#jK zex}b~+FcbV(K7A+#Vv+*s>`#(vK1#qSEjrdC${F@40HsTz$HQ5ADuW)3j^& z^{wvv-TY09Rv6x5>nJo>F3$b@or7$IL%D3;dVv|-jQ8xA8Q%motk&;&ef)^tu1Bj@ z$L2fTjysb4bou-`r~G|}UH9sK>#qKJiTBLxm|cHPt!B#SIC*#Jt5t^4ZIhEdCpko0 z_WLC=K9P+OsGKq}nRUx!-WUHpG*-;n{OewsHt1yXh2P&X%LiT9{a8KXeSLA&-qvG# zm+U%KDSlts^S5g8&9;5%wf~OY|9m4P<1mYgcFIrhYf0Byw$A+7bNl!bhxGW0X^k$s zHw&>yl-sq=zg=GQ*N`)K$>DbSlHYU74@u6*Tf-^9rPhJ}uf=YTKks-vuW`d}HL5w7R0#vglI8x3fmO-dyTt zeyh9BKqqI~xu>i<4U0A<@CogDx-CicIs2Ad^PioYo87c*+hw5%Pn^{IeyADOl{`o; z;mE#gciN4R)S-)FQ_x(=p{M%ctr?7o^c4vmR z-tL}xb*m~2)BeQH-f5B1t-Fn9Pwb))S2cy5f0y4n7piPtvh3WMht8kHb$khJ z__1fq-STIWv$yKabUkV7^c^jHE1T}srk~QgJzIwRZ1w!V1^@pk`W`Ei4L?yBzwGI` z<@<^b`V0N~q^hvf#ZQ1|x{hAl^BtAReiPW(ch6{!c`Q_2(D5p7vW509`E38cef(An z1%-ZhR@R)5Nxft8be@7%A7m+9z(hffVx~!REEssy&geWV4b#@~Z;ANEJXhh0`RpwR zOj)+BR5(}j?PhwTRo05$|HpT|W^=4zQ4toOed)5Fee<`(yd_`FZycz2enWf%SKQ`} zQ}iPrv-+l%UP(LnUq*PT-o6#-0xo(g4js3u&$Ah2WtH2pEGhr-<;Rt)b4&{*cgi?g zzu)1ku|q|{Q?DstK@WeC;HHw7L0Q*!1WsUz=UiMYkuISl@NG?e|Gs3=7sqxiFP{!M zR3h`U)um~(g$rU$vM=3eJL~0;G?VpP^z5_ieRY;9zjeFz*8XdR?M9o#uc^mpab{W- zAC)xzGE=eSC#1W%Ws-~nC4P?)1P*K&W<+x_((dut|a z_#yr6PVtq`hqzn#C*FCoOi|(IF`=0Y{(M_panJs9&*i7DCqxV7Oycm!e8#cQ$ZPX6 zv3!oZztpaVPGe6#zREXa#fgplXJ?!GK8fn`Rd|$|DL5g5Z>H?z8kRfrZO_W=;}zsk^Vj zr{Yrfd{3~yUFW^Z-?>GcN$dC6OsXn*J#F&*BhRYi{$EMgd#iHz(4v z;qgA1{>2=>wh643owVa=wtim4ePiw>%~l2m**~5xjv=SBl&>9oT|MNP=)bWjGuSx36tKij&4wVWGcf)4>jj1%V`?@Ie^y92`4R=`W5Hh47w)zsam1q zc2D;@pB;_;wt24`H%ur`KE%}AH`l82^c=DOTPzscw`5A4nbDAPx^I)z|1H}@ruF?0 zxOEqFsEXB@uUVfD-I!+9zoTG+FI(=?8OxVEsJ#4b6W27iIM3uxo&Gn?XLgE3s|Tyy znBkpbRdCw!wEFZ5yyuT_bgN$~d7gFe!tL)F=ksbT-|sT|c4tHC&pGRPmZ-2YY^wc{ za%;&;YtI={bM9<8^L^j}6A4rncB7=W1T=64KJ& z=y<#6@;BT4Kd;?3%YC2->NK@)3kqiMIcqduWBZx=*W(sFlB_?&TqI}sGkxRYXVz-p zZv0Z%>vTd}tJj2AENlARf1w-0)@o!F>Y6fLux^K91cecO*<2upG}khsB$hsVRz!4^yx2{O+Og% z#4q2N`OA#mNaxo2GyAu2dfae1{pSK-N@Y!6_=mDvtm{3sGcP~ad-1qK)noGn78QZ{ zDsnZv>Vo{i`ty02+VA{vR2DUfNnFFoCt-eJ`eK&)*(RG49%UU#)Z2W+%IAMh+uCAJ zP7h{z`IO6EyMLIMp1Gd(Q`*emPE|kYo%ZiHS+y*UCu;6H`nMfkwCBbDeML)-?q4J{ zAtraJ=dC3Tiw~|0U{MhdU-^=^^6mv~oAy(_c~^GKG?{k7#=E)N6@2S`4uvEs zZOwArV0O<_DQs50&gIju%XpW+h z?_B6;RVcOgiIULr<>7fbtIZ7vY ze_hh6`beoqQ~6QT46VyPMe4b~F6Hp0$o&+Uus|zfnc`iAqFKKjZ~KH;8x{uKTfzE6 z&|OUUh1aH!r<+dhm#eriP5x`cGmqe~>ux)rem9@3Zazn)=g3y|$oMC+OG`bL$(zcZ zHYm(1I56{ltM57Gzw9^C#ggP!?S5jxlo;KYe#6D>>10qYSDe2kbC%K67Uq+4*H7Bl zJ%{_EosZQ+GRA zUHFuB>Dx`6`i&Z$r;m&4uLcbe9aD(>AM9_fyXnt`61Bs>IG8ej`3WuCwC&7MnWt8l-F%lQntwF< zc0(@nwTZ?Pripn3iE6VIuW0K##jRe?@%iPHOxK{P#)nS#7RoSkt~vAR2=mLYyT9Ab zR*wi#4Lp*1c6#dJsfw12KX)Yg{P5P>d#3FuQ+(04BZBjM{#aP+GIGiUZQA2GJL&ta zmiJv#j;soh@mQg4wrts&2$81Whi!Jx7VYHS;pAg+qx@d&_P^VVWbP;=Brzu*I{bFi zuI9r>_O#0`ohCP%Ns{kf4X4Km=VecWzh=KuS}&pGxBA*z<=3x^JQ{x=UNZTbo<7qg zl_@{g>CgIfTD$!~v#G7yHNjfx`!X?Z>2oU6{49RTZs1H?c+)b`XJ292EZ)hT?}bdZ z-z~d+!bWwXXld)Jw8?#2Qf&qFR@pXA=KAU)@u$K2&NHd-P658_cAt+Pd#W?#v4z>C zO>zaNzZbnbuvOQ5O{HIVebk%@dACe_CB2*+ia38h2r_=T%gpcY+3&{Nq}AReacv8<+4Ap9t&DYiVc{@1CSpM78vv`h?BF~Nthb_}9e!^0R+>Fht%UA4qB~@zb zc}#z0bds>S`OcpwR!2|D7Wi?r_qULAi>rN)wZ>$bD2{$->BwnLeZMo9h4^=T+?!f= z>21-+-?zGX-tUS{N({MO@_1(YPVoa4Pm6@7vu#xOouzOqNtYqdaLaR(IWf!pvZnvF z3o{Hqvz7OQ(t*5TJi$2uk ztLNsR@Nuq0(xMW7yNKpMzj@1e9(*#+imRBLbuQJ>E(AQUYt<~pL zALYHjnpJpeN|nNylV-`M=IAqW&JkDRz40KY%W9%qWJ<)8^T*xz?e|PhjC_%@LT-~^ z;JZ(t(;>899lRx0+)=AxEOxIbx%<~Izl=up9fc}#@3vmA`g&-Szkt|VSr?u!wV#<4 znXj*P*z-wn&gGmLJoi4C$o}N5efM)tehaU#--^_m55L74y6%d+HQD>XA*O5L>R&`> z+IKu!#uu4e$$Nf@WT^P`6Um2;>u$LGXg=%mEtyg}UQP~&DsofZ*$G zwJqlFFW)G-b9@H(JC=AG0rQlZ1!uk6CeM!f68`htj>*S_g!#7ZX$tm`^jOBXW0lV5 z=HCmN`Oirj@0KZ#@!MfiI<>|B2`AG8>-R~Ywbqv?$yf-b?%cpqO>17t`!( zTqaR#PHSE4J^1_E+gr2T*S}r;{Kxluiy!Y=b7{uo-YJJBb9#U_ikmIDvLVqn+XNl!~D8G6JHhme8iJq^a@W`F$&>03_!zJJ!=a`jsD#$zjD*BsNy$V!~{Qz2(x)Qt*bpZA;8 zq^~<@Upr<0e@VMsMZ>SV#oKGoa+_t{U)WsR_IS1aew8gJw?7dwEH9O4udlD?8=O-xz{UGh}t#oNy>y`o&^@HS52hzb|+TK4W2Z+}x-) z*PG1lCv_|2-n>`$`)l~Bz`ap^A2)4Lzjx%)la-fOoLV;fR!M_#h*JIcx7)V}W-nQF zF4aIv$9KWxKKs67eX`kd#y9yl-Fn zj=!e$Q~sLX%wrD5SEJwe@Ui~=)UBU!=0$mh-M?97cQW0xsy%K8{6F|7aJ3t6)X7<# zy$^l$V(Xr+`ngjnJJI-QYwyOCUteB=7H_jB|LC&`_u5=>Q7P+UP5evI9ha|6}!6%D*A=#HRc)S=64F36MR#SvGmltDNplX9X4yV0(z?YLxG!I5aPvg(!iJGweW}5V>)1S9SCMU@z9@C4j zo|w5KZ(pth=tzqPy@nghdhhPA|NDroyXKj5{Jebot?%!aG6+|{xN_Nl`}^pRcB~T9 zk1X+=%&($rr?%d$`hBR@EeYRWS>XM76ZDy~3ta-7b^IYai46YneTW%Nm#>phwNRgs&KGjp-bd4B0>#*RNfv0i+1LYYrJC!}_n z38zPf_~yvkO_4K{{x4_eU#6|M|4jIl3Gy8rd-L{aZc*T872uz*AL%0-S^DeK)6=c_ z8z-9<8U{XTzOZcHkKB!~`)=nick7i7oXyO~WAj;6L2*;e9r5QIPJM;0pIu{KyhNb* z;zh-`w<7mj9M@hWCvimC`9TNw>wVh#OhTJiUEcBC{zIMreYUNT(>zqS3t!nGYY1r$fX~;y#;=n!$kAU zluZAcwLNjQ$s&f~jh%9t$rtZ^-xmEc{=2xayAreeo(HR^npB)HUbx+DYw3yNugvo^ zKHmuS*;D`etT|_L)s&M>^-*&+E$J+pI;DZ-(X|=n$5s0rZ}gOA)Mt3St@*w#{o?BN zdxTj1`|M9QE_P@8bNlx7I|p7=&$ZfeJ}_U;eCNJ@oh6fw3C$EQR=BuqcCJ`OZ`I#Y ztIs`;eCP78o6o-yymtU#@*B?=oUqJ35!}rDXzA`we%A->srPjjH}hq@ovVE5 zis;LYI#Q2bUtf7lNZ_fT-Afgz_iK39?Ryqw_4`io#5txC`=#D*?pl#=8*Gshef*hS zUqj22A6c@wo8NBRsQK97U8UN$`*p`<9W6lJrTd=UD|L7{Ndb7!Y))2W$v?$x!sRSVZ&Srs_9?r~}Ol!-SQPFO6cueW->=kubb==%8h4f;0& zPZZ8SDtn>(uzSm~%$d^*{eON5ma?m{h&^v{;KwcR4~IS<53k;7Z<~6*#^$B6)t+Ma z-*c`Vl!-rhu!r-0&-H?58?M}CpCZcGxpVf)SLb%mP-1Q0qc`#1zL0h$(DqeP21bDi zr{1!E3!WYtwQ0GNS;J~Z*JI+R?4@mAzimSo1bK zb2`R#qcGL-(Vb7Lg6FJRw!`F3xyEvx*}w07?|EHzVaCI_Y@?S4pQ-m-~VBe@BQ+%g5if#_IXC# zxT9TBXjHDi6SB>Gvi}uP$t7oJcLh}X$lUPY4wtF9FCX5r*$vu|Jm|O;Poo;u@Fe{5R$2rCo(n zFHEgeD?BjOpm?kAHdhWazuE1a+mFjtKZ&WISgBYWBi%0)!R>fottRBZkCeAhWPxv?WA}L(2^#-y<699pz2`2^cD#7Jgo~zH(>y zg~yLhl~2%2`_x|hQFZz)qXb^{`#Z0*q*`U(&AilNB(B`WSPNRUb<1e!S@*+J@5hC0 z41aTdi&;RPj0aE2iH2UeH`}e>?GXOx)+DAImz z%iZ$(Y&m-;t3BOh(QedNao}bCUpC&1?RT>-%&%X^6R{}$W9N%IJBweG+cA1&M^xzc zR{fThwu!%1Jlla$W>4aBw_d5hvr$_+&E?OyevK`7${%rbZj-3O>mw=^$)2Bo%xX(4 zJ+@N7uYHr0TGPdQ)#guLS8i<%{`&vl_v~4Q$!;p)p)tSqG$in?Ydbh~;{Si&_t*I5 zaP9qiExNMs^O2{?_cr=0e_-YM_EzbO)B5`#WJefh9{lq&PET04T|DnfLccBNoZtfj zIX8os`7CSA2`_8@b>-Rnj`utNvAw=L$Fex%+nUIQ*>ZWZg`2vvTet4$ZBAwjcS`#p zdgNryFUi!1#|ovd_PmM`U3ci+?Jb7=%J*Ga`Txx9@YsGQtmf?F8H@Xt@ZYPqH}kvW zePwIj-MgI2cka0NY5)Jf{rmsgNExU3EGu+>HgT)%LLLeEtm`YK3sly|{A54-{F`~^ zqazV#Qs+&*I9GJ}rZvhIa^Ie1FWGQC^11w-_2L&=K0baqJAa>Mg<-)`q1xqcy|WI6 z3O+IHUf;Cn{=-$_sV0+{J>zz>9hcqFvh79@cbs_$=c~?2gO&H6H!}*a<+yHWs22KA z+sEEuTJcXS>2>u>BZCvx%`?iaI&gN1{&QEmr*1OF^UX4EO?f^YbmYcVIpZpm+S7Ax zJk5+tUgx>ya)QCR>uW;9G<#4th}@5+XMdyEqjh5Q=)C@vQMdH6zLwHDfk!#Nv`))Yi3W&Jezs^*V^L!Tc0>+ z@vghM>440g#_xw2K()i$w9>MuBa}RG--0X3zbhF7vlb&x6 zowhJm*6N=9%J+mb^wYAvhmM&pcbejQ`GVQn+yw=>3yg#(6!Zqhtv)_AHCK8{@z%*Z zk7?WJ?Xc5)-n~WY$drbV9UC2f_a&{&bpUNupR~P^tGKn-K5eOag5m$i{q{_bcaO_d z>uk-ss;0uQ_3S_0NaoXfyyDM0g*H6(V9j~HW7fr`rwdCWj`xM_X8W|`=d#%^9=6Nr zgp|$dQxfQAIQ@&mM0b|Mg^co(N5wZzI4^nZ^sR)8={q?~t{5JVun(WzUAvIa)j?1r zoMq9v4O6oh2Bn?bbZy1kh3#w$7k*iISCRcW$AABsAxrt%zFrNB&hI@eH10M<;bElZqeGOI$x*lD4OoJ(e;k#wyv;r-F|Sb*LC2& zJfo_D=m+V|4fg~XUDSAUW53^>y(ZQ3*w5X&BP&!)bFZC=KI?DyZ-!Ml&+3)m*+TqX z7dUV@1V8N7@8uAzo%khx|39e{bqAZ|Uo|h>dvb-KS>*bZLxs`zZa7|RmU49vToJ($ zlEx%t^5pJWDXILuPt&TBE-dI`&5@ki)|z~I>-G5eXWt%|uUjB?_PcI*rBB|&n6t&7 zY@e9Qp8WL9?w7{g9zRtD&o5Wjf;ONuChpw$5{_I%bqaZ>yg-z~W!0o}MQ8qZ@Zt9NjP_*5#e z{N7W1e%VB2H;MHw(VQNg0+|f1;FB~Oja(c!Jy=<)Pknj$<$QgfqienBMSuIh5*g>2 z9(wE)`lDcPSYfa=uh^mCy8iw@PTu*R?*+q_51zu{8icsPXj|U@@r?CUR}HQ zn^nv)`^ejGzpt?@l!+~WYdUl1(r>x<=2YFUNDk - ### **#1 Empty input causes crash** - - If the input field is empty when page loads, the app will crash. - - File: src/ui/Input.tsx - - ### **#2 Dead code** - - The getUserData function is now unused. It should be deleted. - - File: src/core/UserData.ts - - -Use this list when evaluating issues in Steps 5 and 6 (these are false positives, do NOT flag): - -- Pre-existing issues -- Something that appears to be a bug but is actually correct -- Pedantic nitpicks that a senior engineer would not flag -- Issues that a linter will catch (do not run the linter to verify) -- General code quality concerns (e.g., lack of test coverage, general security issues) unless explicitly required in CLAUDE.md or AGENTS.md -- Issues mentioned in CLAUDE.md or AGENTS.md but explicitly silenced in the code (e.g., via a lint ignore comment) - -Notes: - -- All subagents should be explicitly instructed not to post comments themselves. Only you, the main agent, should post comments. -- Do not use the AskUserQuestion tool. Your goal should be to complete the entire review without user intervention. -- Use gh CLI to interact with GitHub (e.g., fetch pull requests, create comments). Do not use web fetch. -- You must cite and link each issue in inline comments (e.g., if referring to a CLAUDE.md or AGENTS.md rule, include a link to it). - -## Fallback: if you don't have access to subagents - -If you don't have subagents, perform all the steps above yourself sequentially instead of launching agents. Do each review axis (CLAUDE.md compliance, bug scan, introduced problems) yourself, and validate each issue yourself. - -## Fallback: if you don't have access to the workspace diff tool - -If you don't have access to the mcp__conductor__GetWorkspaceDiff tool, use the following git commands to get the diff: - -```bash -# Get the merge base between this branch and the target -MERGE_BASE=$(git merge-base origin/main HEAD) - -# Get the committed diff against the merge base -git diff $MERGE_BASE HEAD - -# Get any uncommitted changes (staged and unstaged) -git diff HEAD -``` - -Review the combination of both outputs: the first shows all committed changes on this branch relative to the target, and the second shows any uncommitted work in progress. - -No need to mention in your report whether or not you used one of the fallback strategies; it's usually irrelevant. - diff --git a/.context/attachments/Review request-v2.md b/.context/attachments/Review request-v2.md deleted file mode 100644 index 0a800c7..0000000 --- a/.context/attachments/Review request-v2.md +++ /dev/null @@ -1,101 +0,0 @@ -## Code Review Instructions - -1. Launch a haiku agent to return a list of file paths (not their contents) for all relevant CLAUDE.md files including: - - - The root CLAUDE.md file, if it exists - - Any CLAUDE.md files in directories containing files modified by the workspace diff (use mcp__conductor__GetWorkspaceDiff with stat option) - -2. If this workspace has an associated PR, read the title and description (but not the changes). This will be helpful context. - -3. In parallel with step 2, launch a sonnet agent to view the changes, using mcp__conductor__GetWorkspaceDiff, and return a summary of the changes - -4. Launch 4 agents in parallel to independently review the changes using mcp__conductor__GetWorkspaceDiff. Each agent should return the list of issues, where each issue includes a description and the reason it was flagged (e.g. "CLAUDE.md adherence", "bug"). The agents should do the following: - - Agents 1 + 2: CLAUDE.md or AGENTS.md compliance sonnet agents - Audit changes for CLAUDE.md or AGENTS.md compliance in parallel. Note: When evaluating CLAUDE.md or AGENTS.md compliance for a file, you should only consider CLAUDE.md or AGENTS.md files that share a file path with the file or parents. - - Agent 3: Opus bug agent - Scan for obvious bugs. Focus only on the diff itself without reading extra context. Flag only significant bugs; ignore nitpicks and likely false positives. Do not flag issues that you cannot validate without looking at context outside of the git diff. - - Agent 4: Opus bug agent - Look for problems that exist in the introduced code. This could be security issues, incorrect logic, etc. Only look for issues that fall within the changed code. - - **CRITICAL: We only want HIGH SIGNAL issues.** This means: - - - Objective bugs that will cause incorrect behavior at runtime - - Clear, unambiguous CLAUDE.md violations where you can quote the exact rule being broken - - We do NOT want: - - - Subjective concerns or "suggestions" - - Style preferences not explicitly required by CLAUDE.md - - Potential issues that "might" be problems - - Anything requiring interpretation or judgment calls - - If you are not certain an issue is real, do not flag it. False positives erode trust and waste reviewer time. - - In addition to the above, each subagent should be told the PR title and description. This will help provide context regarding the author's intent. - -5. For each issue found in the previous step, launch parallel subagents to validate the issue. These subagents should get the PR title and description along with a description of the issue. The agent's job is to review the issue to validate that the stated issue is truly an issue with high confidence. For example, if an issue such as "variable is not defined" was flagged, the subagent's job would be to validate that is actually true in the code. Another example would be CLAUDE.md issues. The agent should validate that the CLAUDE.md rule that was violated is scoped for this file and is actually violated. Use Opus subagents for bugs and logic issues, and sonnet agents for CLAUDE.md violations. - -6. Filter out any issues that were not validated in step 5. This step will give us our list of high signal issues for our review. - -7. Post inline comments for each issue using mcp__conductor__DiffComment: - - **IMPORTANT: Only post ONE comment per unique issue.** - -8. Write out a list of issues found, along with the location of the comment. For example: - - - ### **#1 Empty input causes crash** - - If the input field is empty when page loads, the app will crash. - - File: src/ui/Input.tsx - - ### **#2 Dead code** - - The getUserData function is now unused. It should be deleted. - - File: src/core/UserData.ts - - -Use this list when evaluating issues in Steps 5 and 6 (these are false positives, do NOT flag): - -- Pre-existing issues -- Something that appears to be a bug but is actually correct -- Pedantic nitpicks that a senior engineer would not flag -- Issues that a linter will catch (do not run the linter to verify) -- General code quality concerns (e.g., lack of test coverage, general security issues) unless explicitly required in CLAUDE.md or AGENTS.md -- Issues mentioned in CLAUDE.md or AGENTS.md but explicitly silenced in the code (e.g., via a lint ignore comment) - -Notes: - -- All subagents should be explicitly instructed not to post comments themselves. Only you, the main agent, should post comments. -- Do not use the AskUserQuestion tool. Your goal should be to complete the entire review without user intervention. -- Use gh CLI to interact with GitHub (e.g., fetch pull requests, create comments). Do not use web fetch. -- You must cite and link each issue in inline comments (e.g., if referring to a CLAUDE.md or AGENTS.md rule, include a link to it). - -## Fallback: if you don't have access to subagents - -If you don't have subagents, perform all the steps above yourself sequentially instead of launching agents. Do each review axis (CLAUDE.md compliance, bug scan, introduced problems) yourself, and validate each issue yourself. - -## Fallback: if you don't have access to the workspace diff tool - -If you don't have access to the mcp__conductor__GetWorkspaceDiff tool, use the following git commands to get the diff: - -```bash -# Get the merge base between this branch and the target -MERGE_BASE=$(git merge-base origin/main HEAD) - -# Get the committed diff against the merge base -git diff $MERGE_BASE HEAD - -# Get any uncommitted changes (staged and unstaged) -git diff HEAD -``` - -Review the combination of both outputs: the first shows all committed changes on this branch relative to the target, and the second shows any uncommitted work in progress. - -No need to mention in your report whether or not you used one of the fallback strategies; it's usually irrelevant. - diff --git a/.context/attachments/Review request-v3.md b/.context/attachments/Review request-v3.md deleted file mode 100644 index 0a800c7..0000000 --- a/.context/attachments/Review request-v3.md +++ /dev/null @@ -1,101 +0,0 @@ -## Code Review Instructions - -1. Launch a haiku agent to return a list of file paths (not their contents) for all relevant CLAUDE.md files including: - - - The root CLAUDE.md file, if it exists - - Any CLAUDE.md files in directories containing files modified by the workspace diff (use mcp__conductor__GetWorkspaceDiff with stat option) - -2. If this workspace has an associated PR, read the title and description (but not the changes). This will be helpful context. - -3. In parallel with step 2, launch a sonnet agent to view the changes, using mcp__conductor__GetWorkspaceDiff, and return a summary of the changes - -4. Launch 4 agents in parallel to independently review the changes using mcp__conductor__GetWorkspaceDiff. Each agent should return the list of issues, where each issue includes a description and the reason it was flagged (e.g. "CLAUDE.md adherence", "bug"). The agents should do the following: - - Agents 1 + 2: CLAUDE.md or AGENTS.md compliance sonnet agents - Audit changes for CLAUDE.md or AGENTS.md compliance in parallel. Note: When evaluating CLAUDE.md or AGENTS.md compliance for a file, you should only consider CLAUDE.md or AGENTS.md files that share a file path with the file or parents. - - Agent 3: Opus bug agent - Scan for obvious bugs. Focus only on the diff itself without reading extra context. Flag only significant bugs; ignore nitpicks and likely false positives. Do not flag issues that you cannot validate without looking at context outside of the git diff. - - Agent 4: Opus bug agent - Look for problems that exist in the introduced code. This could be security issues, incorrect logic, etc. Only look for issues that fall within the changed code. - - **CRITICAL: We only want HIGH SIGNAL issues.** This means: - - - Objective bugs that will cause incorrect behavior at runtime - - Clear, unambiguous CLAUDE.md violations where you can quote the exact rule being broken - - We do NOT want: - - - Subjective concerns or "suggestions" - - Style preferences not explicitly required by CLAUDE.md - - Potential issues that "might" be problems - - Anything requiring interpretation or judgment calls - - If you are not certain an issue is real, do not flag it. False positives erode trust and waste reviewer time. - - In addition to the above, each subagent should be told the PR title and description. This will help provide context regarding the author's intent. - -5. For each issue found in the previous step, launch parallel subagents to validate the issue. These subagents should get the PR title and description along with a description of the issue. The agent's job is to review the issue to validate that the stated issue is truly an issue with high confidence. For example, if an issue such as "variable is not defined" was flagged, the subagent's job would be to validate that is actually true in the code. Another example would be CLAUDE.md issues. The agent should validate that the CLAUDE.md rule that was violated is scoped for this file and is actually violated. Use Opus subagents for bugs and logic issues, and sonnet agents for CLAUDE.md violations. - -6. Filter out any issues that were not validated in step 5. This step will give us our list of high signal issues for our review. - -7. Post inline comments for each issue using mcp__conductor__DiffComment: - - **IMPORTANT: Only post ONE comment per unique issue.** - -8. Write out a list of issues found, along with the location of the comment. For example: - - - ### **#1 Empty input causes crash** - - If the input field is empty when page loads, the app will crash. - - File: src/ui/Input.tsx - - ### **#2 Dead code** - - The getUserData function is now unused. It should be deleted. - - File: src/core/UserData.ts - - -Use this list when evaluating issues in Steps 5 and 6 (these are false positives, do NOT flag): - -- Pre-existing issues -- Something that appears to be a bug but is actually correct -- Pedantic nitpicks that a senior engineer would not flag -- Issues that a linter will catch (do not run the linter to verify) -- General code quality concerns (e.g., lack of test coverage, general security issues) unless explicitly required in CLAUDE.md or AGENTS.md -- Issues mentioned in CLAUDE.md or AGENTS.md but explicitly silenced in the code (e.g., via a lint ignore comment) - -Notes: - -- All subagents should be explicitly instructed not to post comments themselves. Only you, the main agent, should post comments. -- Do not use the AskUserQuestion tool. Your goal should be to complete the entire review without user intervention. -- Use gh CLI to interact with GitHub (e.g., fetch pull requests, create comments). Do not use web fetch. -- You must cite and link each issue in inline comments (e.g., if referring to a CLAUDE.md or AGENTS.md rule, include a link to it). - -## Fallback: if you don't have access to subagents - -If you don't have subagents, perform all the steps above yourself sequentially instead of launching agents. Do each review axis (CLAUDE.md compliance, bug scan, introduced problems) yourself, and validate each issue yourself. - -## Fallback: if you don't have access to the workspace diff tool - -If you don't have access to the mcp__conductor__GetWorkspaceDiff tool, use the following git commands to get the diff: - -```bash -# Get the merge base between this branch and the target -MERGE_BASE=$(git merge-base origin/main HEAD) - -# Get the committed diff against the merge base -git diff $MERGE_BASE HEAD - -# Get any uncommitted changes (staged and unstaged) -git diff HEAD -``` - -Review the combination of both outputs: the first shows all committed changes on this branch relative to the target, and the second shows any uncommitted work in progress. - -No need to mention in your report whether or not you used one of the fallback strategies; it's usually irrelevant. - diff --git a/.context/attachments/Review request.md b/.context/attachments/Review request.md deleted file mode 100644 index 0a800c7..0000000 --- a/.context/attachments/Review request.md +++ /dev/null @@ -1,101 +0,0 @@ -## Code Review Instructions - -1. Launch a haiku agent to return a list of file paths (not their contents) for all relevant CLAUDE.md files including: - - - The root CLAUDE.md file, if it exists - - Any CLAUDE.md files in directories containing files modified by the workspace diff (use mcp__conductor__GetWorkspaceDiff with stat option) - -2. If this workspace has an associated PR, read the title and description (but not the changes). This will be helpful context. - -3. In parallel with step 2, launch a sonnet agent to view the changes, using mcp__conductor__GetWorkspaceDiff, and return a summary of the changes - -4. Launch 4 agents in parallel to independently review the changes using mcp__conductor__GetWorkspaceDiff. Each agent should return the list of issues, where each issue includes a description and the reason it was flagged (e.g. "CLAUDE.md adherence", "bug"). The agents should do the following: - - Agents 1 + 2: CLAUDE.md or AGENTS.md compliance sonnet agents - Audit changes for CLAUDE.md or AGENTS.md compliance in parallel. Note: When evaluating CLAUDE.md or AGENTS.md compliance for a file, you should only consider CLAUDE.md or AGENTS.md files that share a file path with the file or parents. - - Agent 3: Opus bug agent - Scan for obvious bugs. Focus only on the diff itself without reading extra context. Flag only significant bugs; ignore nitpicks and likely false positives. Do not flag issues that you cannot validate without looking at context outside of the git diff. - - Agent 4: Opus bug agent - Look for problems that exist in the introduced code. This could be security issues, incorrect logic, etc. Only look for issues that fall within the changed code. - - **CRITICAL: We only want HIGH SIGNAL issues.** This means: - - - Objective bugs that will cause incorrect behavior at runtime - - Clear, unambiguous CLAUDE.md violations where you can quote the exact rule being broken - - We do NOT want: - - - Subjective concerns or "suggestions" - - Style preferences not explicitly required by CLAUDE.md - - Potential issues that "might" be problems - - Anything requiring interpretation or judgment calls - - If you are not certain an issue is real, do not flag it. False positives erode trust and waste reviewer time. - - In addition to the above, each subagent should be told the PR title and description. This will help provide context regarding the author's intent. - -5. For each issue found in the previous step, launch parallel subagents to validate the issue. These subagents should get the PR title and description along with a description of the issue. The agent's job is to review the issue to validate that the stated issue is truly an issue with high confidence. For example, if an issue such as "variable is not defined" was flagged, the subagent's job would be to validate that is actually true in the code. Another example would be CLAUDE.md issues. The agent should validate that the CLAUDE.md rule that was violated is scoped for this file and is actually violated. Use Opus subagents for bugs and logic issues, and sonnet agents for CLAUDE.md violations. - -6. Filter out any issues that were not validated in step 5. This step will give us our list of high signal issues for our review. - -7. Post inline comments for each issue using mcp__conductor__DiffComment: - - **IMPORTANT: Only post ONE comment per unique issue.** - -8. Write out a list of issues found, along with the location of the comment. For example: - - - ### **#1 Empty input causes crash** - - If the input field is empty when page loads, the app will crash. - - File: src/ui/Input.tsx - - ### **#2 Dead code** - - The getUserData function is now unused. It should be deleted. - - File: src/core/UserData.ts - - -Use this list when evaluating issues in Steps 5 and 6 (these are false positives, do NOT flag): - -- Pre-existing issues -- Something that appears to be a bug but is actually correct -- Pedantic nitpicks that a senior engineer would not flag -- Issues that a linter will catch (do not run the linter to verify) -- General code quality concerns (e.g., lack of test coverage, general security issues) unless explicitly required in CLAUDE.md or AGENTS.md -- Issues mentioned in CLAUDE.md or AGENTS.md but explicitly silenced in the code (e.g., via a lint ignore comment) - -Notes: - -- All subagents should be explicitly instructed not to post comments themselves. Only you, the main agent, should post comments. -- Do not use the AskUserQuestion tool. Your goal should be to complete the entire review without user intervention. -- Use gh CLI to interact with GitHub (e.g., fetch pull requests, create comments). Do not use web fetch. -- You must cite and link each issue in inline comments (e.g., if referring to a CLAUDE.md or AGENTS.md rule, include a link to it). - -## Fallback: if you don't have access to subagents - -If you don't have subagents, perform all the steps above yourself sequentially instead of launching agents. Do each review axis (CLAUDE.md compliance, bug scan, introduced problems) yourself, and validate each issue yourself. - -## Fallback: if you don't have access to the workspace diff tool - -If you don't have access to the mcp__conductor__GetWorkspaceDiff tool, use the following git commands to get the diff: - -```bash -# Get the merge base between this branch and the target -MERGE_BASE=$(git merge-base origin/main HEAD) - -# Get the committed diff against the merge base -git diff $MERGE_BASE HEAD - -# Get any uncommitted changes (staged and unstaged) -git diff HEAD -``` - -Review the combination of both outputs: the first shows all committed changes on this branch relative to the target, and the second shows any uncommitted work in progress. - -No need to mention in your report whether or not you used one of the fallback strategies; it's usually irrelevant. - diff --git a/.context/attachments/plan.md b/.context/attachments/plan.md deleted file mode 100644 index 2749e27..0000000 --- a/.context/attachments/plan.md +++ /dev/null @@ -1,215 +0,0 @@ -# Desktop Computer Use API Enhancements - -## Context - -Competitive analysis of Daytona, Cloudflare Sandbox SDK, and CUA revealed significant gaps in our desktop computer use API. Both Daytona and Cloudflare have or are building screenshot compression, hotkey combos, mouseDown/mouseUp, keyDown/keyUp, per-component process health, and live desktop streaming. CUA additionally has window management and accessibility trees. We have none of these. This plan closes the most impactful gaps across 7 tasks. - -## Execution Order - -``` -Sprint 1 (parallel, no dependencies): Tasks 1, 2, 3, 4 -Sprint 2 (foundational refactor): Task 5 -Sprint 3 (parallel, depend on #5): Tasks 6, 7 -``` - ---- - -## Task 1: Unify keyboard press with object modifiers - -**What**: Change `DesktopKeyboardPressRequest` to accept a `modifiers` object instead of requiring DSL strings like `"ctrl+c"`. - -**Files**: -- `server/packages/sandbox-agent/src/desktop_types.rs` — Add `DesktopKeyModifiers { ctrl, shift, alt, cmd }` struct (all `Option`). Add `modifiers: Option` to `DesktopKeyboardPressRequest`. -- `server/packages/sandbox-agent/src/desktop_runtime.rs` — Modify `press_key_args()` (~line 1349) to build xdotool key string from modifiers object. If modifiers present, construct `"ctrl+shift+a"` style string. `cmd` maps to `super`. -- `server/packages/sandbox-agent/src/router.rs` — Add `DesktopKeyModifiers` to OpenAPI schemas list. -- `docs/openapi.json` — Regenerate. - -**Backward compatible**: Old `{"key": "ctrl+a"}` still works. New form: `{"key": "a", "modifiers": {"ctrl": true}}`. - -**Test**: Unit test that `press_key_args("a", Some({ctrl: true, shift: true}))` produces `["key", "--", "ctrl+shift+a"]`. Integration test with both old and new request shapes. - ---- - -## Task 2: Add mouseDown/mouseUp and keyDown/keyUp endpoints - -**What**: 4 new endpoints for low-level press/release control. - -**Endpoints**: -- `POST /v1/desktop/mouse/down` — `xdotool mousedown BUTTON` (optional x,y moves first) -- `POST /v1/desktop/mouse/up` — `xdotool mouseup BUTTON` -- `POST /v1/desktop/keyboard/down` — `xdotool keydown KEY` -- `POST /v1/desktop/keyboard/up` — `xdotool keyup KEY` - -**Files**: -- `server/packages/sandbox-agent/src/desktop_types.rs` — Add `DesktopMouseDownRequest`, `DesktopMouseUpRequest` (x/y optional, button optional), `DesktopKeyboardDownRequest`, `DesktopKeyboardUpRequest` (key: String). -- `server/packages/sandbox-agent/src/desktop_runtime.rs` — Add 4 public methods following existing `click_mouse()` / `press_key()` patterns. -- `server/packages/sandbox-agent/src/router.rs` — Add 4 routes, 4 handlers with utoipa annotations. -- `sdks/typescript/src/client.ts` — Add `mouseDownDesktop()`, `mouseUpDesktop()`, `keyDownDesktop()`, `keyUpDesktop()`. -- `docs/openapi.json` — Regenerate. - -**Test**: Integration test: mouseDown → mousemove → mouseUp sequence. keyDown → keyUp sequence. - ---- - -## Task 3: Screenshot compression - -**What**: Add format, quality, and scale query params to screenshot endpoints. - -**Params**: `format` (png|jpeg|webp, default png), `quality` (1-100, default 85), `scale` (0.1-1.0, default 1.0). - -**Files**: -- `server/packages/sandbox-agent/src/desktop_types.rs` — Add `DesktopScreenshotFormat` enum. Add `format`, `quality`, `scale` fields to `DesktopScreenshotQuery` and `DesktopRegionScreenshotQuery`. -- `server/packages/sandbox-agent/src/desktop_runtime.rs` — After capturing PNG via `import`, pipe through ImageMagick `convert` if format != png or scale != 1.0: `convert png:- -resize {scale*100}% -quality {quality} {format}:-`. Add a `run_command_with_stdin()` helper (or modify existing `run_command_output`) to pipe bytes into a command's stdin. -- `server/packages/sandbox-agent/src/router.rs` — Modify screenshot handlers to pass format/quality/scale, return dynamic `Content-Type` header. -- `sdks/typescript/src/client.ts` — Update `takeDesktopScreenshot()` to accept format/quality/scale. -- `docs/openapi.json` — Regenerate. - -**Dependencies**: ImageMagick `convert` already installed in Docker. Verify WebP delegate availability. - -**Test**: Integration tests: request `?format=jpeg&quality=50`, verify `Content-Type: image/jpeg` and JPEG magic bytes. Verify default still returns PNG. Verify `?scale=0.5` returns a smaller image. - ---- - -## Task 4: Window listing API - -**What**: New endpoint to list open windows. - -**Endpoint**: `GET /v1/desktop/windows` - -**Files**: -- `server/packages/sandbox-agent/src/desktop_types.rs` — Add `DesktopWindowInfo { id, title, x, y, width, height, is_active }` and `DesktopWindowListResponse`. -- `server/packages/sandbox-agent/src/desktop_runtime.rs` — Add `list_windows()` method using xdotool (already installed): - 1. `xdotool search --onlyvisible --name ""` → window IDs - 2. `xdotool getwindowname {id}` + `xdotool getwindowgeometry {id}` per window - 3. `xdotool getactivewindow` → is_active flag - 4. Add `parse_window_geometry()` helper. -- `server/packages/sandbox-agent/src/router.rs` — Add route, handler, OpenAPI annotations. -- `sdks/typescript/src/client.ts` — Add `listDesktopWindows()`. -- `docs/openapi.json` — Regenerate. - -**No new Docker dependencies** — xdotool already installed. - -**Test**: Integration test: start desktop, verify `GET /v1/desktop/windows` returns 200 with a list (may be empty if no GUI apps open, which is fine). - ---- - -## Task 5: Unify desktop processes into process runtime with owner flag - -**What**: Desktop processes (Xvfb, openbox, dbus) get registered in the general process runtime with an `owner` field, gaining log streaming, SSE, and unified lifecycle for free. - -**Files**: - -- `server/packages/sandbox-agent/src/process_runtime.rs`: - - Add `ProcessOwner` enum: `User`, `Desktop`, `System`. - - Add `RestartPolicy` enum: `Never`, `Always`, `OnFailure`. - - Add `owner: ProcessOwner` and `restart_policy: Option` to `ProcessStartSpec`, `ManagedProcess`, and `ProcessSnapshot`. - - Modify `list_processes()` to accept optional owner filter. - - Add auto-restart logic in `watch_exit()`: if restart_policy is Always (or OnFailure and exit code != 0), re-spawn the process using stored spec. Need to store the original `ProcessStartSpec` on `ManagedProcess`. - -- `server/packages/sandbox-agent/src/router/types.rs`: - - Add `owner` to `ProcessInfo` response. - - Add `ProcessListQuery { owner: Option }`. - -- `server/packages/sandbox-agent/src/router.rs`: - - Modify `get_v1_processes` to accept `Query` and filter. - - Pass `ProcessRuntime` into `DesktopRuntime::new()`. - - Add `ProcessOwner`, `RestartPolicy` to OpenAPI schemas. - -- `server/packages/sandbox-agent/src/desktop_runtime.rs` — **Major refactor**: - - Remove `ManagedDesktopChild` struct. - - `DesktopRuntime` takes `ProcessRuntime` as constructor param. - - `start_xvfb_locked()` and `start_openbox_locked()` call `process_runtime.start_process(ProcessStartSpec { owner: Desktop, restart_policy: Some(Always), ... })` instead of spawning directly. - - Store returned process IDs in state instead of `Child` handles. - - `stop` calls `process_runtime.stop_process()` / `kill_process()`. - - `processes_locked()` queries process runtime for desktop-owned processes. - - dbus-launch remains a direct one-shot spawn (it's not a long-running process, just produces env vars). - -- `sdks/typescript/src/client.ts` — Add `owner` filter option to `listProcesses()`. -- `docs/openapi.json` — Regenerate. - -**Risks**: -- Lock ordering: desktop runtime holds Mutex, process runtime uses RwLock. Release desktop Mutex before calling process runtime, or restructure. -- `log_path` field in `DesktopProcessInfo` no longer applies (logs are in-memory now). Remove or deprecate. - -**Test**: Integration: start desktop, `GET /v1/processes?owner=desktop` returns Xvfb+openbox. `GET /v1/processes?owner=user` excludes them. Desktop process logs are streamable via `GET /v1/processes/{id}/logs?follow=true`. Existing desktop lifecycle tests still pass. - ---- - -## Task 6: Screen recording API (ffmpeg x11grab) - -**What**: 6 endpoints for recording the desktop to MP4. - -**Endpoints**: -- `POST /v1/desktop/recording/start` — Start ffmpeg recording -- `POST /v1/desktop/recording/stop` — Stop recording (SIGTERM → wait → SIGKILL) -- `GET /v1/desktop/recordings` — List recordings -- `GET /v1/desktop/recordings/{id}` — Get recording metadata -- `GET /v1/desktop/recordings/{id}/download` — Serve MP4 file -- `DELETE /v1/desktop/recordings/{id}` — Delete recording - -**Files**: -- **New**: `server/packages/sandbox-agent/src/desktop_recording.rs` — Recording state, ffmpeg process management. `start_recording()` spawns ffmpeg via process runtime (owner=Desktop): `ffmpeg -f x11grab -video_size WxH -i :99 -c:v libx264 -preset ultrafast -r 30 {path}`. Recordings stored in `{state_dir}/recordings/`. -- `server/packages/sandbox-agent/src/desktop_types.rs` — Add recording request/response types. -- `server/packages/sandbox-agent/src/desktop_runtime.rs` — Wire recording manager, expose through desktop runtime. -- `server/packages/sandbox-agent/src/router.rs` — Add 6 routes + handlers. -- `server/packages/sandbox-agent/src/desktop_install.rs` — Add `ffmpeg` to dependency detection (soft: only error when recording is requested). -- `docker/runtime/Dockerfile` and `docker/test-agent/Dockerfile` — Add `ffmpeg` to apt-get. -- `sdks/typescript/src/client.ts` — Add 6 recording methods. -- `docs/openapi.json` — Regenerate. - -**Depends on**: Task 5 (ffmpeg runs as desktop-owned process). - -**Test**: Integration: start desktop → start recording → wait 2s → stop → list → download (verify MP4 magic bytes) → delete. - ---- - -## Task 7: Neko WebRTC desktop streaming + React component - -**What**: Integrate neko for WebRTC desktop streaming, mirroring the ProcessTerminal + Ghostty pattern. - -### Server side - -- **New**: `server/packages/sandbox-agent/src/desktop_streaming.rs` — Manages neko process via process runtime (owner=Desktop). Neko connects to existing Xvfb display, runs GStreamer pipeline for H.264 encoding. -- `server/packages/sandbox-agent/src/router.rs`: - - `GET /v1/desktop/stream/ws` — WebSocket proxy to neko's internal WebSocket. Upgrade request, bridge bidirectionally. - - `POST /v1/desktop/stream/start` / `POST /v1/desktop/stream/stop` — Lifecycle control. -- `docker/runtime/Dockerfile` and `docker/test-agent/Dockerfile` — Add neko binary + GStreamer packages (`gstreamer1.0-plugins-base`, `gstreamer1.0-plugins-good`, `gstreamer1.0-x`, `libgstreamer1.0-0`). Consider making this an optional Docker stage to avoid bloating the base image. - -### TypeScript SDK - -- **New**: `sdks/typescript/src/desktop-stream.ts` — `DesktopStreamSession` class ported from neko's `base.ts` (~500 lines): - - WebSocket for signaling (SDP offer/answer, ICE candidates) - - `RTCPeerConnection` for video stream - - `RTCDataChannel` for binary input (mouse: 7 bytes, keyboard: 11 bytes) - - Events: `onTrack(stream)`, `onConnect()`, `onDisconnect()`, `onError()` -- `sdks/typescript/src/client.ts` — Add `connectDesktopStream()` returning `DesktopStreamSession`, `buildDesktopStreamWebSocketUrl()`, `startDesktopStream()`, `stopDesktopStream()`. -- `sdks/typescript/src/index.ts` — Export `DesktopStreamSession`. - -### React SDK - -- **New**: `sdks/react/src/DesktopViewer.tsx` — Following `ProcessTerminal.tsx` pattern: - ``` - Props: client (Pick), height, className, style, onConnect, onDisconnect, onError - ``` - - `useEffect` → `client.connectDesktopStream()` → wire `onTrack` to `