mirror of
https://github.com/harivansh-afk/sandbox-agent.git
synced 2026-04-15 07:04:48 +00:00
Add header status pill showing task/session/sandbox state
Surface aggregate status (error, provisioning, running, ready, no sandbox) as a colored pill in the transcript panel header. Integrates task runtime status, session status, and sandbox availability via the sandboxProcesses interest topic so the pill accurately reflects unreachable sandboxes. Includes mock tasks demonstrating error, provisioning, and running states, unit tests for deriveHeaderStatus, and workspace-dashboard integration. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
098b8113f3
commit
5bd85e4a28
77 changed files with 2329 additions and 4134 deletions
|
|
@ -39,6 +39,10 @@ services:
|
||||||
STRIPE_SECRET_KEY: "${STRIPE_SECRET_KEY:-}"
|
STRIPE_SECRET_KEY: "${STRIPE_SECRET_KEY:-}"
|
||||||
STRIPE_WEBHOOK_SECRET: "${STRIPE_WEBHOOK_SECRET:-}"
|
STRIPE_WEBHOOK_SECRET: "${STRIPE_WEBHOOK_SECRET:-}"
|
||||||
STRIPE_PRICE_TEAM: "${STRIPE_PRICE_TEAM:-}"
|
STRIPE_PRICE_TEAM: "${STRIPE_PRICE_TEAM:-}"
|
||||||
|
FOUNDRY_SANDBOX_PROVIDER: "${FOUNDRY_SANDBOX_PROVIDER:-local}"
|
||||||
|
E2B_API_KEY: "${E2B_API_KEY:-}"
|
||||||
|
E2B_TEMPLATE: "${E2B_TEMPLATE:-}"
|
||||||
|
HF_E2B_TEMPLATE: "${HF_E2B_TEMPLATE:-${E2B_TEMPLATE:-}}"
|
||||||
DAYTONA_ENDPOINT: "${DAYTONA_ENDPOINT:-}"
|
DAYTONA_ENDPOINT: "${DAYTONA_ENDPOINT:-}"
|
||||||
DAYTONA_API_KEY: "${DAYTONA_API_KEY:-}"
|
DAYTONA_API_KEY: "${DAYTONA_API_KEY:-}"
|
||||||
HF_DAYTONA_ENDPOINT: "${HF_DAYTONA_ENDPOINT:-}"
|
HF_DAYTONA_ENDPOINT: "${HF_DAYTONA_ENDPOINT:-}"
|
||||||
|
|
@ -52,6 +56,7 @@ services:
|
||||||
- "../../../task/rivet-checkout:/task/rivet-checkout:ro"
|
- "../../../task/rivet-checkout:/task/rivet-checkout:ro"
|
||||||
# Reuse the host Codex auth profile for local sandbox-agent Codex sessions in dev.
|
# Reuse the host Codex auth profile for local sandbox-agent Codex sessions in dev.
|
||||||
- "${HOME}/.codex:/root/.codex"
|
- "${HOME}/.codex:/root/.codex"
|
||||||
|
- "/var/run/docker.sock:/var/run/docker.sock"
|
||||||
# Keep backend dependency installs Linux-native instead of using host node_modules.
|
# Keep backend dependency installs Linux-native instead of using host node_modules.
|
||||||
- "foundry_backend_root_node_modules:/app/node_modules"
|
- "foundry_backend_root_node_modules:/app/node_modules"
|
||||||
- "foundry_backend_backend_node_modules:/app/foundry/packages/backend/node_modules"
|
- "foundry_backend_backend_node_modules:/app/foundry/packages/backend/node_modules"
|
||||||
|
|
|
||||||
|
|
@ -13,18 +13,19 @@
|
||||||
"start": "bun dist/index.js start"
|
"start": "bun dist/index.js start"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@daytonaio/sdk": "0.141.0",
|
"@e2b/code-interpreter": "^2.3.3",
|
||||||
"@hono/node-server": "^1.19.7",
|
"@hono/node-server": "^1.19.7",
|
||||||
"@hono/node-ws": "^1.3.0",
|
"@hono/node-ws": "^1.3.0",
|
||||||
"@iarna/toml": "^2.2.5",
|
"@iarna/toml": "^2.2.5",
|
||||||
"@sandbox-agent/foundry-shared": "workspace:*",
|
"@sandbox-agent/foundry-shared": "workspace:*",
|
||||||
"@sandbox-agent/persist-rivet": "workspace:*",
|
"@sandbox-agent/persist-rivet": "workspace:*",
|
||||||
"better-auth": "^1.5.5",
|
"better-auth": "^1.5.5",
|
||||||
|
"dockerode": "^4.0.9",
|
||||||
"drizzle-kit": "^0.31.8",
|
"drizzle-kit": "^0.31.8",
|
||||||
"drizzle-orm": "^0.44.5",
|
"drizzle-orm": "^0.44.5",
|
||||||
"hono": "^4.11.9",
|
"hono": "^4.11.9",
|
||||||
"pino": "^10.3.1",
|
"pino": "^10.3.1",
|
||||||
"rivetkit": "2.1.6",
|
"rivetkit": "https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a",
|
||||||
"sandbox-agent": "workspace:*",
|
"sandbox-agent": "workspace:*",
|
||||||
"uuid": "^13.0.0",
|
"uuid": "^13.0.0",
|
||||||
"ws": "^8.19.0",
|
"ws": "^8.19.0",
|
||||||
|
|
|
||||||
|
|
@ -1,24 +1,15 @@
|
||||||
import type { AppConfig } from "@sandbox-agent/foundry-shared";
|
import type { AppConfig } from "@sandbox-agent/foundry-shared";
|
||||||
import type { BackendDriver } from "../driver.js";
|
import type { BackendDriver } from "../driver.js";
|
||||||
import type { NotificationService } from "../notifications/index.js";
|
import type { NotificationService } from "../notifications/index.js";
|
||||||
import type { ProviderRegistry } from "../providers/index.js";
|
|
||||||
import type { AppShellServices } from "../services/app-shell-runtime.js";
|
import type { AppShellServices } from "../services/app-shell-runtime.js";
|
||||||
|
|
||||||
let runtimeConfig: AppConfig | null = null;
|
let runtimeConfig: AppConfig | null = null;
|
||||||
let providerRegistry: ProviderRegistry | null = null;
|
|
||||||
let notificationService: NotificationService | null = null;
|
let notificationService: NotificationService | null = null;
|
||||||
let runtimeDriver: BackendDriver | null = null;
|
let runtimeDriver: BackendDriver | null = null;
|
||||||
let appShellServices: AppShellServices | null = null;
|
let appShellServices: AppShellServices | null = null;
|
||||||
|
|
||||||
export function initActorRuntimeContext(
|
export function initActorRuntimeContext(config: AppConfig, notifications?: NotificationService, driver?: BackendDriver, appShell?: AppShellServices): void {
|
||||||
config: AppConfig,
|
|
||||||
providers: ProviderRegistry,
|
|
||||||
notifications?: NotificationService,
|
|
||||||
driver?: BackendDriver,
|
|
||||||
appShell?: AppShellServices,
|
|
||||||
): void {
|
|
||||||
runtimeConfig = config;
|
runtimeConfig = config;
|
||||||
providerRegistry = providers;
|
|
||||||
notificationService = notifications ?? null;
|
notificationService = notifications ?? null;
|
||||||
runtimeDriver = driver ?? null;
|
runtimeDriver = driver ?? null;
|
||||||
appShellServices = appShell ?? null;
|
appShellServices = appShell ?? null;
|
||||||
|
|
@ -26,12 +17,11 @@ export function initActorRuntimeContext(
|
||||||
|
|
||||||
export function getActorRuntimeContext(): {
|
export function getActorRuntimeContext(): {
|
||||||
config: AppConfig;
|
config: AppConfig;
|
||||||
providers: ProviderRegistry;
|
|
||||||
notifications: NotificationService | null;
|
notifications: NotificationService | null;
|
||||||
driver: BackendDriver;
|
driver: BackendDriver;
|
||||||
appShell: AppShellServices;
|
appShell: AppShellServices;
|
||||||
} {
|
} {
|
||||||
if (!runtimeConfig || !providerRegistry) {
|
if (!runtimeConfig) {
|
||||||
throw new Error("Actor runtime context not initialized");
|
throw new Error("Actor runtime context not initialized");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -45,7 +35,6 @@ export function getActorRuntimeContext(): {
|
||||||
|
|
||||||
return {
|
return {
|
||||||
config: runtimeConfig,
|
config: runtimeConfig,
|
||||||
providers: providerRegistry,
|
|
||||||
notifications: notificationService,
|
notifications: notificationService,
|
||||||
driver: runtimeDriver,
|
driver: runtimeDriver,
|
||||||
appShell: appShellServices,
|
appShell: appShellServices,
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,4 @@
|
||||||
import {
|
import { authUserKey, taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "./keys.js";
|
||||||
authUserKey,
|
|
||||||
taskKey,
|
|
||||||
taskStatusSyncKey,
|
|
||||||
historyKey,
|
|
||||||
projectBranchSyncKey,
|
|
||||||
projectKey,
|
|
||||||
projectPrSyncKey,
|
|
||||||
sandboxInstanceKey,
|
|
||||||
workspaceKey,
|
|
||||||
} from "./keys.js";
|
|
||||||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
|
||||||
|
|
||||||
export function actorClient(c: any) {
|
export function actorClient(c: any) {
|
||||||
return c.client();
|
return c.client();
|
||||||
|
|
@ -86,30 +75,12 @@ export async function getOrCreateProjectBranchSync(c: any, workspaceId: string,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getSandboxInstance(c: any, workspaceId: string, providerId: ProviderId, sandboxId: string) {
|
export function getTaskSandbox(c: any, workspaceId: string, sandboxId: string) {
|
||||||
return actorClient(c).sandboxInstance.get(sandboxInstanceKey(workspaceId, providerId, sandboxId));
|
return actorClient(c).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getOrCreateSandboxInstance(
|
export async function getOrCreateTaskSandbox(c: any, workspaceId: string, sandboxId: string, createWithInput?: Record<string, unknown>) {
|
||||||
c: any,
|
return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(workspaceId, sandboxId), {
|
||||||
workspaceId: string,
|
|
||||||
providerId: ProviderId,
|
|
||||||
sandboxId: string,
|
|
||||||
createWithInput: Record<string, unknown>,
|
|
||||||
) {
|
|
||||||
return await actorClient(c).sandboxInstance.getOrCreate(sandboxInstanceKey(workspaceId, providerId, sandboxId), { createWithInput });
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getOrCreateTaskStatusSync(
|
|
||||||
c: any,
|
|
||||||
workspaceId: string,
|
|
||||||
repoId: string,
|
|
||||||
taskId: string,
|
|
||||||
sandboxId: string,
|
|
||||||
sessionId: string,
|
|
||||||
createWithInput: Record<string, unknown>,
|
|
||||||
) {
|
|
||||||
return await actorClient(c).taskStatusSync.getOrCreate(taskStatusSyncKey(workspaceId, repoId, taskId, sandboxId, sessionId), {
|
|
||||||
createWithInput,
|
createWithInput,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -122,10 +93,6 @@ export function selfProjectBranchSync(c: any) {
|
||||||
return actorClient(c).projectBranchSync.getForId(c.actorId);
|
return actorClient(c).projectBranchSync.getForId(c.actorId);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function selfTaskStatusSync(c: any) {
|
|
||||||
return actorClient(c).taskStatusSync.getForId(c.actorId);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function selfHistory(c: any) {
|
export function selfHistory(c: any) {
|
||||||
return actorClient(c).history.getForId(c.actorId);
|
return actorClient(c).history.getForId(c.actorId);
|
||||||
}
|
}
|
||||||
|
|
@ -142,10 +109,6 @@ export function selfProject(c: any) {
|
||||||
return actorClient(c).project.getForId(c.actorId);
|
return actorClient(c).project.getForId(c.actorId);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function selfSandboxInstance(c: any) {
|
|
||||||
return actorClient(c).sandboxInstance.getForId(c.actorId);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function selfAuthUser(c: any) {
|
export function selfAuthUser(c: any) {
|
||||||
return actorClient(c).authUser.getForId(c.actorId);
|
return actorClient(c).authUser.getForId(c.actorId);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
import { authUser } from "./auth-user/index.js";
|
import { authUser } from "./auth-user/index.js";
|
||||||
import { setup } from "rivetkit";
|
import { setup } from "rivetkit";
|
||||||
import { taskStatusSync } from "./task-status-sync/index.js";
|
|
||||||
import { task } from "./task/index.js";
|
import { task } from "./task/index.js";
|
||||||
import { history } from "./history/index.js";
|
import { history } from "./history/index.js";
|
||||||
import { projectBranchSync } from "./project-branch-sync/index.js";
|
import { projectBranchSync } from "./project-branch-sync/index.js";
|
||||||
import { projectPrSync } from "./project-pr-sync/index.js";
|
import { projectPrSync } from "./project-pr-sync/index.js";
|
||||||
import { project } from "./project/index.js";
|
import { project } from "./project/index.js";
|
||||||
import { sandboxInstance } from "./sandbox-instance/index.js";
|
import { taskSandbox } from "./sandbox/index.js";
|
||||||
import { workspace } from "./workspace/index.js";
|
import { workspace } from "./workspace/index.js";
|
||||||
import { logger } from "../logging.js";
|
import { logger } from "../logging.js";
|
||||||
|
|
||||||
|
|
@ -27,23 +26,21 @@ export const registry = setup({
|
||||||
workspace,
|
workspace,
|
||||||
project,
|
project,
|
||||||
task,
|
task,
|
||||||
sandboxInstance,
|
taskSandbox,
|
||||||
history,
|
history,
|
||||||
projectPrSync,
|
projectPrSync,
|
||||||
projectBranchSync,
|
projectBranchSync,
|
||||||
taskStatusSync,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
export * from "./context.js";
|
export * from "./context.js";
|
||||||
export * from "./events.js";
|
export * from "./events.js";
|
||||||
export * from "./auth-user/index.js";
|
export * from "./auth-user/index.js";
|
||||||
export * from "./task-status-sync/index.js";
|
|
||||||
export * from "./task/index.js";
|
export * from "./task/index.js";
|
||||||
export * from "./history/index.js";
|
export * from "./history/index.js";
|
||||||
export * from "./keys.js";
|
export * from "./keys.js";
|
||||||
export * from "./project-branch-sync/index.js";
|
export * from "./project-branch-sync/index.js";
|
||||||
export * from "./project-pr-sync/index.js";
|
export * from "./project-pr-sync/index.js";
|
||||||
export * from "./project/index.js";
|
export * from "./project/index.js";
|
||||||
export * from "./sandbox-instance/index.js";
|
export * from "./sandbox/index.js";
|
||||||
export * from "./workspace/index.js";
|
export * from "./workspace/index.js";
|
||||||
|
|
|
||||||
|
|
@ -16,8 +16,8 @@ export function taskKey(workspaceId: string, repoId: string, taskId: string): Ac
|
||||||
return ["ws", workspaceId, "project", repoId, "task", taskId];
|
return ["ws", workspaceId, "project", repoId, "task", taskId];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sandboxInstanceKey(workspaceId: string, providerId: string, sandboxId: string): ActorKey {
|
export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey {
|
||||||
return ["ws", workspaceId, "provider", providerId, "sandbox", sandboxId];
|
return ["ws", workspaceId, "sandbox", sandboxId];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function historyKey(workspaceId: string, repoId: string): ActorKey {
|
export function historyKey(workspaceId: string, repoId: string): ActorKey {
|
||||||
|
|
@ -31,8 +31,3 @@ export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey
|
||||||
export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey {
|
export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey {
|
||||||
return ["ws", workspaceId, "project", repoId, "branch-sync"];
|
return ["ws", workspaceId, "project", repoId, "branch-sync"];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function taskStatusSyncKey(workspaceId: string, repoId: string, taskId: string, sandboxId: string, sessionId: string): ActorKey {
|
|
||||||
// Include sandbox + session so multiple sandboxes/sessions can be tracked per task.
|
|
||||||
return ["ws", workspaceId, "project", repoId, "task", taskId, "status-sync", sandboxId, sessionId];
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -126,12 +126,24 @@ async function ensureProjectSyncActors(c: any, localPath: string): Promise<void>
|
||||||
}
|
}
|
||||||
|
|
||||||
const prSync = await getOrCreateProjectPrSync(c, c.state.workspaceId, c.state.repoId, localPath, 30_000);
|
const prSync = await getOrCreateProjectPrSync(c, c.state.workspaceId, c.state.repoId, localPath, 30_000);
|
||||||
await prSync.start();
|
|
||||||
|
|
||||||
const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000);
|
const branchSync = await getOrCreateProjectBranchSync(c, c.state.workspaceId, c.state.repoId, localPath, 5_000);
|
||||||
await branchSync.start();
|
|
||||||
|
|
||||||
c.state.syncActorsStarted = true;
|
c.state.syncActorsStarted = true;
|
||||||
|
|
||||||
|
void prSync.start().catch((error: unknown) => {
|
||||||
|
logActorWarning("project.sync", "starting pr sync actor failed", {
|
||||||
|
workspaceId: c.state.workspaceId,
|
||||||
|
repoId: c.state.repoId,
|
||||||
|
error: resolveErrorMessage(error),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
void branchSync.start().catch((error: unknown) => {
|
||||||
|
logActorWarning("project.sync", "starting branch sync actor failed", {
|
||||||
|
workspaceId: c.state.workspaceId,
|
||||||
|
repoId: c.state.repoId,
|
||||||
|
error: resolveErrorMessage(error),
|
||||||
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function ensureRepoActionJobsTable(c: any): Promise<void> {
|
async function ensureRepoActionJobsTable(c: any): Promise<void> {
|
||||||
|
|
@ -316,13 +328,17 @@ async function ensureProjectReadyForRead(c: any): Promise<string> {
|
||||||
throw new Error("project remoteUrl is not initialized");
|
throw new Error("project remoteUrl is not initialized");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!c.state.localPath || !c.state.syncActorsStarted) {
|
if (!c.state.localPath) {
|
||||||
const result = await projectActions.ensure(c, { remoteUrl: c.state.remoteUrl });
|
const result = await projectActions.ensure(c, { remoteUrl: c.state.remoteUrl });
|
||||||
const localPath = result?.localPath ?? c.state.localPath;
|
c.state.localPath = result?.localPath ?? c.state.localPath;
|
||||||
if (!localPath) {
|
}
|
||||||
|
|
||||||
|
if (!c.state.localPath) {
|
||||||
throw new Error("project local repo is not initialized");
|
throw new Error("project local repo is not initialized");
|
||||||
}
|
}
|
||||||
return localPath;
|
|
||||||
|
if (!c.state.syncActorsStarted) {
|
||||||
|
await ensureProjectSyncActors(c, c.state.localPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
return c.state.localPath;
|
return c.state.localPath;
|
||||||
|
|
@ -428,7 +444,6 @@ async function ensureProjectMutation(c: any, cmd: EnsureProjectCommand): Promise
|
||||||
})
|
})
|
||||||
.run();
|
.run();
|
||||||
|
|
||||||
await ensureProjectSyncActors(c, localPath);
|
|
||||||
return { localPath };
|
return { localPath };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -563,24 +578,25 @@ async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand
|
||||||
await driver.git.fetch(localPath, { githubToken: auth?.githubToken ?? null });
|
await driver.git.fetch(localPath, { githubToken: auth?.githubToken ?? null });
|
||||||
const baseRef = await driver.git.remoteDefaultBaseRef(localPath);
|
const baseRef = await driver.git.remoteDefaultBaseRef(localPath);
|
||||||
const normalizedBase = normalizeBaseBranchName(baseRef);
|
const normalizedBase = normalizeBaseBranchName(baseRef);
|
||||||
|
let branchAvailableInRepo = false;
|
||||||
|
|
||||||
if (requireExistingRemote) {
|
if (requireExistingRemote) {
|
||||||
try {
|
try {
|
||||||
headSha = await driver.git.revParse(localPath, `origin/${branchName}`);
|
headSha = await driver.git.revParse(localPath, `origin/${branchName}`);
|
||||||
|
branchAvailableInRepo = true;
|
||||||
} catch {
|
} catch {
|
||||||
throw new Error(`Remote branch not found: ${branchName}`);
|
throw new Error(`Remote branch not found: ${branchName}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
await driver.git.ensureRemoteBranch(localPath, branchName, { githubToken: auth?.githubToken ?? null });
|
|
||||||
await driver.git.fetch(localPath, { githubToken: auth?.githubToken ?? null });
|
|
||||||
try {
|
try {
|
||||||
headSha = await driver.git.revParse(localPath, `origin/${branchName}`);
|
headSha = await driver.git.revParse(localPath, `origin/${branchName}`);
|
||||||
|
branchAvailableInRepo = true;
|
||||||
} catch {
|
} catch {
|
||||||
headSha = await driver.git.revParse(localPath, baseRef);
|
headSha = await driver.git.revParse(localPath, baseRef);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (await driver.stack.available(localPath).catch(() => false)) {
|
if (branchAvailableInRepo && (await driver.stack.available(localPath).catch(() => false))) {
|
||||||
let stackRows = await driver.stack.listStack(localPath).catch(() => []);
|
let stackRows = await driver.stack.listStack(localPath).catch(() => []);
|
||||||
let stackRow = stackRows.find((entry) => entry.branchName === branchName);
|
let stackRow = stackRows.find((entry) => entry.branchName === branchName);
|
||||||
|
|
||||||
|
|
@ -872,6 +888,10 @@ async function applyPrSyncResultMutation(c: any, body: PrSyncResult): Promise<vo
|
||||||
|
|
||||||
async function applyBranchSyncResultMutation(c: any, body: BranchSyncResult): Promise<void> {
|
async function applyBranchSyncResultMutation(c: any, body: BranchSyncResult): Promise<void> {
|
||||||
const incoming = new Set(body.items.map((item) => item.branchName));
|
const incoming = new Set(body.items.map((item) => item.branchName));
|
||||||
|
const reservedRows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all();
|
||||||
|
const reservedBranches = new Set(
|
||||||
|
reservedRows.map((row) => row.branchName).filter((branchName): branchName is string => typeof branchName === "string" && branchName.length > 0),
|
||||||
|
);
|
||||||
|
|
||||||
for (const item of body.items) {
|
for (const item of body.items) {
|
||||||
const existing = await c.db
|
const existing = await c.db
|
||||||
|
|
@ -916,7 +936,7 @@ async function applyBranchSyncResultMutation(c: any, body: BranchSyncResult): Pr
|
||||||
const existingRows = await c.db.select({ branchName: branches.branchName }).from(branches).all();
|
const existingRows = await c.db.select({ branchName: branches.branchName }).from(branches).all();
|
||||||
|
|
||||||
for (const row of existingRows) {
|
for (const row of existingRows) {
|
||||||
if (incoming.has(row.branchName)) {
|
if (incoming.has(row.branchName) || reservedBranches.has(row.branchName)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
await c.db.delete(branches).where(eq(branches.branchName, row.branchName)).run();
|
await c.db.delete(branches).where(eq(branches.branchName, row.branchName)).run();
|
||||||
|
|
@ -952,7 +972,7 @@ export async function runProjectWorkflow(ctx: any): Promise<void> {
|
||||||
if (msg.name === "project.command.createTask") {
|
if (msg.name === "project.command.createTask") {
|
||||||
const result = await loopCtx.step({
|
const result = await loopCtx.step({
|
||||||
name: "project-create-task",
|
name: "project-create-task",
|
||||||
timeout: 60_000,
|
timeout: 5 * 60_000,
|
||||||
run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand),
|
run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand),
|
||||||
});
|
});
|
||||||
await msg.complete(result);
|
await msg.complete(result);
|
||||||
|
|
@ -1018,7 +1038,7 @@ export const projectActions = {
|
||||||
return expectQueueResponse<TaskRecord>(
|
return expectQueueResponse<TaskRecord>(
|
||||||
await self.send(projectWorkflowQueueName("project.command.createTask"), cmd, {
|
await self.send(projectWorkflowQueueName("project.command.createTask"), cmd, {
|
||||||
wait: true,
|
wait: true,
|
||||||
timeout: 60_000,
|
timeout: 5 * 60_000,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
import { db } from "rivetkit/db/drizzle";
|
|
||||||
import * as schema from "./schema.js";
|
|
||||||
import migrations from "./migrations.js";
|
|
||||||
|
|
||||||
export const sandboxInstanceDb = db({ schema, migrations });
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
import { defineConfig } from "rivetkit/db/drizzle";
|
|
||||||
|
|
||||||
export default defineConfig({
|
|
||||||
out: "./src/actors/sandbox-instance/db/drizzle",
|
|
||||||
schema: "./src/actors/sandbox-instance/db/schema.ts",
|
|
||||||
});
|
|
||||||
|
|
@ -1,27 +0,0 @@
|
||||||
CREATE TABLE `sandbox_instance` (
|
|
||||||
`id` integer PRIMARY KEY NOT NULL,
|
|
||||||
`metadata_json` text NOT NULL,
|
|
||||||
`status` text NOT NULL,
|
|
||||||
`updated_at` integer NOT NULL
|
|
||||||
);
|
|
||||||
--> statement-breakpoint
|
|
||||||
CREATE TABLE `sandbox_session_events` (
|
|
||||||
`id` text PRIMARY KEY NOT NULL,
|
|
||||||
`session_id` text NOT NULL,
|
|
||||||
`event_index` integer NOT NULL,
|
|
||||||
`created_at` integer NOT NULL,
|
|
||||||
`connection_id` text NOT NULL,
|
|
||||||
`sender` text NOT NULL,
|
|
||||||
`payload_json` text NOT NULL
|
|
||||||
);
|
|
||||||
--> statement-breakpoint
|
|
||||||
CREATE UNIQUE INDEX `sandbox_session_events_session_id_event_index_unique` ON `sandbox_session_events` (`session_id`,`event_index`);--> statement-breakpoint
|
|
||||||
CREATE TABLE `sandbox_sessions` (
|
|
||||||
`id` text PRIMARY KEY NOT NULL,
|
|
||||||
`agent` text NOT NULL,
|
|
||||||
`agent_session_id` text NOT NULL,
|
|
||||||
`last_connection_id` text NOT NULL,
|
|
||||||
`created_at` integer NOT NULL,
|
|
||||||
`destroyed_at` integer,
|
|
||||||
`session_init_json` text
|
|
||||||
);
|
|
||||||
|
|
@ -1,180 +0,0 @@
|
||||||
{
|
|
||||||
"version": "6",
|
|
||||||
"dialect": "sqlite",
|
|
||||||
"id": "130486c5-6208-4d00-b367-e02b9def953a",
|
|
||||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
|
||||||
"tables": {
|
|
||||||
"sandbox_instance": {
|
|
||||||
"name": "sandbox_instance",
|
|
||||||
"columns": {
|
|
||||||
"id": {
|
|
||||||
"name": "id",
|
|
||||||
"type": "integer",
|
|
||||||
"primaryKey": true,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"metadata_json": {
|
|
||||||
"name": "metadata_json",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"status": {
|
|
||||||
"name": "status",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"updated_at": {
|
|
||||||
"name": "updated_at",
|
|
||||||
"type": "integer",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"indexes": {},
|
|
||||||
"foreignKeys": {},
|
|
||||||
"compositePrimaryKeys": {},
|
|
||||||
"uniqueConstraints": {},
|
|
||||||
"checkConstraints": {}
|
|
||||||
},
|
|
||||||
"sandbox_session_events": {
|
|
||||||
"name": "sandbox_session_events",
|
|
||||||
"columns": {
|
|
||||||
"id": {
|
|
||||||
"name": "id",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": true,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"session_id": {
|
|
||||||
"name": "session_id",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"event_index": {
|
|
||||||
"name": "event_index",
|
|
||||||
"type": "integer",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"created_at": {
|
|
||||||
"name": "created_at",
|
|
||||||
"type": "integer",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"connection_id": {
|
|
||||||
"name": "connection_id",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"sender": {
|
|
||||||
"name": "sender",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"payload_json": {
|
|
||||||
"name": "payload_json",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"indexes": {
|
|
||||||
"sandbox_session_events_session_id_event_index_unique": {
|
|
||||||
"name": "sandbox_session_events_session_id_event_index_unique",
|
|
||||||
"columns": ["session_id", "event_index"],
|
|
||||||
"isUnique": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"foreignKeys": {},
|
|
||||||
"compositePrimaryKeys": {},
|
|
||||||
"uniqueConstraints": {},
|
|
||||||
"checkConstraints": {}
|
|
||||||
},
|
|
||||||
"sandbox_sessions": {
|
|
||||||
"name": "sandbox_sessions",
|
|
||||||
"columns": {
|
|
||||||
"id": {
|
|
||||||
"name": "id",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": true,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"agent": {
|
|
||||||
"name": "agent",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"agent_session_id": {
|
|
||||||
"name": "agent_session_id",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"last_connection_id": {
|
|
||||||
"name": "last_connection_id",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"created_at": {
|
|
||||||
"name": "created_at",
|
|
||||||
"type": "integer",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": true,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"destroyed_at": {
|
|
||||||
"name": "destroyed_at",
|
|
||||||
"type": "integer",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": false,
|
|
||||||
"autoincrement": false
|
|
||||||
},
|
|
||||||
"session_init_json": {
|
|
||||||
"name": "session_init_json",
|
|
||||||
"type": "text",
|
|
||||||
"primaryKey": false,
|
|
||||||
"notNull": false,
|
|
||||||
"autoincrement": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"indexes": {},
|
|
||||||
"foreignKeys": {},
|
|
||||||
"compositePrimaryKeys": {},
|
|
||||||
"uniqueConstraints": {},
|
|
||||||
"checkConstraints": {}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"views": {},
|
|
||||||
"enums": {},
|
|
||||||
"_meta": {
|
|
||||||
"schemas": {},
|
|
||||||
"tables": {},
|
|
||||||
"columns": {}
|
|
||||||
},
|
|
||||||
"internal": {
|
|
||||||
"indexes": {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
{
|
|
||||||
"version": "7",
|
|
||||||
"dialect": "sqlite",
|
|
||||||
"entries": [
|
|
||||||
{
|
|
||||||
"idx": 0,
|
|
||||||
"version": "6",
|
|
||||||
"when": 1773376224446,
|
|
||||||
"tag": "0000_smooth_sauron",
|
|
||||||
"breakpoints": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
@ -1,48 +0,0 @@
|
||||||
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
|
|
||||||
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
|
|
||||||
// Do not hand-edit this file.
|
|
||||||
|
|
||||||
const journal = {
|
|
||||||
entries: [
|
|
||||||
{
|
|
||||||
idx: 0,
|
|
||||||
when: 1773376224446,
|
|
||||||
tag: "0000_smooth_sauron",
|
|
||||||
breakpoints: true,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
export default {
|
|
||||||
journal,
|
|
||||||
migrations: {
|
|
||||||
m0000: `CREATE TABLE \`sandbox_instance\` (
|
|
||||||
\`id\` integer PRIMARY KEY NOT NULL,
|
|
||||||
\`metadata_json\` text NOT NULL,
|
|
||||||
\`status\` text NOT NULL,
|
|
||||||
\`updated_at\` integer NOT NULL
|
|
||||||
);
|
|
||||||
--> statement-breakpoint
|
|
||||||
CREATE TABLE \`sandbox_session_events\` (
|
|
||||||
\`id\` text PRIMARY KEY NOT NULL,
|
|
||||||
\`session_id\` text NOT NULL,
|
|
||||||
\`event_index\` integer NOT NULL,
|
|
||||||
\`created_at\` integer NOT NULL,
|
|
||||||
\`connection_id\` text NOT NULL,
|
|
||||||
\`sender\` text NOT NULL,
|
|
||||||
\`payload_json\` text NOT NULL
|
|
||||||
);
|
|
||||||
--> statement-breakpoint
|
|
||||||
CREATE UNIQUE INDEX \`sandbox_session_events_session_id_event_index_unique\` ON \`sandbox_session_events\` (\`session_id\`,\`event_index\`);--> statement-breakpoint
|
|
||||||
CREATE TABLE \`sandbox_sessions\` (
|
|
||||||
\`id\` text PRIMARY KEY NOT NULL,
|
|
||||||
\`agent\` text NOT NULL,
|
|
||||||
\`agent_session_id\` text NOT NULL,
|
|
||||||
\`last_connection_id\` text NOT NULL,
|
|
||||||
\`created_at\` integer NOT NULL,
|
|
||||||
\`destroyed_at\` integer,
|
|
||||||
\`session_init_json\` text
|
|
||||||
);
|
|
||||||
`,
|
|
||||||
} as const,
|
|
||||||
};
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
import { integer, sqliteTable, text, uniqueIndex } from "rivetkit/db/drizzle";
|
|
||||||
|
|
||||||
// SQLite is per sandbox-instance actor instance.
|
|
||||||
export const sandboxInstance = sqliteTable("sandbox_instance", {
|
|
||||||
id: integer("id").primaryKey(),
|
|
||||||
// Structured by the provider/runtime metadata serializer for this actor.
|
|
||||||
metadataJson: text("metadata_json").notNull(),
|
|
||||||
status: text("status").notNull(),
|
|
||||||
updatedAt: integer("updated_at").notNull(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Persist sandbox-agent sessions/events in SQLite instead of actor state so they survive
|
|
||||||
// serverless actor evictions and backend restarts.
|
|
||||||
export const sandboxSessions = sqliteTable("sandbox_sessions", {
|
|
||||||
id: text("id").notNull().primaryKey(),
|
|
||||||
agent: text("agent").notNull(),
|
|
||||||
agentSessionId: text("agent_session_id").notNull(),
|
|
||||||
lastConnectionId: text("last_connection_id").notNull(),
|
|
||||||
createdAt: integer("created_at").notNull(),
|
|
||||||
destroyedAt: integer("destroyed_at"),
|
|
||||||
// Structured by the sandbox-agent ACP session bootstrap payload.
|
|
||||||
sessionInitJson: text("session_init_json"),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const sandboxSessionEvents = sqliteTable(
|
|
||||||
"sandbox_session_events",
|
|
||||||
{
|
|
||||||
id: text("id").notNull().primaryKey(),
|
|
||||||
sessionId: text("session_id").notNull(),
|
|
||||||
eventIndex: integer("event_index").notNull(),
|
|
||||||
createdAt: integer("created_at").notNull(),
|
|
||||||
connectionId: text("connection_id").notNull(),
|
|
||||||
sender: text("sender").notNull(),
|
|
||||||
// Structured by the sandbox-agent session event envelope.
|
|
||||||
payloadJson: text("payload_json").notNull(),
|
|
||||||
},
|
|
||||||
(table) => [uniqueIndex("sandbox_session_events_session_id_event_index_unique").on(table.sessionId, table.eventIndex)],
|
|
||||||
);
|
|
||||||
|
|
@ -1,640 +0,0 @@
|
||||||
import { setTimeout as delay } from "node:timers/promises";
|
|
||||||
import { eq } from "drizzle-orm";
|
|
||||||
import { actor, queue } from "rivetkit";
|
|
||||||
import { Loop, workflow } from "rivetkit/workflow";
|
|
||||||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
|
||||||
import type {
|
|
||||||
ProcessCreateRequest,
|
|
||||||
ProcessInfo,
|
|
||||||
ProcessLogFollowQuery,
|
|
||||||
ProcessLogsResponse,
|
|
||||||
ProcessSignalQuery,
|
|
||||||
SessionEvent,
|
|
||||||
SessionRecord,
|
|
||||||
} from "sandbox-agent";
|
|
||||||
import { sandboxInstanceDb } from "./db/db.js";
|
|
||||||
import { sandboxInstance as sandboxInstanceTable } from "./db/schema.js";
|
|
||||||
import { SandboxInstancePersistDriver } from "./persist.js";
|
|
||||||
import { getActorRuntimeContext } from "../context.js";
|
|
||||||
import { selfSandboxInstance } from "../handles.js";
|
|
||||||
import { logActorWarning, resolveErrorMessage } from "../logging.js";
|
|
||||||
import { expectQueueResponse } from "../../services/queue.js";
|
|
||||||
|
|
||||||
export interface SandboxInstanceInput {
|
|
||||||
workspaceId: string;
|
|
||||||
providerId: ProviderId;
|
|
||||||
sandboxId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SandboxAgentConnection {
|
|
||||||
endpoint: string;
|
|
||||||
token?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const SANDBOX_ROW_ID = 1;
|
|
||||||
const CREATE_SESSION_MAX_ATTEMPTS = 3;
|
|
||||||
const CREATE_SESSION_RETRY_BASE_MS = 1_000;
|
|
||||||
const CREATE_SESSION_STEP_TIMEOUT_MS = 10 * 60_000;
|
|
||||||
|
|
||||||
function normalizeStatusFromEventPayload(payload: unknown): "running" | "idle" | "error" | null {
|
|
||||||
if (payload && typeof payload === "object") {
|
|
||||||
const envelope = payload as {
|
|
||||||
error?: unknown;
|
|
||||||
method?: unknown;
|
|
||||||
result?: unknown;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (envelope.error) {
|
|
||||||
return "error";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (envelope.result && typeof envelope.result === "object") {
|
|
||||||
const stopReason = (envelope.result as { stopReason?: unknown }).stopReason;
|
|
||||||
if (typeof stopReason === "string" && stopReason.length > 0) {
|
|
||||||
return "idle";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof envelope.method === "string") {
|
|
||||||
const lowered = envelope.method.toLowerCase();
|
|
||||||
if (lowered.includes("error") || lowered.includes("failed")) {
|
|
||||||
return "error";
|
|
||||||
}
|
|
||||||
if (lowered.includes("ended") || lowered.includes("complete") || lowered.includes("stopped")) {
|
|
||||||
return "idle";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function stringifyJson(value: unknown): string {
|
|
||||||
return JSON.stringify(value, (_key, item) => {
|
|
||||||
if (typeof item === "bigint") return item.toString();
|
|
||||||
return item;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseMetadata(metadataJson: string): Record<string, unknown> {
|
|
||||||
try {
|
|
||||||
const parsed = JSON.parse(metadataJson) as unknown;
|
|
||||||
if (parsed && typeof parsed === "object") return parsed as Record<string, unknown>;
|
|
||||||
return {};
|
|
||||||
} catch {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadPersistedAgentConfig(c: any): Promise<SandboxAgentConnection | null> {
|
|
||||||
try {
|
|
||||||
const row = await c.db
|
|
||||||
.select({ metadataJson: sandboxInstanceTable.metadataJson })
|
|
||||||
.from(sandboxInstanceTable)
|
|
||||||
.where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID))
|
|
||||||
.get();
|
|
||||||
|
|
||||||
if (row?.metadataJson) {
|
|
||||||
const metadata = parseMetadata(row.metadataJson);
|
|
||||||
const endpoint = typeof metadata.agentEndpoint === "string" ? metadata.agentEndpoint.trim() : "";
|
|
||||||
const token = typeof metadata.agentToken === "string" ? metadata.agentToken.trim() : "";
|
|
||||||
if (endpoint) {
|
|
||||||
return token ? { endpoint, token } : { endpoint };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadFreshDaytonaAgentConfig(c: any): Promise<SandboxAgentConnection> {
|
|
||||||
const { config, driver } = getActorRuntimeContext();
|
|
||||||
const daytona = driver.daytona.createClient({
|
|
||||||
apiUrl: config.providers.daytona.endpoint,
|
|
||||||
apiKey: config.providers.daytona.apiKey,
|
|
||||||
});
|
|
||||||
const sandbox = await daytona.getSandbox(c.state.sandboxId);
|
|
||||||
const state = String(sandbox.state ?? "unknown").toLowerCase();
|
|
||||||
if (state !== "started" && state !== "running") {
|
|
||||||
await daytona.startSandbox(c.state.sandboxId, 60);
|
|
||||||
}
|
|
||||||
const preview = await daytona.getPreviewEndpoint(c.state.sandboxId, 2468);
|
|
||||||
return preview.token ? { endpoint: preview.url, token: preview.token } : { endpoint: preview.url };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadFreshProviderAgentConfig(c: any): Promise<SandboxAgentConnection> {
|
|
||||||
const { providers } = getActorRuntimeContext();
|
|
||||||
const provider = providers.get(c.state.providerId);
|
|
||||||
return await provider.ensureSandboxAgent({
|
|
||||||
workspaceId: c.state.workspaceId,
|
|
||||||
sandboxId: c.state.sandboxId,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadAgentConfig(c: any): Promise<SandboxAgentConnection> {
|
|
||||||
const persisted = await loadPersistedAgentConfig(c);
|
|
||||||
if (c.state.providerId === "daytona") {
|
|
||||||
// Keep one stable signed preview endpoint per sandbox-instance actor.
|
|
||||||
// Rotating preview URLs on every call fragments SDK client state (sessions/events)
|
|
||||||
// because client caching keys by endpoint.
|
|
||||||
if (persisted) {
|
|
||||||
return persisted;
|
|
||||||
}
|
|
||||||
return await loadFreshDaytonaAgentConfig(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Local sandboxes are tied to the current backend process, so the sandbox-agent
|
|
||||||
// token can rotate on restart. Always refresh from the provider instead of
|
|
||||||
// trusting persisted metadata.
|
|
||||||
if (c.state.providerId === "local") {
|
|
||||||
return await loadFreshProviderAgentConfig(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (persisted) {
|
|
||||||
return persisted;
|
|
||||||
}
|
|
||||||
|
|
||||||
return await loadFreshProviderAgentConfig(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function derivePersistedSessionStatus(
|
|
||||||
persist: SandboxInstancePersistDriver,
|
|
||||||
sessionId: string,
|
|
||||||
): Promise<{ id: string; status: "running" | "idle" | "error" }> {
|
|
||||||
const session = await persist.getSession(sessionId);
|
|
||||||
if (!session) {
|
|
||||||
return { id: sessionId, status: "error" };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (session.destroyedAt) {
|
|
||||||
return { id: sessionId, status: "idle" };
|
|
||||||
}
|
|
||||||
|
|
||||||
const events = await persist.listEvents({
|
|
||||||
sessionId,
|
|
||||||
limit: 25,
|
|
||||||
});
|
|
||||||
|
|
||||||
for (let index = events.items.length - 1; index >= 0; index -= 1) {
|
|
||||||
const event = events.items[index];
|
|
||||||
if (!event) continue;
|
|
||||||
const status = normalizeStatusFromEventPayload(event.payload);
|
|
||||||
if (status) {
|
|
||||||
return { id: sessionId, status };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { id: sessionId, status: "idle" };
|
|
||||||
}
|
|
||||||
|
|
||||||
function isTransientSessionCreateError(detail: string): boolean {
|
|
||||||
const lowered = detail.toLowerCase();
|
|
||||||
if (lowered.includes("timed out") || lowered.includes("timeout") || lowered.includes("504") || lowered.includes("gateway timeout")) {
|
|
||||||
// ACP timeout errors are expensive and usually deterministic for the same
|
|
||||||
// request; immediate retries spawn additional sessions/processes and make
|
|
||||||
// recovery harder.
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
lowered.includes("502") || lowered.includes("503") || lowered.includes("bad gateway") || lowered.includes("econnreset") || lowered.includes("econnrefused")
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
interface EnsureSandboxCommand {
|
|
||||||
metadata: Record<string, unknown>;
|
|
||||||
status: string;
|
|
||||||
agentEndpoint?: string;
|
|
||||||
agentToken?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface HealthSandboxCommand {
|
|
||||||
status: string;
|
|
||||||
message: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CreateSessionCommand {
|
|
||||||
prompt: string;
|
|
||||||
cwd?: string;
|
|
||||||
agent?: "claude" | "codex" | "opencode";
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CreateSessionResult {
|
|
||||||
id: string | null;
|
|
||||||
status: "running" | "idle" | "error";
|
|
||||||
error?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ListSessionsCommand {
|
|
||||||
cursor?: string;
|
|
||||||
limit?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ListSessionEventsCommand {
|
|
||||||
sessionId: string;
|
|
||||||
cursor?: string;
|
|
||||||
limit?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SendPromptCommand {
|
|
||||||
sessionId: string;
|
|
||||||
prompt: string;
|
|
||||||
notification?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SessionStatusCommand {
|
|
||||||
sessionId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SessionControlCommand {
|
|
||||||
sessionId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const SANDBOX_INSTANCE_QUEUE_NAMES = [
|
|
||||||
"sandboxInstance.command.ensure",
|
|
||||||
"sandboxInstance.command.updateHealth",
|
|
||||||
"sandboxInstance.command.destroy",
|
|
||||||
"sandboxInstance.command.createSession",
|
|
||||||
"sandboxInstance.command.sendPrompt",
|
|
||||||
"sandboxInstance.command.cancelSession",
|
|
||||||
"sandboxInstance.command.destroySession",
|
|
||||||
] as const;
|
|
||||||
|
|
||||||
type SandboxInstanceQueueName = (typeof SANDBOX_INSTANCE_QUEUE_NAMES)[number];
|
|
||||||
|
|
||||||
function sandboxInstanceWorkflowQueueName(name: SandboxInstanceQueueName): SandboxInstanceQueueName {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getSandboxAgentClient(c: any) {
|
|
||||||
const { driver } = getActorRuntimeContext();
|
|
||||||
const persist = new SandboxInstancePersistDriver(c.db);
|
|
||||||
const { endpoint, token } = await loadAgentConfig(c);
|
|
||||||
return driver.sandboxAgent.createClient({
|
|
||||||
endpoint,
|
|
||||||
token,
|
|
||||||
persist,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function broadcastProcessesUpdated(c: any): Promise<void> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
const { processes } = await client.listProcesses();
|
|
||||||
c.broadcast("processesUpdated", {
|
|
||||||
type: "processesUpdated",
|
|
||||||
processes,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function ensureSandboxMutation(c: any, command: EnsureSandboxCommand): Promise<void> {
|
|
||||||
const now = Date.now();
|
|
||||||
const metadata = {
|
|
||||||
...command.metadata,
|
|
||||||
agentEndpoint: command.agentEndpoint ?? null,
|
|
||||||
agentToken: command.agentToken ?? null,
|
|
||||||
};
|
|
||||||
|
|
||||||
const metadataJson = stringifyJson(metadata);
|
|
||||||
await c.db
|
|
||||||
.insert(sandboxInstanceTable)
|
|
||||||
.values({
|
|
||||||
id: SANDBOX_ROW_ID,
|
|
||||||
metadataJson,
|
|
||||||
status: command.status,
|
|
||||||
updatedAt: now,
|
|
||||||
})
|
|
||||||
.onConflictDoUpdate({
|
|
||||||
target: sandboxInstanceTable.id,
|
|
||||||
set: {
|
|
||||||
metadataJson,
|
|
||||||
status: command.status,
|
|
||||||
updatedAt: now,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function updateHealthMutation(c: any, command: HealthSandboxCommand): Promise<void> {
|
|
||||||
await c.db
|
|
||||||
.update(sandboxInstanceTable)
|
|
||||||
.set({
|
|
||||||
status: `${command.status}:${command.message}`,
|
|
||||||
updatedAt: Date.now(),
|
|
||||||
})
|
|
||||||
.where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID))
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function destroySandboxMutation(c: any): Promise<void> {
|
|
||||||
await c.db.delete(sandboxInstanceTable).where(eq(sandboxInstanceTable.id, SANDBOX_ROW_ID)).run();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createSessionMutation(c: any, command: CreateSessionCommand): Promise<CreateSessionResult> {
|
|
||||||
let lastDetail = "sandbox-agent createSession failed";
|
|
||||||
let attemptsMade = 0;
|
|
||||||
|
|
||||||
for (let attempt = 1; attempt <= CREATE_SESSION_MAX_ATTEMPTS; attempt += 1) {
|
|
||||||
attemptsMade = attempt;
|
|
||||||
try {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
|
|
||||||
const session = await client.createSession({
|
|
||||||
prompt: command.prompt,
|
|
||||||
cwd: command.cwd,
|
|
||||||
agent: command.agent,
|
|
||||||
});
|
|
||||||
|
|
||||||
return { id: session.id, status: session.status };
|
|
||||||
} catch (error) {
|
|
||||||
const detail = error instanceof Error ? error.message : String(error);
|
|
||||||
lastDetail = detail;
|
|
||||||
const retryable = isTransientSessionCreateError(detail);
|
|
||||||
const canRetry = retryable && attempt < CREATE_SESSION_MAX_ATTEMPTS;
|
|
||||||
|
|
||||||
if (!canRetry) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const waitMs = CREATE_SESSION_RETRY_BASE_MS * attempt;
|
|
||||||
logActorWarning("sandbox-instance", "createSession transient failure; retrying", {
|
|
||||||
workspaceId: c.state.workspaceId,
|
|
||||||
providerId: c.state.providerId,
|
|
||||||
sandboxId: c.state.sandboxId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: CREATE_SESSION_MAX_ATTEMPTS,
|
|
||||||
waitMs,
|
|
||||||
error: detail,
|
|
||||||
});
|
|
||||||
await delay(waitMs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const attemptLabel = attemptsMade === 1 ? "attempt" : "attempts";
|
|
||||||
return {
|
|
||||||
id: null,
|
|
||||||
status: "error",
|
|
||||||
error: `sandbox-agent createSession failed after ${attemptsMade} ${attemptLabel}: ${lastDetail}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sendPromptMutation(c: any, command: SendPromptCommand): Promise<void> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
await client.sendPrompt({
|
|
||||||
sessionId: command.sessionId,
|
|
||||||
prompt: command.prompt,
|
|
||||||
notification: command.notification,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function cancelSessionMutation(c: any, command: SessionControlCommand): Promise<void> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
await client.cancelSession(command.sessionId);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function destroySessionMutation(c: any, command: SessionControlCommand): Promise<void> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
await client.destroySession(command.sessionId);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runSandboxInstanceWorkflow(ctx: any): Promise<void> {
|
|
||||||
await ctx.loop("sandbox-instance-command-loop", async (loopCtx: any) => {
|
|
||||||
const msg = await loopCtx.queue.next("next-sandbox-instance-command", {
|
|
||||||
names: [...SANDBOX_INSTANCE_QUEUE_NAMES],
|
|
||||||
completable: true,
|
|
||||||
});
|
|
||||||
if (!msg) {
|
|
||||||
return Loop.continue(undefined);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (msg.name === "sandboxInstance.command.ensure") {
|
|
||||||
await loopCtx.step("sandbox-instance-ensure", async () => ensureSandboxMutation(loopCtx, msg.body as EnsureSandboxCommand));
|
|
||||||
await msg.complete({ ok: true });
|
|
||||||
return Loop.continue(undefined);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (msg.name === "sandboxInstance.command.updateHealth") {
|
|
||||||
await loopCtx.step("sandbox-instance-update-health", async () => updateHealthMutation(loopCtx, msg.body as HealthSandboxCommand));
|
|
||||||
await msg.complete({ ok: true });
|
|
||||||
return Loop.continue(undefined);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (msg.name === "sandboxInstance.command.destroy") {
|
|
||||||
await loopCtx.step("sandbox-instance-destroy", async () => destroySandboxMutation(loopCtx));
|
|
||||||
await msg.complete({ ok: true });
|
|
||||||
return Loop.continue(undefined);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (msg.name === "sandboxInstance.command.createSession") {
|
|
||||||
const result = await loopCtx.step({
|
|
||||||
name: "sandbox-instance-create-session",
|
|
||||||
timeout: CREATE_SESSION_STEP_TIMEOUT_MS,
|
|
||||||
run: async () => createSessionMutation(loopCtx, msg.body as CreateSessionCommand),
|
|
||||||
});
|
|
||||||
await msg.complete(result);
|
|
||||||
return Loop.continue(undefined);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (msg.name === "sandboxInstance.command.sendPrompt") {
|
|
||||||
await loopCtx.step("sandbox-instance-send-prompt", async () => sendPromptMutation(loopCtx, msg.body as SendPromptCommand));
|
|
||||||
await msg.complete({ ok: true });
|
|
||||||
return Loop.continue(undefined);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (msg.name === "sandboxInstance.command.cancelSession") {
|
|
||||||
await loopCtx.step("sandbox-instance-cancel-session", async () => cancelSessionMutation(loopCtx, msg.body as SessionControlCommand));
|
|
||||||
await msg.complete({ ok: true });
|
|
||||||
return Loop.continue(undefined);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (msg.name === "sandboxInstance.command.destroySession") {
|
|
||||||
await loopCtx.step("sandbox-instance-destroy-session", async () => destroySessionMutation(loopCtx, msg.body as SessionControlCommand));
|
|
||||||
await msg.complete({ ok: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
return Loop.continue(undefined);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export const sandboxInstance = actor({
|
|
||||||
db: sandboxInstanceDb,
|
|
||||||
queues: Object.fromEntries(SANDBOX_INSTANCE_QUEUE_NAMES.map((name) => [name, queue()])),
|
|
||||||
options: {
|
|
||||||
name: "Sandbox Instance",
|
|
||||||
icon: "box",
|
|
||||||
actionTimeout: 5 * 60_000,
|
|
||||||
},
|
|
||||||
createState: (_c, input: SandboxInstanceInput) => ({
|
|
||||||
workspaceId: input.workspaceId,
|
|
||||||
providerId: input.providerId,
|
|
||||||
sandboxId: input.sandboxId,
|
|
||||||
}),
|
|
||||||
actions: {
|
|
||||||
async sandboxAgentConnection(c: any): Promise<SandboxAgentConnection> {
|
|
||||||
return await loadAgentConfig(c);
|
|
||||||
},
|
|
||||||
|
|
||||||
async createProcess(c: any, request: ProcessCreateRequest): Promise<ProcessInfo> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
const created = await client.createProcess(request);
|
|
||||||
await broadcastProcessesUpdated(c);
|
|
||||||
return created;
|
|
||||||
},
|
|
||||||
|
|
||||||
async listProcesses(c: any): Promise<{ processes: ProcessInfo[] }> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
return await client.listProcesses();
|
|
||||||
},
|
|
||||||
|
|
||||||
async getProcessLogs(c: any, request: { processId: string; query?: ProcessLogFollowQuery }): Promise<ProcessLogsResponse> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
return await client.getProcessLogs(request.processId, request.query);
|
|
||||||
},
|
|
||||||
|
|
||||||
async stopProcess(c: any, request: { processId: string; query?: ProcessSignalQuery }): Promise<ProcessInfo> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
const stopped = await client.stopProcess(request.processId, request.query);
|
|
||||||
await broadcastProcessesUpdated(c);
|
|
||||||
return stopped;
|
|
||||||
},
|
|
||||||
|
|
||||||
async killProcess(c: any, request: { processId: string; query?: ProcessSignalQuery }): Promise<ProcessInfo> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
const killed = await client.killProcess(request.processId, request.query);
|
|
||||||
await broadcastProcessesUpdated(c);
|
|
||||||
return killed;
|
|
||||||
},
|
|
||||||
|
|
||||||
async deleteProcess(c: any, request: { processId: string }): Promise<void> {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
await client.deleteProcess(request.processId);
|
|
||||||
await broadcastProcessesUpdated(c);
|
|
||||||
},
|
|
||||||
|
|
||||||
async providerState(c: any): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> {
|
|
||||||
const at = Date.now();
|
|
||||||
const { config, driver } = getActorRuntimeContext();
|
|
||||||
|
|
||||||
if (c.state.providerId === "daytona") {
|
|
||||||
const daytona = driver.daytona.createClient({
|
|
||||||
apiUrl: config.providers.daytona.endpoint,
|
|
||||||
apiKey: config.providers.daytona.apiKey,
|
|
||||||
});
|
|
||||||
const sandbox = await daytona.getSandbox(c.state.sandboxId);
|
|
||||||
const state = String(sandbox.state ?? "unknown").toLowerCase();
|
|
||||||
return { providerId: c.state.providerId, sandboxId: c.state.sandboxId, state, at };
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
providerId: c.state.providerId,
|
|
||||||
sandboxId: c.state.sandboxId,
|
|
||||||
state: "unknown",
|
|
||||||
at,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
|
|
||||||
async ensure(c, command: EnsureSandboxCommand): Promise<void> {
|
|
||||||
const self = selfSandboxInstance(c);
|
|
||||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.ensure"), command, {
|
|
||||||
wait: true,
|
|
||||||
timeout: 60_000,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
async updateHealth(c, command: HealthSandboxCommand): Promise<void> {
|
|
||||||
const self = selfSandboxInstance(c);
|
|
||||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.updateHealth"), command, {
|
|
||||||
wait: true,
|
|
||||||
timeout: 60_000,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
async destroy(c): Promise<void> {
|
|
||||||
const self = selfSandboxInstance(c);
|
|
||||||
await self.send(
|
|
||||||
sandboxInstanceWorkflowQueueName("sandboxInstance.command.destroy"),
|
|
||||||
{},
|
|
||||||
{
|
|
||||||
wait: true,
|
|
||||||
timeout: 60_000,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
},
|
|
||||||
|
|
||||||
async createSession(c: any, command: CreateSessionCommand): Promise<CreateSessionResult> {
|
|
||||||
const self = selfSandboxInstance(c);
|
|
||||||
return expectQueueResponse<CreateSessionResult>(
|
|
||||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.createSession"), command, {
|
|
||||||
wait: true,
|
|
||||||
timeout: 5 * 60_000,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
},
|
|
||||||
|
|
||||||
async listSessions(c: any, command?: ListSessionsCommand): Promise<{ items: SessionRecord[]; nextCursor?: string }> {
|
|
||||||
const persist = new SandboxInstancePersistDriver(c.db);
|
|
||||||
try {
|
|
||||||
const client = await getSandboxAgentClient(c);
|
|
||||||
|
|
||||||
const page = await client.listSessions({
|
|
||||||
cursor: command?.cursor,
|
|
||||||
limit: command?.limit,
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
items: page.items,
|
|
||||||
nextCursor: page.nextCursor,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
logActorWarning("sandbox-instance", "listSessions remote read failed; using persisted fallback", {
|
|
||||||
workspaceId: c.state.workspaceId,
|
|
||||||
providerId: c.state.providerId,
|
|
||||||
sandboxId: c.state.sandboxId,
|
|
||||||
error: resolveErrorMessage(error),
|
|
||||||
});
|
|
||||||
return await persist.listSessions({
|
|
||||||
cursor: command?.cursor,
|
|
||||||
limit: command?.limit,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
async listSessionEvents(c: any, command: ListSessionEventsCommand): Promise<{ items: SessionEvent[]; nextCursor?: string }> {
|
|
||||||
const persist = new SandboxInstancePersistDriver(c.db);
|
|
||||||
return await persist.listEvents({
|
|
||||||
sessionId: command.sessionId,
|
|
||||||
cursor: command.cursor,
|
|
||||||
limit: command.limit,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
async sendPrompt(c, command: SendPromptCommand): Promise<void> {
|
|
||||||
const self = selfSandboxInstance(c);
|
|
||||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.sendPrompt"), command, {
|
|
||||||
wait: true,
|
|
||||||
timeout: 5 * 60_000,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
async cancelSession(c, command: SessionControlCommand): Promise<void> {
|
|
||||||
const self = selfSandboxInstance(c);
|
|
||||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.cancelSession"), command, {
|
|
||||||
wait: true,
|
|
||||||
timeout: 60_000,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
async destroySession(c, command: SessionControlCommand): Promise<void> {
|
|
||||||
const self = selfSandboxInstance(c);
|
|
||||||
await self.send(sandboxInstanceWorkflowQueueName("sandboxInstance.command.destroySession"), command, {
|
|
||||||
wait: true,
|
|
||||||
timeout: 60_000,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
async sessionStatus(c, command: SessionStatusCommand): Promise<{ id: string; status: "running" | "idle" | "error" }> {
|
|
||||||
return await derivePersistedSessionStatus(new SandboxInstancePersistDriver(c.db), command.sessionId);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
run: workflow(runSandboxInstanceWorkflow),
|
|
||||||
});
|
|
||||||
|
|
@ -1,266 +0,0 @@
|
||||||
import { and, asc, count, eq } from "drizzle-orm";
|
|
||||||
import type { ListEventsRequest, ListPage, ListPageRequest, SessionEvent, SessionPersistDriver, SessionRecord } from "sandbox-agent";
|
|
||||||
import { sandboxSessionEvents, sandboxSessions } from "./db/schema.js";
|
|
||||||
|
|
||||||
const DEFAULT_MAX_SESSIONS = 1024;
|
|
||||||
const DEFAULT_MAX_EVENTS_PER_SESSION = 500;
|
|
||||||
const DEFAULT_LIST_LIMIT = 100;
|
|
||||||
|
|
||||||
function normalizeCap(value: number | undefined, fallback: number): number {
|
|
||||||
if (!Number.isFinite(value) || (value ?? 0) < 1) {
|
|
||||||
return fallback;
|
|
||||||
}
|
|
||||||
return Math.floor(value as number);
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseCursor(cursor: string | undefined): number {
|
|
||||||
if (!cursor) return 0;
|
|
||||||
const parsed = Number.parseInt(cursor, 10);
|
|
||||||
if (!Number.isFinite(parsed) || parsed < 0) return 0;
|
|
||||||
return parsed;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function resolveEventListOffset(params: { cursor?: string; total: number; limit: number }): number {
|
|
||||||
if (params.cursor != null) {
|
|
||||||
return parseCursor(params.cursor);
|
|
||||||
}
|
|
||||||
return Math.max(0, params.total - params.limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
function safeStringify(value: unknown): string {
|
|
||||||
return JSON.stringify(value, (_key, item) => {
|
|
||||||
if (typeof item === "bigint") return item.toString();
|
|
||||||
return item;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function safeParseJson<T>(value: string | null | undefined, fallback: T): T {
|
|
||||||
if (!value) return fallback;
|
|
||||||
try {
|
|
||||||
return JSON.parse(value) as T;
|
|
||||||
} catch {
|
|
||||||
return fallback;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SandboxInstancePersistDriverOptions {
|
|
||||||
maxSessions?: number;
|
|
||||||
maxEventsPerSession?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SandboxInstancePersistDriver implements SessionPersistDriver {
|
|
||||||
private readonly maxSessions: number;
|
|
||||||
private readonly maxEventsPerSession: number;
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
private readonly db: any,
|
|
||||||
options: SandboxInstancePersistDriverOptions = {},
|
|
||||||
) {
|
|
||||||
this.maxSessions = normalizeCap(options.maxSessions, DEFAULT_MAX_SESSIONS);
|
|
||||||
this.maxEventsPerSession = normalizeCap(options.maxEventsPerSession, DEFAULT_MAX_EVENTS_PER_SESSION);
|
|
||||||
}
|
|
||||||
|
|
||||||
async getSession(id: string): Promise<SessionRecord | null> {
|
|
||||||
const row = await this.db
|
|
||||||
.select({
|
|
||||||
id: sandboxSessions.id,
|
|
||||||
agent: sandboxSessions.agent,
|
|
||||||
agentSessionId: sandboxSessions.agentSessionId,
|
|
||||||
lastConnectionId: sandboxSessions.lastConnectionId,
|
|
||||||
createdAt: sandboxSessions.createdAt,
|
|
||||||
destroyedAt: sandboxSessions.destroyedAt,
|
|
||||||
sessionInitJson: sandboxSessions.sessionInitJson,
|
|
||||||
})
|
|
||||||
.from(sandboxSessions)
|
|
||||||
.where(eq(sandboxSessions.id, id))
|
|
||||||
.get();
|
|
||||||
|
|
||||||
if (!row) return null;
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: row.id,
|
|
||||||
agent: row.agent,
|
|
||||||
agentSessionId: row.agentSessionId,
|
|
||||||
lastConnectionId: row.lastConnectionId,
|
|
||||||
createdAt: row.createdAt,
|
|
||||||
destroyedAt: row.destroyedAt ?? undefined,
|
|
||||||
sessionInit: safeParseJson(row.sessionInitJson, undefined),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async listSessions(request: ListPageRequest = {}): Promise<ListPage<SessionRecord>> {
|
|
||||||
const offset = parseCursor(request.cursor);
|
|
||||||
const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT);
|
|
||||||
|
|
||||||
const rows = await this.db
|
|
||||||
.select({
|
|
||||||
id: sandboxSessions.id,
|
|
||||||
agent: sandboxSessions.agent,
|
|
||||||
agentSessionId: sandboxSessions.agentSessionId,
|
|
||||||
lastConnectionId: sandboxSessions.lastConnectionId,
|
|
||||||
createdAt: sandboxSessions.createdAt,
|
|
||||||
destroyedAt: sandboxSessions.destroyedAt,
|
|
||||||
sessionInitJson: sandboxSessions.sessionInitJson,
|
|
||||||
})
|
|
||||||
.from(sandboxSessions)
|
|
||||||
.orderBy(asc(sandboxSessions.createdAt), asc(sandboxSessions.id))
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset)
|
|
||||||
.all();
|
|
||||||
|
|
||||||
const items = rows.map((row) => ({
|
|
||||||
id: row.id,
|
|
||||||
agent: row.agent,
|
|
||||||
agentSessionId: row.agentSessionId,
|
|
||||||
lastConnectionId: row.lastConnectionId,
|
|
||||||
createdAt: row.createdAt,
|
|
||||||
destroyedAt: row.destroyedAt ?? undefined,
|
|
||||||
sessionInit: safeParseJson(row.sessionInitJson, undefined),
|
|
||||||
}));
|
|
||||||
|
|
||||||
const totalRow = await this.db.select({ c: count() }).from(sandboxSessions).get();
|
|
||||||
const total = Number(totalRow?.c ?? 0);
|
|
||||||
|
|
||||||
const nextOffset = offset + items.length;
|
|
||||||
return {
|
|
||||||
items,
|
|
||||||
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async updateSession(session: SessionRecord): Promise<void> {
|
|
||||||
const now = Date.now();
|
|
||||||
await this.db
|
|
||||||
.insert(sandboxSessions)
|
|
||||||
.values({
|
|
||||||
id: session.id,
|
|
||||||
agent: session.agent,
|
|
||||||
agentSessionId: session.agentSessionId,
|
|
||||||
lastConnectionId: session.lastConnectionId,
|
|
||||||
createdAt: session.createdAt ?? now,
|
|
||||||
destroyedAt: session.destroyedAt ?? null,
|
|
||||||
sessionInitJson: session.sessionInit ? safeStringify(session.sessionInit) : null,
|
|
||||||
})
|
|
||||||
.onConflictDoUpdate({
|
|
||||||
target: sandboxSessions.id,
|
|
||||||
set: {
|
|
||||||
agent: session.agent,
|
|
||||||
agentSessionId: session.agentSessionId,
|
|
||||||
lastConnectionId: session.lastConnectionId,
|
|
||||||
createdAt: session.createdAt ?? now,
|
|
||||||
destroyedAt: session.destroyedAt ?? null,
|
|
||||||
sessionInitJson: session.sessionInit ? safeStringify(session.sessionInit) : null,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.run();
|
|
||||||
|
|
||||||
// Evict oldest sessions beyond cap.
|
|
||||||
const totalRow = await this.db.select({ c: count() }).from(sandboxSessions).get();
|
|
||||||
const total = Number(totalRow?.c ?? 0);
|
|
||||||
const overflow = total - this.maxSessions;
|
|
||||||
if (overflow <= 0) return;
|
|
||||||
|
|
||||||
const toRemove = await this.db
|
|
||||||
.select({ id: sandboxSessions.id })
|
|
||||||
.from(sandboxSessions)
|
|
||||||
.orderBy(asc(sandboxSessions.createdAt), asc(sandboxSessions.id))
|
|
||||||
.limit(overflow)
|
|
||||||
.all();
|
|
||||||
|
|
||||||
for (const row of toRemove) {
|
|
||||||
await this.db.delete(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, row.id)).run();
|
|
||||||
await this.db.delete(sandboxSessions).where(eq(sandboxSessions.id, row.id)).run();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
|
|
||||||
const limit = normalizeCap(request.limit, DEFAULT_LIST_LIMIT);
|
|
||||||
const totalRow = await this.db.select({ c: count() }).from(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, request.sessionId)).get();
|
|
||||||
const total = Number(totalRow?.c ?? 0);
|
|
||||||
const offset = resolveEventListOffset({
|
|
||||||
cursor: request.cursor,
|
|
||||||
total,
|
|
||||||
limit,
|
|
||||||
});
|
|
||||||
|
|
||||||
const rows = await this.db
|
|
||||||
.select({
|
|
||||||
id: sandboxSessionEvents.id,
|
|
||||||
sessionId: sandboxSessionEvents.sessionId,
|
|
||||||
eventIndex: sandboxSessionEvents.eventIndex,
|
|
||||||
createdAt: sandboxSessionEvents.createdAt,
|
|
||||||
connectionId: sandboxSessionEvents.connectionId,
|
|
||||||
sender: sandboxSessionEvents.sender,
|
|
||||||
payloadJson: sandboxSessionEvents.payloadJson,
|
|
||||||
})
|
|
||||||
.from(sandboxSessionEvents)
|
|
||||||
.where(eq(sandboxSessionEvents.sessionId, request.sessionId))
|
|
||||||
.orderBy(asc(sandboxSessionEvents.eventIndex), asc(sandboxSessionEvents.id))
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset)
|
|
||||||
.all();
|
|
||||||
|
|
||||||
const items: SessionEvent[] = rows.map((row) => ({
|
|
||||||
id: row.id,
|
|
||||||
eventIndex: row.eventIndex,
|
|
||||||
sessionId: row.sessionId,
|
|
||||||
createdAt: row.createdAt,
|
|
||||||
connectionId: row.connectionId,
|
|
||||||
sender: row.sender as any,
|
|
||||||
payload: safeParseJson(row.payloadJson, null),
|
|
||||||
}));
|
|
||||||
|
|
||||||
const nextOffset = offset + items.length;
|
|
||||||
return {
|
|
||||||
items,
|
|
||||||
nextCursor: nextOffset < total ? String(nextOffset) : undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async insertEvent(event: SessionEvent): Promise<void> {
|
|
||||||
await this.db
|
|
||||||
.insert(sandboxSessionEvents)
|
|
||||||
.values({
|
|
||||||
id: event.id,
|
|
||||||
sessionId: event.sessionId,
|
|
||||||
eventIndex: event.eventIndex,
|
|
||||||
createdAt: event.createdAt,
|
|
||||||
connectionId: event.connectionId,
|
|
||||||
sender: event.sender,
|
|
||||||
payloadJson: safeStringify(event.payload),
|
|
||||||
})
|
|
||||||
.onConflictDoUpdate({
|
|
||||||
target: sandboxSessionEvents.id,
|
|
||||||
set: {
|
|
||||||
sessionId: event.sessionId,
|
|
||||||
eventIndex: event.eventIndex,
|
|
||||||
createdAt: event.createdAt,
|
|
||||||
connectionId: event.connectionId,
|
|
||||||
sender: event.sender,
|
|
||||||
payloadJson: safeStringify(event.payload),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.run();
|
|
||||||
|
|
||||||
// Trim oldest events beyond cap.
|
|
||||||
const totalRow = await this.db.select({ c: count() }).from(sandboxSessionEvents).where(eq(sandboxSessionEvents.sessionId, event.sessionId)).get();
|
|
||||||
const total = Number(totalRow?.c ?? 0);
|
|
||||||
const overflow = total - this.maxEventsPerSession;
|
|
||||||
if (overflow <= 0) return;
|
|
||||||
|
|
||||||
const toRemove = await this.db
|
|
||||||
.select({ id: sandboxSessionEvents.id })
|
|
||||||
.from(sandboxSessionEvents)
|
|
||||||
.where(eq(sandboxSessionEvents.sessionId, event.sessionId))
|
|
||||||
.orderBy(asc(sandboxSessionEvents.eventIndex), asc(sandboxSessionEvents.id))
|
|
||||||
.limit(overflow)
|
|
||||||
.all();
|
|
||||||
|
|
||||||
for (const row of toRemove) {
|
|
||||||
await this.db
|
|
||||||
.delete(sandboxSessionEvents)
|
|
||||||
.where(and(eq(sandboxSessionEvents.sessionId, event.sessionId), eq(sandboxSessionEvents.id, row.id)))
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
401
foundry/packages/backend/src/actors/sandbox/index.ts
Normal file
401
foundry/packages/backend/src/actors/sandbox/index.ts
Normal file
|
|
@ -0,0 +1,401 @@
|
||||||
|
import { actor } from "rivetkit";
|
||||||
|
import { e2b, sandboxActor } from "rivetkit/sandbox";
|
||||||
|
import { existsSync } from "node:fs";
|
||||||
|
import Dockerode from "dockerode";
|
||||||
|
import { SandboxAgent } from "sandbox-agent";
|
||||||
|
import { getActorRuntimeContext } from "../context.js";
|
||||||
|
import { workspaceKey } from "../keys.js";
|
||||||
|
import { resolveSandboxProviderId } from "../../sandbox-config.js";
|
||||||
|
|
||||||
|
const SANDBOX_REPO_CWD = "/home/sandbox/workspace/repo";
|
||||||
|
const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:full";
|
||||||
|
const DEFAULT_LOCAL_SANDBOX_PORT = 2468;
|
||||||
|
const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" });
|
||||||
|
|
||||||
|
function parseTaskSandboxKey(key: readonly string[]): { workspaceId: string; taskId: string } {
|
||||||
|
if (key.length !== 4 || key[0] !== "ws" || key[2] !== "sandbox") {
|
||||||
|
throw new Error(`Invalid task sandbox key: ${JSON.stringify(key)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
workspaceId: key[1]!,
|
||||||
|
taskId: key[3]!,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function preferredDockerHost(): string {
|
||||||
|
if (process.env.FOUNDRY_DOCKER_HOST?.trim()) {
|
||||||
|
return process.env.FOUNDRY_DOCKER_HOST.trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
return existsSync("/.dockerenv") ? "host.docker.internal" : "127.0.0.1";
|
||||||
|
}
|
||||||
|
|
||||||
|
function preferredPublicDockerHost(): string {
|
||||||
|
if (process.env.FOUNDRY_PUBLIC_SANDBOX_HOST?.trim()) {
|
||||||
|
return process.env.FOUNDRY_PUBLIC_SANDBOX_HOST.trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
return "127.0.0.1";
|
||||||
|
}
|
||||||
|
|
||||||
|
function localSandboxAgentPort(): number {
|
||||||
|
const raw = process.env.FOUNDRY_LOCAL_SANDBOX_PORT?.trim() ?? process.env.HF_LOCAL_SANDBOX_PORT?.trim() ?? "";
|
||||||
|
const parsed = Number(raw);
|
||||||
|
if (Number.isInteger(parsed) && parsed > 0 && parsed <= 65535) {
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
return DEFAULT_LOCAL_SANDBOX_PORT;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sandboxEnvPairs(): string[] {
|
||||||
|
const openAiApiKey = process.env.OPENAI_API_KEY;
|
||||||
|
const entries = [
|
||||||
|
["ANTHROPIC_API_KEY", process.env.ANTHROPIC_API_KEY],
|
||||||
|
["CLAUDE_API_KEY", process.env.CLAUDE_API_KEY ?? process.env.ANTHROPIC_API_KEY],
|
||||||
|
["OPENAI_API_KEY", openAiApiKey],
|
||||||
|
// Codex ACP prefers CODEX_API_KEY when present. In dev we want that to be the
|
||||||
|
// actual OpenAI API key, not an unrelated local Codex auth token.
|
||||||
|
["CODEX_API_KEY", openAiApiKey ?? process.env.CODEX_API_KEY],
|
||||||
|
["GH_TOKEN", process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN],
|
||||||
|
["GITHUB_TOKEN", process.env.GITHUB_TOKEN ?? process.env.GH_TOKEN],
|
||||||
|
["E2B_API_KEY", process.env.E2B_API_KEY],
|
||||||
|
];
|
||||||
|
|
||||||
|
return entries
|
||||||
|
.filter((entry): entry is [string, string] => typeof entry[1] === "string" && entry[1].trim().length > 0)
|
||||||
|
.map(([key, value]) => `${key}=${value}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function sandboxEnvObject(): Record<string, string> {
|
||||||
|
return Object.fromEntries(
|
||||||
|
sandboxEnvPairs().map((entry) => {
|
||||||
|
const [key, ...rest] = entry.split("=");
|
||||||
|
return [key!, rest.join("=")];
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function modeIdForAgent(agent?: string | null): string | null {
|
||||||
|
switch (agent) {
|
||||||
|
case "codex":
|
||||||
|
return "full-access";
|
||||||
|
case "claude":
|
||||||
|
return "acceptEdits";
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getPublishedDockerPort(sandboxId: string, containerPort: number): Promise<number> {
|
||||||
|
const info = await dockerClient.getContainer(sandboxId).inspect();
|
||||||
|
const hostPort = info.NetworkSettings?.Ports?.[`${containerPort}/tcp`]?.[0]?.HostPort;
|
||||||
|
if (!hostPort) {
|
||||||
|
throw new Error(`docker sandbox-agent port ${containerPort} is not published`);
|
||||||
|
}
|
||||||
|
return Number(hostPort);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createLocalSandboxProvider(image: string): any {
|
||||||
|
const agentPort = localSandboxAgentPort();
|
||||||
|
const backendHost = preferredDockerHost();
|
||||||
|
const publicHost = preferredPublicDockerHost();
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: "docker",
|
||||||
|
|
||||||
|
async create(_context: any): Promise<string> {
|
||||||
|
const container = await dockerClient.createContainer({
|
||||||
|
Image: image,
|
||||||
|
Cmd: ["server", "--no-token", "--host", "0.0.0.0", "--port", String(agentPort)],
|
||||||
|
Env: sandboxEnvPairs(),
|
||||||
|
ExposedPorts: {
|
||||||
|
[`${agentPort}/tcp`]: {},
|
||||||
|
},
|
||||||
|
HostConfig: {
|
||||||
|
AutoRemove: true,
|
||||||
|
PortBindings: {
|
||||||
|
[`${agentPort}/tcp`]: [{ HostPort: "0" }],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await container.start();
|
||||||
|
return container.id;
|
||||||
|
},
|
||||||
|
|
||||||
|
async destroy(sandboxId: string): Promise<void> {
|
||||||
|
const container = dockerClient.getContainer(sandboxId);
|
||||||
|
try {
|
||||||
|
await container.stop({ t: 5 });
|
||||||
|
} catch {}
|
||||||
|
try {
|
||||||
|
await container.remove({ force: true });
|
||||||
|
} catch {}
|
||||||
|
},
|
||||||
|
|
||||||
|
async getUrl(sandboxId: string): Promise<string> {
|
||||||
|
const hostPort = await getPublishedDockerPort(sandboxId, agentPort);
|
||||||
|
return `http://${publicHost}:${hostPort}`;
|
||||||
|
},
|
||||||
|
|
||||||
|
async connectAgent(sandboxId: string, connectOptions: any): Promise<any> {
|
||||||
|
const hostPort = await getPublishedDockerPort(sandboxId, agentPort);
|
||||||
|
return await SandboxAgent.connect({
|
||||||
|
baseUrl: `http://${backendHost}:${hostPort}`,
|
||||||
|
...connectOptions,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function sanitizeActorResult(value: unknown, seen = new WeakSet<object>()): unknown {
|
||||||
|
if (typeof value === "function" || value === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value && typeof value === "object") {
|
||||||
|
const maybeToRecord = (value as { toRecord?: unknown }).toRecord;
|
||||||
|
if (typeof maybeToRecord === "function") {
|
||||||
|
return sanitizeActorResult(maybeToRecord.call(value), seen);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === null || typeof value !== "object") {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value instanceof Date) {
|
||||||
|
return value.toISOString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value.map((entry) => sanitizeActorResult(entry, seen)).filter((entry) => entry !== undefined);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (seen.has(value)) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
seen.add(value);
|
||||||
|
|
||||||
|
const next: Record<string, unknown> = {};
|
||||||
|
for (const [key, entry] of Object.entries(value)) {
|
||||||
|
const sanitized = sanitizeActorResult(entry, seen);
|
||||||
|
if (sanitized !== undefined) {
|
||||||
|
next[key] = sanitized;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return next;
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseTaskSandbox = sandboxActor({
|
||||||
|
createProvider: async (c) => {
|
||||||
|
const { config } = getActorRuntimeContext();
|
||||||
|
const { workspaceId, taskId } = parseTaskSandboxKey(c.key);
|
||||||
|
const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), {
|
||||||
|
createWithInput: workspaceId,
|
||||||
|
});
|
||||||
|
const task = await workspace.getTask({ workspaceId, taskId });
|
||||||
|
const providerId = resolveSandboxProviderId(config, task.providerId);
|
||||||
|
|
||||||
|
if (providerId === "e2b") {
|
||||||
|
return e2b({
|
||||||
|
create: () => ({
|
||||||
|
template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x",
|
||||||
|
envs: sandboxEnvObject(),
|
||||||
|
}),
|
||||||
|
installAgents: ["claude", "codex"],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
async function broadcastProcesses(c: any, actions: Record<string, (...args: any[]) => Promise<any>>): Promise<void> {
|
||||||
|
try {
|
||||||
|
const listed = await actions.listProcesses(c);
|
||||||
|
c.broadcast("processesUpdated", {
|
||||||
|
type: "processesUpdated",
|
||||||
|
processes: listed.processes ?? [],
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// Process broadcasts are best-effort. Callers still receive the primary action result.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function providerForConnection(c: any): Promise<any | null> {
|
||||||
|
if (c.state.sandboxDestroyed || !c.state.sandboxId) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (c.vars.provider) {
|
||||||
|
return c.vars.provider;
|
||||||
|
}
|
||||||
|
|
||||||
|
const providerFactory = baseTaskSandbox.config.actions as Record<string, unknown>;
|
||||||
|
void providerFactory;
|
||||||
|
const { config } = getActorRuntimeContext();
|
||||||
|
const { workspaceId, taskId } = parseTaskSandboxKey(c.key);
|
||||||
|
const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), {
|
||||||
|
createWithInput: workspaceId,
|
||||||
|
});
|
||||||
|
const task = await workspace.getTask({ workspaceId, taskId });
|
||||||
|
const providerId = resolveSandboxProviderId(config, task.providerId);
|
||||||
|
|
||||||
|
const provider =
|
||||||
|
providerId === "e2b"
|
||||||
|
? e2b({
|
||||||
|
create: () => ({
|
||||||
|
template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x",
|
||||||
|
envs: sandboxEnvObject(),
|
||||||
|
}),
|
||||||
|
installAgents: ["claude", "codex"],
|
||||||
|
})
|
||||||
|
: createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE);
|
||||||
|
|
||||||
|
c.vars.provider = provider;
|
||||||
|
return provider;
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseActions = baseTaskSandbox.config.actions as Record<string, (c: any, ...args: any[]) => Promise<any>>;
|
||||||
|
|
||||||
|
export const taskSandbox = actor({
|
||||||
|
...baseTaskSandbox.config,
|
||||||
|
options: {
|
||||||
|
...baseTaskSandbox.config.options,
|
||||||
|
actionTimeout: 10 * 60_000,
|
||||||
|
},
|
||||||
|
actions: {
|
||||||
|
...baseActions,
|
||||||
|
async createSession(c: any, request: any): Promise<any> {
|
||||||
|
const session = await baseActions.createSession(c, request);
|
||||||
|
const sessionId = typeof request?.id === "string" && request.id.length > 0 ? request.id : session?.id;
|
||||||
|
const modeId = modeIdForAgent(request?.agent);
|
||||||
|
if (sessionId && modeId) {
|
||||||
|
try {
|
||||||
|
await baseActions.rawSendSessionMethod(c, sessionId, "session/set_mode", { modeId });
|
||||||
|
} catch {
|
||||||
|
// Session mode updates are best-effort.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sanitizeActorResult(session);
|
||||||
|
},
|
||||||
|
|
||||||
|
async resumeSession(c: any, sessionId: string): Promise<any> {
|
||||||
|
return sanitizeActorResult(await baseActions.resumeSession(c, sessionId));
|
||||||
|
},
|
||||||
|
|
||||||
|
async resumeOrCreateSession(c: any, request: any): Promise<any> {
|
||||||
|
return sanitizeActorResult(await baseActions.resumeOrCreateSession(c, request));
|
||||||
|
},
|
||||||
|
|
||||||
|
async getSession(c: any, sessionId: string): Promise<any> {
|
||||||
|
return sanitizeActorResult(await baseActions.getSession(c, sessionId));
|
||||||
|
},
|
||||||
|
|
||||||
|
async listSessions(c: any, query?: any): Promise<any> {
|
||||||
|
return sanitizeActorResult(await baseActions.listSessions(c, query));
|
||||||
|
},
|
||||||
|
|
||||||
|
async destroySession(c: any, sessionId: string): Promise<any> {
|
||||||
|
return sanitizeActorResult(await baseActions.destroySession(c, sessionId));
|
||||||
|
},
|
||||||
|
|
||||||
|
async sendPrompt(c: any, request: { sessionId: string; prompt: string }): Promise<any> {
|
||||||
|
const text = typeof request?.prompt === "string" ? request.prompt.trim() : "";
|
||||||
|
if (!text) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = await baseActions.resumeSession(c, request.sessionId);
|
||||||
|
if (!session || typeof session.prompt !== "function") {
|
||||||
|
throw new Error(`session '${request.sessionId}' not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return sanitizeActorResult(await session.prompt([{ type: "text", text }]));
|
||||||
|
},
|
||||||
|
|
||||||
|
async createProcess(c: any, request: any): Promise<any> {
|
||||||
|
const created = await baseActions.createProcess(c, request);
|
||||||
|
await broadcastProcesses(c, baseActions);
|
||||||
|
return created;
|
||||||
|
},
|
||||||
|
|
||||||
|
async runProcess(c: any, request: any): Promise<any> {
|
||||||
|
const result = await baseActions.runProcess(c, request);
|
||||||
|
await broadcastProcesses(c, baseActions);
|
||||||
|
return result;
|
||||||
|
},
|
||||||
|
|
||||||
|
async stopProcess(c: any, processId: string, query?: any): Promise<any> {
|
||||||
|
const stopped = await baseActions.stopProcess(c, processId, query);
|
||||||
|
await broadcastProcesses(c, baseActions);
|
||||||
|
return stopped;
|
||||||
|
},
|
||||||
|
|
||||||
|
async killProcess(c: any, processId: string, query?: any): Promise<any> {
|
||||||
|
const killed = await baseActions.killProcess(c, processId, query);
|
||||||
|
await broadcastProcesses(c, baseActions);
|
||||||
|
return killed;
|
||||||
|
},
|
||||||
|
|
||||||
|
async deleteProcess(c: any, processId: string): Promise<void> {
|
||||||
|
await baseActions.deleteProcess(c, processId);
|
||||||
|
await broadcastProcesses(c, baseActions);
|
||||||
|
},
|
||||||
|
|
||||||
|
async sandboxAgentConnection(c: any): Promise<{ endpoint: string; token?: string }> {
|
||||||
|
const provider = await providerForConnection(c);
|
||||||
|
if (!provider || !c.state.sandboxId) {
|
||||||
|
return { endpoint: "mock://terminal-unavailable" };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return {
|
||||||
|
endpoint: await provider.getUrl(c.state.sandboxId),
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
return { endpoint: "mock://terminal-unavailable" };
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
async providerState(c: any): Promise<{ providerId: "e2b" | "local"; sandboxId: string; state: string; at: number }> {
|
||||||
|
const { config } = getActorRuntimeContext();
|
||||||
|
const { taskId } = parseTaskSandboxKey(c.key);
|
||||||
|
const at = Date.now();
|
||||||
|
const providerId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null);
|
||||||
|
|
||||||
|
if (c.state.sandboxDestroyed) {
|
||||||
|
return { providerId, sandboxId: taskId, state: "destroyed", at };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!c.state.sandboxId) {
|
||||||
|
return { providerId, sandboxId: taskId, state: "pending", at };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const health = await baseActions.getHealth(c);
|
||||||
|
return {
|
||||||
|
providerId,
|
||||||
|
sandboxId: taskId,
|
||||||
|
state: health.status === "ok" ? "running" : "degraded",
|
||||||
|
at,
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
return {
|
||||||
|
providerId,
|
||||||
|
sandboxId: taskId,
|
||||||
|
state: "error",
|
||||||
|
at,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
async repoCwd(): Promise<{ cwd: string }> {
|
||||||
|
return { cwd: SANDBOX_REPO_CWD };
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export { SANDBOX_REPO_CWD };
|
||||||
|
|
@ -1,110 +0,0 @@
|
||||||
import { actor, queue } from "rivetkit";
|
|
||||||
import { workflow } from "rivetkit/workflow";
|
|
||||||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
|
||||||
import { getTask, getSandboxInstance, selfTaskStatusSync } from "../handles.js";
|
|
||||||
import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js";
|
|
||||||
import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js";
|
|
||||||
|
|
||||||
export interface TaskStatusSyncInput {
|
|
||||||
workspaceId: string;
|
|
||||||
repoId: string;
|
|
||||||
taskId: string;
|
|
||||||
providerId: ProviderId;
|
|
||||||
sandboxId: string;
|
|
||||||
sessionId: string;
|
|
||||||
intervalMs: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SetIntervalCommand {
|
|
||||||
intervalMs: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface TaskStatusSyncState extends PollingControlState {
|
|
||||||
workspaceId: string;
|
|
||||||
repoId: string;
|
|
||||||
taskId: string;
|
|
||||||
providerId: ProviderId;
|
|
||||||
sandboxId: string;
|
|
||||||
sessionId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const CONTROL = {
|
|
||||||
start: "task.status_sync.control.start",
|
|
||||||
stop: "task.status_sync.control.stop",
|
|
||||||
setInterval: "task.status_sync.control.set_interval",
|
|
||||||
force: "task.status_sync.control.force",
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
async function pollSessionStatus(c: { state: TaskStatusSyncState }): Promise<void> {
|
|
||||||
const sandboxInstance = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, c.state.sandboxId);
|
|
||||||
const status = await sandboxInstance.sessionStatus({ sessionId: c.state.sessionId });
|
|
||||||
|
|
||||||
const parent = getTask(c, c.state.workspaceId, c.state.repoId, c.state.taskId);
|
|
||||||
await parent.syncWorkbenchSessionStatus({
|
|
||||||
sessionId: c.state.sessionId,
|
|
||||||
status: status.status,
|
|
||||||
at: Date.now(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export const taskStatusSync = actor({
|
|
||||||
queues: {
|
|
||||||
[CONTROL.start]: queue(),
|
|
||||||
[CONTROL.stop]: queue(),
|
|
||||||
[CONTROL.setInterval]: queue(),
|
|
||||||
[CONTROL.force]: queue(),
|
|
||||||
},
|
|
||||||
options: {
|
|
||||||
name: "Task Status Sync",
|
|
||||||
icon: "signal",
|
|
||||||
// Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling.
|
|
||||||
noSleep: true,
|
|
||||||
},
|
|
||||||
createState: (_c, input: TaskStatusSyncInput): TaskStatusSyncState => ({
|
|
||||||
workspaceId: input.workspaceId,
|
|
||||||
repoId: input.repoId,
|
|
||||||
taskId: input.taskId,
|
|
||||||
providerId: input.providerId,
|
|
||||||
sandboxId: input.sandboxId,
|
|
||||||
sessionId: input.sessionId,
|
|
||||||
intervalMs: input.intervalMs,
|
|
||||||
running: true,
|
|
||||||
}),
|
|
||||||
actions: {
|
|
||||||
async start(c): Promise<void> {
|
|
||||||
const self = selfTaskStatusSync(c);
|
|
||||||
await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 });
|
|
||||||
},
|
|
||||||
|
|
||||||
async stop(c): Promise<void> {
|
|
||||||
const self = selfTaskStatusSync(c);
|
|
||||||
await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 });
|
|
||||||
},
|
|
||||||
|
|
||||||
async setIntervalMs(c, payload: SetIntervalCommand): Promise<void> {
|
|
||||||
const self = selfTaskStatusSync(c);
|
|
||||||
await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 });
|
|
||||||
},
|
|
||||||
|
|
||||||
async force(c): Promise<void> {
|
|
||||||
const self = selfTaskStatusSync(c);
|
|
||||||
await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 });
|
|
||||||
},
|
|
||||||
},
|
|
||||||
run: workflow(async (ctx) => {
|
|
||||||
await runWorkflowPollingLoop<TaskStatusSyncState>(ctx, {
|
|
||||||
loopName: "task-status-sync-loop",
|
|
||||||
control: CONTROL,
|
|
||||||
onPoll: async (loopCtx) => {
|
|
||||||
try {
|
|
||||||
await pollSessionStatus(loopCtx);
|
|
||||||
} catch (error) {
|
|
||||||
logActorWarning("task-status-sync", "poll failed", {
|
|
||||||
error: resolveErrorMessage(error),
|
|
||||||
stack: resolveErrorStack(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
@ -139,7 +139,7 @@ export const task = actor({
|
||||||
const self = selfTask(c);
|
const self = selfTask(c);
|
||||||
const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, {
|
const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, {
|
||||||
wait: true,
|
wait: true,
|
||||||
timeout: 60_000,
|
timeout: 5 * 60_000,
|
||||||
});
|
});
|
||||||
return expectQueueResponse<TaskRecord>(result);
|
return expectQueueResponse<TaskRecord>(result);
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,15 @@
|
||||||
// @ts-nocheck
|
// @ts-nocheck
|
||||||
import { randomUUID } from "node:crypto";
|
import { randomUUID } from "node:crypto";
|
||||||
import { basename } from "node:path";
|
import { basename, dirname } from "node:path";
|
||||||
import { asc, eq } from "drizzle-orm";
|
import { asc, eq } from "drizzle-orm";
|
||||||
import { getActorRuntimeContext } from "../context.js";
|
import { getActorRuntimeContext } from "../context.js";
|
||||||
import { getOrCreateTaskStatusSync, getOrCreateProject, getOrCreateWorkspace, getSandboxInstance, selfTask } from "../handles.js";
|
import { getOrCreateProject, getOrCreateTaskSandbox, getOrCreateWorkspace, getTaskSandbox, selfTask } from "../handles.js";
|
||||||
|
import { SANDBOX_REPO_CWD } from "../sandbox/index.js";
|
||||||
|
import { resolveSandboxProviderId } from "../../sandbox-config.js";
|
||||||
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
|
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
|
||||||
import { task as taskTable, taskRuntime, taskWorkbenchSessions } from "./db/schema.js";
|
import { task as taskTable, taskRuntime, taskSandboxes, taskWorkbenchSessions } from "./db/schema.js";
|
||||||
import { getCurrentRecord } from "./workflow/common.js";
|
import { getCurrentRecord } from "./workflow/common.js";
|
||||||
|
|
||||||
const STATUS_SYNC_INTERVAL_MS = 1_000;
|
|
||||||
|
|
||||||
function emptyGitState() {
|
function emptyGitState() {
|
||||||
return {
|
return {
|
||||||
fileChanges: [],
|
fileChanges: [],
|
||||||
|
|
@ -56,18 +56,22 @@ async function ensureTaskRuntimeCacheColumns(c: any): Promise<void> {
|
||||||
}
|
}
|
||||||
|
|
||||||
function defaultModelForAgent(agentType: string | null | undefined) {
|
function defaultModelForAgent(agentType: string | null | undefined) {
|
||||||
return agentType === "codex" ? "gpt-4o" : "claude-sonnet-4";
|
return agentType === "codex" ? "gpt-5.3-codex" : "claude-sonnet-4";
|
||||||
|
}
|
||||||
|
|
||||||
|
function isCodexModel(model: string) {
|
||||||
|
return model.startsWith("gpt-") || model.startsWith("o");
|
||||||
}
|
}
|
||||||
|
|
||||||
function agentKindForModel(model: string) {
|
function agentKindForModel(model: string) {
|
||||||
if (model === "gpt-4o" || model === "o3") {
|
if (isCodexModel(model)) {
|
||||||
return "Codex";
|
return "Codex";
|
||||||
}
|
}
|
||||||
return "Claude";
|
return "Claude";
|
||||||
}
|
}
|
||||||
|
|
||||||
export function agentTypeForModel(model: string) {
|
export function agentTypeForModel(model: string) {
|
||||||
if (model === "gpt-4o" || model === "o3") {
|
if (isCodexModel(model)) {
|
||||||
return "codex";
|
return "codex";
|
||||||
}
|
}
|
||||||
return "claude";
|
return "claude";
|
||||||
|
|
@ -290,6 +294,121 @@ function shellFragment(parts: string[]): string {
|
||||||
return parts.join(" && ");
|
return parts.join(" && ");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function stableSandboxId(c: any): string {
|
||||||
|
return c.state.taskId;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getTaskSandboxRuntime(
|
||||||
|
c: any,
|
||||||
|
record: any,
|
||||||
|
): Promise<{
|
||||||
|
sandbox: any;
|
||||||
|
sandboxId: string;
|
||||||
|
providerId: string;
|
||||||
|
switchTarget: string;
|
||||||
|
cwd: string;
|
||||||
|
}> {
|
||||||
|
const { config } = getActorRuntimeContext();
|
||||||
|
const sandboxId = stableSandboxId(c);
|
||||||
|
const providerId = resolveSandboxProviderId(config, record.providerId ?? c.state.providerId ?? null);
|
||||||
|
const sandbox = await getOrCreateTaskSandbox(c, c.state.workspaceId, sandboxId, {});
|
||||||
|
const actorId = typeof sandbox.resolve === "function" ? await sandbox.resolve().catch(() => null) : null;
|
||||||
|
const switchTarget = providerId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`;
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
await c.db
|
||||||
|
.insert(taskSandboxes)
|
||||||
|
.values({
|
||||||
|
sandboxId,
|
||||||
|
providerId,
|
||||||
|
sandboxActorId: typeof actorId === "string" ? actorId : null,
|
||||||
|
switchTarget,
|
||||||
|
cwd: SANDBOX_REPO_CWD,
|
||||||
|
statusMessage: "sandbox ready",
|
||||||
|
createdAt: now,
|
||||||
|
updatedAt: now,
|
||||||
|
})
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: taskSandboxes.sandboxId,
|
||||||
|
set: {
|
||||||
|
providerId,
|
||||||
|
sandboxActorId: typeof actorId === "string" ? actorId : null,
|
||||||
|
switchTarget,
|
||||||
|
cwd: SANDBOX_REPO_CWD,
|
||||||
|
updatedAt: now,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.run();
|
||||||
|
|
||||||
|
await c.db
|
||||||
|
.update(taskRuntime)
|
||||||
|
.set({
|
||||||
|
activeSandboxId: sandboxId,
|
||||||
|
activeSwitchTarget: switchTarget,
|
||||||
|
activeCwd: SANDBOX_REPO_CWD,
|
||||||
|
updatedAt: now,
|
||||||
|
})
|
||||||
|
.where(eq(taskRuntime.id, 1))
|
||||||
|
.run();
|
||||||
|
|
||||||
|
return {
|
||||||
|
sandbox,
|
||||||
|
sandboxId,
|
||||||
|
providerId,
|
||||||
|
switchTarget,
|
||||||
|
cwd: SANDBOX_REPO_CWD,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function ensureSandboxRepo(c: any, sandbox: any, record: any): Promise<void> {
|
||||||
|
if (!record.branchName) {
|
||||||
|
throw new Error("cannot prepare a sandbox repo before the task branch exists");
|
||||||
|
}
|
||||||
|
|
||||||
|
const { driver } = getActorRuntimeContext();
|
||||||
|
const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId);
|
||||||
|
let repoLocalPath = c.state.repoLocalPath;
|
||||||
|
if (!repoLocalPath) {
|
||||||
|
const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote);
|
||||||
|
const ensured = await project.ensure({ remoteUrl: c.state.repoRemote });
|
||||||
|
repoLocalPath = ensured.localPath;
|
||||||
|
c.state.repoLocalPath = repoLocalPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseRef = await driver.git.remoteDefaultBaseRef(repoLocalPath);
|
||||||
|
const sandboxRepoRoot = dirname(SANDBOX_REPO_CWD);
|
||||||
|
const script = [
|
||||||
|
"set -euo pipefail",
|
||||||
|
`mkdir -p ${JSON.stringify(sandboxRepoRoot)}`,
|
||||||
|
"git config --global credential.helper '!f() { echo username=x-access-token; echo password=${GH_TOKEN:-$GITHUB_TOKEN}; }; f'",
|
||||||
|
`if [ ! -d ${JSON.stringify(`${SANDBOX_REPO_CWD}/.git`)} ]; then rm -rf ${JSON.stringify(SANDBOX_REPO_CWD)} && git clone ${JSON.stringify(
|
||||||
|
c.state.repoRemote,
|
||||||
|
)} ${JSON.stringify(SANDBOX_REPO_CWD)}; fi`,
|
||||||
|
`cd ${JSON.stringify(SANDBOX_REPO_CWD)}`,
|
||||||
|
"git fetch origin --prune",
|
||||||
|
`if git show-ref --verify --quiet refs/remotes/origin/${JSON.stringify(record.branchName).slice(1, -1)}; then target_ref=${JSON.stringify(
|
||||||
|
`origin/${record.branchName}`,
|
||||||
|
)}; else target_ref=${JSON.stringify(baseRef)}; fi`,
|
||||||
|
`git checkout -B ${JSON.stringify(record.branchName)} \"$target_ref\"`,
|
||||||
|
];
|
||||||
|
const result = await sandbox.runProcess({
|
||||||
|
command: "bash",
|
||||||
|
args: ["-lc", script.join("; ")],
|
||||||
|
cwd: "/",
|
||||||
|
env: auth?.githubToken
|
||||||
|
? {
|
||||||
|
GH_TOKEN: auth.githubToken,
|
||||||
|
GITHUB_TOKEN: auth.githubToken,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
timeoutMs: 5 * 60_000,
|
||||||
|
});
|
||||||
|
|
||||||
|
if ((result.exitCode ?? 0) !== 0) {
|
||||||
|
throw new Error(`sandbox repo preparation failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function executeInSandbox(
|
async function executeInSandbox(
|
||||||
c: any,
|
c: any,
|
||||||
params: {
|
params: {
|
||||||
|
|
@ -299,14 +418,20 @@ async function executeInSandbox(
|
||||||
label: string;
|
label: string;
|
||||||
},
|
},
|
||||||
): Promise<{ exitCode: number; result: string }> {
|
): Promise<{ exitCode: number; result: string }> {
|
||||||
const { providers } = getActorRuntimeContext();
|
const record = await ensureWorkbenchSeeded(c);
|
||||||
const provider = providers.get(c.state.providerId);
|
const runtime = await getTaskSandboxRuntime(c, record);
|
||||||
return await provider.executeCommand({
|
await ensureSandboxRepo(c, runtime.sandbox, record);
|
||||||
workspaceId: c.state.workspaceId,
|
const response = await runtime.sandbox.runProcess({
|
||||||
sandboxId: params.sandboxId,
|
command: "bash",
|
||||||
command: `bash -lc ${JSON.stringify(shellFragment([`cd ${JSON.stringify(params.cwd)}`, params.command]))}`,
|
args: ["-lc", shellFragment([`cd ${JSON.stringify(params.cwd)}`, params.command])],
|
||||||
label: params.label,
|
cwd: "/",
|
||||||
|
timeoutMs: 5 * 60_000,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
exitCode: response.exitCode ?? 0,
|
||||||
|
result: [response.stdout, response.stderr].filter(Boolean).join(""),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseGitStatus(output: string): Array<{ path: string; type: "M" | "A" | "D" }> {
|
function parseGitStatus(output: string): Array<{ path: string; type: "M" | "A" | "D" }> {
|
||||||
|
|
@ -500,13 +625,13 @@ async function writeCachedGitState(c: any, gitState: { fileChanges: Array<any>;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function readSessionTranscript(c: any, record: any, sessionId: string) {
|
async function readSessionTranscript(c: any, record: any, sessionId: string) {
|
||||||
const sandboxId = record.activeSandboxId ?? record.sandboxes?.[0]?.sandboxId ?? null;
|
const sandboxId = record.activeSandboxId ?? stableSandboxId(c);
|
||||||
if (!sandboxId) {
|
if (!sandboxId) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, sandboxId);
|
const sandbox = getTaskSandbox(c, c.state.workspaceId, sandboxId);
|
||||||
const page = await sandbox.listSessionEvents({
|
const page = await sandbox.getEvents({
|
||||||
sessionId,
|
sessionId,
|
||||||
limit: 100,
|
limit: 100,
|
||||||
});
|
});
|
||||||
|
|
@ -715,6 +840,24 @@ export async function buildSessionDetail(c: any, tabId: string): Promise<any> {
|
||||||
throw new Error(`Unknown workbench session tab: ${tabId}`);
|
throw new Error(`Unknown workbench session tab: ${tabId}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!meta.sandboxSessionId) {
|
||||||
|
return buildSessionDetailFromMeta(record, meta);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId);
|
||||||
|
if (JSON.stringify(meta.transcript ?? []) !== JSON.stringify(transcript)) {
|
||||||
|
await writeSessionTranscript(c, meta.tabId, transcript);
|
||||||
|
return buildSessionDetailFromMeta(record, {
|
||||||
|
...meta,
|
||||||
|
transcript,
|
||||||
|
transcriptUpdatedAt: Date.now(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Session detail reads should degrade to cached transcript data if the live sandbox is unavailable.
|
||||||
|
}
|
||||||
|
|
||||||
return buildSessionDetailFromMeta(record, meta);
|
return buildSessionDetailFromMeta(record, meta);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -843,42 +986,16 @@ export async function renameWorkbenchBranch(c: any, value: string): Promise<void
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createWorkbenchSession(c: any, model?: string): Promise<{ tabId: string }> {
|
export async function createWorkbenchSession(c: any, model?: string): Promise<{ tabId: string }> {
|
||||||
let record = await ensureWorkbenchSeeded(c);
|
|
||||||
if (record.activeSessionId) {
|
|
||||||
const existingSessions = await listSessionMetaRows(c);
|
|
||||||
if (existingSessions.length === 0) {
|
|
||||||
await ensureSessionMeta(c, {
|
|
||||||
tabId: record.activeSessionId,
|
|
||||||
sandboxSessionId: record.activeSessionId,
|
|
||||||
model: model ?? defaultModelForAgent(record.agentType),
|
|
||||||
sessionName: "Session 1",
|
|
||||||
status: "ready",
|
|
||||||
});
|
|
||||||
await broadcastTaskUpdate(c, { sessionId: record.activeSessionId });
|
|
||||||
return { tabId: record.activeSessionId };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const tabId = `tab-${randomUUID()}`;
|
const tabId = `tab-${randomUUID()}`;
|
||||||
|
const record = await ensureWorkbenchSeeded(c);
|
||||||
await ensureSessionMeta(c, {
|
await ensureSessionMeta(c, {
|
||||||
tabId,
|
tabId,
|
||||||
model: model ?? defaultModelForAgent(record.agentType),
|
model: model ?? defaultModelForAgent(record.agentType),
|
||||||
|
sandboxSessionId: tabId,
|
||||||
status: record.activeSandboxId ? "pending_session_create" : "pending_provision",
|
status: record.activeSandboxId ? "pending_session_create" : "pending_provision",
|
||||||
created: false,
|
created: false,
|
||||||
});
|
});
|
||||||
|
await ensureWorkbenchSession(c, tabId, model);
|
||||||
const providerId = record.providerId ?? c.state.providerId ?? getActorRuntimeContext().providers.defaultProviderId();
|
|
||||||
const self = selfTask(c);
|
|
||||||
if (!record.activeSandboxId && !String(record.status ?? "").startsWith("init_")) {
|
|
||||||
await self.send("task.command.provision", { providerId }, { wait: false });
|
|
||||||
}
|
|
||||||
await self.send(
|
|
||||||
"task.command.workbench.ensure_session",
|
|
||||||
{ tabId, ...(model ? { model } : {}) },
|
|
||||||
{
|
|
||||||
wait: false,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
await broadcastTaskUpdate(c, { sessionId: tabId });
|
await broadcastTaskUpdate(c, { sessionId: tabId });
|
||||||
return { tabId };
|
return { tabId };
|
||||||
}
|
}
|
||||||
|
|
@ -890,39 +1007,7 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri
|
||||||
}
|
}
|
||||||
|
|
||||||
const record = await ensureWorkbenchSeeded(c);
|
const record = await ensureWorkbenchSeeded(c);
|
||||||
if (!record.activeSandboxId) {
|
if (meta.sandboxSessionId && meta.status === "ready") {
|
||||||
await updateSessionMeta(c, tabId, {
|
|
||||||
status: "pending_provision",
|
|
||||||
errorMessage: null,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!meta.sandboxSessionId && record.activeSessionId && meta.status === "pending_provision") {
|
|
||||||
const existingTabForActiveSession = await readSessionMetaBySandboxSessionId(c, record.activeSessionId);
|
|
||||||
if (existingTabForActiveSession && existingTabForActiveSession.tabId !== tabId) {
|
|
||||||
await updateSessionMeta(c, existingTabForActiveSession.tabId, {
|
|
||||||
closed: 1,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
await updateSessionMeta(c, tabId, {
|
|
||||||
sandboxSessionId: record.activeSessionId,
|
|
||||||
status: "ready",
|
|
||||||
errorMessage: null,
|
|
||||||
created: 1,
|
|
||||||
});
|
|
||||||
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
|
|
||||||
sessionId: record.activeSessionId,
|
|
||||||
});
|
|
||||||
await broadcastTaskUpdate(c, { sessionId: tabId });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (meta.sandboxSessionId) {
|
|
||||||
await updateSessionMeta(c, tabId, {
|
|
||||||
status: "ready",
|
|
||||||
errorMessage: null,
|
|
||||||
});
|
|
||||||
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
|
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
|
||||||
sessionId: meta.sandboxSessionId,
|
sessionId: meta.sandboxSessionId,
|
||||||
});
|
});
|
||||||
|
|
@ -930,40 +1015,31 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const activeSandbox = (record.sandboxes ?? []).find((candidate: any) => candidate.sandboxId === record.activeSandboxId) ?? null;
|
|
||||||
const cwd = activeSandbox?.cwd ?? record.sandboxes?.[0]?.cwd ?? null;
|
|
||||||
if (!cwd) {
|
|
||||||
await updateSessionMeta(c, tabId, {
|
|
||||||
status: "error",
|
|
||||||
errorMessage: "cannot create session without a sandbox cwd",
|
|
||||||
});
|
|
||||||
await broadcastTaskUpdate(c, { sessionId: tabId });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await updateSessionMeta(c, tabId, {
|
await updateSessionMeta(c, tabId, {
|
||||||
|
sandboxSessionId: meta.sandboxSessionId ?? tabId,
|
||||||
status: "pending_session_create",
|
status: "pending_session_create",
|
||||||
errorMessage: null,
|
errorMessage: null,
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
const runtime = await getTaskSandboxRuntime(c, record);
|
||||||
const created = await sandbox.createSession({
|
await ensureSandboxRepo(c, runtime.sandbox, record);
|
||||||
prompt: "",
|
await runtime.sandbox.createSession({
|
||||||
cwd,
|
id: meta.sandboxSessionId ?? tabId,
|
||||||
agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)),
|
agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)),
|
||||||
|
model: model ?? meta.model ?? defaultModelForAgent(record.agentType),
|
||||||
|
sessionInit: {
|
||||||
|
cwd: runtime.cwd,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
if (!created.id) {
|
|
||||||
throw new Error(created.error ?? "sandbox-agent session creation failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
await updateSessionMeta(c, tabId, {
|
await updateSessionMeta(c, tabId, {
|
||||||
sandboxSessionId: created.id,
|
sandboxSessionId: meta.sandboxSessionId ?? tabId,
|
||||||
status: "ready",
|
status: "ready",
|
||||||
errorMessage: null,
|
errorMessage: null,
|
||||||
});
|
});
|
||||||
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
|
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
|
||||||
sessionId: created.id,
|
sessionId: meta.sandboxSessionId ?? tabId,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await updateSessionMeta(c, tabId, {
|
await updateSessionMeta(c, tabId, {
|
||||||
|
|
@ -1030,26 +1106,17 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
|
export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
|
||||||
const record = await ensureWorkbenchSeeded(c);
|
|
||||||
if (!record.activeSandboxId) {
|
|
||||||
throw new Error("cannot send message without an active sandbox");
|
|
||||||
}
|
|
||||||
|
|
||||||
const meta = await requireReadySessionMeta(c, sessionId);
|
const meta = await requireReadySessionMeta(c, sessionId);
|
||||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
const record = await ensureWorkbenchSeeded(c);
|
||||||
const prompt = [text.trim(), ...attachments.map((attachment: any) => `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`)]
|
const runtime = await getTaskSandboxRuntime(c, record);
|
||||||
.filter(Boolean)
|
await ensureSandboxRepo(c, runtime.sandbox, record);
|
||||||
.join("\n\n");
|
const prompt = [text.trim(), ...attachments.map((attachment: any) => `@ ${attachment.filePath}:${attachment.lineNumber}\n${attachment.lineContent}`)].filter(
|
||||||
if (!prompt) {
|
Boolean,
|
||||||
|
);
|
||||||
|
if (prompt.length === 0) {
|
||||||
throw new Error("message text is required");
|
throw new Error("message text is required");
|
||||||
}
|
}
|
||||||
|
|
||||||
await sandbox.sendPrompt({
|
|
||||||
sessionId: meta.sandboxSessionId,
|
|
||||||
prompt,
|
|
||||||
notification: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
await updateSessionMeta(c, sessionId, {
|
await updateSessionMeta(c, sessionId, {
|
||||||
unread: 0,
|
unread: 0,
|
||||||
created: 1,
|
created: 1,
|
||||||
|
|
@ -1068,32 +1135,28 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri
|
||||||
.where(eq(taskRuntime.id, 1))
|
.where(eq(taskRuntime.id, 1))
|
||||||
.run();
|
.run();
|
||||||
|
|
||||||
const sync = await getOrCreateTaskStatusSync(c, c.state.workspaceId, c.state.repoId, c.state.taskId, record.activeSandboxId, meta.sandboxSessionId, {
|
await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "running", Date.now());
|
||||||
workspaceId: c.state.workspaceId,
|
|
||||||
repoId: c.state.repoId,
|
try {
|
||||||
taskId: c.state.taskId,
|
await runtime.sandbox.sendPrompt({
|
||||||
providerId: c.state.providerId,
|
|
||||||
sandboxId: record.activeSandboxId,
|
|
||||||
sessionId: meta.sandboxSessionId,
|
sessionId: meta.sandboxSessionId,
|
||||||
intervalMs: STATUS_SYNC_INTERVAL_MS,
|
prompt: prompt.join("\n\n"),
|
||||||
});
|
});
|
||||||
await sync.setIntervalMs({ intervalMs: STATUS_SYNC_INTERVAL_MS });
|
await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "idle", Date.now());
|
||||||
await sync.start();
|
} catch (error) {
|
||||||
await sync.force();
|
await updateSessionMeta(c, sessionId, {
|
||||||
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
|
status: "error",
|
||||||
sessionId: meta.sandboxSessionId,
|
errorMessage: error instanceof Error ? error.message : String(error),
|
||||||
});
|
});
|
||||||
await broadcastTaskUpdate(c, { sessionId });
|
await syncWorkbenchSessionStatus(c, meta.sandboxSessionId, "error", Date.now());
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function stopWorkbenchSession(c: any, sessionId: string): Promise<void> {
|
export async function stopWorkbenchSession(c: any, sessionId: string): Promise<void> {
|
||||||
const record = await ensureWorkbenchSeeded(c);
|
|
||||||
if (!record.activeSandboxId) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const meta = await requireReadySessionMeta(c, sessionId);
|
const meta = await requireReadySessionMeta(c, sessionId);
|
||||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c));
|
||||||
await sandbox.cancelSession({ sessionId: meta.sandboxSessionId });
|
await sandbox.destroySession(meta.sandboxSessionId);
|
||||||
await updateSessionMeta(c, sessionId, {
|
await updateSessionMeta(c, sessionId, {
|
||||||
thinkingSinceMs: null,
|
thinkingSinceMs: null,
|
||||||
});
|
});
|
||||||
|
|
@ -1177,9 +1240,9 @@ export async function closeWorkbenchSession(c: any, sessionId: string): Promise<
|
||||||
if (!meta) {
|
if (!meta) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (record.activeSandboxId && meta.sandboxSessionId) {
|
if (meta.sandboxSessionId) {
|
||||||
const sandbox = getSandboxInstance(c, c.state.workspaceId, c.state.providerId, record.activeSandboxId);
|
const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c));
|
||||||
await sandbox.destroySession({ sessionId: meta.sandboxSessionId });
|
await sandbox.destroySession(meta.sandboxSessionId);
|
||||||
}
|
}
|
||||||
await updateSessionMeta(c, sessionId, {
|
await updateSessionMeta(c, sessionId, {
|
||||||
closed: 1,
|
closed: 1,
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
// @ts-nocheck
|
// @ts-nocheck
|
||||||
import { eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
import { getActorRuntimeContext } from "../../context.js";
|
import { getTaskSandbox } from "../../handles.js";
|
||||||
import { getOrCreateTaskStatusSync } from "../../handles.js";
|
|
||||||
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
||||||
import { task as taskTable, taskRuntime } from "../db/schema.js";
|
import { task as taskTable, taskRuntime } from "../db/schema.js";
|
||||||
import { TASK_ROW_ID, appendHistory, getCurrentRecord, setTaskState } from "./common.js";
|
import { TASK_ROW_ID, appendHistory, getCurrentRecord, setTaskState } from "./common.js";
|
||||||
|
|
@ -25,21 +24,27 @@ async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: str
|
||||||
|
|
||||||
export async function handleAttachActivity(loopCtx: any, msg: any): Promise<void> {
|
export async function handleAttachActivity(loopCtx: any, msg: any): Promise<void> {
|
||||||
const record = await getCurrentRecord(loopCtx);
|
const record = await getCurrentRecord(loopCtx);
|
||||||
const { providers } = getActorRuntimeContext();
|
let target = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === record.activeSandboxId)?.switchTarget ?? "";
|
||||||
const activeSandbox = record.activeSandboxId ? (record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null) : null;
|
|
||||||
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
|
if (record.activeSandboxId) {
|
||||||
const target = await provider.attachTarget({
|
try {
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
const sandbox = getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId);
|
||||||
sandboxId: record.activeSandboxId ?? "",
|
const connection = await sandbox.sandboxAgentConnection();
|
||||||
});
|
if (typeof connection?.endpoint === "string" && connection.endpoint.length > 0) {
|
||||||
|
target = connection.endpoint;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Best effort; keep the last known switch target if the sandbox actor is unavailable.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await appendHistory(loopCtx, "task.attach", {
|
await appendHistory(loopCtx, "task.attach", {
|
||||||
target: target.target,
|
target,
|
||||||
sessionId: record.activeSessionId,
|
sessionId: record.activeSessionId,
|
||||||
});
|
});
|
||||||
|
|
||||||
await msg.complete({
|
await msg.complete({
|
||||||
target: target.target,
|
target,
|
||||||
sessionId: record.activeSessionId,
|
sessionId: record.activeSessionId,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -71,63 +76,14 @@ export async function handleArchiveActivity(loopCtx: any, msg: any): Promise<voi
|
||||||
await setTaskState(loopCtx, "archive_stop_status_sync", "stopping status sync");
|
await setTaskState(loopCtx, "archive_stop_status_sync", "stopping status sync");
|
||||||
const record = await getCurrentRecord(loopCtx);
|
const record = await getCurrentRecord(loopCtx);
|
||||||
|
|
||||||
if (record.activeSandboxId && record.activeSessionId) {
|
|
||||||
try {
|
|
||||||
const sync = await getOrCreateTaskStatusSync(
|
|
||||||
loopCtx,
|
|
||||||
loopCtx.state.workspaceId,
|
|
||||||
loopCtx.state.repoId,
|
|
||||||
loopCtx.state.taskId,
|
|
||||||
record.activeSandboxId,
|
|
||||||
record.activeSessionId,
|
|
||||||
{
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
repoId: loopCtx.state.repoId,
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
providerId: record.providerId,
|
|
||||||
sandboxId: record.activeSandboxId,
|
|
||||||
sessionId: record.activeSessionId,
|
|
||||||
intervalMs: 2_000,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
await withTimeout(sync.stop(), 15_000, "task status sync stop");
|
|
||||||
} catch (error) {
|
|
||||||
logActorWarning("task.commands", "failed to stop status sync during archive", {
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
repoId: loopCtx.state.repoId,
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
sandboxId: record.activeSandboxId,
|
|
||||||
sessionId: record.activeSessionId,
|
|
||||||
error: resolveErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (record.activeSandboxId) {
|
if (record.activeSandboxId) {
|
||||||
await setTaskState(loopCtx, "archive_release_sandbox", "releasing sandbox");
|
await setTaskState(loopCtx, "archive_release_sandbox", "releasing sandbox");
|
||||||
const { providers } = getActorRuntimeContext();
|
void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => {
|
||||||
const activeSandbox = record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null;
|
|
||||||
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
|
|
||||||
const workspaceId = loopCtx.state.workspaceId;
|
|
||||||
const repoId = loopCtx.state.repoId;
|
|
||||||
const taskId = loopCtx.state.taskId;
|
|
||||||
const sandboxId = record.activeSandboxId;
|
|
||||||
|
|
||||||
// Do not block archive finalization on provider stop. Some provider stop calls can
|
|
||||||
// run longer than the synchronous archive UX budget.
|
|
||||||
void withTimeout(
|
|
||||||
provider.releaseSandbox({
|
|
||||||
workspaceId,
|
|
||||||
sandboxId,
|
|
||||||
}),
|
|
||||||
45_000,
|
|
||||||
"provider releaseSandbox",
|
|
||||||
).catch((error) => {
|
|
||||||
logActorWarning("task.commands", "failed to release sandbox during archive", {
|
logActorWarning("task.commands", "failed to release sandbox during archive", {
|
||||||
workspaceId,
|
workspaceId: loopCtx.state.workspaceId,
|
||||||
repoId,
|
repoId: loopCtx.state.repoId,
|
||||||
taskId,
|
taskId: loopCtx.state.taskId,
|
||||||
sandboxId,
|
sandboxId: record.activeSandboxId,
|
||||||
error: resolveErrorMessage(error),
|
error: resolveErrorMessage(error),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
@ -150,13 +106,7 @@ export async function killDestroySandboxActivity(loopCtx: any): Promise<void> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const { providers } = getActorRuntimeContext();
|
await getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId).destroy();
|
||||||
const activeSandbox = record.sandboxes.find((sb: any) => sb.sandboxId === record.activeSandboxId) ?? null;
|
|
||||||
const provider = providers.get(activeSandbox?.providerId ?? record.providerId);
|
|
||||||
await provider.destroySandbox({
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
sandboxId: record.activeSandboxId,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function killWriteDbActivity(loopCtx: any, msg: any): Promise<void> {
|
export async function killWriteDbActivity(loopCtx: any, msg: any): Promise<void> {
|
||||||
|
|
|
||||||
|
|
@ -1,21 +1,13 @@
|
||||||
import { Loop } from "rivetkit/workflow";
|
import { Loop } from "rivetkit/workflow";
|
||||||
import { getActorRuntimeContext } from "../../context.js";
|
|
||||||
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
||||||
import { getCurrentRecord } from "./common.js";
|
import { getCurrentRecord } from "./common.js";
|
||||||
import {
|
import {
|
||||||
initAssertNameActivity,
|
initAssertNameActivity,
|
||||||
initBootstrapDbActivity,
|
initBootstrapDbActivity,
|
||||||
initCompleteActivity,
|
initCompleteActivity,
|
||||||
initCreateSandboxActivity,
|
|
||||||
initCreateSessionActivity,
|
|
||||||
initEnqueueProvisionActivity,
|
initEnqueueProvisionActivity,
|
||||||
initEnsureAgentActivity,
|
|
||||||
initEnsureNameActivity,
|
initEnsureNameActivity,
|
||||||
initExposeSandboxActivity,
|
|
||||||
initFailedActivity,
|
initFailedActivity,
|
||||||
initStartSandboxInstanceActivity,
|
|
||||||
initStartStatusSyncActivity,
|
|
||||||
initWriteDbActivity,
|
|
||||||
} from "./init.js";
|
} from "./init.js";
|
||||||
import {
|
import {
|
||||||
handleArchiveActivity,
|
handleArchiveActivity,
|
||||||
|
|
@ -27,7 +19,6 @@ import {
|
||||||
killDestroySandboxActivity,
|
killDestroySandboxActivity,
|
||||||
killWriteDbActivity,
|
killWriteDbActivity,
|
||||||
} from "./commands.js";
|
} from "./commands.js";
|
||||||
import { idleNotifyActivity, idleSubmitPrActivity, statusUpdateActivity } from "./status-sync.js";
|
|
||||||
import { TASK_QUEUE_NAMES } from "./queue.js";
|
import { TASK_QUEUE_NAMES } from "./queue.js";
|
||||||
import {
|
import {
|
||||||
changeWorkbenchModel,
|
changeWorkbenchModel,
|
||||||
|
|
@ -63,7 +54,6 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
|
||||||
await loopCtx.step("init-enqueue-provision", async () => initEnqueueProvisionActivity(loopCtx, body));
|
await loopCtx.step("init-enqueue-provision", async () => initEnqueueProvisionActivity(loopCtx, body));
|
||||||
await loopCtx.removed("init-dispatch-provision-v2", "step");
|
await loopCtx.removed("init-dispatch-provision-v2", "step");
|
||||||
const currentRecord = await loopCtx.step("init-read-current-record", async () => getCurrentRecord(loopCtx));
|
const currentRecord = await loopCtx.step("init-read-current-record", async () => getCurrentRecord(loopCtx));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await msg.complete(currentRecord);
|
await msg.complete(currentRecord);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
@ -74,38 +64,23 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
|
||||||
},
|
},
|
||||||
|
|
||||||
"task.command.provision": async (loopCtx, msg) => {
|
"task.command.provision": async (loopCtx, msg) => {
|
||||||
const body = msg.body;
|
|
||||||
await loopCtx.removed("init-failed", "step");
|
await loopCtx.removed("init-failed", "step");
|
||||||
await loopCtx.removed("init-failed-v2", "step");
|
await loopCtx.removed("init-failed-v2", "step");
|
||||||
try {
|
try {
|
||||||
await loopCtx.step("init-ensure-name", async () => initEnsureNameActivity(loopCtx));
|
await loopCtx.step({
|
||||||
|
name: "init-ensure-name",
|
||||||
|
timeout: 5 * 60_000,
|
||||||
|
run: async () => initEnsureNameActivity(loopCtx),
|
||||||
|
});
|
||||||
await loopCtx.step("init-assert-name", async () => initAssertNameActivity(loopCtx));
|
await loopCtx.step("init-assert-name", async () => initAssertNameActivity(loopCtx));
|
||||||
|
await loopCtx.removed("init-create-sandbox", "step");
|
||||||
const sandbox = await loopCtx.step({
|
await loopCtx.removed("init-ensure-agent", "step");
|
||||||
name: "init-create-sandbox",
|
await loopCtx.removed("init-start-sandbox-instance", "step");
|
||||||
timeout: 180_000,
|
await loopCtx.removed("init-expose-sandbox", "step");
|
||||||
run: async () => initCreateSandboxActivity(loopCtx, body),
|
await loopCtx.removed("init-create-session", "step");
|
||||||
});
|
await loopCtx.removed("init-write-db", "step");
|
||||||
const agent = await loopCtx.step({
|
await loopCtx.removed("init-start-status-sync", "step");
|
||||||
name: "init-ensure-agent",
|
await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, msg.body));
|
||||||
timeout: 180_000,
|
|
||||||
run: async () => initEnsureAgentActivity(loopCtx, body, sandbox),
|
|
||||||
});
|
|
||||||
const sandboxInstanceReady = await loopCtx.step({
|
|
||||||
name: "init-start-sandbox-instance",
|
|
||||||
timeout: 60_000,
|
|
||||||
run: async () => initStartSandboxInstanceActivity(loopCtx, body, sandbox, agent),
|
|
||||||
});
|
|
||||||
await loopCtx.step("init-expose-sandbox", async () => initExposeSandboxActivity(loopCtx, body, sandbox, sandboxInstanceReady));
|
|
||||||
const session = await loopCtx.step({
|
|
||||||
name: "init-create-session",
|
|
||||||
timeout: 180_000,
|
|
||||||
run: async () => initCreateSessionActivity(loopCtx, body, sandbox, sandboxInstanceReady),
|
|
||||||
});
|
|
||||||
|
|
||||||
await loopCtx.step("init-write-db", async () => initWriteDbActivity(loopCtx, body, sandbox, session, sandboxInstanceReady));
|
|
||||||
await loopCtx.step("init-start-status-sync", async () => initStartStatusSyncActivity(loopCtx, body, sandbox, session));
|
|
||||||
await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, body, sandbox, session));
|
|
||||||
await msg.complete({ ok: true });
|
await msg.complete({ ok: true });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await loopCtx.step("init-failed-v3", async () => initFailedActivity(loopCtx, error));
|
await loopCtx.step("init-failed-v3", async () => initFailedActivity(loopCtx, error));
|
||||||
|
|
@ -172,7 +147,7 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
|
||||||
try {
|
try {
|
||||||
const created = await loopCtx.step({
|
const created = await loopCtx.step({
|
||||||
name: "workbench-create-session",
|
name: "workbench-create-session",
|
||||||
timeout: 30_000,
|
timeout: 5 * 60_000,
|
||||||
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
|
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
|
||||||
});
|
});
|
||||||
await msg.complete(created);
|
await msg.complete(created);
|
||||||
|
|
@ -277,18 +252,6 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
|
||||||
});
|
});
|
||||||
await msg.complete({ ok: true });
|
await msg.complete({ ok: true });
|
||||||
},
|
},
|
||||||
|
|
||||||
"task.status_sync.result": async (loopCtx, msg) => {
|
|
||||||
const transitionedToIdle = await loopCtx.step("status-update", async () => statusUpdateActivity(loopCtx, msg.body));
|
|
||||||
|
|
||||||
if (transitionedToIdle) {
|
|
||||||
const { config } = getActorRuntimeContext();
|
|
||||||
if (config.auto_submit) {
|
|
||||||
await loopCtx.step("idle-submit-pr", async () => idleSubmitPrActivity(loopCtx));
|
|
||||||
}
|
|
||||||
await loopCtx.step("idle-notify", async () => idleNotifyActivity(loopCtx));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function runTaskWorkflow(ctx: any): Promise<void> {
|
export async function runTaskWorkflow(ctx: any): Promise<void> {
|
||||||
|
|
|
||||||
|
|
@ -1,39 +1,14 @@
|
||||||
// @ts-nocheck
|
// @ts-nocheck
|
||||||
import { desc, eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
import { resolveCreateFlowDecision } from "../../../services/create-flow.js";
|
import { resolveCreateFlowDecision } from "../../../services/create-flow.js";
|
||||||
import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js";
|
import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js";
|
||||||
import { getActorRuntimeContext } from "../../context.js";
|
import { getActorRuntimeContext } from "../../context.js";
|
||||||
import { getOrCreateTaskStatusSync, getOrCreateHistory, getOrCreateProject, getOrCreateSandboxInstance, getSandboxInstance, selfTask } from "../../handles.js";
|
import { getOrCreateHistory, getOrCreateProject, selfTask } from "../../handles.js";
|
||||||
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
||||||
import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js";
|
import { defaultSandboxProviderId } from "../../../sandbox-config.js";
|
||||||
import { TASK_ROW_ID, appendHistory, buildAgentPrompt, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js";
|
import { task as taskTable, taskRuntime } from "../db/schema.js";
|
||||||
|
import { TASK_ROW_ID, appendHistory, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js";
|
||||||
import { taskWorkflowQueueName } from "./queue.js";
|
import { taskWorkflowQueueName } from "./queue.js";
|
||||||
import { enqueuePendingWorkbenchSessions } from "../workbench.js";
|
|
||||||
|
|
||||||
const DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS = 180_000;
|
|
||||||
|
|
||||||
function getInitCreateSandboxActivityTimeoutMs(): number {
|
|
||||||
const raw = process.env.HF_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
|
|
||||||
if (!raw) {
|
|
||||||
return DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
|
|
||||||
}
|
|
||||||
const parsed = Number(raw);
|
|
||||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
|
||||||
return DEFAULT_INIT_CREATE_SANDBOX_ACTIVITY_TIMEOUT_MS;
|
|
||||||
}
|
|
||||||
return Math.floor(parsed);
|
|
||||||
}
|
|
||||||
|
|
||||||
function debugInit(loopCtx: any, message: string, context?: Record<string, unknown>): void {
|
|
||||||
loopCtx.log.debug({
|
|
||||||
msg: message,
|
|
||||||
scope: "task.init",
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
repoId: loopCtx.state.repoId,
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
...(context ?? {}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function ensureTaskRuntimeCacheColumns(db: any): Promise<void> {
|
async function ensureTaskRuntimeCacheColumns(db: any): Promise<void> {
|
||||||
await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {});
|
await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {});
|
||||||
|
|
@ -42,35 +17,15 @@ async function ensureTaskRuntimeCacheColumns(db: any): Promise<void> {
|
||||||
await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {});
|
await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function withActivityTimeout<T>(timeoutMs: number, label: string, run: () => Promise<T>): Promise<T> {
|
|
||||||
let timer: ReturnType<typeof setTimeout> | null = null;
|
|
||||||
try {
|
|
||||||
return await Promise.race([
|
|
||||||
run(),
|
|
||||||
new Promise<T>((_, reject) => {
|
|
||||||
timer = setTimeout(() => {
|
|
||||||
reject(new Error(`${label} timed out after ${timeoutMs}ms`));
|
|
||||||
}, timeoutMs);
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
} finally {
|
|
||||||
if (timer) {
|
|
||||||
clearTimeout(timer);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<void> {
|
export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<void> {
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const { config } = getActorRuntimeContext();
|
const { config } = getActorRuntimeContext();
|
||||||
|
const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config);
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
const db = loopCtx.db;
|
|
||||||
const initialStatusMessage = loopCtx.state.branchName && loopCtx.state.title ? "provisioning" : "naming";
|
const initialStatusMessage = loopCtx.state.branchName && loopCtx.state.title ? "provisioning" : "naming";
|
||||||
|
|
||||||
try {
|
await ensureTaskRuntimeCacheColumns(loopCtx.db);
|
||||||
await ensureTaskRuntimeCacheColumns(db);
|
|
||||||
|
|
||||||
await db
|
await loopCtx.db
|
||||||
.insert(taskTable)
|
.insert(taskTable)
|
||||||
.values({
|
.values({
|
||||||
id: TASK_ROW_ID,
|
id: TASK_ROW_ID,
|
||||||
|
|
@ -97,7 +52,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
|
||||||
})
|
})
|
||||||
.run();
|
.run();
|
||||||
|
|
||||||
await db
|
await loopCtx.db
|
||||||
.insert(taskRuntime)
|
.insert(taskRuntime)
|
||||||
.values({
|
.values({
|
||||||
id: TASK_ROW_ID,
|
id: TASK_ROW_ID,
|
||||||
|
|
@ -126,10 +81,6 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
.run();
|
.run();
|
||||||
} catch (error) {
|
|
||||||
const detail = resolveErrorMessage(error);
|
|
||||||
throw new Error(`task init bootstrap db failed: ${detail}`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise<void> {
|
export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise<void> {
|
||||||
|
|
@ -143,12 +94,13 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro
|
||||||
})
|
})
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||||
.run();
|
.run();
|
||||||
|
|
||||||
const self = selfTask(loopCtx);
|
const self = selfTask(loopCtx);
|
||||||
try {
|
try {
|
||||||
await self.send(taskWorkflowQueueName("task.command.provision"), body, {
|
await self.send(taskWorkflowQueueName("task.command.provision"), body, {
|
||||||
wait: false,
|
wait: false,
|
||||||
});
|
});
|
||||||
} catch (error: unknown) {
|
} catch (error) {
|
||||||
logActorWarning("task.init", "background provision command failed", {
|
logActorWarning("task.init", "background provision command failed", {
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
workspaceId: loopCtx.state.workspaceId,
|
||||||
repoId: loopCtx.state.repoId,
|
repoId: loopCtx.state.repoId,
|
||||||
|
|
@ -196,13 +148,12 @@ export async function initEnsureNameActivity(loopCtx: any): Promise<void> {
|
||||||
error: resolveErrorMessage(error),
|
error: resolveErrorMessage(error),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const remoteBranches = (await driver.git.listRemoteBranches(repoLocalPath, { githubToken: auth?.githubToken ?? null })).map(
|
const remoteBranches = (await driver.git.listRemoteBranches(repoLocalPath, { githubToken: auth?.githubToken ?? null })).map(
|
||||||
(branch: any) => branch.branchName,
|
(branch: any) => branch.branchName,
|
||||||
);
|
);
|
||||||
|
|
||||||
const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote);
|
const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote);
|
||||||
const reservedBranches = await project.listReservedBranches({});
|
const reservedBranches = await project.listReservedBranches({});
|
||||||
|
|
||||||
const resolved = resolveCreateFlowDecision({
|
const resolved = resolveCreateFlowDecision({
|
||||||
task: loopCtx.state.task,
|
task: loopCtx.state.task,
|
||||||
explicitTitle: loopCtx.state.explicitTitle ?? undefined,
|
explicitTitle: loopCtx.state.explicitTitle ?? undefined,
|
||||||
|
|
@ -256,388 +207,42 @@ export async function initAssertNameActivity(loopCtx: any): Promise<void> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function initCreateSandboxActivity(loopCtx: any, body: any): Promise<any> {
|
export async function initCompleteActivity(loopCtx: any, body: any): Promise<void> {
|
||||||
await setTaskState(loopCtx, "init_create_sandbox", "creating sandbox");
|
|
||||||
await loopCtx.db
|
|
||||||
.update(taskRuntime)
|
|
||||||
.set({
|
|
||||||
provisionStage: "sandbox_allocated",
|
|
||||||
provisionStageUpdatedAt: Date.now(),
|
|
||||||
updatedAt: Date.now(),
|
|
||||||
})
|
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
|
||||||
.run();
|
|
||||||
const { providers } = getActorRuntimeContext();
|
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const provider = providers.get(providerId);
|
|
||||||
const timeoutMs = getInitCreateSandboxActivityTimeoutMs();
|
|
||||||
const startedAt = Date.now();
|
|
||||||
|
|
||||||
debugInit(loopCtx, "init_create_sandbox started", {
|
|
||||||
providerId,
|
|
||||||
timeoutMs,
|
|
||||||
supportsSessionReuse: provider.capabilities().supportsSessionReuse,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (provider.capabilities().supportsSessionReuse) {
|
|
||||||
const runtime = await loopCtx.db.select({ activeSandboxId: taskRuntime.activeSandboxId }).from(taskRuntime).where(eq(taskRuntime.id, TASK_ROW_ID)).get();
|
|
||||||
|
|
||||||
const existing = await loopCtx.db
|
|
||||||
.select({ sandboxId: taskSandboxes.sandboxId })
|
|
||||||
.from(taskSandboxes)
|
|
||||||
.where(eq(taskSandboxes.providerId, providerId))
|
|
||||||
.orderBy(desc(taskSandboxes.updatedAt))
|
|
||||||
.limit(1)
|
|
||||||
.get();
|
|
||||||
|
|
||||||
const sandboxId = runtime?.activeSandboxId ?? existing?.sandboxId ?? null;
|
|
||||||
if (sandboxId) {
|
|
||||||
debugInit(loopCtx, "init_create_sandbox attempting resume", { sandboxId });
|
|
||||||
try {
|
|
||||||
const resumed = await withActivityTimeout(timeoutMs, "resumeSandbox", async () =>
|
|
||||||
provider.resumeSandbox({
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
sandboxId,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
debugInit(loopCtx, "init_create_sandbox resume succeeded", {
|
|
||||||
sandboxId: resumed.sandboxId,
|
|
||||||
durationMs: Date.now() - startedAt,
|
|
||||||
});
|
|
||||||
return resumed;
|
|
||||||
} catch (error) {
|
|
||||||
logActorWarning("task.init", "resume sandbox failed; creating a new sandbox", {
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
repoId: loopCtx.state.repoId,
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
sandboxId,
|
|
||||||
error: resolveErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
debugInit(loopCtx, "init_create_sandbox creating fresh sandbox", {
|
|
||||||
branchName: loopCtx.state.branchName,
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId);
|
|
||||||
const sandbox = await withActivityTimeout(timeoutMs, "createSandbox", async () =>
|
|
||||||
provider.createSandbox({
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
repoId: loopCtx.state.repoId,
|
|
||||||
repoRemote: loopCtx.state.repoRemote,
|
|
||||||
branchName: loopCtx.state.branchName,
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
githubToken: auth?.githubToken ?? null,
|
|
||||||
debug: (message, context) => debugInit(loopCtx, message, context),
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
debugInit(loopCtx, "init_create_sandbox create succeeded", {
|
|
||||||
sandboxId: sandbox.sandboxId,
|
|
||||||
durationMs: Date.now() - startedAt,
|
|
||||||
});
|
|
||||||
return sandbox;
|
|
||||||
} catch (error) {
|
|
||||||
debugInit(loopCtx, "init_create_sandbox failed", {
|
|
||||||
durationMs: Date.now() - startedAt,
|
|
||||||
error: resolveErrorMessage(error),
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initEnsureAgentActivity(loopCtx: any, body: any, sandbox: any): Promise<any> {
|
|
||||||
await setTaskState(loopCtx, "init_ensure_agent", "ensuring sandbox agent");
|
|
||||||
await loopCtx.db
|
|
||||||
.update(taskRuntime)
|
|
||||||
.set({
|
|
||||||
provisionStage: "agent_installing",
|
|
||||||
provisionStageUpdatedAt: Date.now(),
|
|
||||||
updatedAt: Date.now(),
|
|
||||||
})
|
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
|
||||||
.run();
|
|
||||||
const { providers } = getActorRuntimeContext();
|
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const provider = providers.get(providerId);
|
|
||||||
return await provider.ensureSandboxAgent({
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
sandboxId: sandbox.sandboxId,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initStartSandboxInstanceActivity(loopCtx: any, body: any, sandbox: any, agent: any): Promise<any> {
|
|
||||||
await setTaskState(loopCtx, "init_start_sandbox_instance", "starting sandbox runtime");
|
|
||||||
await loopCtx.db
|
|
||||||
.update(taskRuntime)
|
|
||||||
.set({
|
|
||||||
provisionStage: "agent_starting",
|
|
||||||
provisionStageUpdatedAt: Date.now(),
|
|
||||||
updatedAt: Date.now(),
|
|
||||||
})
|
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
|
||||||
.run();
|
|
||||||
try {
|
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const sandboxInstance = await getOrCreateSandboxInstance(loopCtx, loopCtx.state.workspaceId, providerId, sandbox.sandboxId, {
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
providerId,
|
|
||||||
sandboxId: sandbox.sandboxId,
|
|
||||||
});
|
|
||||||
|
|
||||||
await sandboxInstance.ensure({
|
|
||||||
metadata: sandbox.metadata,
|
|
||||||
status: "ready",
|
|
||||||
agentEndpoint: agent.endpoint,
|
|
||||||
agentToken: agent.token,
|
|
||||||
});
|
|
||||||
|
|
||||||
const actorId = typeof (sandboxInstance as any).resolve === "function" ? await (sandboxInstance as any).resolve() : null;
|
|
||||||
|
|
||||||
return {
|
|
||||||
ok: true as const,
|
|
||||||
actorId: typeof actorId === "string" ? actorId : null,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
const detail = error instanceof Error ? error.message : String(error);
|
|
||||||
return {
|
|
||||||
ok: false as const,
|
|
||||||
error: `sandbox-instance ensure failed: ${detail}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initCreateSessionActivity(loopCtx: any, body: any, sandbox: any, sandboxInstanceReady: any): Promise<any> {
|
|
||||||
await setTaskState(loopCtx, "init_create_session", "creating agent session");
|
|
||||||
await loopCtx.db
|
|
||||||
.update(taskRuntime)
|
|
||||||
.set({
|
|
||||||
provisionStage: "session_creating",
|
|
||||||
provisionStageUpdatedAt: Date.now(),
|
|
||||||
updatedAt: Date.now(),
|
|
||||||
})
|
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
|
||||||
.run();
|
|
||||||
if (!sandboxInstanceReady.ok) {
|
|
||||||
return {
|
|
||||||
id: null,
|
|
||||||
status: "error",
|
|
||||||
error: sandboxInstanceReady.error ?? "sandbox instance is not ready",
|
|
||||||
} as const;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { config } = getActorRuntimeContext();
|
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const sandboxInstance = getSandboxInstance(loopCtx, loopCtx.state.workspaceId, providerId, sandbox.sandboxId);
|
|
||||||
|
|
||||||
const cwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : undefined;
|
|
||||||
|
|
||||||
return await sandboxInstance.createSession({
|
|
||||||
prompt: typeof loopCtx.state.initialPrompt === "string" ? loopCtx.state.initialPrompt : buildAgentPrompt(loopCtx.state.task),
|
|
||||||
cwd,
|
|
||||||
agent: (loopCtx.state.agentType ?? config.default_agent) as any,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initExposeSandboxActivity(loopCtx: any, body: any, sandbox: any, sandboxInstanceReady?: { actorId?: string | null }): Promise<void> {
|
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
const db = loopCtx.db;
|
|
||||||
const activeCwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : null;
|
|
||||||
const sandboxActorId = typeof sandboxInstanceReady?.actorId === "string" && sandboxInstanceReady.actorId.length > 0 ? sandboxInstanceReady.actorId : null;
|
|
||||||
|
|
||||||
await db
|
|
||||||
.insert(taskSandboxes)
|
|
||||||
.values({
|
|
||||||
sandboxId: sandbox.sandboxId,
|
|
||||||
providerId,
|
|
||||||
sandboxActorId,
|
|
||||||
switchTarget: sandbox.switchTarget,
|
|
||||||
cwd: activeCwd,
|
|
||||||
statusMessage: "sandbox ready",
|
|
||||||
createdAt: now,
|
|
||||||
updatedAt: now,
|
|
||||||
})
|
|
||||||
.onConflictDoUpdate({
|
|
||||||
target: taskSandboxes.sandboxId,
|
|
||||||
set: {
|
|
||||||
providerId,
|
|
||||||
sandboxActorId,
|
|
||||||
switchTarget: sandbox.switchTarget,
|
|
||||||
cwd: activeCwd,
|
|
||||||
statusMessage: "sandbox ready",
|
|
||||||
updatedAt: now,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.run();
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(taskRuntime)
|
|
||||||
.set({
|
|
||||||
activeSandboxId: sandbox.sandboxId,
|
|
||||||
activeSwitchTarget: sandbox.switchTarget,
|
|
||||||
activeCwd,
|
|
||||||
statusMessage: "sandbox ready",
|
|
||||||
updatedAt: now,
|
|
||||||
})
|
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initWriteDbActivity(
|
|
||||||
loopCtx: any,
|
|
||||||
body: any,
|
|
||||||
sandbox: any,
|
|
||||||
session: any,
|
|
||||||
sandboxInstanceReady?: { actorId?: string | null },
|
|
||||||
): Promise<void> {
|
|
||||||
await setTaskState(loopCtx, "init_write_db", "persisting task runtime");
|
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const { config } = getActorRuntimeContext();
|
const { config } = getActorRuntimeContext();
|
||||||
const now = Date.now();
|
const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config);
|
||||||
const db = loopCtx.db;
|
|
||||||
const sessionId = session?.id ?? null;
|
|
||||||
const sessionHealthy = Boolean(sessionId) && session?.status !== "error";
|
|
||||||
const activeSessionId = sessionHealthy ? sessionId : null;
|
|
||||||
const statusMessage = sessionHealthy ? "session created" : session?.status === "error" ? (session.error ?? "session create failed") : "session unavailable";
|
|
||||||
|
|
||||||
const activeCwd = sandbox.metadata && typeof (sandbox.metadata as any).cwd === "string" ? ((sandbox.metadata as any).cwd as string) : null;
|
|
||||||
const sandboxActorId = typeof sandboxInstanceReady?.actorId === "string" && sandboxInstanceReady.actorId.length > 0 ? sandboxInstanceReady.actorId : null;
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(taskTable)
|
|
||||||
.set({
|
|
||||||
providerId,
|
|
||||||
status: sessionHealthy ? "running" : "error",
|
|
||||||
agentType: loopCtx.state.agentType ?? config.default_agent,
|
|
||||||
updatedAt: now,
|
|
||||||
})
|
|
||||||
.where(eq(taskTable.id, TASK_ROW_ID))
|
|
||||||
.run();
|
|
||||||
|
|
||||||
await db
|
|
||||||
.insert(taskSandboxes)
|
|
||||||
.values({
|
|
||||||
sandboxId: sandbox.sandboxId,
|
|
||||||
providerId,
|
|
||||||
sandboxActorId,
|
|
||||||
switchTarget: sandbox.switchTarget,
|
|
||||||
cwd: activeCwd,
|
|
||||||
statusMessage,
|
|
||||||
createdAt: now,
|
|
||||||
updatedAt: now,
|
|
||||||
})
|
|
||||||
.onConflictDoUpdate({
|
|
||||||
target: taskSandboxes.sandboxId,
|
|
||||||
set: {
|
|
||||||
providerId,
|
|
||||||
sandboxActorId,
|
|
||||||
switchTarget: sandbox.switchTarget,
|
|
||||||
cwd: activeCwd,
|
|
||||||
statusMessage,
|
|
||||||
updatedAt: now,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.run();
|
|
||||||
|
|
||||||
await db
|
|
||||||
.insert(taskRuntime)
|
|
||||||
.values({
|
|
||||||
id: TASK_ROW_ID,
|
|
||||||
activeSandboxId: sandbox.sandboxId,
|
|
||||||
activeSessionId,
|
|
||||||
activeSwitchTarget: sandbox.switchTarget,
|
|
||||||
activeCwd,
|
|
||||||
statusMessage,
|
|
||||||
provisionStage: sessionHealthy ? "ready" : "error",
|
|
||||||
provisionStageUpdatedAt: now,
|
|
||||||
updatedAt: now,
|
|
||||||
})
|
|
||||||
.onConflictDoUpdate({
|
|
||||||
target: taskRuntime.id,
|
|
||||||
set: {
|
|
||||||
activeSandboxId: sandbox.sandboxId,
|
|
||||||
activeSessionId,
|
|
||||||
activeSwitchTarget: sandbox.switchTarget,
|
|
||||||
activeCwd,
|
|
||||||
statusMessage,
|
|
||||||
provisionStage: sessionHealthy ? "ready" : "error",
|
|
||||||
provisionStageUpdatedAt: now,
|
|
||||||
updatedAt: now,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initStartStatusSyncActivity(loopCtx: any, body: any, sandbox: any, session: any): Promise<void> {
|
|
||||||
const sessionId = session?.id ?? null;
|
|
||||||
if (!sessionId || session?.status === "error") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await setTaskState(loopCtx, "init_start_status_sync", "starting session status sync");
|
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const sync = await getOrCreateTaskStatusSync(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.taskId, sandbox.sandboxId, sessionId, {
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
repoId: loopCtx.state.repoId,
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
providerId,
|
|
||||||
sandboxId: sandbox.sandboxId,
|
|
||||||
sessionId,
|
|
||||||
intervalMs: 2_000,
|
|
||||||
});
|
|
||||||
|
|
||||||
await sync.start();
|
|
||||||
await sync.force();
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initCompleteActivity(loopCtx: any, body: any, sandbox: any, session: any): Promise<void> {
|
|
||||||
const providerId = body?.providerId ?? loopCtx.state.providerId;
|
|
||||||
const sessionId = session?.id ?? null;
|
|
||||||
const sessionHealthy = Boolean(sessionId) && session?.status !== "error";
|
|
||||||
if (sessionHealthy) {
|
|
||||||
await setTaskState(loopCtx, "init_complete", "task initialized");
|
await setTaskState(loopCtx, "init_complete", "task initialized");
|
||||||
|
await loopCtx.db
|
||||||
|
.update(taskRuntime)
|
||||||
|
.set({
|
||||||
|
statusMessage: "ready",
|
||||||
|
provisionStage: "ready",
|
||||||
|
provisionStageUpdatedAt: now,
|
||||||
|
updatedAt: now,
|
||||||
|
})
|
||||||
|
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
||||||
|
.run();
|
||||||
|
|
||||||
const history = await getOrCreateHistory(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId);
|
const history = await getOrCreateHistory(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId);
|
||||||
await history.append({
|
await history.append({
|
||||||
kind: "task.initialized",
|
kind: "task.initialized",
|
||||||
taskId: loopCtx.state.taskId,
|
taskId: loopCtx.state.taskId,
|
||||||
branchName: loopCtx.state.branchName,
|
branchName: loopCtx.state.branchName,
|
||||||
payload: { providerId, sandboxId: sandbox.sandboxId, sessionId },
|
payload: { providerId },
|
||||||
});
|
});
|
||||||
|
|
||||||
loopCtx.state.initialized = true;
|
loopCtx.state.initialized = true;
|
||||||
await enqueuePendingWorkbenchSessions(loopCtx);
|
|
||||||
const self = selfTask(loopCtx);
|
|
||||||
await self.send(taskWorkflowQueueName("task.command.workbench.refresh_derived"), {}, { wait: false });
|
|
||||||
if (sessionId) {
|
|
||||||
await self.send(taskWorkflowQueueName("task.command.workbench.refresh_session_transcript"), { sessionId }, { wait: false });
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const detail = session?.status === "error" ? (session.error ?? "session create failed") : "session unavailable";
|
|
||||||
await setTaskState(loopCtx, "error", detail);
|
|
||||||
await appendHistory(loopCtx, "task.error", {
|
|
||||||
detail,
|
|
||||||
messages: [detail],
|
|
||||||
});
|
|
||||||
loopCtx.state.initialized = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function initFailedActivity(loopCtx: any, error: unknown): Promise<void> {
|
export async function initFailedActivity(loopCtx: any, error: unknown): Promise<void> {
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
const detail = resolveErrorDetail(error);
|
const detail = resolveErrorDetail(error);
|
||||||
const messages = collectErrorMessages(error);
|
const messages = collectErrorMessages(error);
|
||||||
const db = loopCtx.db;
|
const { config } = getActorRuntimeContext();
|
||||||
const { config, providers } = getActorRuntimeContext();
|
const providerId = loopCtx.state.providerId ?? defaultSandboxProviderId(config);
|
||||||
const providerId = loopCtx.state.providerId ?? providers.defaultProviderId();
|
|
||||||
|
|
||||||
await db
|
await loopCtx.db
|
||||||
.insert(taskTable)
|
.insert(taskTable)
|
||||||
.values({
|
.values({
|
||||||
id: TASK_ROW_ID,
|
id: TASK_ROW_ID,
|
||||||
|
|
@ -664,7 +269,7 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise<
|
||||||
})
|
})
|
||||||
.run();
|
.run();
|
||||||
|
|
||||||
await db
|
await loopCtx.db
|
||||||
.insert(taskRuntime)
|
.insert(taskRuntime)
|
||||||
.values({
|
.values({
|
||||||
id: TASK_ROW_ID,
|
id: TASK_ROW_ID,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
// @ts-nocheck
|
// @ts-nocheck
|
||||||
import { eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
import { getActorRuntimeContext } from "../../context.js";
|
import { getTaskSandbox } from "../../handles.js";
|
||||||
|
import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js";
|
||||||
import { taskRuntime, taskSandboxes } from "../db/schema.js";
|
import { taskRuntime, taskSandboxes } from "../db/schema.js";
|
||||||
import { TASK_ROW_ID, appendHistory, getCurrentRecord } from "./common.js";
|
import { TASK_ROW_ID, appendHistory, getCurrentRecord } from "./common.js";
|
||||||
|
|
||||||
|
|
@ -22,15 +23,11 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive
|
||||||
}
|
}
|
||||||
|
|
||||||
const activeSandbox = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === activeSandboxId) ?? null;
|
const activeSandbox = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === activeSandboxId) ?? null;
|
||||||
const providerId = activeSandbox?.providerId ?? record.providerId;
|
|
||||||
const cwd = activeSandbox?.cwd ?? null;
|
const cwd = activeSandbox?.cwd ?? null;
|
||||||
if (!cwd) {
|
if (!cwd) {
|
||||||
throw new Error("cannot push: active sandbox cwd is not set");
|
throw new Error("cannot push: active sandbox cwd is not set");
|
||||||
}
|
}
|
||||||
|
|
||||||
const { providers } = getActorRuntimeContext();
|
|
||||||
const provider = providers.get(providerId);
|
|
||||||
|
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
await loopCtx.db
|
await loopCtx.db
|
||||||
.update(taskRuntime)
|
.update(taskRuntime)
|
||||||
|
|
@ -52,15 +49,23 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive
|
||||||
`git push -u origin ${JSON.stringify(branchName)}`,
|
`git push -u origin ${JSON.stringify(branchName)}`,
|
||||||
].join("; ");
|
].join("; ");
|
||||||
|
|
||||||
const result = await provider.executeCommand({
|
const sandbox = getTaskSandbox(loopCtx, loopCtx.state.workspaceId, activeSandboxId);
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId);
|
||||||
sandboxId: activeSandboxId,
|
const result = await sandbox.runProcess({
|
||||||
command: ["bash", "-lc", JSON.stringify(script)].join(" "),
|
command: "bash",
|
||||||
label: `git push ${branchName}`,
|
args: ["-lc", script],
|
||||||
|
cwd: "/",
|
||||||
|
env: auth?.githubToken
|
||||||
|
? {
|
||||||
|
GH_TOKEN: auth.githubToken,
|
||||||
|
GITHUB_TOKEN: auth.githubToken,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
timeoutMs: 5 * 60_000,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (result.exitCode !== 0) {
|
if ((result.exitCode ?? 0) !== 0) {
|
||||||
throw new Error(`git push failed (${result.exitCode}): ${result.result}`);
|
throw new Error(`git push failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const updatedAt = Date.now();
|
const updatedAt = Date.now();
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,6 @@ export const TASK_QUEUE_NAMES = [
|
||||||
"task.command.workbench.close_session",
|
"task.command.workbench.close_session",
|
||||||
"task.command.workbench.publish_pr",
|
"task.command.workbench.publish_pr",
|
||||||
"task.command.workbench.revert_file",
|
"task.command.workbench.revert_file",
|
||||||
"task.status_sync.result",
|
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
export function taskWorkflowQueueName(name: string): string {
|
export function taskWorkflowQueueName(name: string): string {
|
||||||
|
|
|
||||||
|
|
@ -1,148 +0,0 @@
|
||||||
// @ts-nocheck
|
|
||||||
import { eq } from "drizzle-orm";
|
|
||||||
import { getActorRuntimeContext } from "../../context.js";
|
|
||||||
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
|
|
||||||
import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js";
|
|
||||||
import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js";
|
|
||||||
import { TASK_ROW_ID, appendHistory, resolveErrorDetail } from "./common.js";
|
|
||||||
import { pushActiveBranchActivity } from "./push.js";
|
|
||||||
|
|
||||||
function mapSessionStatus(status: "running" | "idle" | "error") {
|
|
||||||
if (status === "idle") return "idle";
|
|
||||||
if (status === "error") return "error";
|
|
||||||
return "running";
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function statusUpdateActivity(loopCtx: any, body: any): Promise<boolean> {
|
|
||||||
const newStatus = mapSessionStatus(body.status);
|
|
||||||
const wasIdle = loopCtx.state.previousStatus === "idle";
|
|
||||||
const didTransition = newStatus === "idle" && !wasIdle;
|
|
||||||
const isDuplicateStatus = loopCtx.state.previousStatus === newStatus;
|
|
||||||
|
|
||||||
if (isDuplicateStatus) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const db = loopCtx.db;
|
|
||||||
const runtime = await db
|
|
||||||
.select({
|
|
||||||
activeSandboxId: taskRuntime.activeSandboxId,
|
|
||||||
activeSessionId: taskRuntime.activeSessionId,
|
|
||||||
})
|
|
||||||
.from(taskRuntime)
|
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
|
||||||
.get();
|
|
||||||
|
|
||||||
const isActive = runtime?.activeSandboxId === body.sandboxId && runtime?.activeSessionId === body.sessionId;
|
|
||||||
|
|
||||||
if (isActive) {
|
|
||||||
await db.update(taskTable).set({ status: newStatus, updatedAt: body.at }).where(eq(taskTable.id, TASK_ROW_ID)).run();
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(taskRuntime)
|
|
||||||
.set({ statusMessage: `session:${body.status}`, updatedAt: body.at })
|
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
|
||||||
.run();
|
|
||||||
}
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(taskSandboxes)
|
|
||||||
.set({ statusMessage: `session:${body.status}`, updatedAt: body.at })
|
|
||||||
.where(eq(taskSandboxes.sandboxId, body.sandboxId))
|
|
||||||
.run();
|
|
||||||
|
|
||||||
await appendHistory(loopCtx, "task.status", {
|
|
||||||
status: body.status,
|
|
||||||
sessionId: body.sessionId,
|
|
||||||
sandboxId: body.sandboxId,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (isActive) {
|
|
||||||
loopCtx.state.previousStatus = newStatus;
|
|
||||||
|
|
||||||
const { driver } = getActorRuntimeContext();
|
|
||||||
if (loopCtx.state.branchName) {
|
|
||||||
driver.tmux.setWindowStatus(loopCtx.state.branchName, newStatus);
|
|
||||||
}
|
|
||||||
return didTransition;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function idleSubmitPrActivity(loopCtx: any): Promise<void> {
|
|
||||||
const { driver } = getActorRuntimeContext();
|
|
||||||
const db = loopCtx.db;
|
|
||||||
|
|
||||||
const self = await db.select({ prSubmitted: taskTable.prSubmitted }).from(taskTable).where(eq(taskTable.id, TASK_ROW_ID)).get();
|
|
||||||
|
|
||||||
if (self && self.prSubmitted) return;
|
|
||||||
|
|
||||||
const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await driver.git.fetch(loopCtx.state.repoLocalPath, { githubToken: auth?.githubToken ?? null });
|
|
||||||
} catch (error) {
|
|
||||||
logActorWarning("task.status-sync", "fetch before PR submit failed", {
|
|
||||||
workspaceId: loopCtx.state.workspaceId,
|
|
||||||
repoId: loopCtx.state.repoId,
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
error: resolveErrorMessage(error),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!loopCtx.state.branchName || !loopCtx.state.title) {
|
|
||||||
throw new Error("cannot submit PR before task has a branch and title");
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await pushActiveBranchActivity(loopCtx, {
|
|
||||||
reason: "auto_submit_idle",
|
|
||||||
historyKind: "task.push.auto",
|
|
||||||
});
|
|
||||||
|
|
||||||
const pr = await driver.github.createPr(loopCtx.state.repoLocalPath, loopCtx.state.branchName, loopCtx.state.title, undefined, {
|
|
||||||
githubToken: auth?.githubToken ?? null,
|
|
||||||
});
|
|
||||||
|
|
||||||
await db.update(taskTable).set({ prSubmitted: 1, updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run();
|
|
||||||
|
|
||||||
await appendHistory(loopCtx, "task.step", {
|
|
||||||
step: "pr_submit",
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
branchName: loopCtx.state.branchName,
|
|
||||||
prUrl: pr.url,
|
|
||||||
prNumber: pr.number,
|
|
||||||
});
|
|
||||||
|
|
||||||
await appendHistory(loopCtx, "task.pr_created", {
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
branchName: loopCtx.state.branchName,
|
|
||||||
prUrl: pr.url,
|
|
||||||
prNumber: pr.number,
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
const detail = resolveErrorDetail(error);
|
|
||||||
await db
|
|
||||||
.update(taskRuntime)
|
|
||||||
.set({
|
|
||||||
statusMessage: `pr submit failed: ${detail}`,
|
|
||||||
updatedAt: Date.now(),
|
|
||||||
})
|
|
||||||
.where(eq(taskRuntime.id, TASK_ROW_ID))
|
|
||||||
.run();
|
|
||||||
|
|
||||||
await appendHistory(loopCtx, "task.pr_create_failed", {
|
|
||||||
taskId: loopCtx.state.taskId,
|
|
||||||
branchName: loopCtx.state.branchName,
|
|
||||||
error: detail,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function idleNotifyActivity(loopCtx: any): Promise<void> {
|
|
||||||
const { notifications } = getActorRuntimeContext();
|
|
||||||
if (notifications && loopCtx.state.branchName) {
|
|
||||||
await notifications.agentIdle(loopCtx.state.branchName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
// @ts-nocheck
|
// @ts-nocheck
|
||||||
|
import { setTimeout as delay } from "node:timers/promises";
|
||||||
import { desc, eq } from "drizzle-orm";
|
import { desc, eq } from "drizzle-orm";
|
||||||
import { Loop } from "rivetkit/workflow";
|
import { Loop } from "rivetkit/workflow";
|
||||||
import type {
|
import type {
|
||||||
|
|
@ -37,6 +38,7 @@ import type {
|
||||||
import { getActorRuntimeContext } from "../context.js";
|
import { getActorRuntimeContext } from "../context.js";
|
||||||
import { getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js";
|
import { getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js";
|
||||||
import { logActorWarning, resolveErrorMessage } from "../logging.js";
|
import { logActorWarning, resolveErrorMessage } from "../logging.js";
|
||||||
|
import { availableSandboxProviderIds, defaultSandboxProviderId } from "../../sandbox-config.js";
|
||||||
import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js";
|
import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js";
|
||||||
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
|
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
|
||||||
import { taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js";
|
import { taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js";
|
||||||
|
|
@ -258,6 +260,24 @@ async function requireWorkbenchTask(c: any, taskId: string) {
|
||||||
return getTask(c, c.state.workspaceId, repoId, taskId);
|
return getTask(c, c.state.workspaceId, repoId, taskId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function waitForWorkbenchTaskReady(task: any, timeoutMs = 5 * 60_000): Promise<any> {
|
||||||
|
const startedAt = Date.now();
|
||||||
|
|
||||||
|
for (;;) {
|
||||||
|
const record = await task.get();
|
||||||
|
if (record?.branchName && record?.title) {
|
||||||
|
return record;
|
||||||
|
}
|
||||||
|
if (record?.status === "error") {
|
||||||
|
throw new Error("task initialization failed before the workbench session was ready");
|
||||||
|
}
|
||||||
|
if (Date.now() - startedAt > timeoutMs) {
|
||||||
|
throw new Error("timed out waiting for task initialization");
|
||||||
|
}
|
||||||
|
await delay(1_000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reads the workspace sidebar snapshot from the workspace actor's local SQLite
|
* Reads the workspace sidebar snapshot from the workspace actor's local SQLite
|
||||||
* only. Task actors push summary updates into `task_summaries`, so clients do
|
* only. Task actors push summary updates into `task_summaries`, so clients do
|
||||||
|
|
@ -343,8 +363,8 @@ async function addRepoMutation(c: any, input: AddRepoInput): Promise<RepoRecord>
|
||||||
async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskRecord> {
|
async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskRecord> {
|
||||||
assertWorkspace(c, input.workspaceId);
|
assertWorkspace(c, input.workspaceId);
|
||||||
|
|
||||||
const { providers } = getActorRuntimeContext();
|
const { config } = getActorRuntimeContext();
|
||||||
const providerId = input.providerId ?? providers.defaultProviderId();
|
const providerId = input.providerId ?? defaultSandboxProviderId(config);
|
||||||
|
|
||||||
const repoId = input.repoId;
|
const repoId = input.repoId;
|
||||||
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get();
|
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get();
|
||||||
|
|
@ -411,8 +431,8 @@ async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskR
|
||||||
|
|
||||||
async function refreshProviderProfilesMutation(c: any, command?: RefreshProviderProfilesCommand): Promise<void> {
|
async function refreshProviderProfilesMutation(c: any, command?: RefreshProviderProfilesCommand): Promise<void> {
|
||||||
const body = command ?? {};
|
const body = command ?? {};
|
||||||
const { providers } = getActorRuntimeContext();
|
const { config } = getActorRuntimeContext();
|
||||||
const providerIds: ProviderId[] = body.providerId ? [body.providerId] : providers.availableProviderIds();
|
const providerIds: ProviderId[] = body.providerId ? [body.providerId] : availableSandboxProviderIds(config);
|
||||||
|
|
||||||
for (const providerId of providerIds) {
|
for (const providerId of providerIds) {
|
||||||
await c.db
|
await c.db
|
||||||
|
|
@ -456,7 +476,7 @@ export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
|
||||||
if (msg.name === "workspace.command.createTask") {
|
if (msg.name === "workspace.command.createTask") {
|
||||||
const result = await loopCtx.step({
|
const result = await loopCtx.step({
|
||||||
name: "workspace-create-task",
|
name: "workspace-create-task",
|
||||||
timeout: 60_000,
|
timeout: 5 * 60_000,
|
||||||
run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput),
|
run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput),
|
||||||
});
|
});
|
||||||
await msg.complete(result);
|
await msg.complete(result);
|
||||||
|
|
@ -546,7 +566,7 @@ export const workspaceActions = {
|
||||||
return expectQueueResponse<TaskRecord>(
|
return expectQueueResponse<TaskRecord>(
|
||||||
await self.send(workspaceWorkflowQueueName("workspace.command.createTask"), input, {
|
await self.send(workspaceWorkflowQueueName("workspace.command.createTask"), input, {
|
||||||
wait: true,
|
wait: true,
|
||||||
timeout: 60_000,
|
timeout: 5 * 60_000,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|
@ -603,8 +623,21 @@ export const workspaceActions = {
|
||||||
...(input.branch ? { explicitBranchName: input.branch } : {}),
|
...(input.branch ? { explicitBranchName: input.branch } : {}),
|
||||||
...(input.model ? { agentType: agentTypeForModel(input.model) } : {}),
|
...(input.model ? { agentType: agentTypeForModel(input.model) } : {}),
|
||||||
});
|
});
|
||||||
|
const task = await requireWorkbenchTask(c, created.taskId);
|
||||||
|
await waitForWorkbenchTaskReady(task);
|
||||||
|
const session = await task.createWorkbenchSession({
|
||||||
|
taskId: created.taskId,
|
||||||
|
...(input.model ? { model: input.model } : {}),
|
||||||
|
});
|
||||||
|
await task.sendWorkbenchMessage({
|
||||||
|
taskId: created.taskId,
|
||||||
|
tabId: session.tabId,
|
||||||
|
text: input.task,
|
||||||
|
attachments: [],
|
||||||
|
});
|
||||||
return {
|
return {
|
||||||
taskId: created.taskId,
|
taskId: created.taskId,
|
||||||
|
tabId: session.tabId,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,5 @@
|
||||||
import type { BranchSnapshot } from "./integrations/git/index.js";
|
import type { BranchSnapshot } from "./integrations/git/index.js";
|
||||||
import type { PullRequestSnapshot } from "./integrations/github/index.js";
|
import type { PullRequestSnapshot } from "./integrations/github/index.js";
|
||||||
import type { SandboxSession, SandboxAgentClientOptions, SandboxSessionCreateRequest } from "./integrations/sandbox-agent/client.js";
|
|
||||||
import type {
|
|
||||||
ListEventsRequest,
|
|
||||||
ListPage,
|
|
||||||
ListPageRequest,
|
|
||||||
ProcessCreateRequest,
|
|
||||||
ProcessInfo,
|
|
||||||
ProcessLogFollowQuery,
|
|
||||||
ProcessLogsResponse,
|
|
||||||
ProcessSignalQuery,
|
|
||||||
SessionEvent,
|
|
||||||
SessionRecord,
|
|
||||||
} from "sandbox-agent";
|
|
||||||
import type { DaytonaClientOptions, DaytonaCreateSandboxOptions, DaytonaPreviewEndpoint, DaytonaSandbox } from "./integrations/daytona/client.js";
|
|
||||||
import {
|
import {
|
||||||
validateRemote,
|
validateRemote,
|
||||||
ensureCloned,
|
ensureCloned,
|
||||||
|
|
@ -36,8 +22,6 @@ import {
|
||||||
gitSpiceTrackBranch,
|
gitSpiceTrackBranch,
|
||||||
} from "./integrations/git-spice/index.js";
|
} from "./integrations/git-spice/index.js";
|
||||||
import { listPullRequests, createPr, starRepository } from "./integrations/github/index.js";
|
import { listPullRequests, createPr, starRepository } from "./integrations/github/index.js";
|
||||||
import { SandboxAgentClient } from "./integrations/sandbox-agent/client.js";
|
|
||||||
import { DaytonaClient } from "./integrations/daytona/client.js";
|
|
||||||
|
|
||||||
export interface GitDriver {
|
export interface GitDriver {
|
||||||
validateRemote(remoteUrl: string, options?: { githubToken?: string | null }): Promise<void>;
|
validateRemote(remoteUrl: string, options?: { githubToken?: string | null }): Promise<void>;
|
||||||
|
|
@ -79,40 +63,6 @@ export interface GithubDriver {
|
||||||
starRepository(repoFullName: string, options?: { githubToken?: string | null }): Promise<void>;
|
starRepository(repoFullName: string, options?: { githubToken?: string | null }): Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SandboxAgentClientLike {
|
|
||||||
createSession(request: string | SandboxSessionCreateRequest): Promise<SandboxSession>;
|
|
||||||
sessionStatus(sessionId: string): Promise<SandboxSession>;
|
|
||||||
listSessions(request?: ListPageRequest): Promise<ListPage<SessionRecord>>;
|
|
||||||
listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>>;
|
|
||||||
createProcess(request: ProcessCreateRequest): Promise<ProcessInfo>;
|
|
||||||
listProcesses(): Promise<{ processes: ProcessInfo[] }>;
|
|
||||||
getProcessLogs(processId: string, query?: ProcessLogFollowQuery): Promise<ProcessLogsResponse>;
|
|
||||||
stopProcess(processId: string, query?: ProcessSignalQuery): Promise<ProcessInfo>;
|
|
||||||
killProcess(processId: string, query?: ProcessSignalQuery): Promise<ProcessInfo>;
|
|
||||||
deleteProcess(processId: string): Promise<void>;
|
|
||||||
sendPrompt(request: { sessionId: string; prompt: string; notification?: boolean }): Promise<void>;
|
|
||||||
cancelSession(sessionId: string): Promise<void>;
|
|
||||||
destroySession(sessionId: string): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SandboxAgentDriver {
|
|
||||||
createClient(options: SandboxAgentClientOptions): SandboxAgentClientLike;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DaytonaClientLike {
|
|
||||||
createSandbox(options: DaytonaCreateSandboxOptions): Promise<DaytonaSandbox>;
|
|
||||||
getSandbox(sandboxId: string): Promise<DaytonaSandbox>;
|
|
||||||
startSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void>;
|
|
||||||
stopSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void>;
|
|
||||||
deleteSandbox(sandboxId: string): Promise<void>;
|
|
||||||
executeCommand(sandboxId: string, command: string): Promise<{ exitCode: number; result: string }>;
|
|
||||||
getPreviewEndpoint(sandboxId: string, port: number): Promise<DaytonaPreviewEndpoint>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DaytonaDriver {
|
|
||||||
createClient(options: DaytonaClientOptions): DaytonaClientLike;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TmuxDriver {
|
export interface TmuxDriver {
|
||||||
setWindowStatus(branchName: string, status: string): number;
|
setWindowStatus(branchName: string, status: string): number;
|
||||||
}
|
}
|
||||||
|
|
@ -121,15 +71,10 @@ export interface BackendDriver {
|
||||||
git: GitDriver;
|
git: GitDriver;
|
||||||
stack: StackDriver;
|
stack: StackDriver;
|
||||||
github: GithubDriver;
|
github: GithubDriver;
|
||||||
sandboxAgent: SandboxAgentDriver;
|
|
||||||
daytona: DaytonaDriver;
|
|
||||||
tmux: TmuxDriver;
|
tmux: TmuxDriver;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createDefaultDriver(): BackendDriver {
|
export function createDefaultDriver(): BackendDriver {
|
||||||
const sandboxAgentClients = new Map<string, SandboxAgentClient>();
|
|
||||||
const daytonaClients = new Map<string, DaytonaClient>();
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
git: {
|
git: {
|
||||||
validateRemote,
|
validateRemote,
|
||||||
|
|
@ -157,33 +102,6 @@ export function createDefaultDriver(): BackendDriver {
|
||||||
createPr,
|
createPr,
|
||||||
starRepository,
|
starRepository,
|
||||||
},
|
},
|
||||||
sandboxAgent: {
|
|
||||||
createClient: (opts) => {
|
|
||||||
if (opts.persist) {
|
|
||||||
return new SandboxAgentClient(opts);
|
|
||||||
}
|
|
||||||
const key = `${opts.endpoint}|${opts.token ?? ""}|${opts.agent ?? ""}`;
|
|
||||||
const cached = sandboxAgentClients.get(key);
|
|
||||||
if (cached) {
|
|
||||||
return cached;
|
|
||||||
}
|
|
||||||
const created = new SandboxAgentClient(opts);
|
|
||||||
sandboxAgentClients.set(key, created);
|
|
||||||
return created;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
daytona: {
|
|
||||||
createClient: (opts) => {
|
|
||||||
const key = `${opts.apiUrl ?? ""}|${opts.apiKey ?? ""}|${opts.target ?? ""}`;
|
|
||||||
const cached = daytonaClients.get(key);
|
|
||||||
if (cached) {
|
|
||||||
return cached;
|
|
||||||
}
|
|
||||||
const created = new DaytonaClient(opts);
|
|
||||||
daytonaClients.set(key, created);
|
|
||||||
return created;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
tmux: {
|
tmux: {
|
||||||
setWindowStatus: () => 0,
|
setWindowStatus: () => 0,
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ import { workspaceKey } from "./actors/keys.js";
|
||||||
import { loadConfig } from "./config/backend.js";
|
import { loadConfig } from "./config/backend.js";
|
||||||
import { createBackends, createNotificationService } from "./notifications/index.js";
|
import { createBackends, createNotificationService } from "./notifications/index.js";
|
||||||
import { createDefaultDriver } from "./driver.js";
|
import { createDefaultDriver } from "./driver.js";
|
||||||
import { createProviderRegistry } from "./providers/index.js";
|
|
||||||
import { createClient } from "rivetkit/client";
|
import { createClient } from "rivetkit/client";
|
||||||
import { initBetterAuthService } from "./services/better-auth.js";
|
import { initBetterAuthService } from "./services/better-auth.js";
|
||||||
import { createDefaultAppShellServices } from "./services/app-shell-runtime.js";
|
import { createDefaultAppShellServices } from "./services/app-shell-runtime.js";
|
||||||
|
|
@ -69,15 +68,14 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
|
||||||
return undefined;
|
return undefined;
|
||||||
};
|
};
|
||||||
|
|
||||||
config.providers.daytona.endpoint = envFirst("HF_DAYTONA_ENDPOINT", "DAYTONA_ENDPOINT") ?? config.providers.daytona.endpoint;
|
config.providers.e2b.apiKey = envFirst("E2B_API_KEY") ?? config.providers.e2b.apiKey;
|
||||||
config.providers.daytona.apiKey = envFirst("HF_DAYTONA_API_KEY", "DAYTONA_API_KEY") ?? config.providers.daytona.apiKey;
|
config.providers.e2b.template = envFirst("HF_E2B_TEMPLATE", "E2B_TEMPLATE") ?? config.providers.e2b.template;
|
||||||
|
|
||||||
const driver = createDefaultDriver();
|
const driver = createDefaultDriver();
|
||||||
const providers = createProviderRegistry(config, driver);
|
|
||||||
const backends = await createBackends(config.notify);
|
const backends = await createBackends(config.notify);
|
||||||
const notifications = createNotificationService(backends);
|
const notifications = createNotificationService(backends);
|
||||||
const appShellServices = createDefaultAppShellServices();
|
const appShellServices = createDefaultAppShellServices();
|
||||||
initActorRuntimeContext(config, providers, notifications, driver, appShellServices);
|
initActorRuntimeContext(config, notifications, driver, appShellServices);
|
||||||
|
|
||||||
const actorClient = createClient({
|
const actorClient = createClient({
|
||||||
endpoint: `http://127.0.0.1:${config.backend.port}/v1/rivet`,
|
endpoint: `http://127.0.0.1:${config.backend.port}/v1/rivet`,
|
||||||
|
|
|
||||||
|
|
@ -1,113 +0,0 @@
|
||||||
import { Daytona, type Image } from "@daytonaio/sdk";
|
|
||||||
|
|
||||||
export interface DaytonaSandbox {
|
|
||||||
id: string;
|
|
||||||
state?: string;
|
|
||||||
snapshot?: string;
|
|
||||||
labels?: Record<string, string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DaytonaCreateSandboxOptions {
|
|
||||||
image: string | Image;
|
|
||||||
envVars?: Record<string, string>;
|
|
||||||
labels?: Record<string, string>;
|
|
||||||
autoStopInterval?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DaytonaPreviewEndpoint {
|
|
||||||
url: string;
|
|
||||||
token?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DaytonaClientOptions {
|
|
||||||
apiUrl?: string;
|
|
||||||
apiKey?: string;
|
|
||||||
target?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeApiUrl(input?: string): string | undefined {
|
|
||||||
if (!input) return undefined;
|
|
||||||
const trimmed = input.replace(/\/+$/, "");
|
|
||||||
if (trimmed.endsWith("/api")) {
|
|
||||||
return trimmed;
|
|
||||||
}
|
|
||||||
return `${trimmed}/api`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class DaytonaClient {
|
|
||||||
private readonly daytona: Daytona;
|
|
||||||
|
|
||||||
constructor(options: DaytonaClientOptions) {
|
|
||||||
const apiUrl = normalizeApiUrl(options.apiUrl);
|
|
||||||
this.daytona = new Daytona({
|
|
||||||
_experimental: {},
|
|
||||||
...(apiUrl ? { apiUrl } : {}),
|
|
||||||
...(options.apiKey ? { apiKey: options.apiKey } : {}),
|
|
||||||
...(options.target ? { target: options.target } : {}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async createSandbox(options: DaytonaCreateSandboxOptions): Promise<DaytonaSandbox> {
|
|
||||||
const sandbox = await this.daytona.create({
|
|
||||||
image: options.image,
|
|
||||||
envVars: options.envVars,
|
|
||||||
labels: options.labels,
|
|
||||||
...(options.autoStopInterval !== undefined ? { autoStopInterval: options.autoStopInterval } : {}),
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: sandbox.id,
|
|
||||||
state: sandbox.state,
|
|
||||||
snapshot: sandbox.snapshot,
|
|
||||||
labels: (sandbox as any).labels,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async getSandbox(sandboxId: string): Promise<DaytonaSandbox> {
|
|
||||||
const sandbox = await this.daytona.get(sandboxId);
|
|
||||||
return {
|
|
||||||
id: sandbox.id,
|
|
||||||
state: sandbox.state,
|
|
||||||
snapshot: sandbox.snapshot,
|
|
||||||
labels: (sandbox as any).labels,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async startSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void> {
|
|
||||||
const sandbox = await this.daytona.get(sandboxId);
|
|
||||||
await sandbox.start(timeoutSeconds);
|
|
||||||
}
|
|
||||||
|
|
||||||
async stopSandbox(sandboxId: string, timeoutSeconds?: number): Promise<void> {
|
|
||||||
const sandbox = await this.daytona.get(sandboxId);
|
|
||||||
await sandbox.stop(timeoutSeconds);
|
|
||||||
}
|
|
||||||
|
|
||||||
async deleteSandbox(sandboxId: string): Promise<void> {
|
|
||||||
const sandbox = await this.daytona.get(sandboxId);
|
|
||||||
await this.daytona.delete(sandbox);
|
|
||||||
}
|
|
||||||
|
|
||||||
async executeCommand(sandboxId: string, command: string): Promise<{ exitCode: number; result: string }> {
|
|
||||||
const sandbox = await this.daytona.get(sandboxId);
|
|
||||||
const response = await sandbox.process.executeCommand(command);
|
|
||||||
return {
|
|
||||||
exitCode: response.exitCode,
|
|
||||||
result: response.result,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async getPreviewEndpoint(sandboxId: string, port: number): Promise<DaytonaPreviewEndpoint> {
|
|
||||||
const sandbox = await this.daytona.get(sandboxId);
|
|
||||||
// Use signed preview URLs for server-to-sandbox communication.
|
|
||||||
// The standard preview link may redirect to an interactive Auth0 flow from non-browser clients.
|
|
||||||
// Signed preview URLs work for direct HTTP access.
|
|
||||||
//
|
|
||||||
// Request a longer-lived URL so sessions can run for several minutes without refresh.
|
|
||||||
const preview = await sandbox.getSignedPreviewUrl(port, 6 * 60 * 60);
|
|
||||||
return {
|
|
||||||
url: preview.url,
|
|
||||||
token: preview.token,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -87,7 +87,7 @@ export interface BranchSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function fetch(repoPath: string, options?: GitAuthOptions): Promise<void> {
|
export async function fetch(repoPath: string, options?: GitAuthOptions): Promise<void> {
|
||||||
await execFileAsync("git", ["-C", repoPath, "fetch", "--prune"], {
|
await execFileAsync("git", ["-C", repoPath, "fetch", "--prune", "--no-auto-gc"], {
|
||||||
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
|
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
|
||||||
env: gitEnv(options),
|
env: gitEnv(options),
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,512 +0,0 @@
|
||||||
import type {
|
|
||||||
AgentEndpoint,
|
|
||||||
AttachTarget,
|
|
||||||
AttachTargetRequest,
|
|
||||||
CreateSandboxRequest,
|
|
||||||
DestroySandboxRequest,
|
|
||||||
EnsureAgentRequest,
|
|
||||||
ExecuteSandboxCommandRequest,
|
|
||||||
ExecuteSandboxCommandResult,
|
|
||||||
ProviderCapabilities,
|
|
||||||
ReleaseSandboxRequest,
|
|
||||||
ResumeSandboxRequest,
|
|
||||||
SandboxHandle,
|
|
||||||
SandboxHealth,
|
|
||||||
SandboxHealthRequest,
|
|
||||||
SandboxProvider,
|
|
||||||
} from "../provider-api/index.js";
|
|
||||||
import type { DaytonaDriver } from "../../driver.js";
|
|
||||||
import { Image } from "@daytonaio/sdk";
|
|
||||||
import { readFileSync } from "node:fs";
|
|
||||||
import { homedir } from "node:os";
|
|
||||||
import { resolve } from "node:path";
|
|
||||||
|
|
||||||
export interface DaytonaProviderConfig {
|
|
||||||
endpoint?: string;
|
|
||||||
apiKey?: string;
|
|
||||||
image: string;
|
|
||||||
target?: string;
|
|
||||||
/**
|
|
||||||
* Auto-stop interval in minutes. If omitted, Daytona's default applies.
|
|
||||||
* Set to `0` to disable auto-stop.
|
|
||||||
*/
|
|
||||||
autoStopInterval?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class DaytonaProvider implements SandboxProvider {
|
|
||||||
constructor(
|
|
||||||
private readonly config: DaytonaProviderConfig,
|
|
||||||
private readonly daytona?: DaytonaDriver,
|
|
||||||
) {}
|
|
||||||
|
|
||||||
private static readonly SANDBOX_AGENT_PORT = 2468;
|
|
||||||
private static readonly SANDBOX_AGENT_VERSION = "0.3.0";
|
|
||||||
private static readonly DEFAULT_ACP_REQUEST_TIMEOUT_MS = 120_000;
|
|
||||||
private static readonly AGENT_IDS = ["codex", "claude"] as const;
|
|
||||||
private static readonly PASSTHROUGH_ENV_KEYS = [
|
|
||||||
"ANTHROPIC_API_KEY",
|
|
||||||
"CLAUDE_API_KEY",
|
|
||||||
"OPENAI_API_KEY",
|
|
||||||
"CODEX_API_KEY",
|
|
||||||
"OPENCODE_API_KEY",
|
|
||||||
"CEREBRAS_API_KEY",
|
|
||||||
"GH_TOKEN",
|
|
||||||
"GITHUB_TOKEN",
|
|
||||||
] as const;
|
|
||||||
|
|
||||||
private getRequestTimeoutMs(): number {
|
|
||||||
const parsed = Number(process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS ?? "120000");
|
|
||||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
|
||||||
return 120_000;
|
|
||||||
}
|
|
||||||
return Math.floor(parsed);
|
|
||||||
}
|
|
||||||
|
|
||||||
private getAcpRequestTimeoutMs(): number {
|
|
||||||
const parsed = Number(process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS ?? DaytonaProvider.DEFAULT_ACP_REQUEST_TIMEOUT_MS.toString());
|
|
||||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
|
||||||
return DaytonaProvider.DEFAULT_ACP_REQUEST_TIMEOUT_MS;
|
|
||||||
}
|
|
||||||
return Math.floor(parsed);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async withTimeout<T>(label: string, fn: () => Promise<T>): Promise<T> {
|
|
||||||
const timeoutMs = this.getRequestTimeoutMs();
|
|
||||||
let timer: ReturnType<typeof setTimeout> | null = null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
return await Promise.race([
|
|
||||||
fn(),
|
|
||||||
new Promise<T>((_, reject) => {
|
|
||||||
timer = setTimeout(() => {
|
|
||||||
reject(new Error(`daytona ${label} timed out after ${timeoutMs}ms`));
|
|
||||||
}, timeoutMs);
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
} finally {
|
|
||||||
if (timer) {
|
|
||||||
clearTimeout(timer);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private getClient() {
|
|
||||||
const apiKey = this.config.apiKey?.trim();
|
|
||||||
if (!apiKey) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const endpoint = this.config.endpoint?.trim();
|
|
||||||
|
|
||||||
return this.daytona?.createClient({
|
|
||||||
...(endpoint ? { apiUrl: endpoint } : {}),
|
|
||||||
apiKey,
|
|
||||||
target: this.config.target,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private requireClient() {
|
|
||||||
const client = this.getClient();
|
|
||||||
if (client) {
|
|
||||||
return client;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this.daytona) {
|
|
||||||
throw new Error("daytona provider requires backend daytona driver");
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(
|
|
||||||
"daytona provider is not configured: missing apiKey. " +
|
|
||||||
"Set HF_DAYTONA_API_KEY (or DAYTONA_API_KEY). " +
|
|
||||||
"Optionally set HF_DAYTONA_ENDPOINT (or DAYTONA_ENDPOINT).",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async ensureStarted(sandboxId: string): Promise<void> {
|
|
||||||
const client = this.requireClient();
|
|
||||||
|
|
||||||
const sandbox = await this.withTimeout("get sandbox", () => client.getSandbox(sandboxId));
|
|
||||||
const state = String(sandbox.state ?? "unknown").toLowerCase();
|
|
||||||
if (state === "started" || state === "running") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the sandbox is stopped (or any non-started state), try starting it.
|
|
||||||
// Daytona preserves the filesystem across stop/start, which is what we rely on for faster git setup.
|
|
||||||
await this.withTimeout("start sandbox", () => client.startSandbox(sandboxId, 60));
|
|
||||||
}
|
|
||||||
|
|
||||||
private buildEnvVars(): Record<string, string> {
|
|
||||||
const envVars: Record<string, string> = {};
|
|
||||||
|
|
||||||
for (const key of DaytonaProvider.PASSTHROUGH_ENV_KEYS) {
|
|
||||||
const value = process.env[key];
|
|
||||||
if (value) {
|
|
||||||
envVars[key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return envVars;
|
|
||||||
}
|
|
||||||
|
|
||||||
private buildShellExports(extra: Record<string, string> = {}): string[] {
|
|
||||||
const merged = {
|
|
||||||
...this.buildEnvVars(),
|
|
||||||
...extra,
|
|
||||||
};
|
|
||||||
|
|
||||||
return Object.entries(merged).map(([key, value]) => {
|
|
||||||
const encoded = Buffer.from(value, "utf8").toString("base64");
|
|
||||||
return `export ${key}="$(printf %s ${JSON.stringify(encoded)} | base64 -d)"`;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private buildSnapshotImage() {
|
|
||||||
// Use Daytona image build + snapshot caching so base tooling (git + sandbox-agent)
|
|
||||||
// is prepared once and reused for subsequent sandboxes.
|
|
||||||
return Image.base(this.config.image).runCommands(
|
|
||||||
"apt-get update && apt-get install -y curl ca-certificates git openssh-client nodejs npm",
|
|
||||||
`curl -fsSL https://releases.rivet.dev/sandbox-agent/${DaytonaProvider.SANDBOX_AGENT_VERSION}/install.sh | sh`,
|
|
||||||
`bash -lc 'export PATH="$HOME/.local/bin:$PATH"; sandbox-agent install-agent codex || true; sandbox-agent install-agent claude || true'`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async runCheckedCommand(sandboxId: string, command: string, label: string): Promise<void> {
|
|
||||||
const client = this.requireClient();
|
|
||||||
|
|
||||||
const result = await this.withTimeout(`execute command (${label})`, () => client.executeCommand(sandboxId, command));
|
|
||||||
if (result.exitCode !== 0) {
|
|
||||||
throw new Error(`daytona ${label} failed (${result.exitCode}): ${result.result}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private shellSingleQuote(value: string): string {
|
|
||||||
return `'${value.replace(/'/g, `'\"'\"'`)}'`;
|
|
||||||
}
|
|
||||||
|
|
||||||
private readLocalCodexAuth(): string | null {
|
|
||||||
const authPath = resolve(homedir(), ".codex", "auth.json");
|
|
||||||
try {
|
|
||||||
return readFileSync(authPath, "utf8");
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private buildCloneRepoScript(req: CreateSandboxRequest, repoDir: string): string {
|
|
||||||
const usesGithubHttpAuth = req.repoRemote.startsWith("https://github.com/");
|
|
||||||
const githubPath = usesGithubHttpAuth ? req.repoRemote.slice("https://github.com/".length) : "";
|
|
||||||
|
|
||||||
const lines = [
|
|
||||||
"set -eu",
|
|
||||||
"export GIT_TERMINAL_PROMPT=0",
|
|
||||||
"export GIT_ASKPASS=/bin/echo",
|
|
||||||
`TOKEN=${JSON.stringify(req.githubToken ?? "")}`,
|
|
||||||
'if [ -z "$TOKEN" ]; then',
|
|
||||||
' if [ -n "${GH_TOKEN:-}" ]; then TOKEN="$GH_TOKEN"; else TOKEN="${GITHUB_TOKEN:-}"; fi',
|
|
||||||
"fi",
|
|
||||||
'AUTH_REMOTE=""',
|
|
||||||
...(usesGithubHttpAuth ? ['if [ -n "$TOKEN" ]; then', ` AUTH_REMOTE="https://x-access-token:${"$"}TOKEN@github.com/${githubPath}"`, "fi"] : []),
|
|
||||||
`rm -rf "${repoDir}"`,
|
|
||||||
`mkdir -p "${repoDir}"`,
|
|
||||||
`rmdir "${repoDir}"`,
|
|
||||||
// Foundry test repos can be private, so clone/fetch must use the sandbox's GitHub token when available.
|
|
||||||
...(usesGithubHttpAuth
|
|
||||||
? ['if [ -n "$AUTH_REMOTE" ]; then', ` git clone "$AUTH_REMOTE" "${repoDir}"`, "else", ` git clone "${req.repoRemote}" "${repoDir}"`, "fi"]
|
|
||||||
: [`git clone "${req.repoRemote}" "${repoDir}"`]),
|
|
||||||
`cd "${repoDir}"`,
|
|
||||||
...(usesGithubHttpAuth ? ['if [ -n "$AUTH_REMOTE" ]; then', ` git remote set-url origin "${req.repoRemote}"`, "fi"] : []),
|
|
||||||
// The task branch may not exist remotely yet (agent push creates it). Base off current branch (default branch).
|
|
||||||
`if git show-ref --verify --quiet "refs/remotes/origin/${req.branchName}"; then git checkout -B "${req.branchName}" "origin/${req.branchName}"; else git checkout -B "${req.branchName}" "$(git branch --show-current 2>/dev/null || echo main)"; fi`,
|
|
||||||
`git config user.email "foundry@local" >/dev/null 2>&1 || true`,
|
|
||||||
`git config user.name "Foundry" >/dev/null 2>&1 || true`,
|
|
||||||
];
|
|
||||||
|
|
||||||
return lines.join("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
id() {
|
|
||||||
return "daytona" as const;
|
|
||||||
}
|
|
||||||
|
|
||||||
capabilities(): ProviderCapabilities {
|
|
||||||
return {
|
|
||||||
remote: true,
|
|
||||||
supportsSessionReuse: true,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async validateConfig(input: unknown): Promise<Record<string, unknown>> {
|
|
||||||
return (input as Record<string, unknown> | undefined) ?? {};
|
|
||||||
}
|
|
||||||
|
|
||||||
async createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle> {
|
|
||||||
const client = this.requireClient();
|
|
||||||
const emitDebug = req.debug ?? (() => {});
|
|
||||||
|
|
||||||
emitDebug("daytona.createSandbox.start", {
|
|
||||||
workspaceId: req.workspaceId,
|
|
||||||
repoId: req.repoId,
|
|
||||||
taskId: req.taskId,
|
|
||||||
branchName: req.branchName,
|
|
||||||
});
|
|
||||||
|
|
||||||
const createStartedAt = Date.now();
|
|
||||||
const sandbox = await this.withTimeout("create sandbox", () =>
|
|
||||||
client.createSandbox({
|
|
||||||
image: this.buildSnapshotImage(),
|
|
||||||
envVars: this.buildEnvVars(),
|
|
||||||
labels: {
|
|
||||||
"foundry.workspace": req.workspaceId,
|
|
||||||
"foundry.task": req.taskId,
|
|
||||||
"foundry.repo_id": req.repoId,
|
|
||||||
"foundry.repo_remote": req.repoRemote,
|
|
||||||
"foundry.branch": req.branchName,
|
|
||||||
},
|
|
||||||
autoStopInterval: this.config.autoStopInterval,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
emitDebug("daytona.createSandbox.created", {
|
|
||||||
sandboxId: sandbox.id,
|
|
||||||
durationMs: Date.now() - createStartedAt,
|
|
||||||
state: sandbox.state ?? null,
|
|
||||||
});
|
|
||||||
|
|
||||||
const repoDir = `/home/daytona/foundry/${req.workspaceId}/${req.repoId}/${req.taskId}/repo`;
|
|
||||||
|
|
||||||
// Prepare a working directory for the agent. This must succeed for the task to work.
|
|
||||||
const installStartedAt = Date.now();
|
|
||||||
await this.runCheckedCommand(
|
|
||||||
sandbox.id,
|
|
||||||
[
|
|
||||||
"bash",
|
|
||||||
"-lc",
|
|
||||||
`'set -euo pipefail; export DEBIAN_FRONTEND=noninteractive; if command -v git >/dev/null 2>&1 && command -v npx >/dev/null 2>&1; then exit 0; fi; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y git openssh-client ca-certificates nodejs npm >/tmp/apt-install.log 2>&1'`,
|
|
||||||
].join(" "),
|
|
||||||
"install git + node toolchain",
|
|
||||||
);
|
|
||||||
emitDebug("daytona.createSandbox.install_toolchain.done", {
|
|
||||||
sandboxId: sandbox.id,
|
|
||||||
durationMs: Date.now() - installStartedAt,
|
|
||||||
});
|
|
||||||
|
|
||||||
const cloneStartedAt = Date.now();
|
|
||||||
await this.runCheckedCommand(sandbox.id, ["bash", "-lc", this.shellSingleQuote(this.buildCloneRepoScript(req, repoDir))].join(" "), "clone repo");
|
|
||||||
emitDebug("daytona.createSandbox.clone_repo.done", {
|
|
||||||
sandboxId: sandbox.id,
|
|
||||||
durationMs: Date.now() - cloneStartedAt,
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
sandboxId: sandbox.id,
|
|
||||||
switchTarget: `daytona://${sandbox.id}`,
|
|
||||||
metadata: {
|
|
||||||
endpoint: this.config.endpoint ?? null,
|
|
||||||
image: this.config.image,
|
|
||||||
snapshot: sandbox.snapshot ?? null,
|
|
||||||
remote: true,
|
|
||||||
state: sandbox.state ?? null,
|
|
||||||
cwd: repoDir,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle> {
|
|
||||||
const client = this.requireClient();
|
|
||||||
|
|
||||||
await this.ensureStarted(req.sandboxId);
|
|
||||||
|
|
||||||
// Reconstruct cwd from sandbox labels written at create time.
|
|
||||||
const info = await this.withTimeout("resume get sandbox", () => client.getSandbox(req.sandboxId));
|
|
||||||
const labels = info.labels ?? {};
|
|
||||||
const workspaceId = labels["foundry.workspace"] ?? req.workspaceId;
|
|
||||||
const repoId = labels["foundry.repo_id"] ?? "";
|
|
||||||
const taskId = labels["foundry.task"] ?? "";
|
|
||||||
const cwd = repoId && taskId ? `/home/daytona/foundry/${workspaceId}/${repoId}/${taskId}/repo` : null;
|
|
||||||
|
|
||||||
return {
|
|
||||||
sandboxId: req.sandboxId,
|
|
||||||
switchTarget: `daytona://${req.sandboxId}`,
|
|
||||||
metadata: {
|
|
||||||
resumed: true,
|
|
||||||
endpoint: this.config.endpoint ?? null,
|
|
||||||
...(cwd ? { cwd } : {}),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async destroySandbox(_req: DestroySandboxRequest): Promise<void> {
|
|
||||||
const client = this.getClient();
|
|
||||||
if (!client) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await this.withTimeout("delete sandbox", () => client.deleteSandbox(_req.sandboxId));
|
|
||||||
} catch (error) {
|
|
||||||
// Ignore not-found style cleanup failures.
|
|
||||||
const text = error instanceof Error ? error.message : String(error);
|
|
||||||
if (text.toLowerCase().includes("not found")) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async releaseSandbox(req: ReleaseSandboxRequest): Promise<void> {
|
|
||||||
const client = this.getClient();
|
|
||||||
if (!client) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await this.withTimeout("stop sandbox", () => client.stopSandbox(req.sandboxId, 60));
|
|
||||||
} catch (error) {
|
|
||||||
const text = error instanceof Error ? error.message : String(error);
|
|
||||||
if (text.toLowerCase().includes("not found")) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async ensureSandboxAgent(req: EnsureAgentRequest): Promise<AgentEndpoint> {
|
|
||||||
const client = this.requireClient();
|
|
||||||
const acpRequestTimeoutMs = this.getAcpRequestTimeoutMs();
|
|
||||||
const sandboxAgentExports = this.buildShellExports({
|
|
||||||
SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS: acpRequestTimeoutMs.toString(),
|
|
||||||
});
|
|
||||||
const codexAuth = this.readLocalCodexAuth();
|
|
||||||
const codexAuthSetup = codexAuth
|
|
||||||
? [
|
|
||||||
'mkdir -p "$HOME/.codex" "$HOME/.config/codex"',
|
|
||||||
`printf %s ${JSON.stringify(Buffer.from(codexAuth, "utf8").toString("base64"))} | base64 -d > "$HOME/.codex/auth.json"`,
|
|
||||||
'cp "$HOME/.codex/auth.json" "$HOME/.config/codex/auth.json"',
|
|
||||||
"unset OPENAI_API_KEY CODEX_API_KEY",
|
|
||||||
]
|
|
||||||
: [];
|
|
||||||
|
|
||||||
await this.ensureStarted(req.sandboxId);
|
|
||||||
|
|
||||||
await this.runCheckedCommand(
|
|
||||||
req.sandboxId,
|
|
||||||
[
|
|
||||||
"bash",
|
|
||||||
"-lc",
|
|
||||||
`'set -euo pipefail; if command -v curl >/dev/null 2>&1; then exit 0; fi; export DEBIAN_FRONTEND=noninteractive; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y curl ca-certificates >/tmp/apt-install.log 2>&1'`,
|
|
||||||
].join(" "),
|
|
||||||
"install curl",
|
|
||||||
);
|
|
||||||
|
|
||||||
await this.runCheckedCommand(
|
|
||||||
req.sandboxId,
|
|
||||||
[
|
|
||||||
"bash",
|
|
||||||
"-lc",
|
|
||||||
`'set -euo pipefail; if command -v npx >/dev/null 2>&1; then exit 0; fi; export DEBIAN_FRONTEND=noninteractive; apt-get update -y >/tmp/apt-update.log 2>&1; apt-get install -y nodejs npm >/tmp/apt-install.log 2>&1'`,
|
|
||||||
].join(" "),
|
|
||||||
"install node toolchain",
|
|
||||||
);
|
|
||||||
|
|
||||||
await this.runCheckedCommand(
|
|
||||||
req.sandboxId,
|
|
||||||
[
|
|
||||||
"bash",
|
|
||||||
"-lc",
|
|
||||||
`'set -euo pipefail; export PATH="$HOME/.local/bin:$PATH"; if sandbox-agent --version 2>/dev/null | grep -q "${DaytonaProvider.SANDBOX_AGENT_VERSION}"; then exit 0; fi; curl -fsSL https://releases.rivet.dev/sandbox-agent/${DaytonaProvider.SANDBOX_AGENT_VERSION}/install.sh | sh'`,
|
|
||||||
].join(" "),
|
|
||||||
"install sandbox-agent",
|
|
||||||
);
|
|
||||||
|
|
||||||
for (const agentId of DaytonaProvider.AGENT_IDS) {
|
|
||||||
try {
|
|
||||||
await this.runCheckedCommand(
|
|
||||||
req.sandboxId,
|
|
||||||
["bash", "-lc", `'export PATH="$HOME/.local/bin:$PATH"; sandbox-agent install-agent ${agentId}'`].join(" "),
|
|
||||||
`install agent ${agentId}`,
|
|
||||||
);
|
|
||||||
} catch {
|
|
||||||
// Some sandbox-agent builds may not ship every agent plugin; treat this as best-effort.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.runCheckedCommand(
|
|
||||||
req.sandboxId,
|
|
||||||
[
|
|
||||||
"bash",
|
|
||||||
"-lc",
|
|
||||||
this.shellSingleQuote(
|
|
||||||
[
|
|
||||||
"set -euo pipefail",
|
|
||||||
'export PATH="$HOME/.local/bin:$PATH"',
|
|
||||||
...sandboxAgentExports,
|
|
||||||
...codexAuthSetup,
|
|
||||||
"command -v sandbox-agent >/dev/null 2>&1",
|
|
||||||
"if pgrep -x sandbox-agent >/dev/null; then exit 0; fi",
|
|
||||||
`nohup sandbox-agent server --no-token --host 0.0.0.0 --port ${DaytonaProvider.SANDBOX_AGENT_PORT} >/tmp/sandbox-agent.log 2>&1 &`,
|
|
||||||
].join("\n"),
|
|
||||||
),
|
|
||||||
].join(" "),
|
|
||||||
"start sandbox-agent",
|
|
||||||
);
|
|
||||||
|
|
||||||
await this.runCheckedCommand(
|
|
||||||
req.sandboxId,
|
|
||||||
[
|
|
||||||
"bash",
|
|
||||||
"-lc",
|
|
||||||
`'for i in $(seq 1 45); do curl -fsS "http://127.0.0.1:${DaytonaProvider.SANDBOX_AGENT_PORT}/v1/health" >/dev/null && exit 0; sleep 1; done; echo "sandbox-agent failed to become healthy" >&2; tail -n 80 /tmp/sandbox-agent.log >&2; exit 1'`,
|
|
||||||
].join(" "),
|
|
||||||
"wait for sandbox-agent health",
|
|
||||||
);
|
|
||||||
|
|
||||||
const preview = await this.withTimeout("get preview endpoint", () => client.getPreviewEndpoint(req.sandboxId, DaytonaProvider.SANDBOX_AGENT_PORT));
|
|
||||||
|
|
||||||
return {
|
|
||||||
endpoint: preview.url,
|
|
||||||
token: preview.token,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async health(req: SandboxHealthRequest): Promise<SandboxHealth> {
|
|
||||||
const client = this.getClient();
|
|
||||||
if (!client) {
|
|
||||||
return {
|
|
||||||
status: "degraded",
|
|
||||||
message: "daytona driver not configured",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const sandbox = await this.withTimeout("health get sandbox", () => client.getSandbox(req.sandboxId));
|
|
||||||
const state = String(sandbox.state ?? "unknown");
|
|
||||||
if (state.toLowerCase().includes("error")) {
|
|
||||||
return {
|
|
||||||
status: "down",
|
|
||||||
message: `daytona sandbox in error state: ${state}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
status: "healthy",
|
|
||||||
message: `daytona sandbox state: ${state}`,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
const text = error instanceof Error ? error.message : String(error);
|
|
||||||
return {
|
|
||||||
status: "down",
|
|
||||||
message: `daytona sandbox health check failed: ${text}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async attachTarget(req: AttachTargetRequest): Promise<AttachTarget> {
|
|
||||||
return {
|
|
||||||
target: `daytona://${req.sandboxId}`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult> {
|
|
||||||
const client = this.requireClient();
|
|
||||||
await this.ensureStarted(req.sandboxId);
|
|
||||||
return await this.withTimeout(`execute command (${req.label ?? "command"})`, () => client.executeCommand(req.sandboxId, req.command));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,77 +0,0 @@
|
||||||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
|
||||||
import type { AppConfig } from "@sandbox-agent/foundry-shared";
|
|
||||||
import type { BackendDriver } from "../driver.js";
|
|
||||||
import { DaytonaProvider } from "./daytona/index.js";
|
|
||||||
import { LocalProvider } from "./local/index.js";
|
|
||||||
import type { SandboxProvider } from "./provider-api/index.js";
|
|
||||||
|
|
||||||
export interface ProviderRegistry {
|
|
||||||
get(providerId: ProviderId): SandboxProvider;
|
|
||||||
availableProviderIds(): ProviderId[];
|
|
||||||
defaultProviderId(): ProviderId;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createProviderRegistry(config: AppConfig, driver?: BackendDriver): ProviderRegistry {
|
|
||||||
const gitDriver = driver?.git ?? {
|
|
||||||
validateRemote: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
ensureCloned: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
fetch: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
listRemoteBranches: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
remoteDefaultBaseRef: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
revParse: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
ensureRemoteBranch: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
diffStatForBranch: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
conflictsWithMain: async () => {
|
|
||||||
throw new Error("local provider requires backend git driver");
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const local = new LocalProvider(
|
|
||||||
{
|
|
||||||
rootDir: config.providers.local.rootDir,
|
|
||||||
sandboxAgentPort: config.providers.local.sandboxAgentPort,
|
|
||||||
},
|
|
||||||
gitDriver,
|
|
||||||
);
|
|
||||||
const daytona = new DaytonaProvider(
|
|
||||||
{
|
|
||||||
endpoint: config.providers.daytona.endpoint,
|
|
||||||
apiKey: config.providers.daytona.apiKey,
|
|
||||||
image: config.providers.daytona.image,
|
|
||||||
},
|
|
||||||
driver?.daytona,
|
|
||||||
);
|
|
||||||
|
|
||||||
const map: Record<ProviderId, SandboxProvider> = {
|
|
||||||
local,
|
|
||||||
daytona,
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
|
||||||
get(providerId: ProviderId): SandboxProvider {
|
|
||||||
return map[providerId];
|
|
||||||
},
|
|
||||||
availableProviderIds(): ProviderId[] {
|
|
||||||
return Object.keys(map) as ProviderId[];
|
|
||||||
},
|
|
||||||
defaultProviderId(): ProviderId {
|
|
||||||
return config.providers.daytona.apiKey ? "daytona" : "local";
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
@ -1,235 +0,0 @@
|
||||||
import { randomUUID } from "node:crypto";
|
|
||||||
import { execFile } from "node:child_process";
|
|
||||||
import { existsSync, mkdirSync, rmSync } from "node:fs";
|
|
||||||
import { homedir } from "node:os";
|
|
||||||
import { dirname, resolve } from "node:path";
|
|
||||||
import { promisify } from "node:util";
|
|
||||||
import { InMemorySessionPersistDriver, SandboxAgent } from "sandbox-agent";
|
|
||||||
import type {
|
|
||||||
AgentEndpoint,
|
|
||||||
AttachTarget,
|
|
||||||
AttachTargetRequest,
|
|
||||||
CreateSandboxRequest,
|
|
||||||
DestroySandboxRequest,
|
|
||||||
EnsureAgentRequest,
|
|
||||||
ExecuteSandboxCommandRequest,
|
|
||||||
ExecuteSandboxCommandResult,
|
|
||||||
ProviderCapabilities,
|
|
||||||
ReleaseSandboxRequest,
|
|
||||||
ResumeSandboxRequest,
|
|
||||||
SandboxHandle,
|
|
||||||
SandboxHealth,
|
|
||||||
SandboxHealthRequest,
|
|
||||||
SandboxProvider,
|
|
||||||
} from "../provider-api/index.js";
|
|
||||||
import type { GitDriver } from "../../driver.js";
|
|
||||||
|
|
||||||
const execFileAsync = promisify(execFile);
|
|
||||||
const DEFAULT_SANDBOX_AGENT_PORT = 2468;
|
|
||||||
|
|
||||||
export interface LocalProviderConfig {
|
|
||||||
rootDir?: string;
|
|
||||||
sandboxAgentPort?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
function expandHome(value: string): string {
|
|
||||||
if (value === "~") {
|
|
||||||
return homedir();
|
|
||||||
}
|
|
||||||
if (value.startsWith("~/")) {
|
|
||||||
return resolve(homedir(), value.slice(2));
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function branchExists(repoPath: string, branchName: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
await execFileAsync("git", ["-C", repoPath, "show-ref", "--verify", `refs/remotes/origin/${branchName}`]);
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkoutBranch(repoPath: string, branchName: string, git: GitDriver): Promise<void> {
|
|
||||||
await git.fetch(repoPath);
|
|
||||||
const targetRef = (await branchExists(repoPath, branchName)) ? `origin/${branchName}` : await git.remoteDefaultBaseRef(repoPath);
|
|
||||||
await execFileAsync("git", ["-C", repoPath, "checkout", "-B", branchName, targetRef], {
|
|
||||||
env: process.env as Record<string, string>,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export class LocalProvider implements SandboxProvider {
|
|
||||||
private sdkPromise: Promise<SandboxAgent> | null = null;
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
private readonly config: LocalProviderConfig,
|
|
||||||
private readonly git: GitDriver,
|
|
||||||
) {}
|
|
||||||
|
|
||||||
private rootDir(): string {
|
|
||||||
return expandHome(this.config.rootDir?.trim() || "~/.local/share/foundry/local-sandboxes");
|
|
||||||
}
|
|
||||||
|
|
||||||
private sandboxRoot(workspaceId: string, sandboxId: string): string {
|
|
||||||
return resolve(this.rootDir(), workspaceId, sandboxId);
|
|
||||||
}
|
|
||||||
|
|
||||||
private repoDir(workspaceId: string, sandboxId: string): string {
|
|
||||||
return resolve(this.sandboxRoot(workspaceId, sandboxId), "repo");
|
|
||||||
}
|
|
||||||
|
|
||||||
private sandboxHandle(workspaceId: string, sandboxId: string, repoDir: string): SandboxHandle {
|
|
||||||
return {
|
|
||||||
sandboxId,
|
|
||||||
switchTarget: `local://${repoDir}`,
|
|
||||||
metadata: {
|
|
||||||
cwd: repoDir,
|
|
||||||
repoDir,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
private async sandboxAgent(): Promise<SandboxAgent> {
|
|
||||||
if (!this.sdkPromise) {
|
|
||||||
const sandboxAgentHome = resolve(this.rootDir(), ".sandbox-agent-home");
|
|
||||||
mkdirSync(sandboxAgentHome, { recursive: true });
|
|
||||||
const spawnHome = process.env.HOME?.trim() || sandboxAgentHome;
|
|
||||||
this.sdkPromise = SandboxAgent.start({
|
|
||||||
persist: new InMemorySessionPersistDriver(),
|
|
||||||
spawn: {
|
|
||||||
enabled: true,
|
|
||||||
host: "127.0.0.1",
|
|
||||||
port: this.config.sandboxAgentPort ?? DEFAULT_SANDBOX_AGENT_PORT,
|
|
||||||
log: "silent",
|
|
||||||
env: {
|
|
||||||
HOME: spawnHome,
|
|
||||||
...(process.env.ANTHROPIC_API_KEY ? { ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY } : {}),
|
|
||||||
...(process.env.CLAUDE_API_KEY ? { CLAUDE_API_KEY: process.env.CLAUDE_API_KEY } : {}),
|
|
||||||
...(process.env.OPENAI_API_KEY ? { OPENAI_API_KEY: process.env.OPENAI_API_KEY } : {}),
|
|
||||||
...(process.env.CODEX_API_KEY ? { CODEX_API_KEY: process.env.CODEX_API_KEY } : {}),
|
|
||||||
...(process.env.GH_TOKEN ? { GH_TOKEN: process.env.GH_TOKEN } : {}),
|
|
||||||
...(process.env.GITHUB_TOKEN ? { GITHUB_TOKEN: process.env.GITHUB_TOKEN } : {}),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}).then(async (sdk) => {
|
|
||||||
for (const agentName of ["claude", "codex"] as const) {
|
|
||||||
try {
|
|
||||||
const agent = await sdk.getAgent(agentName, { config: true });
|
|
||||||
if (!agent.installed) {
|
|
||||||
await sdk.installAgent(agentName);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// The local provider can still function if the agent is already available
|
|
||||||
// through the user's PATH or the install check is unsupported.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sdk;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return this.sdkPromise;
|
|
||||||
}
|
|
||||||
|
|
||||||
id() {
|
|
||||||
return "local" as const;
|
|
||||||
}
|
|
||||||
|
|
||||||
capabilities(): ProviderCapabilities {
|
|
||||||
return {
|
|
||||||
remote: false,
|
|
||||||
supportsSessionReuse: true,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async validateConfig(input: unknown): Promise<Record<string, unknown>> {
|
|
||||||
return (input as Record<string, unknown> | undefined) ?? {};
|
|
||||||
}
|
|
||||||
|
|
||||||
async createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle> {
|
|
||||||
const sandboxId = req.taskId || `local-${randomUUID()}`;
|
|
||||||
const repoDir = this.repoDir(req.workspaceId, sandboxId);
|
|
||||||
mkdirSync(dirname(repoDir), { recursive: true });
|
|
||||||
await this.git.ensureCloned(req.repoRemote, repoDir, { githubToken: req.githubToken });
|
|
||||||
await checkoutBranch(repoDir, req.branchName, this.git);
|
|
||||||
return this.sandboxHandle(req.workspaceId, sandboxId, repoDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
async resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle> {
|
|
||||||
const repoDir = this.repoDir(req.workspaceId, req.sandboxId);
|
|
||||||
if (!existsSync(repoDir)) {
|
|
||||||
throw new Error(`local sandbox repo is missing: ${repoDir}`);
|
|
||||||
}
|
|
||||||
return this.sandboxHandle(req.workspaceId, req.sandboxId, repoDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
async destroySandbox(req: DestroySandboxRequest): Promise<void> {
|
|
||||||
rmSync(this.sandboxRoot(req.workspaceId, req.sandboxId), {
|
|
||||||
force: true,
|
|
||||||
recursive: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async releaseSandbox(_req: ReleaseSandboxRequest): Promise<void> {
|
|
||||||
// Local sandboxes stay warm on disk to preserve session state and repo context.
|
|
||||||
}
|
|
||||||
|
|
||||||
async ensureSandboxAgent(_req: EnsureAgentRequest): Promise<AgentEndpoint> {
|
|
||||||
const sdk = await this.sandboxAgent();
|
|
||||||
const { baseUrl, token } = sdk as unknown as {
|
|
||||||
baseUrl?: string;
|
|
||||||
token?: string;
|
|
||||||
};
|
|
||||||
if (!baseUrl) {
|
|
||||||
throw new Error("sandbox-agent baseUrl is unavailable");
|
|
||||||
}
|
|
||||||
return token ? { endpoint: baseUrl, token } : { endpoint: baseUrl };
|
|
||||||
}
|
|
||||||
|
|
||||||
async health(req: SandboxHealthRequest): Promise<SandboxHealth> {
|
|
||||||
try {
|
|
||||||
const repoDir = this.repoDir(req.workspaceId, req.sandboxId);
|
|
||||||
if (!existsSync(repoDir)) {
|
|
||||||
return {
|
|
||||||
status: "down",
|
|
||||||
message: "local sandbox repo is missing",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const sdk = await this.sandboxAgent();
|
|
||||||
const health = await sdk.getHealth();
|
|
||||||
return {
|
|
||||||
status: health.status === "ok" ? "healthy" : "degraded",
|
|
||||||
message: health.status,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
status: "down",
|
|
||||||
message: error instanceof Error ? error.message : String(error),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async attachTarget(req: AttachTargetRequest): Promise<AttachTarget> {
|
|
||||||
return { target: this.repoDir(req.workspaceId, req.sandboxId) };
|
|
||||||
}
|
|
||||||
|
|
||||||
async executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult> {
|
|
||||||
const cwd = this.repoDir(req.workspaceId, req.sandboxId);
|
|
||||||
try {
|
|
||||||
const { stdout, stderr } = await execFileAsync("bash", ["-lc", req.command], {
|
|
||||||
cwd,
|
|
||||||
env: process.env as Record<string, string>,
|
|
||||||
maxBuffer: 1024 * 1024 * 16,
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
exitCode: 0,
|
|
||||||
result: [stdout, stderr].filter(Boolean).join(""),
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
const detail = error as { stdout?: string; stderr?: string; code?: number };
|
|
||||||
return {
|
|
||||||
exitCode: typeof detail.code === "number" ? detail.code : 1,
|
|
||||||
result: [detail.stdout, detail.stderr, error instanceof Error ? error.message : String(error)].filter(Boolean).join(""),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,100 +0,0 @@
|
||||||
import type { ProviderId } from "@sandbox-agent/foundry-shared";
|
|
||||||
|
|
||||||
export interface ProviderCapabilities {
|
|
||||||
remote: boolean;
|
|
||||||
supportsSessionReuse: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CreateSandboxRequest {
|
|
||||||
workspaceId: string;
|
|
||||||
repoId: string;
|
|
||||||
repoRemote: string;
|
|
||||||
branchName: string;
|
|
||||||
taskId: string;
|
|
||||||
githubToken?: string | null;
|
|
||||||
debug?: (message: string, context?: Record<string, unknown>) => void;
|
|
||||||
options?: Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ResumeSandboxRequest {
|
|
||||||
workspaceId: string;
|
|
||||||
sandboxId: string;
|
|
||||||
options?: Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface DestroySandboxRequest {
|
|
||||||
workspaceId: string;
|
|
||||||
sandboxId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ReleaseSandboxRequest {
|
|
||||||
workspaceId: string;
|
|
||||||
sandboxId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface EnsureAgentRequest {
|
|
||||||
workspaceId: string;
|
|
||||||
sandboxId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SandboxHealthRequest {
|
|
||||||
workspaceId: string;
|
|
||||||
sandboxId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AttachTargetRequest {
|
|
||||||
workspaceId: string;
|
|
||||||
sandboxId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ExecuteSandboxCommandRequest {
|
|
||||||
workspaceId: string;
|
|
||||||
sandboxId: string;
|
|
||||||
command: string;
|
|
||||||
label?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SandboxHandle {
|
|
||||||
sandboxId: string;
|
|
||||||
switchTarget: string;
|
|
||||||
metadata: Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AgentEndpoint {
|
|
||||||
endpoint: string;
|
|
||||||
token?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SandboxHealth {
|
|
||||||
status: "healthy" | "degraded" | "down";
|
|
||||||
message: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AttachTarget {
|
|
||||||
target: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ExecuteSandboxCommandResult {
|
|
||||||
exitCode: number;
|
|
||||||
result: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SandboxProvider {
|
|
||||||
id(): ProviderId;
|
|
||||||
capabilities(): ProviderCapabilities;
|
|
||||||
validateConfig(input: unknown): Promise<Record<string, unknown>>;
|
|
||||||
|
|
||||||
createSandbox(req: CreateSandboxRequest): Promise<SandboxHandle>;
|
|
||||||
resumeSandbox(req: ResumeSandboxRequest): Promise<SandboxHandle>;
|
|
||||||
destroySandbox(req: DestroySandboxRequest): Promise<void>;
|
|
||||||
/**
|
|
||||||
* Release resources for a sandbox without deleting its filesystem/state.
|
|
||||||
* For remote providers, this typically maps to "stop"/"suspend".
|
|
||||||
*/
|
|
||||||
releaseSandbox(req: ReleaseSandboxRequest): Promise<void>;
|
|
||||||
|
|
||||||
ensureSandboxAgent(req: EnsureAgentRequest): Promise<AgentEndpoint>;
|
|
||||||
health(req: SandboxHealthRequest): Promise<SandboxHealth>;
|
|
||||||
attachTarget(req: AttachTargetRequest): Promise<AttachTarget>;
|
|
||||||
executeCommand(req: ExecuteSandboxCommandRequest): Promise<ExecuteSandboxCommandResult>;
|
|
||||||
}
|
|
||||||
39
foundry/packages/backend/src/sandbox-config.ts
Normal file
39
foundry/packages/backend/src/sandbox-config.ts
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
import type { AppConfig, ProviderId } from "@sandbox-agent/foundry-shared";
|
||||||
|
|
||||||
|
function hasE2BApiKey(config: AppConfig): boolean {
|
||||||
|
return Boolean(config.providers.e2b.apiKey?.trim());
|
||||||
|
}
|
||||||
|
|
||||||
|
function forcedSandboxProviderId(): ProviderId | null {
|
||||||
|
const raw = process.env.FOUNDRY_SANDBOX_PROVIDER?.trim() ?? process.env.HF_SANDBOX_PROVIDER?.trim() ?? null;
|
||||||
|
if (raw === "local" || raw === "e2b") {
|
||||||
|
return raw;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function defaultSandboxProviderId(config: AppConfig): ProviderId {
|
||||||
|
const forced = forcedSandboxProviderId();
|
||||||
|
if (forced === "local") {
|
||||||
|
return "local";
|
||||||
|
}
|
||||||
|
if (forced === "e2b") {
|
||||||
|
if (!hasE2BApiKey(config)) {
|
||||||
|
throw new Error("FOUNDRY_SANDBOX_PROVIDER=e2b requires E2B_API_KEY to be configured.");
|
||||||
|
}
|
||||||
|
return "e2b";
|
||||||
|
}
|
||||||
|
return hasE2BApiKey(config) ? "e2b" : "local";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function availableSandboxProviderIds(config: AppConfig): ProviderId[] {
|
||||||
|
return hasE2BApiKey(config) ? ["e2b", "local"] : ["local"];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resolveSandboxProviderId(config: AppConfig, requested?: ProviderId | null): ProviderId {
|
||||||
|
if (requested === "e2b" && !hasE2BApiKey(config)) {
|
||||||
|
throw new Error("E2B provider is not configured. Set E2B_API_KEY before selecting the e2b provider.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return requested ?? defaultSandboxProviderId(config);
|
||||||
|
}
|
||||||
|
|
@ -1,205 +0,0 @@
|
||||||
import { mkdirSync, rmSync, writeFileSync } from "node:fs";
|
|
||||||
import { tmpdir } from "node:os";
|
|
||||||
import { resolve } from "node:path";
|
|
||||||
import { describe, expect, it } from "vitest";
|
|
||||||
import type { DaytonaClientLike, DaytonaDriver } from "../src/driver.js";
|
|
||||||
import type { DaytonaCreateSandboxOptions } from "../src/integrations/daytona/client.js";
|
|
||||||
import { DaytonaProvider } from "../src/providers/daytona/index.js";
|
|
||||||
|
|
||||||
class RecordingDaytonaClient implements DaytonaClientLike {
|
|
||||||
createSandboxCalls: DaytonaCreateSandboxOptions[] = [];
|
|
||||||
executedCommands: string[] = [];
|
|
||||||
|
|
||||||
async createSandbox(options: DaytonaCreateSandboxOptions) {
|
|
||||||
this.createSandboxCalls.push(options);
|
|
||||||
return {
|
|
||||||
id: "sandbox-1",
|
|
||||||
state: "started",
|
|
||||||
snapshot: "snapshot-foundry",
|
|
||||||
labels: {},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async getSandbox(sandboxId: string) {
|
|
||||||
return {
|
|
||||||
id: sandboxId,
|
|
||||||
state: "started",
|
|
||||||
snapshot: "snapshot-foundry",
|
|
||||||
labels: {},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async startSandbox(_sandboxId: string, _timeoutSeconds?: number) {}
|
|
||||||
|
|
||||||
async stopSandbox(_sandboxId: string, _timeoutSeconds?: number) {}
|
|
||||||
|
|
||||||
async deleteSandbox(_sandboxId: string) {}
|
|
||||||
|
|
||||||
async executeCommand(_sandboxId: string, command: string) {
|
|
||||||
this.executedCommands.push(command);
|
|
||||||
return { exitCode: 0, result: "" };
|
|
||||||
}
|
|
||||||
|
|
||||||
async getPreviewEndpoint(sandboxId: string, port: number) {
|
|
||||||
return {
|
|
||||||
url: `https://preview.example/sandbox/${sandboxId}/port/${port}`,
|
|
||||||
token: "preview-token",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function createProviderWithClient(client: DaytonaClientLike): DaytonaProvider {
|
|
||||||
const daytonaDriver: DaytonaDriver = {
|
|
||||||
createClient: () => client,
|
|
||||||
};
|
|
||||||
|
|
||||||
return new DaytonaProvider(
|
|
||||||
{
|
|
||||||
apiKey: "test-key",
|
|
||||||
image: "ubuntu:24.04",
|
|
||||||
},
|
|
||||||
daytonaDriver,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("daytona provider snapshot image behavior", () => {
|
|
||||||
it("creates sandboxes using a snapshot-capable image recipe", async () => {
|
|
||||||
const client = new RecordingDaytonaClient();
|
|
||||||
const provider = createProviderWithClient(client);
|
|
||||||
|
|
||||||
const handle = await provider.createSandbox({
|
|
||||||
workspaceId: "default",
|
|
||||||
repoId: "repo-1",
|
|
||||||
repoRemote: "https://github.com/acme/repo.git",
|
|
||||||
branchName: "feature/test",
|
|
||||||
taskId: "task-1",
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(client.createSandboxCalls).toHaveLength(1);
|
|
||||||
const createCall = client.createSandboxCalls[0];
|
|
||||||
if (!createCall) {
|
|
||||||
throw new Error("expected create sandbox call");
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(typeof createCall.image).not.toBe("string");
|
|
||||||
if (typeof createCall.image === "string") {
|
|
||||||
throw new Error("expected daytona image recipe object");
|
|
||||||
}
|
|
||||||
|
|
||||||
const dockerfile = createCall.image.dockerfile;
|
|
||||||
expect(dockerfile).toContain("apt-get install -y curl ca-certificates git openssh-client nodejs npm");
|
|
||||||
expect(dockerfile).toContain("sandbox-agent/0.3.0/install.sh");
|
|
||||||
const installAgentLines = dockerfile.match(/sandbox-agent install-agent [a-z0-9-]+/gi) ?? [];
|
|
||||||
expect(installAgentLines.length).toBeGreaterThanOrEqual(2);
|
|
||||||
const commands = client.executedCommands.join("\n");
|
|
||||||
expect(commands).toContain("GIT_TERMINAL_PROMPT=0");
|
|
||||||
expect(commands).toContain("GIT_ASKPASS=/bin/echo");
|
|
||||||
expect(commands).not.toContain("[[");
|
|
||||||
expect(commands).not.toContain("GIT_AUTH_ARGS=()");
|
|
||||||
expect(commands).not.toContain("${GIT_AUTH_ARGS[@]}");
|
|
||||||
expect(commands).not.toContain(".extraheader");
|
|
||||||
|
|
||||||
expect(handle.metadata.snapshot).toBe("snapshot-foundry");
|
|
||||||
expect(handle.metadata.image).toBe("ubuntu:24.04");
|
|
||||||
expect(handle.metadata.cwd).toBe("/home/daytona/foundry/default/repo-1/task-1/repo");
|
|
||||||
expect(client.executedCommands.length).toBeGreaterThan(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("starts sandbox-agent with ACP timeout env override", async () => {
|
|
||||||
const previous = process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS;
|
|
||||||
const previousHome = process.env.HOME;
|
|
||||||
const tempHome = resolve(tmpdir(), `daytona-provider-test-${Date.now()}`);
|
|
||||||
mkdirSync(resolve(tempHome, ".codex"), { recursive: true });
|
|
||||||
writeFileSync(resolve(tempHome, ".codex", "auth.json"), JSON.stringify({ access_token: "test-token" }));
|
|
||||||
process.env.HOME = tempHome;
|
|
||||||
process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS = "240000";
|
|
||||||
|
|
||||||
try {
|
|
||||||
const client = new RecordingDaytonaClient();
|
|
||||||
const provider = createProviderWithClient(client);
|
|
||||||
|
|
||||||
await provider.ensureSandboxAgent({
|
|
||||||
workspaceId: "default",
|
|
||||||
sandboxId: "sandbox-1",
|
|
||||||
});
|
|
||||||
|
|
||||||
const startCommand = client.executedCommands.find(
|
|
||||||
(command) => command.includes("export SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS=") && command.includes("sandbox-agent server --no-token"),
|
|
||||||
);
|
|
||||||
|
|
||||||
const joined = client.executedCommands.join("\n");
|
|
||||||
expect(joined).toContain("sandbox-agent/0.3.0/install.sh");
|
|
||||||
expect(joined).toContain("SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS");
|
|
||||||
expect(joined).toContain("apt-get install -y nodejs npm");
|
|
||||||
expect(joined).toContain("sandbox-agent server --no-token --host 0.0.0.0 --port 2468");
|
|
||||||
expect(joined).toContain('mkdir -p "$HOME/.codex" "$HOME/.config/codex"');
|
|
||||||
expect(joined).toContain("unset OPENAI_API_KEY CODEX_API_KEY");
|
|
||||||
expect(joined).not.toContain('rm -f "$HOME/.codex/auth.json"');
|
|
||||||
expect(startCommand).toBeTruthy();
|
|
||||||
} finally {
|
|
||||||
if (previous === undefined) {
|
|
||||||
delete process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS;
|
|
||||||
} else {
|
|
||||||
process.env.HF_SANDBOX_AGENT_ACP_REQUEST_TIMEOUT_MS = previous;
|
|
||||||
}
|
|
||||||
if (previousHome === undefined) {
|
|
||||||
delete process.env.HOME;
|
|
||||||
} else {
|
|
||||||
process.env.HOME = previousHome;
|
|
||||||
}
|
|
||||||
rmSync(tempHome, { force: true, recursive: true });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
it("fails with explicit timeout when daytona createSandbox hangs", async () => {
|
|
||||||
const previous = process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS;
|
|
||||||
process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS = "120";
|
|
||||||
|
|
||||||
const hangingClient: DaytonaClientLike = {
|
|
||||||
createSandbox: async () => await new Promise(() => {}),
|
|
||||||
getSandbox: async (sandboxId) => ({ id: sandboxId, state: "started" }),
|
|
||||||
startSandbox: async () => {},
|
|
||||||
stopSandbox: async () => {},
|
|
||||||
deleteSandbox: async () => {},
|
|
||||||
executeCommand: async () => ({ exitCode: 0, result: "" }),
|
|
||||||
getPreviewEndpoint: async (sandboxId, port) => ({
|
|
||||||
url: `https://preview.example/sandbox/${sandboxId}/port/${port}`,
|
|
||||||
token: "preview-token",
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const provider = createProviderWithClient(hangingClient);
|
|
||||||
await expect(
|
|
||||||
provider.createSandbox({
|
|
||||||
workspaceId: "default",
|
|
||||||
repoId: "repo-1",
|
|
||||||
repoRemote: "https://github.com/acme/repo.git",
|
|
||||||
branchName: "feature/test",
|
|
||||||
taskId: "task-timeout",
|
|
||||||
}),
|
|
||||||
).rejects.toThrow("daytona create sandbox timed out after 120ms");
|
|
||||||
} finally {
|
|
||||||
if (previous === undefined) {
|
|
||||||
delete process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS;
|
|
||||||
} else {
|
|
||||||
process.env.HF_DAYTONA_REQUEST_TIMEOUT_MS = previous;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
it("executes backend-managed sandbox commands through provider API", async () => {
|
|
||||||
const client = new RecordingDaytonaClient();
|
|
||||||
const provider = createProviderWithClient(client);
|
|
||||||
|
|
||||||
const result = await provider.executeCommand({
|
|
||||||
workspaceId: "default",
|
|
||||||
sandboxId: "sandbox-1",
|
|
||||||
command: "echo backend-push",
|
|
||||||
label: "manual push",
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(result.exitCode).toBe(0);
|
|
||||||
expect(client.executedCommands).toContain("echo backend-push");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
@ -3,7 +3,6 @@ import { join } from "node:path";
|
||||||
import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared";
|
import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared";
|
||||||
import type { BackendDriver } from "../../src/driver.js";
|
import type { BackendDriver } from "../../src/driver.js";
|
||||||
import { initActorRuntimeContext } from "../../src/actors/context.js";
|
import { initActorRuntimeContext } from "../../src/actors/context.js";
|
||||||
import { createProviderRegistry } from "../../src/providers/index.js";
|
|
||||||
import { createDefaultAppShellServices } from "../../src/services/app-shell-runtime.js";
|
import { createDefaultAppShellServices } from "../../src/services/app-shell-runtime.js";
|
||||||
|
|
||||||
export function createTestConfig(overrides?: Partial<AppConfig>): AppConfig {
|
export function createTestConfig(overrides?: Partial<AppConfig>): AppConfig {
|
||||||
|
|
@ -21,7 +20,8 @@ export function createTestConfig(overrides?: Partial<AppConfig>): AppConfig {
|
||||||
backup_retention_days: 7,
|
backup_retention_days: 7,
|
||||||
},
|
},
|
||||||
providers: {
|
providers: {
|
||||||
daytona: { image: "ubuntu:24.04" },
|
local: {},
|
||||||
|
e2b: {},
|
||||||
},
|
},
|
||||||
...overrides,
|
...overrides,
|
||||||
});
|
});
|
||||||
|
|
@ -29,7 +29,6 @@ export function createTestConfig(overrides?: Partial<AppConfig>): AppConfig {
|
||||||
|
|
||||||
export function createTestRuntimeContext(driver: BackendDriver, configOverrides?: Partial<AppConfig>): { config: AppConfig } {
|
export function createTestRuntimeContext(driver: BackendDriver, configOverrides?: Partial<AppConfig>): { config: AppConfig } {
|
||||||
const config = createTestConfig(configOverrides);
|
const config = createTestConfig(configOverrides);
|
||||||
const providers = createProviderRegistry(config, driver);
|
initActorRuntimeContext(config, undefined, driver, createDefaultAppShellServices());
|
||||||
initActorRuntimeContext(config, providers, undefined, driver, createDefaultAppShellServices());
|
|
||||||
return { config };
|
return { config };
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,23 +1,10 @@
|
||||||
import type {
|
import type { BackendDriver, GitDriver, GithubDriver, StackDriver, TmuxDriver } from "../../src/driver.js";
|
||||||
BackendDriver,
|
|
||||||
DaytonaClientLike,
|
|
||||||
DaytonaDriver,
|
|
||||||
GitDriver,
|
|
||||||
GithubDriver,
|
|
||||||
StackDriver,
|
|
||||||
SandboxAgentDriver,
|
|
||||||
SandboxAgentClientLike,
|
|
||||||
TmuxDriver,
|
|
||||||
} from "../../src/driver.js";
|
|
||||||
import type { ListEventsRequest, ListPage, ListPageRequest, ProcessInfo, ProcessLogsResponse, SessionEvent, SessionRecord } from "sandbox-agent";
|
|
||||||
|
|
||||||
export function createTestDriver(overrides?: Partial<BackendDriver>): BackendDriver {
|
export function createTestDriver(overrides?: Partial<BackendDriver>): BackendDriver {
|
||||||
return {
|
return {
|
||||||
git: overrides?.git ?? createTestGitDriver(),
|
git: overrides?.git ?? createTestGitDriver(),
|
||||||
stack: overrides?.stack ?? createTestStackDriver(),
|
stack: overrides?.stack ?? createTestStackDriver(),
|
||||||
github: overrides?.github ?? createTestGithubDriver(),
|
github: overrides?.github ?? createTestGithubDriver(),
|
||||||
sandboxAgent: overrides?.sandboxAgent ?? createTestSandboxAgentDriver(),
|
|
||||||
daytona: overrides?.daytona ?? createTestDaytonaDriver(),
|
|
||||||
tmux: overrides?.tmux ?? createTestTmuxDriver(),
|
tmux: overrides?.tmux ?? createTestTmuxDriver(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
@ -63,79 +50,6 @@ export function createTestGithubDriver(overrides?: Partial<GithubDriver>): Githu
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createTestSandboxAgentDriver(overrides?: Partial<SandboxAgentDriver>): SandboxAgentDriver {
|
|
||||||
return {
|
|
||||||
createClient: (_opts) => createTestSandboxAgentClient(),
|
|
||||||
...overrides,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createTestSandboxAgentClient(overrides?: Partial<SandboxAgentClientLike>): SandboxAgentClientLike {
|
|
||||||
const defaultProcess: ProcessInfo = {
|
|
||||||
id: "process-1",
|
|
||||||
command: "bash",
|
|
||||||
args: ["-lc", "echo test"],
|
|
||||||
createdAtMs: Date.now(),
|
|
||||||
cwd: "/workspace",
|
|
||||||
exitCode: null,
|
|
||||||
exitedAtMs: null,
|
|
||||||
interactive: true,
|
|
||||||
pid: 123,
|
|
||||||
status: "running",
|
|
||||||
tty: true,
|
|
||||||
};
|
|
||||||
const defaultLogs: ProcessLogsResponse = {
|
|
||||||
processId: defaultProcess.id,
|
|
||||||
stream: "combined",
|
|
||||||
entries: [],
|
|
||||||
};
|
|
||||||
return {
|
|
||||||
createSession: async (_prompt) => ({ id: "test-session-1", status: "running" }),
|
|
||||||
sessionStatus: async (sessionId) => ({ id: sessionId, status: "running" }),
|
|
||||||
listSessions: async (_request?: ListPageRequest): Promise<ListPage<SessionRecord>> => ({
|
|
||||||
items: [],
|
|
||||||
nextCursor: undefined,
|
|
||||||
}),
|
|
||||||
listEvents: async (_request: ListEventsRequest): Promise<ListPage<SessionEvent>> => ({
|
|
||||||
items: [],
|
|
||||||
nextCursor: undefined,
|
|
||||||
}),
|
|
||||||
createProcess: async () => defaultProcess,
|
|
||||||
listProcesses: async () => ({ processes: [defaultProcess] }),
|
|
||||||
getProcessLogs: async () => defaultLogs,
|
|
||||||
stopProcess: async () => ({ ...defaultProcess, status: "exited", exitCode: 0, exitedAtMs: Date.now() }),
|
|
||||||
killProcess: async () => ({ ...defaultProcess, status: "exited", exitCode: 137, exitedAtMs: Date.now() }),
|
|
||||||
deleteProcess: async () => {},
|
|
||||||
sendPrompt: async (_request) => {},
|
|
||||||
cancelSession: async (_sessionId) => {},
|
|
||||||
destroySession: async (_sessionId) => {},
|
|
||||||
...overrides,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createTestDaytonaDriver(overrides?: Partial<DaytonaDriver>): DaytonaDriver {
|
|
||||||
return {
|
|
||||||
createClient: (_opts) => createTestDaytonaClient(),
|
|
||||||
...overrides,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createTestDaytonaClient(overrides?: Partial<DaytonaClientLike>): DaytonaClientLike {
|
|
||||||
return {
|
|
||||||
createSandbox: async () => ({ id: "sandbox-test-1", state: "started" }),
|
|
||||||
getSandbox: async (sandboxId) => ({ id: sandboxId, state: "started" }),
|
|
||||||
startSandbox: async () => {},
|
|
||||||
stopSandbox: async () => {},
|
|
||||||
deleteSandbox: async () => {},
|
|
||||||
executeCommand: async () => ({ exitCode: 0, result: "" }),
|
|
||||||
getPreviewEndpoint: async (sandboxId, port) => ({
|
|
||||||
url: `https://preview.example/sandbox/${sandboxId}/port/${port}`,
|
|
||||||
token: "preview-token",
|
|
||||||
}),
|
|
||||||
...overrides,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createTestTmuxDriver(overrides?: Partial<TmuxDriver>): TmuxDriver {
|
export function createTestTmuxDriver(overrides?: Partial<TmuxDriver>): TmuxDriver {
|
||||||
return {
|
return {
|
||||||
setWindowStatus: () => 0,
|
setWindowStatus: () => 0,
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,5 @@
|
||||||
import { describe, expect, it } from "vitest";
|
import { describe, expect, it } from "vitest";
|
||||||
import {
|
import { taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "../src/actors/keys.js";
|
||||||
taskKey,
|
|
||||||
taskStatusSyncKey,
|
|
||||||
historyKey,
|
|
||||||
projectBranchSyncKey,
|
|
||||||
projectKey,
|
|
||||||
projectPrSyncKey,
|
|
||||||
sandboxInstanceKey,
|
|
||||||
workspaceKey,
|
|
||||||
} from "../src/actors/keys.js";
|
|
||||||
|
|
||||||
describe("actor keys", () => {
|
describe("actor keys", () => {
|
||||||
it("prefixes every key with workspace namespace", () => {
|
it("prefixes every key with workspace namespace", () => {
|
||||||
|
|
@ -16,11 +7,10 @@ describe("actor keys", () => {
|
||||||
workspaceKey("default"),
|
workspaceKey("default"),
|
||||||
projectKey("default", "repo"),
|
projectKey("default", "repo"),
|
||||||
taskKey("default", "repo", "task"),
|
taskKey("default", "repo", "task"),
|
||||||
sandboxInstanceKey("default", "daytona", "sbx"),
|
taskSandboxKey("default", "sbx"),
|
||||||
historyKey("default", "repo"),
|
historyKey("default", "repo"),
|
||||||
projectPrSyncKey("default", "repo"),
|
projectPrSyncKey("default", "repo"),
|
||||||
projectBranchSyncKey("default", "repo"),
|
projectBranchSyncKey("default", "repo"),
|
||||||
taskStatusSyncKey("default", "repo", "task", "sandbox-1", "session-1"),
|
|
||||||
];
|
];
|
||||||
|
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
|
|
|
||||||
|
|
@ -1,52 +0,0 @@
|
||||||
import { describe, expect, it } from "vitest";
|
|
||||||
import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared";
|
|
||||||
import { createProviderRegistry } from "../src/providers/index.js";
|
|
||||||
|
|
||||||
function makeConfig(): AppConfig {
|
|
||||||
return ConfigSchema.parse({
|
|
||||||
auto_submit: true,
|
|
||||||
notify: ["terminal"],
|
|
||||||
workspace: { default: "default" },
|
|
||||||
backend: {
|
|
||||||
host: "127.0.0.1",
|
|
||||||
port: 7741,
|
|
||||||
dbPath: "~/.local/share/foundry/task.db",
|
|
||||||
opencode_poll_interval: 2,
|
|
||||||
github_poll_interval: 30,
|
|
||||||
backup_interval_secs: 3600,
|
|
||||||
backup_retention_days: 7,
|
|
||||||
},
|
|
||||||
providers: {
|
|
||||||
local: {},
|
|
||||||
daytona: { image: "ubuntu:24.04" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("provider registry", () => {
|
|
||||||
it("defaults to local when daytona is not configured", () => {
|
|
||||||
const registry = createProviderRegistry(makeConfig());
|
|
||||||
expect(registry.defaultProviderId()).toBe("local");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("prefers daytona when an api key is configured", () => {
|
|
||||||
const registry = createProviderRegistry(
|
|
||||||
ConfigSchema.parse({
|
|
||||||
...makeConfig(),
|
|
||||||
providers: {
|
|
||||||
...makeConfig().providers,
|
|
||||||
daytona: {
|
|
||||||
...makeConfig().providers.daytona,
|
|
||||||
apiKey: "test-token",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
expect(registry.defaultProviderId()).toBe("daytona");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("returns the built-in provider", () => {
|
|
||||||
const registry = createProviderRegistry(makeConfig());
|
|
||||||
expect(registry.get("daytona").id()).toBe("daytona");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
50
foundry/packages/backend/test/sandbox-config.test.ts
Normal file
50
foundry/packages/backend/test/sandbox-config.test.ts
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
import { ConfigSchema, type AppConfig } from "@sandbox-agent/foundry-shared";
|
||||||
|
import { availableSandboxProviderIds, defaultSandboxProviderId, resolveSandboxProviderId } from "../src/sandbox-config.js";
|
||||||
|
|
||||||
|
function makeConfig(overrides?: Partial<AppConfig>): AppConfig {
|
||||||
|
return ConfigSchema.parse({
|
||||||
|
auto_submit: true,
|
||||||
|
notify: ["terminal"],
|
||||||
|
workspace: { default: "default" },
|
||||||
|
backend: {
|
||||||
|
host: "127.0.0.1",
|
||||||
|
port: 7741,
|
||||||
|
dbPath: "~/.local/share/foundry/task.db",
|
||||||
|
opencode_poll_interval: 2,
|
||||||
|
github_poll_interval: 30,
|
||||||
|
backup_interval_secs: 3600,
|
||||||
|
backup_retention_days: 7,
|
||||||
|
},
|
||||||
|
providers: {
|
||||||
|
local: {},
|
||||||
|
e2b: {},
|
||||||
|
},
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("sandbox config", () => {
|
||||||
|
it("defaults to local when e2b is not configured", () => {
|
||||||
|
const config = makeConfig();
|
||||||
|
expect(defaultSandboxProviderId(config)).toBe("local");
|
||||||
|
expect(availableSandboxProviderIds(config)).toEqual(["local"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("prefers e2b when an api key is configured", () => {
|
||||||
|
const config = makeConfig({
|
||||||
|
providers: {
|
||||||
|
local: {},
|
||||||
|
e2b: { apiKey: "test-token" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(defaultSandboxProviderId(config)).toBe("e2b");
|
||||||
|
expect(availableSandboxProviderIds(config)).toEqual(["e2b", "local"]);
|
||||||
|
expect(resolveSandboxProviderId(config, "e2b")).toBe("e2b");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("rejects selecting e2b without an api key", () => {
|
||||||
|
const config = makeConfig();
|
||||||
|
expect(() => resolveSandboxProviderId(config, "e2b")).toThrow("E2B provider is not configured");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
import { describe, expect, it } from "vitest";
|
|
||||||
import { resolveEventListOffset } from "../src/actors/sandbox-instance/persist.js";
|
|
||||||
|
|
||||||
describe("sandbox-instance persist event offset", () => {
|
|
||||||
it("returns newest tail when cursor is omitted", () => {
|
|
||||||
expect(resolveEventListOffset({ total: 180, limit: 50 })).toBe(130);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("returns zero when total rows are below page size", () => {
|
|
||||||
expect(resolveEventListOffset({ total: 20, limit: 50 })).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("uses explicit cursor when provided", () => {
|
|
||||||
expect(resolveEventListOffset({ cursor: "7", total: 180, limit: 50 })).toBe(7);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("normalizes invalid cursors to zero", () => {
|
|
||||||
expect(resolveEventListOffset({ cursor: "-3", total: 180, limit: 50 })).toBe(0);
|
|
||||||
expect(resolveEventListOffset({ cursor: "not-a-number", total: 180, limit: 50 })).toBe(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
@ -56,7 +56,7 @@ describe("workspace isolation", () => {
|
||||||
workspaceId: "alpha",
|
workspaceId: "alpha",
|
||||||
repoId: repoA.repoId,
|
repoId: repoA.repoId,
|
||||||
task: "task A",
|
task: "task A",
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
explicitBranchName: "feature/a",
|
explicitBranchName: "feature/a",
|
||||||
explicitTitle: "A",
|
explicitTitle: "A",
|
||||||
});
|
});
|
||||||
|
|
@ -65,7 +65,7 @@ describe("workspace isolation", () => {
|
||||||
workspaceId: "beta",
|
workspaceId: "beta",
|
||||||
repoId: repoB.repoId,
|
repoId: repoB.repoId,
|
||||||
task: "task B",
|
task: "task B",
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
explicitBranchName: "feature/b",
|
explicitBranchName: "feature/b",
|
||||||
explicitTitle: "B",
|
explicitTitle: "B",
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -140,7 +140,7 @@ JSON Output:
|
||||||
"tasks": {
|
"tasks": {
|
||||||
"total": 4,
|
"total": 4,
|
||||||
"byStatus": { "queued": 0, "running": 1, "idle": 2, "archived": 1, "killed": 0, "error": 0 },
|
"byStatus": { "queued": 0, "running": 1, "idle": 2, "archived": 1, "killed": 0, "error": 0 },
|
||||||
"byProvider": { "daytona": 4 }
|
"byProvider": { "local": 4 }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
`);
|
`);
|
||||||
|
|
@ -169,7 +169,7 @@ JSON Output:
|
||||||
"taskId": "...",
|
"taskId": "...",
|
||||||
"repoId": "...",
|
"repoId": "...",
|
||||||
"branchName": "feature/foo",
|
"branchName": "feature/foo",
|
||||||
"payloadJson": "{\\"providerId\\":\\"daytona\\"}",
|
"payloadJson": "{\\"providerId\\":\\"local\\"}",
|
||||||
"createdAt": 1770607522229
|
"createdAt": 1770607522229
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -69,7 +69,8 @@ describe("backend manager", () => {
|
||||||
backup_retention_days: 7,
|
backup_retention_days: 7,
|
||||||
},
|
},
|
||||||
providers: {
|
providers: {
|
||||||
daytona: { image: "ubuntu:24.04" },
|
local: {},
|
||||||
|
e2b: {},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,8 @@ describe("resolveTuiTheme", () => {
|
||||||
backup_retention_days: 7,
|
backup_retention_days: 7,
|
||||||
},
|
},
|
||||||
providers: {
|
providers: {
|
||||||
daytona: { image: "ubuntu:24.04" },
|
local: {},
|
||||||
|
e2b: {},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ const sample: TaskRecord = {
|
||||||
branchName: "feature/test",
|
branchName: "feature/test",
|
||||||
title: "Test Title",
|
title: "Test Title",
|
||||||
task: "Do test",
|
task: "Do test",
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
status: "running",
|
status: "running",
|
||||||
statusMessage: null,
|
statusMessage: null,
|
||||||
activeSandboxId: "sandbox-1",
|
activeSandboxId: "sandbox-1",
|
||||||
|
|
@ -19,8 +19,8 @@ const sample: TaskRecord = {
|
||||||
sandboxes: [
|
sandboxes: [
|
||||||
{
|
{
|
||||||
sandboxId: "sandbox-1",
|
sandboxId: "sandbox-1",
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
switchTarget: "daytona://sandbox-1",
|
switchTarget: "sandbox://local/sandbox-1",
|
||||||
cwd: null,
|
cwd: null,
|
||||||
createdAt: 1,
|
createdAt: 1,
|
||||||
updatedAt: 1,
|
updatedAt: 1,
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,8 @@ describe("cli workspace resolution", () => {
|
||||||
backup_retention_days: 7,
|
backup_retention_days: 7,
|
||||||
},
|
},
|
||||||
providers: {
|
providers: {
|
||||||
daytona: { image: "ubuntu:24.04" },
|
local: {},
|
||||||
|
e2b: {},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ import type {
|
||||||
} from "@sandbox-agent/foundry-shared";
|
} from "@sandbox-agent/foundry-shared";
|
||||||
import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent";
|
import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent";
|
||||||
import { createMockBackendClient } from "./mock/backend-client.js";
|
import { createMockBackendClient } from "./mock/backend-client.js";
|
||||||
import { sandboxInstanceKey, taskKey, workspaceKey } from "./keys.js";
|
import { taskKey, taskSandboxKey, workspaceKey } from "./keys.js";
|
||||||
|
|
||||||
export type TaskAction = "push" | "sync" | "merge" | "archive" | "kill";
|
export type TaskAction = "push" | "sync" | "merge" | "archive" | "kill";
|
||||||
|
|
||||||
|
|
@ -137,23 +137,26 @@ interface TaskHandle {
|
||||||
connect(): ActorConn;
|
connect(): ActorConn;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SandboxInstanceHandle {
|
interface TaskSandboxHandle {
|
||||||
connect(): ActorConn;
|
connect(): ActorConn;
|
||||||
createSession(input: {
|
createSession(input: {
|
||||||
prompt: string;
|
id?: string;
|
||||||
|
agent: string;
|
||||||
|
model?: string;
|
||||||
|
sessionInit?: {
|
||||||
cwd?: string;
|
cwd?: string;
|
||||||
agent?: AgentType | "opencode";
|
};
|
||||||
}): Promise<{ id: string | null; status: "running" | "idle" | "error"; error?: string }>;
|
}): Promise<{ id: string }>;
|
||||||
listSessions(input?: { cursor?: string; limit?: number }): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }>;
|
listSessions(input?: { cursor?: string; limit?: number }): Promise<{ items: SandboxSessionRecord[]; nextCursor?: string }>;
|
||||||
listSessionEvents(input: { sessionId: string; cursor?: string; limit?: number }): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }>;
|
getEvents(input: { sessionId: string; cursor?: string; limit?: number }): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }>;
|
||||||
createProcess(input: ProcessCreateRequest): Promise<SandboxProcessRecord>;
|
createProcess(input: ProcessCreateRequest): Promise<SandboxProcessRecord>;
|
||||||
listProcesses(): Promise<{ processes: SandboxProcessRecord[] }>;
|
listProcesses(): Promise<{ processes: SandboxProcessRecord[] }>;
|
||||||
getProcessLogs(input: { processId: string; query?: ProcessLogFollowQuery }): Promise<ProcessLogsResponse>;
|
getProcessLogs(processId: string, query?: ProcessLogFollowQuery): Promise<ProcessLogsResponse>;
|
||||||
stopProcess(input: { processId: string; query?: ProcessSignalQuery }): Promise<SandboxProcessRecord>;
|
stopProcess(processId: string, query?: ProcessSignalQuery): Promise<SandboxProcessRecord>;
|
||||||
killProcess(input: { processId: string; query?: ProcessSignalQuery }): Promise<SandboxProcessRecord>;
|
killProcess(processId: string, query?: ProcessSignalQuery): Promise<SandboxProcessRecord>;
|
||||||
deleteProcess(input: { processId: string }): Promise<void>;
|
deleteProcess(processId: string): Promise<void>;
|
||||||
sendPrompt(input: { sessionId: string; prompt: string; notification?: boolean }): Promise<void>;
|
rawSendSessionMethod(sessionId: string, method: string, params: Record<string, unknown>): Promise<unknown>;
|
||||||
sessionStatus(input: { sessionId: string }): Promise<{ id: string; status: "running" | "idle" | "error" }>;
|
destroySession(sessionId: string): Promise<void>;
|
||||||
sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>;
|
sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>;
|
||||||
providerState(): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }>;
|
providerState(): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }>;
|
||||||
}
|
}
|
||||||
|
|
@ -166,8 +169,10 @@ interface RivetClient {
|
||||||
get(key?: string | string[]): TaskHandle;
|
get(key?: string | string[]): TaskHandle;
|
||||||
getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): TaskHandle;
|
getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): TaskHandle;
|
||||||
};
|
};
|
||||||
sandboxInstance: {
|
taskSandbox: {
|
||||||
getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): SandboxInstanceHandle;
|
get(key?: string | string[]): TaskSandboxHandle;
|
||||||
|
getOrCreate(key?: string | string[], opts?: { createWithInput?: unknown }): TaskSandboxHandle;
|
||||||
|
getForId(actorId: string): TaskSandboxHandle;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -423,8 +428,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
|
|
||||||
const task = async (workspaceId: string, repoId: string, taskId: string): Promise<TaskHandle> => client.task.get(taskKey(workspaceId, repoId, taskId));
|
const task = async (workspaceId: string, repoId: string, taskId: string): Promise<TaskHandle> => client.task.get(taskKey(workspaceId, repoId, taskId));
|
||||||
|
|
||||||
const sandboxByKey = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<SandboxInstanceHandle> => {
|
const sandboxByKey = async (workspaceId: string, _providerId: ProviderId, sandboxId: string): Promise<TaskSandboxHandle> => {
|
||||||
return (client as any).sandboxInstance.get(sandboxInstanceKey(workspaceId, providerId, sandboxId));
|
return (client as any).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId));
|
||||||
};
|
};
|
||||||
|
|
||||||
function isActorNotFoundError(error: unknown): boolean {
|
function isActorNotFoundError(error: unknown): boolean {
|
||||||
|
|
@ -432,7 +437,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
return message.includes("Actor not found");
|
return message.includes("Actor not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
const sandboxByActorIdFromTask = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<SandboxInstanceHandle | null> => {
|
const sandboxByActorIdFromTask = async (workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<TaskSandboxHandle | null> => {
|
||||||
const ws = await workspace(workspaceId);
|
const ws = await workspace(workspaceId);
|
||||||
const rows = await ws.listTasks({ workspaceId });
|
const rows = await ws.listTasks({ workspaceId });
|
||||||
const candidates = [...rows].sort((a, b) => b.updatedAt - a.updatedAt);
|
const candidates = [...rows].sort((a, b) => b.updatedAt - a.updatedAt);
|
||||||
|
|
@ -451,7 +456,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
(sb as any).sandboxActorId.length > 0,
|
(sb as any).sandboxActorId.length > 0,
|
||||||
) as { sandboxActorId?: string } | undefined;
|
) as { sandboxActorId?: string } | undefined;
|
||||||
if (sandbox?.sandboxActorId) {
|
if (sandbox?.sandboxActorId) {
|
||||||
return (client as any).sandboxInstance.getForId(sandbox.sandboxActorId);
|
return (client as any).taskSandbox.getForId(sandbox.sandboxActorId);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
|
@ -469,7 +474,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
workspaceId: string,
|
workspaceId: string,
|
||||||
providerId: ProviderId,
|
providerId: ProviderId,
|
||||||
sandboxId: string,
|
sandboxId: string,
|
||||||
run: (handle: SandboxInstanceHandle) => Promise<T>,
|
run: (handle: TaskSandboxHandle) => Promise<T>,
|
||||||
): Promise<T> => {
|
): Promise<T> => {
|
||||||
const handle = await sandboxByKey(workspaceId, providerId, sandboxId);
|
const handle = await sandboxByKey(workspaceId, providerId, sandboxId);
|
||||||
try {
|
try {
|
||||||
|
|
@ -511,16 +516,32 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
|
|
||||||
const getWorkbenchCompat = async (workspaceId: string): Promise<TaskWorkbenchSnapshot> => {
|
const getWorkbenchCompat = async (workspaceId: string): Promise<TaskWorkbenchSnapshot> => {
|
||||||
const summary = await (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId });
|
const summary = await (await workspace(workspaceId)).getWorkspaceSummary({ workspaceId });
|
||||||
const tasks = await Promise.all(
|
const tasks = (
|
||||||
|
await Promise.all(
|
||||||
summary.taskSummaries.map(async (taskSummary) => {
|
summary.taskSummaries.map(async (taskSummary) => {
|
||||||
const detail = await (await task(workspaceId, taskSummary.repoId, taskSummary.id)).getTaskDetail();
|
let detail;
|
||||||
|
try {
|
||||||
|
detail = await (await task(workspaceId, taskSummary.repoId, taskSummary.id)).getTaskDetail();
|
||||||
|
} catch (error) {
|
||||||
|
if (isActorNotFoundError(error)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
const sessionDetails = await Promise.all(
|
const sessionDetails = await Promise.all(
|
||||||
detail.sessionsSummary.map(async (session) => {
|
detail.sessionsSummary.map(async (session) => {
|
||||||
|
try {
|
||||||
const full = await (await task(workspaceId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id });
|
const full = await (await task(workspaceId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id });
|
||||||
return [session.id, full] as const;
|
return [session.id, full] as const;
|
||||||
|
} catch (error) {
|
||||||
|
if (isActorNotFoundError(error)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
const sessionDetailsById = new Map(sessionDetails);
|
const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkbenchSessionDetail] => entry !== null));
|
||||||
return {
|
return {
|
||||||
id: detail.id,
|
id: detail.id,
|
||||||
repoId: detail.repoId,
|
repoId: detail.repoId,
|
||||||
|
|
@ -552,7 +573,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
minutesUsed: detail.minutesUsed,
|
minutesUsed: detail.minutesUsed,
|
||||||
};
|
};
|
||||||
}),
|
}),
|
||||||
);
|
)
|
||||||
|
).filter((task): task is TaskWorkbenchSnapshot["tasks"][number] => task !== null);
|
||||||
|
|
||||||
const projects = summary.repos
|
const projects = summary.repos
|
||||||
.map((repo) => ({
|
.map((repo) => ({
|
||||||
|
|
@ -639,8 +661,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
|
|
||||||
if (!entry.disposeConnPromise) {
|
if (!entry.disposeConnPromise) {
|
||||||
entry.disposeConnPromise = (async () => {
|
entry.disposeConnPromise = (async () => {
|
||||||
const handle = await sandboxByKey(workspaceId, providerId, sandboxId);
|
const conn = await connectSandbox(workspaceId, providerId, sandboxId);
|
||||||
const conn = (handle as any).connect();
|
|
||||||
const unsubscribeEvent = conn.on("processesUpdated", () => {
|
const unsubscribeEvent = conn.on("processesUpdated", () => {
|
||||||
const current = sandboxProcessSubscriptions.get(key);
|
const current = sandboxProcessSubscriptions.get(key);
|
||||||
if (!current) {
|
if (!current) {
|
||||||
|
|
@ -958,17 +979,22 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
}): Promise<{ id: string; status: "running" | "idle" | "error" }> {
|
}): Promise<{ id: string; status: "running" | "idle" | "error" }> {
|
||||||
const created = await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) =>
|
const created = await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) =>
|
||||||
handle.createSession({
|
handle.createSession({
|
||||||
prompt: input.prompt,
|
agent: input.agent ?? "claude",
|
||||||
|
sessionInit: {
|
||||||
cwd: input.cwd,
|
cwd: input.cwd,
|
||||||
agent: input.agent,
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
if (input.prompt.trim().length > 0) {
|
||||||
|
await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) =>
|
||||||
|
handle.rawSendSessionMethod(created.id, "session/prompt", {
|
||||||
|
prompt: [{ type: "text", text: input.prompt }],
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
if (!created.id) {
|
|
||||||
throw new Error(created.error ?? "sandbox session creation failed");
|
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
id: created.id,
|
id: created.id,
|
||||||
status: created.status,
|
status: "idle",
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
@ -987,7 +1013,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
sandboxId: string,
|
sandboxId: string,
|
||||||
input: { sessionId: string; cursor?: string; limit?: number },
|
input: { sessionId: string; cursor?: string; limit?: number },
|
||||||
): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }> {
|
): Promise<{ items: SandboxSessionEventRecord[]; nextCursor?: string }> {
|
||||||
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.listSessionEvents(input));
|
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getEvents(input));
|
||||||
},
|
},
|
||||||
|
|
||||||
async createSandboxProcess(input: {
|
async createSandboxProcess(input: {
|
||||||
|
|
@ -1010,7 +1036,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
processId: string,
|
processId: string,
|
||||||
query?: ProcessLogFollowQuery,
|
query?: ProcessLogFollowQuery,
|
||||||
): Promise<ProcessLogsResponse> {
|
): Promise<ProcessLogsResponse> {
|
||||||
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getProcessLogs({ processId, query }));
|
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.getProcessLogs(processId, query));
|
||||||
},
|
},
|
||||||
|
|
||||||
async stopSandboxProcess(
|
async stopSandboxProcess(
|
||||||
|
|
@ -1020,7 +1046,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
processId: string,
|
processId: string,
|
||||||
query?: ProcessSignalQuery,
|
query?: ProcessSignalQuery,
|
||||||
): Promise<SandboxProcessRecord> {
|
): Promise<SandboxProcessRecord> {
|
||||||
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.stopProcess({ processId, query }));
|
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.stopProcess(processId, query));
|
||||||
},
|
},
|
||||||
|
|
||||||
async killSandboxProcess(
|
async killSandboxProcess(
|
||||||
|
|
@ -1030,11 +1056,11 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
processId: string,
|
processId: string,
|
||||||
query?: ProcessSignalQuery,
|
query?: ProcessSignalQuery,
|
||||||
): Promise<SandboxProcessRecord> {
|
): Promise<SandboxProcessRecord> {
|
||||||
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.killProcess({ processId, query }));
|
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.killProcess(processId, query));
|
||||||
},
|
},
|
||||||
|
|
||||||
async deleteSandboxProcess(workspaceId: string, providerId: ProviderId, sandboxId: string, processId: string): Promise<void> {
|
async deleteSandboxProcess(workspaceId: string, providerId: ProviderId, sandboxId: string, processId: string): Promise<void> {
|
||||||
await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.deleteProcess({ processId }));
|
await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.deleteProcess(processId));
|
||||||
},
|
},
|
||||||
|
|
||||||
subscribeSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): () => void {
|
subscribeSandboxProcesses(workspaceId: string, providerId: ProviderId, sandboxId: string, listener: () => void): () => void {
|
||||||
|
|
@ -1050,10 +1076,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
notification?: boolean;
|
notification?: boolean;
|
||||||
}): Promise<void> {
|
}): Promise<void> {
|
||||||
await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) =>
|
await withSandboxHandle(input.workspaceId, input.providerId, input.sandboxId, async (handle) =>
|
||||||
handle.sendPrompt({
|
handle.rawSendSessionMethod(input.sessionId, "session/prompt", {
|
||||||
sessionId: input.sessionId,
|
prompt: [{ type: "text", text: input.prompt }],
|
||||||
prompt: input.prompt,
|
|
||||||
notification: input.notification,
|
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
|
@ -1064,7 +1088,10 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
|
||||||
sandboxId: string,
|
sandboxId: string,
|
||||||
sessionId: string,
|
sessionId: string,
|
||||||
): Promise<{ id: string; status: "running" | "idle" | "error" }> {
|
): Promise<{ id: string; status: "running" | "idle" | "error" }> {
|
||||||
return await withSandboxHandle(workspaceId, providerId, sandboxId, async (handle) => handle.sessionStatus({ sessionId }));
|
return {
|
||||||
|
id: sessionId,
|
||||||
|
status: "idle",
|
||||||
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
async sandboxProviderState(
|
async sandboxProviderState(
|
||||||
|
|
|
||||||
|
|
@ -12,8 +12,8 @@ export function taskKey(workspaceId: string, repoId: string, taskId: string): Ac
|
||||||
return ["ws", workspaceId, "project", repoId, "task", taskId];
|
return ["ws", workspaceId, "project", repoId, "task", taskId];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sandboxInstanceKey(workspaceId: string, providerId: string, sandboxId: string): ActorKey {
|
export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey {
|
||||||
return ["ws", workspaceId, "provider", providerId, "sandbox", sandboxId];
|
return ["ws", workspaceId, "sandbox", sandboxId];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function historyKey(workspaceId: string, repoId: string): ActorKey {
|
export function historyKey(workspaceId: string, repoId: string): ActorKey {
|
||||||
|
|
@ -27,8 +27,3 @@ export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey
|
||||||
export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey {
|
export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey {
|
||||||
return ["ws", workspaceId, "project", repoId, "branch-sync"];
|
return ["ws", workspaceId, "project", repoId, "branch-sync"];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function taskStatusSyncKey(workspaceId: string, repoId: string, taskId: string, sandboxId: string, sessionId: string): ActorKey {
|
|
||||||
// Include sandbox + session so multiple sandboxes/sessions can be tracked per task.
|
|
||||||
return ["ws", workspaceId, "project", repoId, "task", taskId, "status-sync", sandboxId, sessionId];
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import type { WorkbenchModelId } from "@sandbox-agent/foundry-shared";
|
||||||
import { injectMockLatency } from "./mock/latency.js";
|
import { injectMockLatency } from "./mock/latency.js";
|
||||||
import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" };
|
import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" };
|
||||||
|
|
||||||
|
|
@ -58,7 +59,7 @@ export interface MockFoundryOrganizationSettings {
|
||||||
slug: string;
|
slug: string;
|
||||||
primaryDomain: string;
|
primaryDomain: string;
|
||||||
seatAccrualMode: "first_prompt";
|
seatAccrualMode: "first_prompt";
|
||||||
defaultModel: "claude-sonnet-4" | "claude-opus-4" | "gpt-4o" | "o3";
|
defaultModel: WorkbenchModelId;
|
||||||
autoImportRepos: boolean;
|
autoImportRepos: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -177,7 +178,7 @@ function buildRivetOrganization(): MockFoundryOrganization {
|
||||||
slug: "rivet",
|
slug: "rivet",
|
||||||
primaryDomain: "rivet.dev",
|
primaryDomain: "rivet.dev",
|
||||||
seatAccrualMode: "first_prompt",
|
seatAccrualMode: "first_prompt",
|
||||||
defaultModel: "o3",
|
defaultModel: "gpt-5.3-codex",
|
||||||
autoImportRepos: true,
|
autoImportRepos: true,
|
||||||
},
|
},
|
||||||
github: {
|
github: {
|
||||||
|
|
|
||||||
|
|
@ -9,12 +9,6 @@ const QUEUED_STATUSES = new Set<TaskStatus>([
|
||||||
"init_enqueue_provision",
|
"init_enqueue_provision",
|
||||||
"init_ensure_name",
|
"init_ensure_name",
|
||||||
"init_assert_name",
|
"init_assert_name",
|
||||||
"init_create_sandbox",
|
|
||||||
"init_ensure_agent",
|
|
||||||
"init_start_sandbox_instance",
|
|
||||||
"init_create_session",
|
|
||||||
"init_write_db",
|
|
||||||
"init_start_status_sync",
|
|
||||||
"init_complete",
|
"init_complete",
|
||||||
"archive_stop_status_sync",
|
"archive_stop_status_sync",
|
||||||
"archive_release_sandbox",
|
"archive_release_sandbox",
|
||||||
|
|
|
||||||
|
|
@ -26,8 +26,12 @@ export const MODEL_GROUPS: ModelGroup[] = [
|
||||||
{
|
{
|
||||||
provider: "OpenAI",
|
provider: "OpenAI",
|
||||||
models: [
|
models: [
|
||||||
{ id: "gpt-4o", label: "GPT-4o" },
|
{ id: "gpt-5.3-codex", label: "GPT-5.3 Codex" },
|
||||||
{ id: "o3", label: "o3" },
|
{ id: "gpt-5.4", label: "GPT-5.4" },
|
||||||
|
{ id: "gpt-5.2-codex", label: "GPT-5.2 Codex" },
|
||||||
|
{ id: "gpt-5.1-codex-max", label: "GPT-5.1 Codex Max" },
|
||||||
|
{ id: "gpt-5.2", label: "GPT-5.2" },
|
||||||
|
{ id: "gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini" },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
@ -334,7 +338,7 @@ export function buildInitialTasks(): Task[] {
|
||||||
sessionId: "t2",
|
sessionId: "t2",
|
||||||
sessionName: "Test coverage",
|
sessionName: "Test coverage",
|
||||||
agent: "Codex",
|
agent: "Codex",
|
||||||
model: "gpt-4o",
|
model: "gpt-5.3-codex",
|
||||||
status: "idle",
|
status: "idle",
|
||||||
thinkingSinceMs: null,
|
thinkingSinceMs: null,
|
||||||
unread: true,
|
unread: true,
|
||||||
|
|
@ -1083,7 +1087,7 @@ export function buildInitialTasks(): Task[] {
|
||||||
sessionId: "t10",
|
sessionId: "t10",
|
||||||
sessionName: "Namespace fix",
|
sessionName: "Namespace fix",
|
||||||
agent: "Codex",
|
agent: "Codex",
|
||||||
model: "gpt-4o",
|
model: "gpt-5.3-codex",
|
||||||
status: "idle",
|
status: "idle",
|
||||||
thinkingSinceMs: null,
|
thinkingSinceMs: null,
|
||||||
unread: true,
|
unread: true,
|
||||||
|
|
@ -1120,6 +1124,109 @@ export function buildInitialTasks(): Task[] {
|
||||||
fileTree: [],
|
fileTree: [],
|
||||||
minutesUsed: 3,
|
minutesUsed: 3,
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// ── Status demo tasks ──────────────────────────────────────────────
|
||||||
|
{
|
||||||
|
id: "status-error",
|
||||||
|
repoId: "sandbox-agent",
|
||||||
|
title: "Fix broken auth middleware (error demo)",
|
||||||
|
status: "error",
|
||||||
|
runtimeStatus: "error",
|
||||||
|
statusMessage: "session:error",
|
||||||
|
repoName: "rivet-dev/sandbox-agent",
|
||||||
|
updatedAtMs: minutesAgo(2),
|
||||||
|
branch: "fix/auth-middleware",
|
||||||
|
pullRequest: null,
|
||||||
|
tabs: [
|
||||||
|
{
|
||||||
|
id: "status-error-tab",
|
||||||
|
sessionId: "status-error-session",
|
||||||
|
sessionName: "Auth fix",
|
||||||
|
agent: "Claude",
|
||||||
|
model: "claude-sonnet-4",
|
||||||
|
status: "error",
|
||||||
|
thinkingSinceMs: null,
|
||||||
|
unread: false,
|
||||||
|
created: true,
|
||||||
|
errorMessage: "Sandbox process exited unexpectedly (exit code 137). The sandbox may have run out of memory.",
|
||||||
|
draft: { text: "", attachments: [], updatedAtMs: null },
|
||||||
|
transcript: [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
fileChanges: [],
|
||||||
|
diffs: {},
|
||||||
|
fileTree: [],
|
||||||
|
minutesUsed: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "status-provisioning",
|
||||||
|
repoId: "sandbox-agent",
|
||||||
|
title: "Add rate limiting to API gateway (provisioning demo)",
|
||||||
|
status: "new",
|
||||||
|
runtimeStatus: "init_enqueue_provision",
|
||||||
|
statusMessage: "Queueing sandbox provisioning.",
|
||||||
|
repoName: "rivet-dev/sandbox-agent",
|
||||||
|
updatedAtMs: minutesAgo(0),
|
||||||
|
branch: null,
|
||||||
|
pullRequest: null,
|
||||||
|
tabs: [
|
||||||
|
{
|
||||||
|
id: "status-prov-tab",
|
||||||
|
sessionId: null,
|
||||||
|
sessionName: "Session 1",
|
||||||
|
agent: "Claude",
|
||||||
|
model: "claude-sonnet-4",
|
||||||
|
status: "pending_provision",
|
||||||
|
thinkingSinceMs: null,
|
||||||
|
unread: false,
|
||||||
|
created: false,
|
||||||
|
draft: { text: "", attachments: [], updatedAtMs: null },
|
||||||
|
transcript: [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
fileChanges: [],
|
||||||
|
diffs: {},
|
||||||
|
fileTree: [],
|
||||||
|
minutesUsed: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "status-running",
|
||||||
|
repoId: "sandbox-agent",
|
||||||
|
title: "Refactor WebSocket handler (running demo)",
|
||||||
|
status: "running",
|
||||||
|
runtimeStatus: "running",
|
||||||
|
repoName: "rivet-dev/sandbox-agent",
|
||||||
|
updatedAtMs: minutesAgo(1),
|
||||||
|
branch: "refactor/ws-handler",
|
||||||
|
pullRequest: null,
|
||||||
|
tabs: [
|
||||||
|
{
|
||||||
|
id: "status-run-tab",
|
||||||
|
sessionId: "status-run-session",
|
||||||
|
sessionName: "WS refactor",
|
||||||
|
agent: "Codex",
|
||||||
|
model: "gpt-5.3-codex",
|
||||||
|
status: "running",
|
||||||
|
thinkingSinceMs: Date.now() - 12_000,
|
||||||
|
unread: false,
|
||||||
|
created: true,
|
||||||
|
draft: { text: "", attachments: [], updatedAtMs: null },
|
||||||
|
transcript: transcriptFromLegacyMessages("status-run-tab", [
|
||||||
|
{
|
||||||
|
id: "sr1",
|
||||||
|
role: "user",
|
||||||
|
agent: null,
|
||||||
|
createdAtMs: minutesAgo(3),
|
||||||
|
lines: ["Refactor the WebSocket handler to use a connection pool pattern."],
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
fileChanges: [],
|
||||||
|
diffs: {},
|
||||||
|
fileTree: [],
|
||||||
|
minutesUsed: 2,
|
||||||
|
},
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -171,7 +171,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => {
|
||||||
"4. git push the branch to origin",
|
"4. git push the branch to origin",
|
||||||
"5. Stop when done (agent should go idle).",
|
"5. Stop when done (agent should go idle).",
|
||||||
].join("\n"),
|
].join("\n"),
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
explicitTitle: `test(e2e): ${runId}`,
|
explicitTitle: `test(e2e): ${runId}`,
|
||||||
explicitBranchName: `e2e/${runId}`,
|
explicitBranchName: `e2e/${runId}`,
|
||||||
});
|
});
|
||||||
|
|
@ -185,7 +185,7 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => {
|
||||||
try {
|
try {
|
||||||
const namedAndProvisioned = await poll<TaskRecord>(
|
const namedAndProvisioned = await poll<TaskRecord>(
|
||||||
"task naming + sandbox provisioning",
|
"task naming + sandbox provisioning",
|
||||||
// Cold Daytona snapshot/image preparation can exceed 5 minutes on first run.
|
// Cold local sandbox startup can exceed a few minutes on first run.
|
||||||
8 * 60_000,
|
8 * 60_000,
|
||||||
1_000,
|
1_000,
|
||||||
async () => client.getTask(workspaceId, created.taskId),
|
async () => client.getTask(workspaceId, created.taskId),
|
||||||
|
|
@ -301,17 +301,17 @@ describe("e2e: backend -> sandbox-agent -> git -> PR", () => {
|
||||||
|
|
||||||
if (sandboxId) {
|
if (sandboxId) {
|
||||||
await poll<{ providerId: string; sandboxId: string; state: string; at: number }>(
|
await poll<{ providerId: string; sandboxId: string; state: string; at: number }>(
|
||||||
"daytona sandbox to stop",
|
"sandbox to stop",
|
||||||
2 * 60_000,
|
2 * 60_000,
|
||||||
2_000,
|
2_000,
|
||||||
async () => client.sandboxProviderState(workspaceId, "daytona", sandboxId!),
|
async () => client.sandboxProviderState(workspaceId, "local", sandboxId!),
|
||||||
(s) => {
|
(s) => {
|
||||||
const st = String(s.state).toLowerCase();
|
const st = String(s.state).toLowerCase();
|
||||||
return st.includes("stopped") || st.includes("suspended") || st.includes("paused");
|
return st.includes("destroyed") || st.includes("stopped") || st.includes("suspended") || st.includes("paused");
|
||||||
},
|
},
|
||||||
).catch(async (err) => {
|
).catch(async (err) => {
|
||||||
const dump = await debugDump(client, workspaceId, created.taskId);
|
const dump = await debugDump(client, workspaceId, created.taskId);
|
||||||
const state = await client.sandboxProviderState(workspaceId, "daytona", sandboxId!).catch(() => null);
|
const state = await client.sandboxProviderState(workspaceId, "local", sandboxId!).catch(() => null);
|
||||||
throw new Error(`${err instanceof Error ? err.message : String(err)}\n` + `sandbox state: ${state ? state.state : "unknown"}\n` + `${dump}`);
|
throw new Error(`${err instanceof Error ? err.message : String(err)}\n` + `sandbox state: ${state ? state.state : "unknown"}\n` + `${dump}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,8 @@
|
||||||
import { execFile } from "node:child_process";
|
|
||||||
import { promisify } from "node:util";
|
|
||||||
import { describe, expect, it } from "vitest";
|
import { describe, expect, it } from "vitest";
|
||||||
import type { TaskWorkbenchSnapshot, WorkbenchAgentTab, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared";
|
import type { TaskWorkbenchSnapshot, WorkbenchAgentTab, WorkbenchTask, WorkbenchModelId, WorkbenchTranscriptEvent } from "@sandbox-agent/foundry-shared";
|
||||||
import { createBackendClient } from "../../src/backend-client.js";
|
import { createBackendClient } from "../../src/backend-client.js";
|
||||||
|
|
||||||
const RUN_WORKBENCH_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_E2E === "1";
|
const RUN_WORKBENCH_E2E = process.env.HF_ENABLE_DAEMON_WORKBENCH_E2E === "1";
|
||||||
const execFileAsync = promisify(execFile);
|
|
||||||
|
|
||||||
function requiredEnv(name: string): string {
|
function requiredEnv(name: string): string {
|
||||||
const value = process.env[name]?.trim();
|
const value = process.env[name]?.trim();
|
||||||
|
|
@ -20,8 +17,12 @@ function workbenchModelEnv(name: string, fallback: WorkbenchModelId): WorkbenchM
|
||||||
switch (value) {
|
switch (value) {
|
||||||
case "claude-sonnet-4":
|
case "claude-sonnet-4":
|
||||||
case "claude-opus-4":
|
case "claude-opus-4":
|
||||||
case "gpt-4o":
|
case "gpt-5.3-codex":
|
||||||
case "o3":
|
case "gpt-5.4":
|
||||||
|
case "gpt-5.2-codex":
|
||||||
|
case "gpt-5.1-codex-max":
|
||||||
|
case "gpt-5.2":
|
||||||
|
case "gpt-5.1-codex-mini":
|
||||||
return value;
|
return value;
|
||||||
default:
|
default:
|
||||||
return fallback;
|
return fallback;
|
||||||
|
|
@ -32,16 +33,6 @@ async function sleep(ms: number): Promise<void> {
|
||||||
await new Promise((resolve) => setTimeout(resolve, ms));
|
await new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function seedSandboxFile(workspaceId: string, taskId: string, filePath: string, content: string): Promise<void> {
|
|
||||||
const repoPath = `/root/.local/share/foundry/local-sandboxes/${workspaceId}/${taskId}/repo`;
|
|
||||||
const script = [
|
|
||||||
`cd ${JSON.stringify(repoPath)}`,
|
|
||||||
`mkdir -p ${JSON.stringify(filePath.includes("/") ? filePath.slice(0, filePath.lastIndexOf("/")) : ".")}`,
|
|
||||||
`printf '%s\\n' ${JSON.stringify(content)} > ${JSON.stringify(filePath)}`,
|
|
||||||
].join(" && ");
|
|
||||||
await execFileAsync("docker", ["exec", "foundry-backend-1", "bash", "-lc", script]);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function poll<T>(label: string, timeoutMs: number, intervalMs: number, fn: () => Promise<T>, isDone: (value: T) => boolean): Promise<T> {
|
async function poll<T>(label: string, timeoutMs: number, intervalMs: number, fn: () => Promise<T>, isDone: (value: T) => boolean): Promise<T> {
|
||||||
const startedAt = Date.now();
|
const startedAt = Date.now();
|
||||||
let lastValue: T;
|
let lastValue: T;
|
||||||
|
|
@ -148,7 +139,7 @@ describe("e2e(client): workbench flows", () => {
|
||||||
const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet";
|
const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet";
|
||||||
const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default";
|
const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default";
|
||||||
const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO");
|
const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO");
|
||||||
const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-4o");
|
const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex");
|
||||||
const runId = `wb-${Date.now().toString(36)}`;
|
const runId = `wb-${Date.now().toString(36)}`;
|
||||||
const expectedFile = `${runId}.txt`;
|
const expectedFile = `${runId}.txt`;
|
||||||
const expectedInitialReply = `WORKBENCH_READY_${runId}`;
|
const expectedInitialReply = `WORKBENCH_READY_${runId}`;
|
||||||
|
|
@ -192,17 +183,6 @@ describe("e2e(client): workbench flows", () => {
|
||||||
expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy();
|
expect(findTab(initialCompleted, primaryTab.id).sessionId).toBeTruthy();
|
||||||
expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true);
|
expect(transcriptIncludesAgentText(findTab(initialCompleted, primaryTab.id).transcript, expectedInitialReply)).toBe(true);
|
||||||
|
|
||||||
await seedSandboxFile(workspaceId, created.taskId, expectedFile, runId);
|
|
||||||
|
|
||||||
const fileSeeded = await poll(
|
|
||||||
"seeded sandbox file reflected in workbench",
|
|
||||||
30_000,
|
|
||||||
1_000,
|
|
||||||
async () => findTask(await client.getWorkbench(workspaceId), created.taskId),
|
|
||||||
(task) => task.fileChanges.some((file) => file.path === expectedFile),
|
|
||||||
);
|
|
||||||
expect(fileSeeded.fileChanges.some((file) => file.path === expectedFile)).toBe(true);
|
|
||||||
|
|
||||||
await client.renameWorkbenchTask(workspaceId, {
|
await client.renameWorkbenchTask(workspaceId, {
|
||||||
taskId: created.taskId,
|
taskId: created.taskId,
|
||||||
value: `Workbench E2E ${runId} Renamed`,
|
value: `Workbench E2E ${runId} Renamed`,
|
||||||
|
|
@ -227,7 +207,11 @@ describe("e2e(client): workbench flows", () => {
|
||||||
await client.updateWorkbenchDraft(workspaceId, {
|
await client.updateWorkbenchDraft(workspaceId, {
|
||||||
taskId: created.taskId,
|
taskId: created.taskId,
|
||||||
tabId: secondTab.tabId,
|
tabId: secondTab.tabId,
|
||||||
text: `Reply with exactly: ${expectedReply}`,
|
text: [
|
||||||
|
`Create a file named ${expectedFile} in the repo root.`,
|
||||||
|
`Write exactly this single line into the file: ${runId}`,
|
||||||
|
`Then reply with exactly: ${expectedReply}`,
|
||||||
|
].join("\n"),
|
||||||
attachments: [
|
attachments: [
|
||||||
{
|
{
|
||||||
id: `${expectedFile}:1`,
|
id: `${expectedFile}:1`,
|
||||||
|
|
@ -245,8 +229,19 @@ describe("e2e(client): workbench flows", () => {
|
||||||
await client.sendWorkbenchMessage(workspaceId, {
|
await client.sendWorkbenchMessage(workspaceId, {
|
||||||
taskId: created.taskId,
|
taskId: created.taskId,
|
||||||
tabId: secondTab.tabId,
|
tabId: secondTab.tabId,
|
||||||
text: `Reply with exactly: ${expectedReply}`,
|
text: [
|
||||||
attachments: [],
|
`Create a file named ${expectedFile} in the repo root.`,
|
||||||
|
`Write exactly this single line into the file: ${runId}`,
|
||||||
|
`Then reply with exactly: ${expectedReply}`,
|
||||||
|
].join("\n"),
|
||||||
|
attachments: [
|
||||||
|
{
|
||||||
|
id: `${expectedFile}:1`,
|
||||||
|
filePath: expectedFile,
|
||||||
|
lineNumber: 1,
|
||||||
|
lineContent: runId,
|
||||||
|
},
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
const withSecondReply = await poll(
|
const withSecondReply = await poll(
|
||||||
|
|
@ -256,12 +251,15 @@ describe("e2e(client): workbench flows", () => {
|
||||||
async () => findTask(await client.getWorkbench(workspaceId), created.taskId),
|
async () => findTask(await client.getWorkbench(workspaceId), created.taskId),
|
||||||
(task) => {
|
(task) => {
|
||||||
const tab = findTab(task, secondTab.tabId);
|
const tab = findTab(task, secondTab.tabId);
|
||||||
return tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply);
|
return (
|
||||||
|
tab.status === "idle" && transcriptIncludesAgentText(tab.transcript, expectedReply) && task.fileChanges.some((file) => file.path === expectedFile)
|
||||||
|
);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
const secondTranscript = findTab(withSecondReply, secondTab.tabId).transcript;
|
const secondTranscript = findTab(withSecondReply, secondTab.tabId).transcript;
|
||||||
expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true);
|
expect(transcriptIncludesAgentText(secondTranscript, expectedReply)).toBe(true);
|
||||||
|
expect(withSecondReply.fileChanges.some((file) => file.path === expectedFile)).toBe(true);
|
||||||
|
|
||||||
await client.setWorkbenchSessionUnread(workspaceId, {
|
await client.setWorkbenchSessionUnread(workspaceId, {
|
||||||
taskId: created.taskId,
|
taskId: created.taskId,
|
||||||
|
|
|
||||||
|
|
@ -30,8 +30,12 @@ function workbenchModelEnv(name: string, fallback: WorkbenchModelId): WorkbenchM
|
||||||
switch (value) {
|
switch (value) {
|
||||||
case "claude-sonnet-4":
|
case "claude-sonnet-4":
|
||||||
case "claude-opus-4":
|
case "claude-opus-4":
|
||||||
case "gpt-4o":
|
case "gpt-5.3-codex":
|
||||||
case "o3":
|
case "gpt-5.4":
|
||||||
|
case "gpt-5.2-codex":
|
||||||
|
case "gpt-5.1-codex-max":
|
||||||
|
case "gpt-5.2":
|
||||||
|
case "gpt-5.1-codex-mini":
|
||||||
return value;
|
return value;
|
||||||
default:
|
default:
|
||||||
return fallback;
|
return fallback;
|
||||||
|
|
@ -191,7 +195,7 @@ describe("e2e(client): workbench load", () => {
|
||||||
const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet";
|
const endpoint = process.env.HF_E2E_BACKEND_ENDPOINT?.trim() || "http://127.0.0.1:7741/v1/rivet";
|
||||||
const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default";
|
const workspaceId = process.env.HF_E2E_WORKSPACE?.trim() || "default";
|
||||||
const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO");
|
const repoRemote = requiredEnv("HF_E2E_GITHUB_REPO");
|
||||||
const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-4o");
|
const model = workbenchModelEnv("HF_E2E_MODEL", "gpt-5.3-codex");
|
||||||
const taskCount = intEnv("HF_LOAD_TASK_COUNT", 3);
|
const taskCount = intEnv("HF_LOAD_TASK_COUNT", 3);
|
||||||
const extraSessionCount = intEnv("HF_LOAD_EXTRA_SESSION_COUNT", 2);
|
const extraSessionCount = intEnv("HF_LOAD_EXTRA_SESSION_COUNT", 2);
|
||||||
const pollIntervalMs = intEnv("HF_LOAD_POLL_INTERVAL_MS", 2_000);
|
const pollIntervalMs = intEnv("HF_LOAD_POLL_INTERVAL_MS", 2_000);
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { describe, expect, it } from "vitest";
|
import { describe, expect, it } from "vitest";
|
||||||
import { taskKey, taskStatusSyncKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, sandboxInstanceKey, workspaceKey } from "../src/keys.js";
|
import { taskKey, historyKey, projectBranchSyncKey, projectKey, projectPrSyncKey, taskSandboxKey, workspaceKey } from "../src/keys.js";
|
||||||
|
|
||||||
describe("actor keys", () => {
|
describe("actor keys", () => {
|
||||||
it("prefixes every key with workspace namespace", () => {
|
it("prefixes every key with workspace namespace", () => {
|
||||||
|
|
@ -7,11 +7,10 @@ describe("actor keys", () => {
|
||||||
workspaceKey("default"),
|
workspaceKey("default"),
|
||||||
projectKey("default", "repo"),
|
projectKey("default", "repo"),
|
||||||
taskKey("default", "repo", "task"),
|
taskKey("default", "repo", "task"),
|
||||||
sandboxInstanceKey("default", "daytona", "sbx"),
|
taskSandboxKey("default", "sbx"),
|
||||||
historyKey("default", "repo"),
|
historyKey("default", "repo"),
|
||||||
projectPrSyncKey("default", "repo"),
|
projectPrSyncKey("default", "repo"),
|
||||||
projectBranchSyncKey("default", "repo"),
|
projectBranchSyncKey("default", "repo"),
|
||||||
taskStatusSyncKey("default", "repo", "task", "sandbox-1", "session-1"),
|
|
||||||
];
|
];
|
||||||
|
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ const sample: TaskRecord = {
|
||||||
branchName: "feature/test",
|
branchName: "feature/test",
|
||||||
title: "Test Title",
|
title: "Test Title",
|
||||||
task: "Do test",
|
task: "Do test",
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
status: "running",
|
status: "running",
|
||||||
statusMessage: null,
|
statusMessage: null,
|
||||||
activeSandboxId: "sandbox-1",
|
activeSandboxId: "sandbox-1",
|
||||||
|
|
@ -18,9 +18,9 @@ const sample: TaskRecord = {
|
||||||
sandboxes: [
|
sandboxes: [
|
||||||
{
|
{
|
||||||
sandboxId: "sandbox-1",
|
sandboxId: "sandbox-1",
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
sandboxActorId: null,
|
sandboxActorId: null,
|
||||||
switchTarget: "daytona://sandbox-1",
|
switchTarget: "sandbox://local/sandbox-1",
|
||||||
cwd: null,
|
cwd: null,
|
||||||
createdAt: 1,
|
createdAt: 1,
|
||||||
updatedAt: 1,
|
updatedAt: 1,
|
||||||
|
|
@ -73,8 +73,8 @@ describe("summary helpers", () => {
|
||||||
it("summarizes by status and provider", () => {
|
it("summarizes by status and provider", () => {
|
||||||
const rows: TaskRecord[] = [
|
const rows: TaskRecord[] = [
|
||||||
sample,
|
sample,
|
||||||
{ ...sample, taskId: "task-2", status: "idle", providerId: "daytona" },
|
{ ...sample, taskId: "task-2", status: "idle", providerId: "local" },
|
||||||
{ ...sample, taskId: "task-3", status: "error", providerId: "daytona" },
|
{ ...sample, taskId: "task-3", status: "error", providerId: "local" },
|
||||||
];
|
];
|
||||||
|
|
||||||
const summary = summarizeTasks(rows);
|
const summary = summarizeTasks(rows);
|
||||||
|
|
@ -82,6 +82,6 @@ describe("summary helpers", () => {
|
||||||
expect(summary.byStatus.running).toBe(1);
|
expect(summary.byStatus.running).toBe(1);
|
||||||
expect(summary.byStatus.idle).toBe(1);
|
expect(summary.byStatus.idle).toBe(1);
|
||||||
expect(summary.byStatus.error).toBe(1);
|
expect(summary.byStatus.error).toBe(1);
|
||||||
expect(summary.byProvider.daytona).toBe(3);
|
expect(summary.byProvider.local).toBe(3);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,45 @@
|
||||||
import { memo, useCallback, useEffect, useMemo, useState } from "react";
|
import { memo, useEffect, useMemo, useState } from "react";
|
||||||
import { useStyletron } from "baseui";
|
import { useStyletron } from "baseui";
|
||||||
import { useFoundryTokens } from "../app/theme";
|
import { useFoundryTokens } from "../app/theme";
|
||||||
import { isMockFrontendClient } from "../lib/env";
|
import { isMockFrontendClient } from "../lib/env";
|
||||||
import { interestManager } from "../lib/interest";
|
import { interestManager } from "../lib/interest";
|
||||||
import type { FoundryOrganization, TaskWorkbenchSnapshot, WorkbenchTask } from "@sandbox-agent/foundry-shared";
|
import type {
|
||||||
|
FoundryOrganization,
|
||||||
|
TaskStatus,
|
||||||
|
TaskWorkbenchSnapshot,
|
||||||
|
WorkbenchSandboxSummary,
|
||||||
|
WorkbenchSessionSummary,
|
||||||
|
WorkbenchTaskStatus,
|
||||||
|
} from "@sandbox-agent/foundry-shared";
|
||||||
import type { DebugInterestTopic } from "@sandbox-agent/foundry-client";
|
import type { DebugInterestTopic } from "@sandbox-agent/foundry-client";
|
||||||
|
import { describeTaskState } from "../features/tasks/status";
|
||||||
|
|
||||||
interface DevPanelProps {
|
interface DevPanelProps {
|
||||||
workspaceId: string;
|
workspaceId: string;
|
||||||
snapshot: TaskWorkbenchSnapshot;
|
snapshot: TaskWorkbenchSnapshot;
|
||||||
organization?: FoundryOrganization | null;
|
organization?: FoundryOrganization | null;
|
||||||
|
focusedTask?: DevPanelFocusedTask | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DevPanelFocusedTask {
|
||||||
|
id: string;
|
||||||
|
repoId: string;
|
||||||
|
title: string | null;
|
||||||
|
status: WorkbenchTaskStatus;
|
||||||
|
runtimeStatus?: TaskStatus | null;
|
||||||
|
statusMessage?: string | null;
|
||||||
|
branch?: string | null;
|
||||||
|
activeSandboxId?: string | null;
|
||||||
|
activeSessionId?: string | null;
|
||||||
|
sandboxes?: WorkbenchSandboxSummary[];
|
||||||
|
sessions?: WorkbenchSessionSummary[];
|
||||||
}
|
}
|
||||||
|
|
||||||
interface TopicInfo {
|
interface TopicInfo {
|
||||||
label: string;
|
label: string;
|
||||||
key: string;
|
key: string;
|
||||||
|
/** Parsed params portion of the cache key, or empty if none. */
|
||||||
|
params: string;
|
||||||
listenerCount: number;
|
listenerCount: number;
|
||||||
hasConnection: boolean;
|
hasConnection: boolean;
|
||||||
status: "loading" | "connected" | "error";
|
status: "loading" | "connected" | "error";
|
||||||
|
|
@ -36,6 +61,12 @@ function topicLabel(topic: DebugInterestTopic): string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Extract the params portion of a cache key (everything after the first `:`) */
|
||||||
|
function topicParams(topic: DebugInterestTopic): string {
|
||||||
|
const idx = topic.cacheKey.indexOf(":");
|
||||||
|
return idx >= 0 ? topic.cacheKey.slice(idx + 1) : "";
|
||||||
|
}
|
||||||
|
|
||||||
function timeAgo(ts: number | null): string {
|
function timeAgo(ts: number | null): string {
|
||||||
if (!ts) return "never";
|
if (!ts) return "never";
|
||||||
const seconds = Math.floor((Date.now() - ts) / 1000);
|
const seconds = Math.floor((Date.now() - ts) / 1000);
|
||||||
|
|
@ -46,17 +77,14 @@ function timeAgo(ts: number | null): string {
|
||||||
return `${Math.floor(minutes / 60)}h`;
|
return `${Math.floor(minutes / 60)}h`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function taskStatusLabel(task: WorkbenchTask): string {
|
|
||||||
if (task.status === "archived") return "archived";
|
|
||||||
const hasRunning = task.tabs?.some((tab) => tab.status === "running");
|
|
||||||
if (hasRunning) return "running";
|
|
||||||
return task.status ?? "idle";
|
|
||||||
}
|
|
||||||
|
|
||||||
function statusColor(status: string, t: ReturnType<typeof useFoundryTokens>): string {
|
function statusColor(status: string, t: ReturnType<typeof useFoundryTokens>): string {
|
||||||
|
if (status === "new" || status.startsWith("init_") || status.startsWith("archive_") || status.startsWith("kill_") || status.startsWith("pending_")) {
|
||||||
|
return t.statusWarning;
|
||||||
|
}
|
||||||
switch (status) {
|
switch (status) {
|
||||||
case "connected":
|
case "connected":
|
||||||
case "running":
|
case "running":
|
||||||
|
case "ready":
|
||||||
return t.statusSuccess;
|
return t.statusSuccess;
|
||||||
case "loading":
|
case "loading":
|
||||||
return t.statusWarning;
|
return t.statusWarning;
|
||||||
|
|
@ -97,7 +125,15 @@ function installStatusColor(status: string, t: ReturnType<typeof useFoundryToken
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organization }: DevPanelProps) {
|
/** Format elapsed thinking time as a compact string. */
|
||||||
|
function thinkingLabel(sinceMs: number | null, now: number): string | null {
|
||||||
|
if (!sinceMs) return null;
|
||||||
|
const elapsed = Math.floor((now - sinceMs) / 1000);
|
||||||
|
if (elapsed < 1) return "thinking";
|
||||||
|
return `thinking ${elapsed}s`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organization, focusedTask }: DevPanelProps) {
|
||||||
const [css] = useStyletron();
|
const [css] = useStyletron();
|
||||||
const t = useFoundryTokens();
|
const t = useFoundryTokens();
|
||||||
const [now, setNow] = useState(Date.now());
|
const [now, setNow] = useState(Date.now());
|
||||||
|
|
@ -112,6 +148,7 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza
|
||||||
return interestManager.listDebugTopics().map((topic) => ({
|
return interestManager.listDebugTopics().map((topic) => ({
|
||||||
label: topicLabel(topic),
|
label: topicLabel(topic),
|
||||||
key: topic.cacheKey,
|
key: topic.cacheKey,
|
||||||
|
params: topicParams(topic),
|
||||||
listenerCount: topic.listenerCount,
|
listenerCount: topic.listenerCount,
|
||||||
hasConnection: topic.status === "connected",
|
hasConnection: topic.status === "connected",
|
||||||
status: topic.status,
|
status: topic.status,
|
||||||
|
|
@ -119,9 +156,9 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza
|
||||||
}));
|
}));
|
||||||
}, [now]);
|
}, [now]);
|
||||||
|
|
||||||
const tasks = snapshot.tasks ?? [];
|
|
||||||
const repos = snapshot.repos ?? [];
|
const repos = snapshot.repos ?? [];
|
||||||
const projects = snapshot.projects ?? [];
|
const focusedTaskStatus = focusedTask?.runtimeStatus ?? focusedTask?.status ?? null;
|
||||||
|
const focusedTaskState = describeTaskState(focusedTaskStatus, focusedTask?.statusMessage ?? null);
|
||||||
|
|
||||||
const mono = css({
|
const mono = css({
|
||||||
fontFamily: "ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace",
|
fontFamily: "ui-monospace, SFMono-Regular, 'SF Mono', Consolas, monospace",
|
||||||
|
|
@ -203,7 +240,13 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza
|
||||||
{topic.label}
|
{topic.label}
|
||||||
</span>
|
</span>
|
||||||
<span className={`${mono} ${css({ color: statusColor(topic.status, t) })}`}>{topic.status}</span>
|
<span className={`${mono} ${css({ color: statusColor(topic.status, t) })}`}>{topic.status}</span>
|
||||||
<span className={`${mono} ${css({ color: t.textMuted })}`}>{topic.key.length > 24 ? `...${topic.key.slice(-20)}` : topic.key}</span>
|
{topic.params && (
|
||||||
|
<span
|
||||||
|
className={`${mono} ${css({ color: t.textMuted, overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap", maxWidth: "100px" })}`}
|
||||||
|
>
|
||||||
|
{topic.params}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
<span className={`${mono} ${css({ color: t.textTertiary })}`}>{timeAgo(topic.lastRefresh)}</span>
|
<span className={`${mono} ${css({ color: t.textTertiary })}`}>{timeAgo(topic.lastRefresh)}</span>
|
||||||
</div>
|
</div>
|
||||||
))}
|
))}
|
||||||
|
|
@ -214,44 +257,150 @@ export const DevPanel = memo(function DevPanel({ workspaceId, snapshot, organiza
|
||||||
<Section label="Snapshot" t={t} css={css}>
|
<Section label="Snapshot" t={t} css={css}>
|
||||||
<div className={css({ display: "flex", gap: "10px", fontSize: "10px" })}>
|
<div className={css({ display: "flex", gap: "10px", fontSize: "10px" })}>
|
||||||
<Stat label="repos" value={repos.length} t={t} css={css} />
|
<Stat label="repos" value={repos.length} t={t} css={css} />
|
||||||
<Stat label="projects" value={projects.length} t={t} css={css} />
|
<Stat label="tasks" value={(snapshot.tasks ?? []).length} t={t} css={css} />
|
||||||
<Stat label="tasks" value={tasks.length} t={t} css={css} />
|
|
||||||
</div>
|
</div>
|
||||||
</Section>
|
</Section>
|
||||||
|
|
||||||
{/* Tasks */}
|
<Section label="Focused Task" t={t} css={css}>
|
||||||
{tasks.length > 0 && (
|
{focusedTask ? (
|
||||||
<Section label="Tasks" t={t} css={css}>
|
<div className={css({ display: "flex", flexDirection: "column", gap: "3px", fontSize: "10px" })}>
|
||||||
{tasks.slice(0, 10).map((task) => {
|
<div className={css({ display: "flex", alignItems: "center", gap: "6px" })}>
|
||||||
const status = taskStatusLabel(task);
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
key={task.id}
|
|
||||||
className={css({
|
|
||||||
display: "flex",
|
|
||||||
alignItems: "center",
|
|
||||||
gap: "6px",
|
|
||||||
padding: "1px 0",
|
|
||||||
fontSize: "10px",
|
|
||||||
})}
|
|
||||||
>
|
|
||||||
<span
|
<span
|
||||||
className={css({
|
className={css({
|
||||||
width: "5px",
|
width: "5px",
|
||||||
height: "5px",
|
height: "5px",
|
||||||
borderRadius: "50%",
|
borderRadius: "50%",
|
||||||
backgroundColor: statusColor(status, t),
|
backgroundColor: statusColor(focusedTaskStatus ?? focusedTask.status, t),
|
||||||
flexShrink: 0,
|
flexShrink: 0,
|
||||||
})}
|
})}
|
||||||
/>
|
/>
|
||||||
<span className={css({ color: t.textPrimary, flex: 1, overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" })}>
|
<span className={css({ color: t.textPrimary, flex: 1, overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap" })}>
|
||||||
{task.title || task.id.slice(0, 12)}
|
{focusedTask.title || focusedTask.id.slice(0, 12)}
|
||||||
</span>
|
</span>
|
||||||
<span className={`${mono} ${css({ color: statusColor(status, t) })}`}>{status}</span>
|
<span className={`${mono} ${css({ color: statusColor(focusedTaskStatus ?? focusedTask.status, t) })}`}>
|
||||||
<span className={`${mono} ${css({ color: t.textMuted })}`}>{task.tabs?.length ?? 0} tabs</span>
|
{focusedTaskStatus ?? focusedTask.status}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className={`${mono} ${css({ color: t.textMuted })}`}>{focusedTaskState.detail}</div>
|
||||||
|
<div className={`${mono} ${css({ color: t.textTertiary })}`}>task: {focusedTask.id}</div>
|
||||||
|
<div className={`${mono} ${css({ color: t.textTertiary })}`}>repo: {focusedTask.repoId}</div>
|
||||||
|
<div className={`${mono} ${css({ color: t.textTertiary })}`}>branch: {focusedTask.branch ?? "-"}</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<span className={css({ fontSize: "10px", color: t.textMuted })}>No task focused</span>
|
||||||
|
)}
|
||||||
|
</Section>
|
||||||
|
|
||||||
|
{/* Session — only when a task is focused */}
|
||||||
|
{focusedTask && (
|
||||||
|
<Section label="Session" t={t} css={css}>
|
||||||
|
{(focusedTask.sessions?.length ?? 0) > 0 ? (
|
||||||
|
focusedTask.sessions!.map((session) => {
|
||||||
|
const isActive = session.id === focusedTask.activeSessionId;
|
||||||
|
const thinking = thinkingLabel(session.thinkingSinceMs, now);
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={session.id}
|
||||||
|
className={css({
|
||||||
|
display: "flex",
|
||||||
|
flexDirection: "column",
|
||||||
|
gap: "1px",
|
||||||
|
padding: "2px 0",
|
||||||
|
fontSize: "10px",
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
<div className={css({ display: "flex", alignItems: "center", gap: "6px" })}>
|
||||||
|
<span
|
||||||
|
className={css({
|
||||||
|
width: "5px",
|
||||||
|
height: "5px",
|
||||||
|
borderRadius: "50%",
|
||||||
|
backgroundColor: statusColor(session.status, t),
|
||||||
|
flexShrink: 0,
|
||||||
|
})}
|
||||||
|
/>
|
||||||
|
<span
|
||||||
|
className={css({
|
||||||
|
color: isActive ? t.textPrimary : t.textTertiary,
|
||||||
|
flex: 1,
|
||||||
|
overflow: "hidden",
|
||||||
|
textOverflow: "ellipsis",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
{session.sessionName || session.id.slice(0, 12)}
|
||||||
|
{isActive ? " *" : ""}
|
||||||
|
</span>
|
||||||
|
<span className={`${mono} ${css({ color: statusColor(session.status, t) })}`}>{session.status}</span>
|
||||||
|
</div>
|
||||||
|
<div className={css({ display: "flex", gap: "6px", paddingLeft: "11px" })}>
|
||||||
|
<span className={`${mono} ${css({ color: t.textMuted })}`}>{session.agent}</span>
|
||||||
|
<span className={`${mono} ${css({ color: t.textMuted })}`}>{session.model}</span>
|
||||||
|
{!session.created && <span className={`${mono} ${css({ color: t.statusWarning })}`}>not created</span>}
|
||||||
|
{session.unread && <span className={`${mono} ${css({ color: t.statusWarning })}`}>unread</span>}
|
||||||
|
{thinking && <span className={`${mono} ${css({ color: t.statusWarning })}`}>{thinking}</span>}
|
||||||
|
</div>
|
||||||
|
{session.errorMessage && (
|
||||||
|
<div className={`${mono} ${css({ color: t.statusError, paddingLeft: "11px", wordBreak: "break-word" })}`}>{session.errorMessage}</div>
|
||||||
|
)}
|
||||||
|
{session.sessionId && <div className={`${mono} ${css({ color: t.textTertiary, paddingLeft: "11px" })}`}>sid: {session.sessionId}</div>}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
})
|
||||||
|
) : (
|
||||||
|
<span className={css({ fontSize: "10px", color: t.textMuted })}>No sessions</span>
|
||||||
|
)}
|
||||||
|
</Section>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Sandbox — only when a task is focused */}
|
||||||
|
{focusedTask && (
|
||||||
|
<Section label="Sandbox" t={t} css={css}>
|
||||||
|
{(focusedTask.sandboxes?.length ?? 0) > 0 ? (
|
||||||
|
focusedTask.sandboxes!.map((sandbox) => {
|
||||||
|
const isActive = sandbox.sandboxId === focusedTask.activeSandboxId;
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={sandbox.sandboxId}
|
||||||
|
className={css({
|
||||||
|
display: "flex",
|
||||||
|
flexDirection: "column",
|
||||||
|
gap: "1px",
|
||||||
|
padding: "2px 0",
|
||||||
|
fontSize: "10px",
|
||||||
})}
|
})}
|
||||||
|
>
|
||||||
|
<div className={css({ display: "flex", alignItems: "center", gap: "6px" })}>
|
||||||
|
<span
|
||||||
|
className={css({
|
||||||
|
width: "5px",
|
||||||
|
height: "5px",
|
||||||
|
borderRadius: "50%",
|
||||||
|
backgroundColor: isActive ? t.statusSuccess : t.textMuted,
|
||||||
|
flexShrink: 0,
|
||||||
|
})}
|
||||||
|
/>
|
||||||
|
<span
|
||||||
|
className={css({
|
||||||
|
color: isActive ? t.textPrimary : t.textTertiary,
|
||||||
|
flex: 1,
|
||||||
|
overflow: "hidden",
|
||||||
|
textOverflow: "ellipsis",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
{sandbox.sandboxId.slice(0, 16)}
|
||||||
|
{isActive ? " *" : ""}
|
||||||
|
</span>
|
||||||
|
<span className={`${mono} ${css({ color: t.textMuted })}`}>{sandbox.providerId}</span>
|
||||||
|
</div>
|
||||||
|
{sandbox.cwd && <div className={`${mono} ${css({ color: t.textTertiary, paddingLeft: "11px" })}`}>cwd: {sandbox.cwd}</div>}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})
|
||||||
|
) : (
|
||||||
|
<span className={css({ fontSize: "10px", color: t.textMuted })}>No sandboxes</span>
|
||||||
|
)}
|
||||||
</Section>
|
</Section>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -40,6 +40,7 @@ import {
|
||||||
import { activeMockOrganization, useMockAppSnapshot } from "../lib/mock-app";
|
import { activeMockOrganization, useMockAppSnapshot } from "../lib/mock-app";
|
||||||
import { backendClient } from "../lib/backend";
|
import { backendClient } from "../lib/backend";
|
||||||
import { interestManager } from "../lib/interest";
|
import { interestManager } from "../lib/interest";
|
||||||
|
import { describeTaskState, isProvisioningTaskStatus } from "../features/tasks/status";
|
||||||
|
|
||||||
function firstAgentTabId(task: Task): string | null {
|
function firstAgentTabId(task: Task): string | null {
|
||||||
return task.tabs[0]?.id ?? null;
|
return task.tabs[0]?.id ?? null;
|
||||||
|
|
@ -124,10 +125,6 @@ function toLegacyTask(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function isProvisioningTaskStatus(status: string | null | undefined): boolean {
|
|
||||||
return status === "new" || String(status ?? "").startsWith("init_");
|
|
||||||
}
|
|
||||||
|
|
||||||
function sessionStateMessage(tab: Task["tabs"][number] | null | undefined): string | null {
|
function sessionStateMessage(tab: Task["tabs"][number] | null | undefined): string | null {
|
||||||
if (!tab) {
|
if (!tab) {
|
||||||
return null;
|
return null;
|
||||||
|
|
@ -176,6 +173,7 @@ interface WorkbenchActions {
|
||||||
const TranscriptPanel = memo(function TranscriptPanel({
|
const TranscriptPanel = memo(function TranscriptPanel({
|
||||||
taskWorkbenchClient,
|
taskWorkbenchClient,
|
||||||
task,
|
task,
|
||||||
|
hasSandbox,
|
||||||
activeTabId,
|
activeTabId,
|
||||||
lastAgentTabId,
|
lastAgentTabId,
|
||||||
openDiffs,
|
openDiffs,
|
||||||
|
|
@ -193,6 +191,7 @@ const TranscriptPanel = memo(function TranscriptPanel({
|
||||||
}: {
|
}: {
|
||||||
taskWorkbenchClient: WorkbenchActions;
|
taskWorkbenchClient: WorkbenchActions;
|
||||||
task: Task;
|
task: Task;
|
||||||
|
hasSandbox: boolean;
|
||||||
activeTabId: string | null;
|
activeTabId: string | null;
|
||||||
lastAgentTabId: string | null;
|
lastAgentTabId: string | null;
|
||||||
openDiffs: string[];
|
openDiffs: string[];
|
||||||
|
|
@ -226,8 +225,10 @@ const TranscriptPanel = memo(function TranscriptPanel({
|
||||||
const isTerminal = task.status === "archived";
|
const isTerminal = task.status === "archived";
|
||||||
const historyEvents = useMemo(() => buildHistoryEvents(task.tabs), [task.tabs]);
|
const historyEvents = useMemo(() => buildHistoryEvents(task.tabs), [task.tabs]);
|
||||||
const activeMessages = useMemo(() => buildDisplayMessages(activeAgentTab), [activeAgentTab]);
|
const activeMessages = useMemo(() => buildDisplayMessages(activeAgentTab), [activeAgentTab]);
|
||||||
const taskProvisioning = isProvisioningTaskStatus(task.runtimeStatus ?? task.status);
|
const taskRuntimeStatus = task.runtimeStatus ?? task.status;
|
||||||
const taskProvisioningMessage = task.statusMessage ?? "Provisioning sandbox...";
|
const taskState = describeTaskState(taskRuntimeStatus, task.statusMessage ?? null);
|
||||||
|
const taskProvisioning = isProvisioningTaskStatus(taskRuntimeStatus);
|
||||||
|
const taskProvisioningMessage = taskState.detail;
|
||||||
const activeSessionMessage = sessionStateMessage(activeAgentTab);
|
const activeSessionMessage = sessionStateMessage(activeAgentTab);
|
||||||
const showPendingSessionState =
|
const showPendingSessionState =
|
||||||
!activeDiff &&
|
!activeDiff &&
|
||||||
|
|
@ -574,6 +575,7 @@ const TranscriptPanel = memo(function TranscriptPanel({
|
||||||
<SPanel>
|
<SPanel>
|
||||||
<TranscriptHeader
|
<TranscriptHeader
|
||||||
task={task}
|
task={task}
|
||||||
|
hasSandbox={hasSandbox}
|
||||||
activeTab={activeAgentTab}
|
activeTab={activeAgentTab}
|
||||||
editingField={editingField}
|
editingField={editingField}
|
||||||
editValue={editValue}
|
editValue={editValue}
|
||||||
|
|
@ -657,7 +659,7 @@ const TranscriptPanel = memo(function TranscriptPanel({
|
||||||
{taskProvisioning ? (
|
{taskProvisioning ? (
|
||||||
<>
|
<>
|
||||||
<SpinnerDot size={16} />
|
<SpinnerDot size={16} />
|
||||||
<h2 style={{ margin: 0, fontSize: "20px", fontWeight: 600 }}>Provisioning task</h2>
|
<h2 style={{ margin: 0, fontSize: "20px", fontWeight: 600 }}>{taskState.title}</h2>
|
||||||
<p style={{ margin: 0, opacity: 0.75 }}>{taskProvisioningMessage}</p>
|
<p style={{ margin: 0, opacity: 0.75 }}>{taskProvisioningMessage}</p>
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
|
|
@ -1130,6 +1132,22 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
|
||||||
}
|
}
|
||||||
: null,
|
: null,
|
||||||
);
|
);
|
||||||
|
const activeSandbox = useMemo(() => {
|
||||||
|
if (!taskState.data?.activeSandboxId) return null;
|
||||||
|
return taskState.data.sandboxes?.find((s) => s.sandboxId === taskState.data!.activeSandboxId) ?? null;
|
||||||
|
}, [taskState.data?.activeSandboxId, taskState.data?.sandboxes]);
|
||||||
|
const sandboxState = useInterest(
|
||||||
|
interestManager,
|
||||||
|
"sandboxProcesses",
|
||||||
|
activeSandbox
|
||||||
|
? {
|
||||||
|
workspaceId,
|
||||||
|
providerId: activeSandbox.providerId,
|
||||||
|
sandboxId: activeSandbox.sandboxId,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
);
|
||||||
|
const hasSandbox = Boolean(activeSandbox) && sandboxState.status !== "error";
|
||||||
const tasks = useMemo(() => {
|
const tasks = useMemo(() => {
|
||||||
const sessionCache = new Map<string, { draft: Task["tabs"][number]["draft"]; transcript: Task["tabs"][number]["transcript"] }>();
|
const sessionCache = new Map<string, { draft: Task["tabs"][number]["draft"]; transcript: Task["tabs"][number]["transcript"] }>();
|
||||||
if (selectedTaskSummary && taskState.data) {
|
if (selectedTaskSummary && taskState.data) {
|
||||||
|
|
@ -1387,7 +1405,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
|
||||||
const { taskId, tabId } = await taskWorkbenchClient.createTask({
|
const { taskId, tabId } = await taskWorkbenchClient.createTask({
|
||||||
repoId,
|
repoId,
|
||||||
task: "New task",
|
task: "New task",
|
||||||
model: "gpt-4o",
|
model: "gpt-5.3-codex",
|
||||||
title: "New task",
|
title: "New task",
|
||||||
});
|
});
|
||||||
await navigate({
|
await navigate({
|
||||||
|
|
@ -1787,6 +1805,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
|
||||||
workspaceId={workspaceId}
|
workspaceId={workspaceId}
|
||||||
snapshot={{ workspaceId, repos: workspaceRepos, projects: rawProjects, tasks } as TaskWorkbenchSnapshot}
|
snapshot={{ workspaceId, repos: workspaceRepos, projects: rawProjects, tasks } as TaskWorkbenchSnapshot}
|
||||||
organization={activeOrg}
|
organization={activeOrg}
|
||||||
|
focusedTask={null}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</>
|
</>
|
||||||
|
|
@ -1888,6 +1907,7 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
|
||||||
<TranscriptPanel
|
<TranscriptPanel
|
||||||
taskWorkbenchClient={taskWorkbenchClient}
|
taskWorkbenchClient={taskWorkbenchClient}
|
||||||
task={activeTask}
|
task={activeTask}
|
||||||
|
hasSandbox={hasSandbox}
|
||||||
activeTabId={activeTabId}
|
activeTabId={activeTabId}
|
||||||
lastAgentTabId={lastAgentTabId}
|
lastAgentTabId={lastAgentTabId}
|
||||||
openDiffs={openDiffs}
|
openDiffs={openDiffs}
|
||||||
|
|
@ -1978,6 +1998,30 @@ export function MockLayout({ workspaceId, selectedTaskId, selectedSessionId }: M
|
||||||
workspaceId={workspaceId}
|
workspaceId={workspaceId}
|
||||||
snapshot={{ workspaceId, repos: workspaceRepos, projects: rawProjects, tasks } as TaskWorkbenchSnapshot}
|
snapshot={{ workspaceId, repos: workspaceRepos, projects: rawProjects, tasks } as TaskWorkbenchSnapshot}
|
||||||
organization={activeOrg}
|
organization={activeOrg}
|
||||||
|
focusedTask={{
|
||||||
|
id: activeTask.id,
|
||||||
|
repoId: activeTask.repoId,
|
||||||
|
title: activeTask.title,
|
||||||
|
status: activeTask.status,
|
||||||
|
runtimeStatus: activeTask.runtimeStatus ?? null,
|
||||||
|
statusMessage: activeTask.statusMessage ?? null,
|
||||||
|
branch: activeTask.branch ?? null,
|
||||||
|
activeSandboxId: activeTask.activeSandboxId ?? null,
|
||||||
|
activeSessionId: selectedSessionId ?? activeTask.tabs[0]?.id ?? null,
|
||||||
|
sandboxes: [],
|
||||||
|
sessions:
|
||||||
|
activeTask.tabs?.map((tab) => ({
|
||||||
|
id: tab.id,
|
||||||
|
sessionId: tab.sessionId ?? null,
|
||||||
|
sessionName: tab.sessionName ?? tab.id,
|
||||||
|
agent: tab.agent,
|
||||||
|
model: tab.model,
|
||||||
|
status: tab.status,
|
||||||
|
thinkingSinceMs: tab.thinkingSinceMs ?? null,
|
||||||
|
unread: tab.unread ?? false,
|
||||||
|
created: tab.created ?? false,
|
||||||
|
})) ?? [],
|
||||||
|
}}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</Shell>
|
</Shell>
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,16 @@
|
||||||
import { memo } from "react";
|
import { memo, useMemo } from "react";
|
||||||
import { useStyletron } from "baseui";
|
import { useStyletron } from "baseui";
|
||||||
import { LabelSmall } from "baseui/typography";
|
import { LabelSmall } from "baseui/typography";
|
||||||
import { Clock, PanelLeft, PanelRight } from "lucide-react";
|
import { Clock, PanelLeft, PanelRight } from "lucide-react";
|
||||||
|
|
||||||
import { useFoundryTokens } from "../../app/theme";
|
import { useFoundryTokens } from "../../app/theme";
|
||||||
import { PanelHeaderBar } from "./ui";
|
import { deriveHeaderStatus } from "../../features/tasks/status";
|
||||||
|
import { HeaderStatusPill, PanelHeaderBar } from "./ui";
|
||||||
import { type AgentTab, type Task } from "./view-model";
|
import { type AgentTab, type Task } from "./view-model";
|
||||||
|
|
||||||
export const TranscriptHeader = memo(function TranscriptHeader({
|
export const TranscriptHeader = memo(function TranscriptHeader({
|
||||||
task,
|
task,
|
||||||
|
hasSandbox,
|
||||||
activeTab,
|
activeTab,
|
||||||
editingField,
|
editingField,
|
||||||
editValue,
|
editValue,
|
||||||
|
|
@ -26,6 +28,7 @@ export const TranscriptHeader = memo(function TranscriptHeader({
|
||||||
onNavigateToUsage,
|
onNavigateToUsage,
|
||||||
}: {
|
}: {
|
||||||
task: Task;
|
task: Task;
|
||||||
|
hasSandbox: boolean;
|
||||||
activeTab: AgentTab | null | undefined;
|
activeTab: AgentTab | null | undefined;
|
||||||
editingField: "title" | "branch" | null;
|
editingField: "title" | "branch" | null;
|
||||||
editValue: string;
|
editValue: string;
|
||||||
|
|
@ -46,6 +49,11 @@ export const TranscriptHeader = memo(function TranscriptHeader({
|
||||||
const t = useFoundryTokens();
|
const t = useFoundryTokens();
|
||||||
const isDesktop = !!import.meta.env.VITE_DESKTOP;
|
const isDesktop = !!import.meta.env.VITE_DESKTOP;
|
||||||
const needsTrafficLightInset = isDesktop && sidebarCollapsed;
|
const needsTrafficLightInset = isDesktop && sidebarCollapsed;
|
||||||
|
const taskStatus = task.runtimeStatus ?? task.status;
|
||||||
|
const headerStatus = useMemo(
|
||||||
|
() => deriveHeaderStatus(taskStatus, task.statusMessage ?? null, activeTab?.status ?? null, activeTab?.errorMessage ?? null, hasSandbox),
|
||||||
|
[taskStatus, task.statusMessage, activeTab?.status, activeTab?.errorMessage, hasSandbox],
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<PanelHeaderBar $style={{ backgroundColor: t.surfaceSecondary, borderBottom: "none", paddingLeft: needsTrafficLightInset ? "74px" : "14px" }}>
|
<PanelHeaderBar $style={{ backgroundColor: t.surfaceSecondary, borderBottom: "none", paddingLeft: needsTrafficLightInset ? "74px" : "14px" }}>
|
||||||
|
|
@ -161,6 +169,7 @@ export const TranscriptHeader = memo(function TranscriptHeader({
|
||||||
</span>
|
</span>
|
||||||
)
|
)
|
||||||
) : null}
|
) : null}
|
||||||
|
<HeaderStatusPill status={headerStatus} />
|
||||||
<div className={css({ flex: 1 })} />
|
<div className={css({ flex: 1 })} />
|
||||||
<div
|
<div
|
||||||
role="button"
|
role="button"
|
||||||
|
|
|
||||||
|
|
@ -184,6 +184,73 @@ export const AgentIcon = memo(function AgentIcon({ agent, size = 14 }: { agent:
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export type HeaderStatusVariant = "error" | "warning" | "success" | "neutral";
|
||||||
|
|
||||||
|
export interface HeaderStatusInfo {
|
||||||
|
variant: HeaderStatusVariant;
|
||||||
|
label: string;
|
||||||
|
spinning: boolean;
|
||||||
|
tooltip?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const HeaderStatusPill = memo(function HeaderStatusPill({ status }: { status: HeaderStatusInfo }) {
|
||||||
|
const [css] = useStyletron();
|
||||||
|
const t = useFoundryTokens();
|
||||||
|
|
||||||
|
const colorMap: Record<HeaderStatusVariant, { bg: string; text: string; dot: string }> = {
|
||||||
|
error: { bg: `${t.statusError}18`, text: t.statusError, dot: t.statusError },
|
||||||
|
warning: { bg: `${t.statusWarning}18`, text: t.statusWarning, dot: t.statusWarning },
|
||||||
|
success: { bg: `${t.statusSuccess}18`, text: t.statusSuccess, dot: t.statusSuccess },
|
||||||
|
neutral: { bg: t.interactiveSubtle, text: t.textTertiary, dot: t.textTertiary },
|
||||||
|
};
|
||||||
|
const colors = colorMap[status.variant];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
title={status.tooltip}
|
||||||
|
className={css({
|
||||||
|
display: "inline-flex",
|
||||||
|
alignItems: "center",
|
||||||
|
gap: "5px",
|
||||||
|
padding: "2px 8px",
|
||||||
|
borderRadius: "999px",
|
||||||
|
backgroundColor: colors.bg,
|
||||||
|
fontSize: "11px",
|
||||||
|
fontWeight: 500,
|
||||||
|
lineHeight: 1,
|
||||||
|
color: colors.text,
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
flexShrink: 0,
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
{status.spinning ? (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
width: 8,
|
||||||
|
height: 8,
|
||||||
|
borderRadius: "50%",
|
||||||
|
border: `1.5px solid ${colors.dot}40`,
|
||||||
|
borderTopColor: colors.dot,
|
||||||
|
animation: "hf-spin 0.8s linear infinite",
|
||||||
|
flexShrink: 0,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
width: 6,
|
||||||
|
height: 6,
|
||||||
|
borderRadius: "50%",
|
||||||
|
backgroundColor: colors.dot,
|
||||||
|
flexShrink: 0,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<span>{status.label}</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
export const TabAvatar = memo(function TabAvatar({ tab }: { tab: AgentTab }) {
|
export const TabAvatar = memo(function TabAvatar({ tab }: { tab: AgentTab }) {
|
||||||
if (tab.status === "running" || tab.status === "pending_provision" || tab.status === "pending_session_create") return <SpinnerDot size={8} />;
|
if (tab.status === "running" || tab.status === "pending_provision" || tab.status === "pending_session_create") return <SpinnerDot size={8} />;
|
||||||
if (tab.unread) return <UnreadDot />;
|
if (tab.unread) return <UnreadDot />;
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ function makeTab(transcript: WorkbenchAgentTab["transcript"]): WorkbenchAgentTab
|
||||||
sessionId: "session-1",
|
sessionId: "session-1",
|
||||||
sessionName: "Session 1",
|
sessionName: "Session 1",
|
||||||
agent: "Codex",
|
agent: "Codex",
|
||||||
model: "gpt-4o",
|
model: "gpt-5.3-codex",
|
||||||
status: "idle",
|
status: "idle",
|
||||||
thinkingSinceMs: null,
|
thinkingSinceMs: null,
|
||||||
unread: false,
|
unread: false,
|
||||||
|
|
|
||||||
|
|
@ -28,8 +28,12 @@ export const MODEL_GROUPS: ModelGroup[] = [
|
||||||
{
|
{
|
||||||
provider: "OpenAI",
|
provider: "OpenAI",
|
||||||
models: [
|
models: [
|
||||||
{ id: "gpt-4o", label: "GPT-4o" },
|
{ id: "gpt-5.3-codex", label: "GPT-5.3 Codex" },
|
||||||
{ id: "o3", label: "o3" },
|
{ id: "gpt-5.4", label: "GPT-5.4" },
|
||||||
|
{ id: "gpt-5.2-codex", label: "GPT-5.2 Codex" },
|
||||||
|
{ id: "gpt-5.1-codex-max", label: "GPT-5.1 Codex Max" },
|
||||||
|
{ id: "gpt-5.2", label: "GPT-5.2" },
|
||||||
|
{ id: "gpt-5.1-codex-mini", label: "GPT-5.1 Codex Mini" },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { useEffect, useMemo, useState, type ReactNode } from "react";
|
import { useEffect, useMemo, useState, type ReactNode } from "react";
|
||||||
import type { AgentType, RepoBranchRecord, RepoOverview, RepoStackAction, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared";
|
import type { AgentType, RepoBranchRecord, RepoOverview, RepoStackAction, TaskWorkbenchSnapshot, WorkbenchTaskStatus } from "@sandbox-agent/foundry-shared";
|
||||||
import { useInterest } from "@sandbox-agent/foundry-client";
|
import { useInterest } from "@sandbox-agent/foundry-client";
|
||||||
import { useMutation, useQuery } from "@tanstack/react-query";
|
import { useMutation, useQuery } from "@tanstack/react-query";
|
||||||
import { Link, useNavigate } from "@tanstack/react-router";
|
import { Link, useNavigate } from "@tanstack/react-router";
|
||||||
|
|
@ -15,9 +15,12 @@ import { styled, useStyletron } from "baseui";
|
||||||
import { HeadingSmall, HeadingXSmall, LabelSmall, LabelXSmall, MonoLabelSmall, ParagraphSmall } from "baseui/typography";
|
import { HeadingSmall, HeadingXSmall, LabelSmall, LabelXSmall, MonoLabelSmall, ParagraphSmall } from "baseui/typography";
|
||||||
import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal, Shuffle } from "lucide-react";
|
import { Bot, CircleAlert, FolderGit2, GitBranch, MessageSquareText, SendHorizontal, Shuffle } from "lucide-react";
|
||||||
import { formatDiffStat } from "../features/tasks/model";
|
import { formatDiffStat } from "../features/tasks/model";
|
||||||
|
import { deriveHeaderStatus, describeTaskState } from "../features/tasks/status";
|
||||||
|
import { HeaderStatusPill } from "./mock-layout/ui";
|
||||||
import { buildTranscript, resolveSessionSelection } from "../features/sessions/model";
|
import { buildTranscript, resolveSessionSelection } from "../features/sessions/model";
|
||||||
import { backendClient } from "../lib/backend";
|
import { backendClient } from "../lib/backend";
|
||||||
import { interestManager } from "../lib/interest";
|
import { interestManager } from "../lib/interest";
|
||||||
|
import { DevPanel, useDevPanel } from "./dev-panel";
|
||||||
|
|
||||||
interface WorkspaceDashboardProps {
|
interface WorkspaceDashboardProps {
|
||||||
workspaceId: string;
|
workspaceId: string;
|
||||||
|
|
@ -333,6 +336,7 @@ function MetaRow({ label, value, mono = false }: { label: string; value: string;
|
||||||
export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }: WorkspaceDashboardProps) {
|
export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId }: WorkspaceDashboardProps) {
|
||||||
const [css, theme] = useStyletron();
|
const [css, theme] = useStyletron();
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
const showDevPanel = useDevPanel();
|
||||||
const repoOverviewMode = typeof selectedRepoId === "string" && selectedRepoId.length > 0;
|
const repoOverviewMode = typeof selectedRepoId === "string" && selectedRepoId.length > 0;
|
||||||
|
|
||||||
const [draft, setDraft] = useState("");
|
const [draft, setDraft] = useState("");
|
||||||
|
|
@ -468,6 +472,10 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
}, [selectedForSession?.id]);
|
}, [selectedForSession?.id]);
|
||||||
|
|
||||||
const sessionRows = selectedForSession?.sessionsSummary ?? [];
|
const sessionRows = selectedForSession?.sessionsSummary ?? [];
|
||||||
|
const taskRuntimeStatus = selectedForSession?.runtimeStatus ?? selectedForSession?.status ?? null;
|
||||||
|
const taskStatusState = describeTaskState(taskRuntimeStatus, selectedForSession?.statusMessage ?? null);
|
||||||
|
const taskStateSummary = `${taskStatusState.title}. ${taskStatusState.detail}`;
|
||||||
|
const shouldUseTaskStateEmptyState = Boolean(selectedForSession && taskRuntimeStatus && taskRuntimeStatus !== "running" && taskRuntimeStatus !== "idle");
|
||||||
const sessionSelection = useMemo(
|
const sessionSelection = useMemo(
|
||||||
() =>
|
() =>
|
||||||
resolveSessionSelection({
|
resolveSessionSelection({
|
||||||
|
|
@ -503,6 +511,64 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
const isPendingSessionCreate = selectedSessionSummary?.status === "pending_session_create";
|
const isPendingSessionCreate = selectedSessionSummary?.status === "pending_session_create";
|
||||||
const isSessionError = selectedSessionSummary?.status === "error";
|
const isSessionError = selectedSessionSummary?.status === "error";
|
||||||
const canStartSession = Boolean(selectedForSession && activeSandbox?.sandboxId);
|
const canStartSession = Boolean(selectedForSession && activeSandbox?.sandboxId);
|
||||||
|
const devPanelFocusedTask = useMemo(() => {
|
||||||
|
if (repoOverviewMode) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const task = selectedForSession ?? selectedSummary;
|
||||||
|
if (!task) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: task.id,
|
||||||
|
repoId: task.repoId,
|
||||||
|
title: task.title,
|
||||||
|
status: task.status,
|
||||||
|
runtimeStatus: selectedForSession?.runtimeStatus ?? null,
|
||||||
|
statusMessage: selectedForSession?.statusMessage ?? null,
|
||||||
|
branch: task.branch ?? null,
|
||||||
|
activeSandboxId: selectedForSession?.activeSandboxId ?? null,
|
||||||
|
activeSessionId: selectedForSession?.activeSessionId ?? null,
|
||||||
|
sandboxes: selectedForSession?.sandboxes ?? [],
|
||||||
|
sessions: selectedForSession?.sessionsSummary ?? [],
|
||||||
|
};
|
||||||
|
}, [repoOverviewMode, selectedForSession, selectedSummary]);
|
||||||
|
const devPanelSnapshot = useMemo(
|
||||||
|
(): TaskWorkbenchSnapshot => ({
|
||||||
|
workspaceId,
|
||||||
|
repos: repos.map((repo) => ({ id: repo.id, label: repo.label })),
|
||||||
|
projects: [],
|
||||||
|
tasks: rows.map((task) => ({
|
||||||
|
id: task.id,
|
||||||
|
repoId: task.repoId,
|
||||||
|
title: task.title,
|
||||||
|
status: task.status,
|
||||||
|
runtimeStatus: selectedForSession?.id === task.id ? selectedForSession.runtimeStatus : undefined,
|
||||||
|
statusMessage: selectedForSession?.id === task.id ? selectedForSession.statusMessage : null,
|
||||||
|
repoName: task.repoName,
|
||||||
|
updatedAtMs: task.updatedAtMs,
|
||||||
|
branch: task.branch ?? null,
|
||||||
|
pullRequest: task.pullRequest,
|
||||||
|
tabs: task.sessionsSummary.map((session) => ({
|
||||||
|
...session,
|
||||||
|
draft: {
|
||||||
|
text: "",
|
||||||
|
attachments: [],
|
||||||
|
updatedAtMs: null,
|
||||||
|
},
|
||||||
|
transcript: [],
|
||||||
|
})),
|
||||||
|
fileChanges: [],
|
||||||
|
diffs: {},
|
||||||
|
fileTree: [],
|
||||||
|
minutesUsed: 0,
|
||||||
|
activeSandboxId: selectedForSession?.id === task.id ? selectedForSession.activeSandboxId : null,
|
||||||
|
})),
|
||||||
|
}),
|
||||||
|
[repos, rows, selectedForSession, workspaceId],
|
||||||
|
);
|
||||||
|
|
||||||
const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => {
|
const startSessionFromTask = async (): Promise<{ id: string; status: "running" | "idle" | "error" }> => {
|
||||||
if (!selectedForSession || !activeSandbox?.sandboxId) {
|
if (!selectedForSession || !activeSandbox?.sandboxId) {
|
||||||
|
|
@ -1270,7 +1336,17 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
<HeadingXSmall marginTop="0" marginBottom="0">
|
<HeadingXSmall marginTop="0" marginBottom="0">
|
||||||
{selectedForSession ? (selectedForSession.title ?? "Determining title...") : "No task selected"}
|
{selectedForSession ? (selectedForSession.title ?? "Determining title...") : "No task selected"}
|
||||||
</HeadingXSmall>
|
</HeadingXSmall>
|
||||||
{selectedForSession ? <StatusPill kind={statusKind(selectedForSession.status)}>{selectedForSession.status}</StatusPill> : null}
|
{selectedForSession ? (
|
||||||
|
<HeaderStatusPill
|
||||||
|
status={deriveHeaderStatus(
|
||||||
|
taskRuntimeStatus ?? selectedForSession.status,
|
||||||
|
selectedForSession.statusMessage ?? null,
|
||||||
|
selectedSessionSummary?.status ?? null,
|
||||||
|
selectedSessionSummary?.errorMessage ?? null,
|
||||||
|
Boolean(activeSandbox?.sandboxId),
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
) : null}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{selectedForSession && !resolvedSessionId ? (
|
{selectedForSession && !resolvedSessionId ? (
|
||||||
|
|
@ -1285,6 +1361,11 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
</Button>
|
</Button>
|
||||||
) : null}
|
) : null}
|
||||||
</div>
|
</div>
|
||||||
|
{selectedForSession ? (
|
||||||
|
<ParagraphSmall marginTop="0" marginBottom="0" color="contentSecondary" data-testid="task-runtime-state">
|
||||||
|
{taskStateSummary}
|
||||||
|
</ParagraphSmall>
|
||||||
|
) : null}
|
||||||
</PanelHeader>
|
</PanelHeader>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
|
|
@ -1381,19 +1462,22 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
})}
|
})}
|
||||||
>
|
>
|
||||||
<LabelSmall marginTop="0" marginBottom="0">
|
<LabelSmall marginTop="0" marginBottom="0">
|
||||||
{isPendingProvision ? "Provisioning sandbox..." : "Creating session..."}
|
{shouldUseTaskStateEmptyState ? taskStatusState.title : isPendingProvision ? "Provisioning sandbox..." : "Creating session..."}
|
||||||
</LabelSmall>
|
</LabelSmall>
|
||||||
<Skeleton rows={1} height="32px" />
|
<Skeleton rows={1} height="32px" />
|
||||||
<ParagraphSmall marginTop="0" marginBottom="0" color="contentSecondary">
|
<ParagraphSmall marginTop="0" marginBottom="0" color="contentSecondary">
|
||||||
{selectedForSession?.statusMessage ?? (isPendingProvision ? "The task is still provisioning." : "The session is being created.")}
|
{shouldUseTaskStateEmptyState
|
||||||
|
? taskStateSummary
|
||||||
|
: (selectedForSession?.statusMessage ??
|
||||||
|
(isPendingProvision ? "The task is still provisioning." : "The session is being created."))}
|
||||||
</ParagraphSmall>
|
</ParagraphSmall>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
||||||
{transcript.length === 0 && !(resolvedSessionId && sessionState.status === "loading") ? (
|
{transcript.length === 0 && !(resolvedSessionId && sessionState.status === "loading") ? (
|
||||||
<EmptyState testId="session-transcript-empty">
|
<EmptyState testId="session-transcript-empty">
|
||||||
{selectedForSession.runtimeStatus === "error" && selectedForSession.statusMessage
|
{shouldUseTaskStateEmptyState
|
||||||
? `Session failed: ${selectedForSession.statusMessage}`
|
? taskStateSummary
|
||||||
: isPendingProvision
|
: isPendingProvision
|
||||||
? (selectedForSession.statusMessage ?? "Provisioning sandbox...")
|
? (selectedForSession.statusMessage ?? "Provisioning sandbox...")
|
||||||
: isPendingSessionCreate
|
: isPendingSessionCreate
|
||||||
|
|
@ -1602,6 +1686,8 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
gap: theme.sizing.scale300,
|
gap: theme.sizing.scale300,
|
||||||
})}
|
})}
|
||||||
>
|
>
|
||||||
|
<MetaRow label="State" value={taskRuntimeStatus ?? "-"} mono />
|
||||||
|
<MetaRow label="State detail" value={taskStatusState.detail} />
|
||||||
<MetaRow label="Task" value={selectedForSession.id} mono />
|
<MetaRow label="Task" value={selectedForSession.id} mono />
|
||||||
<MetaRow label="Sandbox" value={selectedForSession.activeSandboxId ?? "-"} mono />
|
<MetaRow label="Sandbox" value={selectedForSession.activeSandboxId ?? "-"} mono />
|
||||||
<MetaRow label="Session" value={resolvedSessionId ?? "-"} mono />
|
<MetaRow label="Session" value={resolvedSessionId ?? "-"} mono />
|
||||||
|
|
@ -1646,7 +1732,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
</div>
|
</div>
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
{selectedForSession.runtimeStatus === "error" ? (
|
{taskRuntimeStatus === "error" ? (
|
||||||
<div
|
<div
|
||||||
className={css({
|
className={css({
|
||||||
padding: "12px",
|
padding: "12px",
|
||||||
|
|
@ -1665,11 +1751,11 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
>
|
>
|
||||||
<CircleAlert size={14} />
|
<CircleAlert size={14} />
|
||||||
<LabelSmall marginTop="0" marginBottom="0">
|
<LabelSmall marginTop="0" marginBottom="0">
|
||||||
Session reported an error state
|
Task reported an error state
|
||||||
</LabelSmall>
|
</LabelSmall>
|
||||||
</div>
|
</div>
|
||||||
<ParagraphSmall marginTop="0" marginBottom="0" color="contentSecondary">
|
<ParagraphSmall marginTop="0" marginBottom="0" color="contentSecondary">
|
||||||
{selectedForSession.statusMessage ? selectedForSession.statusMessage : "Open transcript in the center panel for details."}
|
{taskStatusState.detail}
|
||||||
</ParagraphSmall>
|
</ParagraphSmall>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
|
|
@ -1926,6 +2012,7 @@ export function WorkspaceDashboard({ workspaceId, selectedTaskId, selectedRepoId
|
||||||
</ModalFooter>
|
</ModalFooter>
|
||||||
</Modal>
|
</Modal>
|
||||||
</DashboardGrid>
|
</DashboardGrid>
|
||||||
|
{showDevPanel ? <DevPanel workspaceId={workspaceId} snapshot={devPanelSnapshot} focusedTask={devPanelFocusedTask} /> : null}
|
||||||
</AppShell>
|
</AppShell>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ const base: TaskRecord = {
|
||||||
branchName: "feature/one",
|
branchName: "feature/one",
|
||||||
title: "Feature one",
|
title: "Feature one",
|
||||||
task: "Ship one",
|
task: "Ship one",
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
status: "running",
|
status: "running",
|
||||||
statusMessage: null,
|
statusMessage: null,
|
||||||
activeSandboxId: "sandbox-1",
|
activeSandboxId: "sandbox-1",
|
||||||
|
|
@ -18,9 +18,9 @@ const base: TaskRecord = {
|
||||||
sandboxes: [
|
sandboxes: [
|
||||||
{
|
{
|
||||||
sandboxId: "sandbox-1",
|
sandboxId: "sandbox-1",
|
||||||
providerId: "daytona",
|
providerId: "local",
|
||||||
sandboxActorId: null,
|
sandboxActorId: null,
|
||||||
switchTarget: "daytona://sandbox-1",
|
switchTarget: "sandbox://local/sandbox-1",
|
||||||
cwd: null,
|
cwd: null,
|
||||||
createdAt: 10,
|
createdAt: 10,
|
||||||
updatedAt: 10,
|
updatedAt: 10,
|
||||||
|
|
|
||||||
133
foundry/packages/frontend/src/features/tasks/status.test.ts
Normal file
133
foundry/packages/frontend/src/features/tasks/status.test.ts
Normal file
|
|
@ -0,0 +1,133 @@
|
||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
import { TaskStatusSchema } from "@sandbox-agent/foundry-shared";
|
||||||
|
import { defaultTaskStatusMessage, deriveHeaderStatus, describeTaskState, isProvisioningTaskStatus, resolveTaskStateDetail } from "./status";
|
||||||
|
|
||||||
|
describe("defaultTaskStatusMessage", () => {
|
||||||
|
it("covers every backend task status", () => {
|
||||||
|
for (const status of [...TaskStatusSchema.options, "new"] as const) {
|
||||||
|
expect(defaultTaskStatusMessage(status)).toMatch(/\S/);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns the expected copy for init_ensure_name", () => {
|
||||||
|
expect(defaultTaskStatusMessage("init_ensure_name")).toBe("Determining title and branch.");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("resolveTaskStateDetail", () => {
|
||||||
|
it("prefers the backend status message when present", () => {
|
||||||
|
expect(resolveTaskStateDetail("init_ensure_name", "determining title and branch")).toBe("determining title and branch");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("falls back to the default copy when the backend message is empty", () => {
|
||||||
|
expect(resolveTaskStateDetail("init_complete", " ")).toBe("Finalizing task initialization.");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("describeTaskState", () => {
|
||||||
|
it("includes the raw backend status code in the title", () => {
|
||||||
|
expect(describeTaskState("kill_destroy_sandbox", null)).toEqual({
|
||||||
|
title: "Task state: kill_destroy_sandbox",
|
||||||
|
detail: "Destroying sandbox resources.",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("isProvisioningTaskStatus", () => {
|
||||||
|
it("treats all init states as provisioning", () => {
|
||||||
|
expect(isProvisioningTaskStatus("init_bootstrap_db")).toBe(true);
|
||||||
|
expect(isProvisioningTaskStatus("init_ensure_name")).toBe(true);
|
||||||
|
expect(isProvisioningTaskStatus("init_complete")).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not treat steady-state or terminal states as provisioning", () => {
|
||||||
|
expect(isProvisioningTaskStatus("running")).toBe(false);
|
||||||
|
expect(isProvisioningTaskStatus("archived")).toBe(false);
|
||||||
|
expect(isProvisioningTaskStatus("killed")).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("deriveHeaderStatus", () => {
|
||||||
|
it("returns error variant when session has error", () => {
|
||||||
|
const result = deriveHeaderStatus("running", null, "error", "Sandbox crashed");
|
||||||
|
expect(result.variant).toBe("error");
|
||||||
|
expect(result.label).toBe("Session error");
|
||||||
|
expect(result.tooltip).toBe("Sandbox crashed");
|
||||||
|
expect(result.spinning).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns error variant when task has error", () => {
|
||||||
|
const result = deriveHeaderStatus("error", "session:error", null, null);
|
||||||
|
expect(result.variant).toBe("error");
|
||||||
|
expect(result.label).toBe("Error");
|
||||||
|
expect(result.spinning).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns warning variant with spinner for provisioning task", () => {
|
||||||
|
const result = deriveHeaderStatus("init_enqueue_provision", null, null, null);
|
||||||
|
expect(result.variant).toBe("warning");
|
||||||
|
expect(result.label).toBe("Provisioning");
|
||||||
|
expect(result.spinning).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns warning variant for pending_provision session", () => {
|
||||||
|
const result = deriveHeaderStatus("running", null, "pending_provision", null);
|
||||||
|
expect(result.variant).toBe("warning");
|
||||||
|
expect(result.label).toBe("Provisioning");
|
||||||
|
expect(result.spinning).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns warning variant for pending_session_create session", () => {
|
||||||
|
const result = deriveHeaderStatus("running", null, "pending_session_create", null);
|
||||||
|
expect(result.variant).toBe("warning");
|
||||||
|
expect(result.label).toBe("Creating session");
|
||||||
|
expect(result.spinning).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns success variant with spinner for running session", () => {
|
||||||
|
const result = deriveHeaderStatus("running", null, "running", null);
|
||||||
|
expect(result.variant).toBe("success");
|
||||||
|
expect(result.label).toBe("Running");
|
||||||
|
expect(result.spinning).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns success variant for idle/ready state", () => {
|
||||||
|
const result = deriveHeaderStatus("idle", null, "idle", null);
|
||||||
|
expect(result.variant).toBe("success");
|
||||||
|
expect(result.label).toBe("Ready");
|
||||||
|
expect(result.spinning).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns neutral variant for archived task", () => {
|
||||||
|
const result = deriveHeaderStatus("archived", null, null, null);
|
||||||
|
expect(result.variant).toBe("neutral");
|
||||||
|
expect(result.label).toBe("Archived");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("session error takes priority over task error", () => {
|
||||||
|
const result = deriveHeaderStatus("error", "session:error", "error", "Sandbox OOM");
|
||||||
|
expect(result.variant).toBe("error");
|
||||||
|
expect(result.label).toBe("Session error");
|
||||||
|
expect(result.tooltip).toBe("Sandbox OOM");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("returns warning when no sandbox is available", () => {
|
||||||
|
const result = deriveHeaderStatus("idle", null, "idle", null, false);
|
||||||
|
expect(result.variant).toBe("warning");
|
||||||
|
expect(result.label).toBe("No sandbox");
|
||||||
|
expect(result.spinning).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("still shows provisioning when no sandbox but task is provisioning", () => {
|
||||||
|
const result = deriveHeaderStatus("init_enqueue_provision", null, null, null, false);
|
||||||
|
expect(result.variant).toBe("warning");
|
||||||
|
expect(result.label).toBe("Provisioning");
|
||||||
|
expect(result.spinning).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("shows error over no-sandbox when session has error", () => {
|
||||||
|
const result = deriveHeaderStatus("idle", null, "error", "Connection lost", false);
|
||||||
|
expect(result.variant).toBe("error");
|
||||||
|
expect(result.label).toBe("Session error");
|
||||||
|
});
|
||||||
|
});
|
||||||
179
foundry/packages/frontend/src/features/tasks/status.ts
Normal file
179
foundry/packages/frontend/src/features/tasks/status.ts
Normal file
|
|
@ -0,0 +1,179 @@
|
||||||
|
import type { TaskStatus, WorkbenchSessionStatus } from "@sandbox-agent/foundry-shared";
|
||||||
|
import type { HeaderStatusInfo } from "../../components/mock-layout/ui";
|
||||||
|
|
||||||
|
export type TaskDisplayStatus = TaskStatus | "new";
|
||||||
|
|
||||||
|
export interface TaskStateDescriptor {
|
||||||
|
title: string;
|
||||||
|
detail: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isProvisioningTaskStatus(status: TaskDisplayStatus | null | undefined): boolean {
|
||||||
|
return (
|
||||||
|
status === "new" ||
|
||||||
|
status === "init_bootstrap_db" ||
|
||||||
|
status === "init_enqueue_provision" ||
|
||||||
|
status === "init_ensure_name" ||
|
||||||
|
status === "init_assert_name" ||
|
||||||
|
status === "init_complete"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function defaultTaskStatusMessage(status: TaskDisplayStatus | null | undefined): string {
|
||||||
|
switch (status) {
|
||||||
|
case "new":
|
||||||
|
return "Task created. Waiting to initialize.";
|
||||||
|
case "init_bootstrap_db":
|
||||||
|
return "Creating task records.";
|
||||||
|
case "init_enqueue_provision":
|
||||||
|
return "Queueing sandbox provisioning.";
|
||||||
|
case "init_ensure_name":
|
||||||
|
return "Determining title and branch.";
|
||||||
|
case "init_assert_name":
|
||||||
|
return "Validating title and branch.";
|
||||||
|
case "init_complete":
|
||||||
|
return "Finalizing task initialization.";
|
||||||
|
case "running":
|
||||||
|
return "Agent session is actively running.";
|
||||||
|
case "idle":
|
||||||
|
return "Sandbox is ready for the next prompt.";
|
||||||
|
case "archive_stop_status_sync":
|
||||||
|
return "Stopping sandbox status sync before archiving.";
|
||||||
|
case "archive_release_sandbox":
|
||||||
|
return "Releasing sandbox resources.";
|
||||||
|
case "archive_finalize":
|
||||||
|
return "Finalizing archive.";
|
||||||
|
case "archived":
|
||||||
|
return "Task has been archived.";
|
||||||
|
case "kill_destroy_sandbox":
|
||||||
|
return "Destroying sandbox resources.";
|
||||||
|
case "kill_finalize":
|
||||||
|
return "Finalizing task termination.";
|
||||||
|
case "killed":
|
||||||
|
return "Task has been terminated.";
|
||||||
|
case "error":
|
||||||
|
return "Task entered an error state.";
|
||||||
|
case null:
|
||||||
|
case undefined:
|
||||||
|
return "Task state unavailable.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resolveTaskStateDetail(status: TaskDisplayStatus | null | undefined, statusMessage: string | null | undefined): string {
|
||||||
|
const normalized = statusMessage?.trim();
|
||||||
|
return normalized && normalized.length > 0 ? normalized : defaultTaskStatusMessage(status);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function describeTaskState(status: TaskDisplayStatus | null | undefined, statusMessage: string | null | undefined): TaskStateDescriptor {
|
||||||
|
return {
|
||||||
|
title: status ? `Task state: ${status}` : "Task state unavailable",
|
||||||
|
detail: resolveTaskStateDetail(status, statusMessage),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Derives the header status pill state from the combined task + active session + sandbox state.
|
||||||
|
* Priority: session error > task error > no sandbox > provisioning > running > ready/idle > neutral.
|
||||||
|
*/
|
||||||
|
export function deriveHeaderStatus(
|
||||||
|
taskStatus: TaskDisplayStatus | null | undefined,
|
||||||
|
taskStatusMessage: string | null | undefined,
|
||||||
|
sessionStatus: WorkbenchSessionStatus | null | undefined,
|
||||||
|
sessionErrorMessage: string | null | undefined,
|
||||||
|
hasSandbox?: boolean,
|
||||||
|
): HeaderStatusInfo {
|
||||||
|
// Session error takes priority
|
||||||
|
if (sessionStatus === "error") {
|
||||||
|
return {
|
||||||
|
variant: "error",
|
||||||
|
label: "Session error",
|
||||||
|
spinning: false,
|
||||||
|
tooltip: sessionErrorMessage ?? "Session failed to start.",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Task error
|
||||||
|
if (taskStatus === "error") {
|
||||||
|
return {
|
||||||
|
variant: "error",
|
||||||
|
label: "Error",
|
||||||
|
spinning: false,
|
||||||
|
tooltip: taskStatusMessage ?? "Task entered an error state.",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// No sandbox available (not provisioning, not errored — just missing)
|
||||||
|
if (hasSandbox === false && !isProvisioningTaskStatus(taskStatus)) {
|
||||||
|
return {
|
||||||
|
variant: "warning",
|
||||||
|
label: "No sandbox",
|
||||||
|
spinning: false,
|
||||||
|
tooltip: taskStatusMessage ?? "Sandbox is not available for this task.",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Task provisioning (init_* states)
|
||||||
|
if (isProvisioningTaskStatus(taskStatus)) {
|
||||||
|
return {
|
||||||
|
variant: "warning",
|
||||||
|
label: "Provisioning",
|
||||||
|
spinning: true,
|
||||||
|
tooltip: resolveTaskStateDetail(taskStatus, taskStatusMessage),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Session pending states
|
||||||
|
if (sessionStatus === "pending_provision") {
|
||||||
|
return {
|
||||||
|
variant: "warning",
|
||||||
|
label: "Provisioning",
|
||||||
|
spinning: true,
|
||||||
|
tooltip: "Provisioning sandbox...",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sessionStatus === "pending_session_create") {
|
||||||
|
return {
|
||||||
|
variant: "warning",
|
||||||
|
label: "Creating session",
|
||||||
|
spinning: true,
|
||||||
|
tooltip: "Creating agent session...",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Running
|
||||||
|
if (sessionStatus === "running") {
|
||||||
|
return {
|
||||||
|
variant: "success",
|
||||||
|
label: "Running",
|
||||||
|
spinning: true,
|
||||||
|
tooltip: "Agent is actively running.",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ready / idle
|
||||||
|
if (sessionStatus === "ready" || sessionStatus === "idle" || taskStatus === "idle" || taskStatus === "running") {
|
||||||
|
return {
|
||||||
|
variant: "success",
|
||||||
|
label: "Ready",
|
||||||
|
spinning: false,
|
||||||
|
tooltip: "Sandbox is ready.",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Terminal states
|
||||||
|
if (taskStatus === "archived" || taskStatus === "killed") {
|
||||||
|
return {
|
||||||
|
variant: "neutral",
|
||||||
|
label: taskStatus === "archived" ? "Archived" : "Terminated",
|
||||||
|
spinning: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback
|
||||||
|
return {
|
||||||
|
variant: "neutral",
|
||||||
|
label: taskStatus ?? "Unknown",
|
||||||
|
spinning: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
@ -1,3 +1,5 @@
|
||||||
|
import type { WorkbenchModelId } from "./workbench.js";
|
||||||
|
|
||||||
export type FoundryBillingPlanId = "free" | "team";
|
export type FoundryBillingPlanId = "free" | "team";
|
||||||
export type FoundryBillingStatus = "active" | "trialing" | "past_due" | "scheduled_cancel";
|
export type FoundryBillingStatus = "active" | "trialing" | "past_due" | "scheduled_cancel";
|
||||||
export type FoundryGithubInstallationStatus = "connected" | "install_required" | "reconnect_required";
|
export type FoundryGithubInstallationStatus = "connected" | "install_required" | "reconnect_required";
|
||||||
|
|
@ -55,7 +57,7 @@ export interface FoundryOrganizationSettings {
|
||||||
slug: string;
|
slug: string;
|
||||||
primaryDomain: string;
|
primaryDomain: string;
|
||||||
seatAccrualMode: "first_prompt";
|
seatAccrualMode: "first_prompt";
|
||||||
defaultModel: "claude-sonnet-4" | "claude-opus-4" | "gpt-4o" | "o3";
|
defaultModel: WorkbenchModelId;
|
||||||
autoImportRepos: boolean;
|
autoImportRepos: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,19 +43,17 @@ export const ConfigSchema = z.object({
|
||||||
.object({
|
.object({
|
||||||
local: z
|
local: z
|
||||||
.object({
|
.object({
|
||||||
rootDir: z.string().optional(),
|
image: z.string().optional(),
|
||||||
sandboxAgentPort: z.number().int().min(1).max(65535).optional(),
|
|
||||||
})
|
})
|
||||||
.default({}),
|
.default({}),
|
||||||
daytona: z
|
e2b: z
|
||||||
.object({
|
.object({
|
||||||
endpoint: z.string().optional(),
|
|
||||||
apiKey: z.string().optional(),
|
apiKey: z.string().optional(),
|
||||||
image: z.string().default("ubuntu:24.04"),
|
template: z.string().optional(),
|
||||||
})
|
})
|
||||||
.default({ image: "ubuntu:24.04" }),
|
.default({}),
|
||||||
})
|
})
|
||||||
.default({ local: {}, daytona: { image: "ubuntu:24.04" } }),
|
.default({ local: {}, e2b: {} }),
|
||||||
});
|
});
|
||||||
|
|
||||||
export type AppConfig = z.infer<typeof ConfigSchema>;
|
export type AppConfig = z.infer<typeof ConfigSchema>;
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ export const WorkspaceIdSchema = z
|
||||||
.regex(/^[a-zA-Z0-9._-]+$/);
|
.regex(/^[a-zA-Z0-9._-]+$/);
|
||||||
export type WorkspaceId = z.infer<typeof WorkspaceIdSchema>;
|
export type WorkspaceId = z.infer<typeof WorkspaceIdSchema>;
|
||||||
|
|
||||||
export const ProviderIdSchema = z.enum(["daytona", "local"]);
|
export const ProviderIdSchema = z.enum(["e2b", "local"]);
|
||||||
export type ProviderId = z.infer<typeof ProviderIdSchema>;
|
export type ProviderId = z.infer<typeof ProviderIdSchema>;
|
||||||
|
|
||||||
export const AgentTypeSchema = z.enum(["claude", "codex"]);
|
export const AgentTypeSchema = z.enum(["claude", "codex"]);
|
||||||
|
|
@ -24,12 +24,6 @@ export const TaskStatusSchema = z.enum([
|
||||||
"init_enqueue_provision",
|
"init_enqueue_provision",
|
||||||
"init_ensure_name",
|
"init_ensure_name",
|
||||||
"init_assert_name",
|
"init_assert_name",
|
||||||
"init_create_sandbox",
|
|
||||||
"init_ensure_agent",
|
|
||||||
"init_start_sandbox_instance",
|
|
||||||
"init_create_session",
|
|
||||||
"init_write_db",
|
|
||||||
"init_start_status_sync",
|
|
||||||
"init_complete",
|
"init_complete",
|
||||||
"running",
|
"running",
|
||||||
"idle",
|
"idle",
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,15 @@ import type { AgentType, ProviderId, TaskStatus } from "./contracts.js";
|
||||||
|
|
||||||
export type WorkbenchTaskStatus = TaskStatus | "new";
|
export type WorkbenchTaskStatus = TaskStatus | "new";
|
||||||
export type WorkbenchAgentKind = "Claude" | "Codex" | "Cursor";
|
export type WorkbenchAgentKind = "Claude" | "Codex" | "Cursor";
|
||||||
export type WorkbenchModelId = "claude-sonnet-4" | "claude-opus-4" | "gpt-4o" | "o3";
|
export type WorkbenchModelId =
|
||||||
|
| "claude-sonnet-4"
|
||||||
|
| "claude-opus-4"
|
||||||
|
| "gpt-5.3-codex"
|
||||||
|
| "gpt-5.4"
|
||||||
|
| "gpt-5.2-codex"
|
||||||
|
| "gpt-5.1-codex-max"
|
||||||
|
| "gpt-5.2"
|
||||||
|
| "gpt-5.1-codex-mini";
|
||||||
export type WorkbenchSessionStatus = "pending_provision" | "pending_session_create" | "ready" | "running" | "idle" | "error";
|
export type WorkbenchSessionStatus = "pending_provision" | "pending_session_create" | "ready" | "running" | "idle" | "error";
|
||||||
|
|
||||||
export interface WorkbenchTranscriptEvent {
|
export interface WorkbenchTranscriptEvent {
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,8 @@ const cfg: AppConfig = ConfigSchema.parse({
|
||||||
backup_retention_days: 7,
|
backup_retention_days: 7,
|
||||||
},
|
},
|
||||||
providers: {
|
providers: {
|
||||||
daytona: { image: "ubuntu:24.04" },
|
local: {},
|
||||||
|
e2b: {},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
283
pnpm-lock.yaml
generated
283
pnpm-lock.yaml
generated
|
|
@ -452,9 +452,9 @@ importers:
|
||||||
|
|
||||||
foundry/packages/backend:
|
foundry/packages/backend:
|
||||||
dependencies:
|
dependencies:
|
||||||
'@daytonaio/sdk':
|
'@e2b/code-interpreter':
|
||||||
specifier: 0.141.0
|
specifier: ^2.3.3
|
||||||
version: 0.141.0(ws@8.19.0)
|
version: 2.3.3
|
||||||
'@hono/node-server':
|
'@hono/node-server':
|
||||||
specifier: ^1.19.7
|
specifier: ^1.19.7
|
||||||
version: 1.19.9(hono@4.12.2)
|
version: 1.19.9(hono@4.12.2)
|
||||||
|
|
@ -473,6 +473,9 @@ importers:
|
||||||
better-auth:
|
better-auth:
|
||||||
specifier: ^1.5.5
|
specifier: ^1.5.5
|
||||||
version: 1.5.5(@cloudflare/workers-types@4.20260313.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))
|
version: 1.5.5(@cloudflare/workers-types@4.20260313.1)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(pg@8.20.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(solid-js@1.9.11)(vitest@3.2.4(@types/debug@4.1.12)(@types/node@25.5.0)(jiti@1.21.7)(tsx@4.21.0)(yaml@2.8.2))
|
||||||
|
dockerode:
|
||||||
|
specifier: ^4.0.9
|
||||||
|
version: 4.0.9
|
||||||
drizzle-kit:
|
drizzle-kit:
|
||||||
specifier: ^0.31.8
|
specifier: ^0.31.8
|
||||||
version: 0.31.9
|
version: 0.31.9
|
||||||
|
|
@ -486,8 +489,8 @@ importers:
|
||||||
specifier: ^10.3.1
|
specifier: ^10.3.1
|
||||||
version: 10.3.1
|
version: 10.3.1
|
||||||
rivetkit:
|
rivetkit:
|
||||||
specifier: 2.1.6
|
specifier: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a
|
||||||
version: 2.1.6(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0)
|
version: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0)
|
||||||
sandbox-agent:
|
sandbox-agent:
|
||||||
specifier: workspace:*
|
specifier: workspace:*
|
||||||
version: link:../../../sdks/typescript
|
version: link:../../../sdks/typescript
|
||||||
|
|
@ -1686,21 +1689,12 @@ packages:
|
||||||
moment:
|
moment:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@daytonaio/api-client@0.141.0':
|
|
||||||
resolution: {integrity: sha512-DSPCurIEjfFyXCd07jkDgfsoFppVhTLyIJdvfb0LgG1EgV75BPqqzk2WM4ragBFJUuK2URF5CK7qkaHW0AXKMA==}
|
|
||||||
|
|
||||||
'@daytonaio/api-client@0.151.0':
|
'@daytonaio/api-client@0.151.0':
|
||||||
resolution: {integrity: sha512-Ahu7bjunHbJEEAEkcEFjjdazN+1hML/lLZwOyul2WFaCTh9q5dmufhr0qKAKCIs3ccTY+Is0fO5UtPpo/fig+A==}
|
resolution: {integrity: sha512-Ahu7bjunHbJEEAEkcEFjjdazN+1hML/lLZwOyul2WFaCTh9q5dmufhr0qKAKCIs3ccTY+Is0fO5UtPpo/fig+A==}
|
||||||
|
|
||||||
'@daytonaio/sdk@0.141.0':
|
|
||||||
resolution: {integrity: sha512-JUopkS9SkO7h4WN8CjparOrP9k954euOF5KG//PeCEFOxUWTPFOME70GrmHXQKa1qkdZiF/4tz9jtZ744B1I2w==}
|
|
||||||
|
|
||||||
'@daytonaio/sdk@0.151.0':
|
'@daytonaio/sdk@0.151.0':
|
||||||
resolution: {integrity: sha512-wd4x9Bipt1KmTD+0GXTVEQtgXBmyy/gAmCjdOJllwo5Ya5RbGu/CZeitBCIEKhDM8TnkxefVxdpxBCfi/Wg9xA==}
|
resolution: {integrity: sha512-wd4x9Bipt1KmTD+0GXTVEQtgXBmyy/gAmCjdOJllwo5Ya5RbGu/CZeitBCIEKhDM8TnkxefVxdpxBCfi/Wg9xA==}
|
||||||
|
|
||||||
'@daytonaio/toolbox-api-client@0.141.0':
|
|
||||||
resolution: {integrity: sha512-KGkCLDLAltd9FCic3PhSJGrTp3RwGsUwWEGp5vyWZFQGWpJV8CVp08CH5SBdo4YhuqFUVlyQcwha1HpzpVH++A==}
|
|
||||||
|
|
||||||
'@daytonaio/toolbox-api-client@0.151.0':
|
'@daytonaio/toolbox-api-client@0.151.0':
|
||||||
resolution: {integrity: sha512-63n/wBNnZh1r8dUypzwNeenoA4okWNEWzsE6kZ8b047y5zBYT0cI63cGRn25nSrepLlGKpX4MJnVjjz50+bVqA==}
|
resolution: {integrity: sha512-63n/wBNnZh1r8dUypzwNeenoA4okWNEWzsE6kZ8b047y5zBYT0cI63cGRn25nSrepLlGKpX4MJnVjjz50+bVqA==}
|
||||||
|
|
||||||
|
|
@ -3165,9 +3159,17 @@ packages:
|
||||||
'@rivetkit/engine-runner-protocol@2.1.6':
|
'@rivetkit/engine-runner-protocol@2.1.6':
|
||||||
resolution: {integrity: sha512-QwaWvAJN2KGae+UHKZbLiEWaWj9ycmwtrRtUq728CU+lidkaGv5yHxXb4gkXSD7rhGQcR98+XWZLb0F0BM/vAg==}
|
resolution: {integrity: sha512-QwaWvAJN2KGae+UHKZbLiEWaWj9ycmwtrRtUq728CU+lidkaGv5yHxXb4gkXSD7rhGQcR98+XWZLb0F0BM/vAg==}
|
||||||
|
|
||||||
|
'@rivetkit/engine-runner-protocol@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner-protocol@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner-protocol@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd}
|
||||||
|
version: 2.1.6
|
||||||
|
|
||||||
'@rivetkit/engine-runner@2.1.6':
|
'@rivetkit/engine-runner@2.1.6':
|
||||||
resolution: {integrity: sha512-WpiEmi/SxAVED0N/M0kvPZwq/MxMuuz/Y89ut1sTP7syPzpCauGxafOdqkTqiX1ef+N1ZlrtX+v/LwDF/jIgFw==}
|
resolution: {integrity: sha512-WpiEmi/SxAVED0N/M0kvPZwq/MxMuuz/Y89ut1sTP7syPzpCauGxafOdqkTqiX1ef+N1ZlrtX+v/LwDF/jIgFw==}
|
||||||
|
|
||||||
|
'@rivetkit/engine-runner@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd}
|
||||||
|
version: 2.1.6
|
||||||
|
|
||||||
'@rivetkit/fast-json-patch@3.1.2':
|
'@rivetkit/fast-json-patch@3.1.2':
|
||||||
resolution: {integrity: sha512-CtA50xgsSSzICQduF/NDShPRzvucnNvsW/lQO0WgMTT1XAj9Lfae4pm7r3llFwilgG+9iq76Hv1LUqNy72v6yw==}
|
resolution: {integrity: sha512-CtA50xgsSSzICQduF/NDShPRzvucnNvsW/lQO0WgMTT1XAj9Lfae4pm7r3llFwilgG+9iq76Hv1LUqNy72v6yw==}
|
||||||
|
|
||||||
|
|
@ -3179,6 +3181,11 @@ packages:
|
||||||
resolution: {integrity: sha512-jbCrigzqoygZTYdZu7izaQjr77Q4BFX1HwhW4Mf0UFIaKT72AteH/w4PcktzrKcw4Utmo0zX0C6zNBRKo0IpOA==}
|
resolution: {integrity: sha512-jbCrigzqoygZTYdZu7izaQjr77Q4BFX1HwhW4Mf0UFIaKT72AteH/w4PcktzrKcw4Utmo0zX0C6zNBRKo0IpOA==}
|
||||||
engines: {node: '>=20.0.0'}
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
|
'@rivetkit/sqlite-vfs@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/sqlite-vfs@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/sqlite-vfs@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd}
|
||||||
|
version: 2.1.6
|
||||||
|
engines: {node: '>=20.0.0'}
|
||||||
|
|
||||||
'@rivetkit/sqlite@0.1.1':
|
'@rivetkit/sqlite@0.1.1':
|
||||||
resolution: {integrity: sha512-NE7ZBy/hQhOrWzMZFjkHX9SoXxf+ILcDvVV+mNbUYPgiy/fsDzlXdK0+JDTGnko5f4Xl6/KVCoCozz9gkwkq8A==}
|
resolution: {integrity: sha512-NE7ZBy/hQhOrWzMZFjkHX9SoXxf+ILcDvVV+mNbUYPgiy/fsDzlXdK0+JDTGnko5f4Xl6/KVCoCozz9gkwkq8A==}
|
||||||
|
|
||||||
|
|
@ -3186,13 +3193,27 @@ packages:
|
||||||
resolution: {integrity: sha512-wuuGWoWWdUPbqs5u+31YodSUOsYMydaa+/cxZ7I5KaUe26fK0i1E+0ytqC1JGQm6utWeuYp8cLUX3WSEfVKJhQ==}
|
resolution: {integrity: sha512-wuuGWoWWdUPbqs5u+31YodSUOsYMydaa+/cxZ7I5KaUe26fK0i1E+0ytqC1JGQm6utWeuYp8cLUX3WSEfVKJhQ==}
|
||||||
engines: {node: '>=18.0.0'}
|
engines: {node: '>=18.0.0'}
|
||||||
|
|
||||||
|
'@rivetkit/traces@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/traces@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/traces@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd}
|
||||||
|
version: 2.1.6
|
||||||
|
engines: {node: '>=18.0.0'}
|
||||||
|
|
||||||
'@rivetkit/virtual-websocket@2.0.33':
|
'@rivetkit/virtual-websocket@2.0.33':
|
||||||
resolution: {integrity: sha512-sMoHZgBy9WDW76pv+ML3LPgf7TWk5vXdu3ZpPO20j6n+rB3fLacnnmzjt5xD6tZcJ/x5qINyEywGgcxA7MTMuQ==}
|
resolution: {integrity: sha512-sMoHZgBy9WDW76pv+ML3LPgf7TWk5vXdu3ZpPO20j6n+rB3fLacnnmzjt5xD6tZcJ/x5qINyEywGgcxA7MTMuQ==}
|
||||||
|
|
||||||
|
'@rivetkit/virtual-websocket@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd}
|
||||||
|
version: 2.0.33
|
||||||
|
|
||||||
'@rivetkit/workflow-engine@2.1.6':
|
'@rivetkit/workflow-engine@2.1.6':
|
||||||
resolution: {integrity: sha512-eLVFBbhOlBQKzO5lu032tOo0OEAFFp7uNcGwvB1mBFmYsm7aKBgnJl214IV39a6fRtCL2meVxiMU1GKb006zYw==}
|
resolution: {integrity: sha512-eLVFBbhOlBQKzO5lu032tOo0OEAFFp7uNcGwvB1mBFmYsm7aKBgnJl214IV39a6fRtCL2meVxiMU1GKb006zYw==}
|
||||||
engines: {node: '>=18.0.0'}
|
engines: {node: '>=18.0.0'}
|
||||||
|
|
||||||
|
'@rivetkit/workflow-engine@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/workflow-engine@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/@rivetkit/workflow-engine@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd}
|
||||||
|
version: 2.1.6
|
||||||
|
engines: {node: '>=18.0.0'}
|
||||||
|
|
||||||
'@rolldown/pluginutils@1.0.0-beta.27':
|
'@rolldown/pluginutils@1.0.0-beta.27':
|
||||||
resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==}
|
resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==}
|
||||||
|
|
||||||
|
|
@ -3336,6 +3357,38 @@ packages:
|
||||||
'@rtsao/csstype@2.6.5-forked.0':
|
'@rtsao/csstype@2.6.5-forked.0':
|
||||||
resolution: {integrity: sha512-0HwnY8uPWcCloTgdbbaJG3MbDUfNf6yKWZfCKxFv9yj2Sbp4mSKaIjC7Cr/5L4hMxvrrk85CU3wlAg7EtBBJ1Q==}
|
resolution: {integrity: sha512-0HwnY8uPWcCloTgdbbaJG3MbDUfNf6yKWZfCKxFv9yj2Sbp4mSKaIjC7Cr/5L4hMxvrrk85CU3wlAg7EtBBJ1Q==}
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-darwin-arm64@0.3.2':
|
||||||
|
resolution: {integrity: sha512-w4aAr7vPj4m6Lwr25bIoXRVTJDoybP/tcGhB+unzsRUSQqCtrZpRCg7opJqds5vIOQzVxFlmVwquKWcPlDjYRA==}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [darwin]
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-darwin-x64@0.3.2':
|
||||||
|
resolution: {integrity: sha512-OJkaeNGjvPNWHVnNgUXxN4H5nv75nWwRnxOVRf1UP8VvLhd3xoL6uWGgmVoRSF+Wwg3dMHbkrmMwAdcXyaSV9w==}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [darwin]
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-linux-arm64@0.3.2':
|
||||||
|
resolution: {integrity: sha512-WTreRie8wrvGijuqBaK7/78sfizCcex1CNBZjws7a6/tn4Ar+thQ0jbY8alrpdtBLuwzk/pBIQ2oRU9vidHtvQ==}
|
||||||
|
cpu: [arm64]
|
||||||
|
os: [linux]
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-linux-x64@0.3.2':
|
||||||
|
resolution: {integrity: sha512-aas9+UdW0+j2aWOCp+EV5GA8JkmwsIg0lSRkrRijzrnewsNxlSFQ4dIsSpTkqyMWO18STqjqFtmkZI/dIASEyQ==}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [linux]
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-shared@0.3.2':
|
||||||
|
resolution: {integrity: sha512-4UQKczwfY+Bf83kQ9rdXv3U5Z9PqfWm2wp6EK2rzKh6iuMAtuNI8PSUWodi1LBUjch8mJv4rx7Gs/XCO92dRRw==}
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-win32-x64@0.3.2':
|
||||||
|
resolution: {integrity: sha512-mJejOEp9czrMzbpN0VzC/y6UfKU/RyAJjTWHBNcCVfHP2zDuIOwT8y3gErM+q4CHt0lHR616wQYIqNpY8QqyDA==}
|
||||||
|
cpu: [x64]
|
||||||
|
os: [win32]
|
||||||
|
|
||||||
|
'@sandbox-agent/cli@0.3.2':
|
||||||
|
resolution: {integrity: sha512-nQ3bxbrr0QMdm0eK/MvBNd6Npvt1VCxQrrkpf747dVCHmKxCmcRc3t/jeeye1Lof6sP8l01mHgvSFNiez/KNHQ==}
|
||||||
|
hasBin: true
|
||||||
|
|
||||||
'@shikijs/core@3.21.0':
|
'@shikijs/core@3.21.0':
|
||||||
resolution: {integrity: sha512-AXSQu/2n1UIQekY8euBJlvFYZIw0PHY63jUzGbrOma4wPxzznJXTXkri+QcHeBNaFxiiOljKxxJkVSoB3PjbyA==}
|
resolution: {integrity: sha512-AXSQu/2n1UIQekY8euBJlvFYZIw0PHY63jUzGbrOma4wPxzznJXTXkri+QcHeBNaFxiiOljKxxJkVSoB3PjbyA==}
|
||||||
|
|
||||||
|
|
@ -3866,6 +3919,9 @@ packages:
|
||||||
engines: {node: '>=0.4.0'}
|
engines: {node: '>=0.4.0'}
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
|
acp-http-client@0.3.2:
|
||||||
|
resolution: {integrity: sha512-btRUDXAA9BlcTQURsJogdWthoXsKOnMeFhtYlEYQxgt0vq7H6xMfMrewlIgFjRXgRTbru4Fre2T6wS/amTTyjQ==}
|
||||||
|
|
||||||
aggregate-error@5.0.0:
|
aggregate-error@5.0.0:
|
||||||
resolution: {integrity: sha512-gOsf2YwSlleG6IjRYG2A7k0HmBMEo6qVNk9Bp/EaLgAJT5ngH6PXbqa4ItvnEwCm/velL5jAnQgsHsWnjhGmvw==}
|
resolution: {integrity: sha512-gOsf2YwSlleG6IjRYG2A7k0HmBMEo6qVNk9Bp/EaLgAJT5ngH6PXbqa4ItvnEwCm/velL5jAnQgsHsWnjhGmvw==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
|
|
@ -6503,6 +6559,40 @@ packages:
|
||||||
ws:
|
ws:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
rivetkit@https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a:
|
||||||
|
resolution: {tarball: https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a}
|
||||||
|
version: 2.1.6
|
||||||
|
engines: {node: '>=22.0.0'}
|
||||||
|
peerDependencies:
|
||||||
|
'@daytonaio/sdk': ^0.150.0
|
||||||
|
'@e2b/code-interpreter': ^2.3.3
|
||||||
|
'@hono/node-server': ^1.14.0
|
||||||
|
'@hono/node-ws': ^1.1.1
|
||||||
|
dockerode: ^4.0.9
|
||||||
|
drizzle-kit: ^0.31.2
|
||||||
|
drizzle-orm: ^0.44.2
|
||||||
|
eventsource: ^4.0.0
|
||||||
|
ws: ^8.0.0
|
||||||
|
peerDependenciesMeta:
|
||||||
|
'@daytonaio/sdk':
|
||||||
|
optional: true
|
||||||
|
'@e2b/code-interpreter':
|
||||||
|
optional: true
|
||||||
|
'@hono/node-server':
|
||||||
|
optional: true
|
||||||
|
'@hono/node-ws':
|
||||||
|
optional: true
|
||||||
|
dockerode:
|
||||||
|
optional: true
|
||||||
|
drizzle-kit:
|
||||||
|
optional: true
|
||||||
|
drizzle-orm:
|
||||||
|
optional: true
|
||||||
|
eventsource:
|
||||||
|
optional: true
|
||||||
|
ws:
|
||||||
|
optional: true
|
||||||
|
|
||||||
robust-predicates@3.0.2:
|
robust-predicates@3.0.2:
|
||||||
resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==}
|
resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==}
|
||||||
|
|
||||||
|
|
@ -6534,6 +6624,9 @@ packages:
|
||||||
safer-buffer@2.1.2:
|
safer-buffer@2.1.2:
|
||||||
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
|
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
|
||||||
|
|
||||||
|
sandbox-agent@0.3.2:
|
||||||
|
resolution: {integrity: sha512-ic1UPLMKExjXIw4ViL0Wo07YsSqGtS25g6b6BechOX5CBC4d130tCR1xrhJ8Fuy6jlzx6I+f7gIxDedTnRUWSA==}
|
||||||
|
|
||||||
sax@1.4.4:
|
sax@1.4.4:
|
||||||
resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==}
|
resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==}
|
||||||
engines: {node: '>=11.0.0'}
|
engines: {node: '>=11.0.0'}
|
||||||
|
|
@ -8520,49 +8613,12 @@ snapshots:
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
moment: 2.30.1
|
moment: 2.30.1
|
||||||
|
|
||||||
'@daytonaio/api-client@0.141.0':
|
|
||||||
dependencies:
|
|
||||||
axios: 1.13.5
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- debug
|
|
||||||
|
|
||||||
'@daytonaio/api-client@0.151.0':
|
'@daytonaio/api-client@0.151.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
axios: 1.13.5
|
axios: 1.13.5
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- debug
|
- debug
|
||||||
|
|
||||||
'@daytonaio/sdk@0.141.0(ws@8.19.0)':
|
|
||||||
dependencies:
|
|
||||||
'@aws-sdk/client-s3': 3.975.0
|
|
||||||
'@aws-sdk/lib-storage': 3.975.0(@aws-sdk/client-s3@3.975.0)
|
|
||||||
'@daytonaio/api-client': 0.141.0
|
|
||||||
'@daytonaio/toolbox-api-client': 0.141.0
|
|
||||||
'@iarna/toml': 2.2.5
|
|
||||||
'@opentelemetry/api': 1.9.0
|
|
||||||
'@opentelemetry/exporter-trace-otlp-http': 0.207.0(@opentelemetry/api@1.9.0)
|
|
||||||
'@opentelemetry/instrumentation-http': 0.207.0(@opentelemetry/api@1.9.0)
|
|
||||||
'@opentelemetry/otlp-exporter-base': 0.207.0(@opentelemetry/api@1.9.0)
|
|
||||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
|
||||||
'@opentelemetry/sdk-node': 0.207.0(@opentelemetry/api@1.9.0)
|
|
||||||
'@opentelemetry/sdk-trace-base': 2.5.0(@opentelemetry/api@1.9.0)
|
|
||||||
'@opentelemetry/semantic-conventions': 1.39.0
|
|
||||||
axios: 1.13.5
|
|
||||||
busboy: 1.6.0
|
|
||||||
dotenv: 17.2.3
|
|
||||||
expand-tilde: 2.0.2
|
|
||||||
fast-glob: 3.3.3
|
|
||||||
form-data: 4.0.5
|
|
||||||
isomorphic-ws: 5.0.0(ws@8.19.0)
|
|
||||||
pathe: 2.0.3
|
|
||||||
shell-quote: 1.8.3
|
|
||||||
tar: 7.5.7
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- aws-crt
|
|
||||||
- debug
|
|
||||||
- supports-color
|
|
||||||
- ws
|
|
||||||
|
|
||||||
'@daytonaio/sdk@0.151.0(ws@8.19.0)':
|
'@daytonaio/sdk@0.151.0(ws@8.19.0)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@aws-sdk/client-s3': 3.975.0
|
'@aws-sdk/client-s3': 3.975.0
|
||||||
|
|
@ -8594,12 +8650,6 @@ snapshots:
|
||||||
- supports-color
|
- supports-color
|
||||||
- ws
|
- ws
|
||||||
|
|
||||||
'@daytonaio/toolbox-api-client@0.141.0':
|
|
||||||
dependencies:
|
|
||||||
axios: 1.13.5
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- debug
|
|
||||||
|
|
||||||
'@daytonaio/toolbox-api-client@0.151.0':
|
'@daytonaio/toolbox-api-client@0.151.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
axios: 1.13.5
|
axios: 1.13.5
|
||||||
|
|
@ -9693,6 +9743,10 @@ snapshots:
|
||||||
dependencies:
|
dependencies:
|
||||||
'@rivetkit/bare-ts': 0.6.2
|
'@rivetkit/bare-ts': 0.6.2
|
||||||
|
|
||||||
|
'@rivetkit/engine-runner-protocol@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner-protocol@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
dependencies:
|
||||||
|
'@rivetkit/bare-ts': 0.6.2
|
||||||
|
|
||||||
'@rivetkit/engine-runner@2.1.6':
|
'@rivetkit/engine-runner@2.1.6':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@rivetkit/engine-runner-protocol': 2.1.6
|
'@rivetkit/engine-runner-protocol': 2.1.6
|
||||||
|
|
@ -9704,6 +9758,17 @@ snapshots:
|
||||||
- bufferutil
|
- bufferutil
|
||||||
- utf-8-validate
|
- utf-8-validate
|
||||||
|
|
||||||
|
'@rivetkit/engine-runner@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
dependencies:
|
||||||
|
'@rivetkit/engine-runner-protocol': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner-protocol@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd
|
||||||
|
'@rivetkit/virtual-websocket': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd
|
||||||
|
pino: 9.14.0
|
||||||
|
uuid: 12.0.0
|
||||||
|
ws: 8.19.0
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- bufferutil
|
||||||
|
- utf-8-validate
|
||||||
|
|
||||||
'@rivetkit/fast-json-patch@3.1.2': {}
|
'@rivetkit/fast-json-patch@3.1.2': {}
|
||||||
|
|
||||||
'@rivetkit/on-change@6.0.2-rc.1': {}
|
'@rivetkit/on-change@6.0.2-rc.1': {}
|
||||||
|
|
@ -9714,6 +9779,12 @@ snapshots:
|
||||||
'@rivetkit/sqlite': 0.1.1
|
'@rivetkit/sqlite': 0.1.1
|
||||||
vbare: 0.0.4
|
vbare: 0.0.4
|
||||||
|
|
||||||
|
'@rivetkit/sqlite-vfs@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/sqlite-vfs@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
dependencies:
|
||||||
|
'@rivetkit/bare-ts': 0.6.2
|
||||||
|
'@rivetkit/sqlite': 0.1.1
|
||||||
|
vbare: 0.0.4
|
||||||
|
|
||||||
'@rivetkit/sqlite@0.1.1': {}
|
'@rivetkit/sqlite@0.1.1': {}
|
||||||
|
|
||||||
'@rivetkit/traces@2.1.6':
|
'@rivetkit/traces@2.1.6':
|
||||||
|
|
@ -9723,8 +9794,17 @@ snapshots:
|
||||||
fdb-tuple: 1.0.0
|
fdb-tuple: 1.0.0
|
||||||
vbare: 0.0.4
|
vbare: 0.0.4
|
||||||
|
|
||||||
|
'@rivetkit/traces@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/traces@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
dependencies:
|
||||||
|
'@rivetkit/bare-ts': 0.6.2
|
||||||
|
cbor-x: 1.6.3
|
||||||
|
fdb-tuple: 1.0.0
|
||||||
|
vbare: 0.0.4
|
||||||
|
|
||||||
'@rivetkit/virtual-websocket@2.0.33': {}
|
'@rivetkit/virtual-websocket@2.0.33': {}
|
||||||
|
|
||||||
|
'@rivetkit/virtual-websocket@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd': {}
|
||||||
|
|
||||||
'@rivetkit/workflow-engine@2.1.6':
|
'@rivetkit/workflow-engine@2.1.6':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@rivetkit/bare-ts': 0.6.2
|
'@rivetkit/bare-ts': 0.6.2
|
||||||
|
|
@ -9733,6 +9813,14 @@ snapshots:
|
||||||
pino: 9.14.0
|
pino: 9.14.0
|
||||||
vbare: 0.0.4
|
vbare: 0.0.4
|
||||||
|
|
||||||
|
'@rivetkit/workflow-engine@https://pkg.pr.new/rivet-dev/rivet/@rivetkit/workflow-engine@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd':
|
||||||
|
dependencies:
|
||||||
|
'@rivetkit/bare-ts': 0.6.2
|
||||||
|
cbor-x: 1.6.3
|
||||||
|
fdb-tuple: 1.0.0
|
||||||
|
pino: 9.14.0
|
||||||
|
vbare: 0.0.4
|
||||||
|
|
||||||
'@rolldown/pluginutils@1.0.0-beta.27': {}
|
'@rolldown/pluginutils@1.0.0-beta.27': {}
|
||||||
|
|
||||||
'@rolldown/pluginutils@1.0.0-rc.3': {}
|
'@rolldown/pluginutils@1.0.0-rc.3': {}
|
||||||
|
|
@ -9822,6 +9910,34 @@ snapshots:
|
||||||
|
|
||||||
'@rtsao/csstype@2.6.5-forked.0': {}
|
'@rtsao/csstype@2.6.5-forked.0': {}
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-darwin-arm64@0.3.2':
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-darwin-x64@0.3.2':
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-linux-arm64@0.3.2':
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-linux-x64@0.3.2':
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-shared@0.3.2': {}
|
||||||
|
|
||||||
|
'@sandbox-agent/cli-win32-x64@0.3.2':
|
||||||
|
optional: true
|
||||||
|
|
||||||
|
'@sandbox-agent/cli@0.3.2':
|
||||||
|
dependencies:
|
||||||
|
'@sandbox-agent/cli-shared': 0.3.2
|
||||||
|
optionalDependencies:
|
||||||
|
'@sandbox-agent/cli-darwin-arm64': 0.3.2
|
||||||
|
'@sandbox-agent/cli-darwin-x64': 0.3.2
|
||||||
|
'@sandbox-agent/cli-linux-arm64': 0.3.2
|
||||||
|
'@sandbox-agent/cli-linux-x64': 0.3.2
|
||||||
|
'@sandbox-agent/cli-win32-x64': 0.3.2
|
||||||
|
optional: true
|
||||||
|
|
||||||
'@shikijs/core@3.21.0':
|
'@shikijs/core@3.21.0':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@shikijs/types': 3.21.0
|
'@shikijs/types': 3.21.0
|
||||||
|
|
@ -10549,6 +10665,12 @@ snapshots:
|
||||||
|
|
||||||
acorn@8.15.0: {}
|
acorn@8.15.0: {}
|
||||||
|
|
||||||
|
acp-http-client@0.3.2(zod@4.3.6):
|
||||||
|
dependencies:
|
||||||
|
'@agentclientprotocol/sdk': 0.14.1(zod@4.3.6)
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- zod
|
||||||
|
|
||||||
aggregate-error@5.0.0:
|
aggregate-error@5.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
clean-stack: 5.3.0
|
clean-stack: 5.3.0
|
||||||
|
|
@ -13576,6 +13698,44 @@ snapshots:
|
||||||
- bufferutil
|
- bufferutil
|
||||||
- utf-8-validate
|
- utf-8-validate
|
||||||
|
|
||||||
|
rivetkit@https://pkg.pr.new/rivet-dev/rivet/rivetkit@791500a(@e2b/code-interpreter@2.3.3)(@hono/node-server@1.19.9(hono@4.12.2))(@hono/node-ws@1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2))(@standard-schema/spec@1.1.0)(dockerode@4.0.9)(drizzle-kit@0.31.9)(drizzle-orm@0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0))(ws@8.19.0):
|
||||||
|
dependencies:
|
||||||
|
'@hono/standard-validator': 0.1.5(@standard-schema/spec@1.1.0)(hono@4.12.2)
|
||||||
|
'@hono/zod-openapi': 1.2.2(hono@4.12.2)(zod@4.3.6)
|
||||||
|
'@rivetkit/bare-ts': 0.6.2
|
||||||
|
'@rivetkit/engine-runner': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/engine-runner@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd
|
||||||
|
'@rivetkit/fast-json-patch': 3.1.2
|
||||||
|
'@rivetkit/on-change': 6.0.2-rc.1
|
||||||
|
'@rivetkit/sqlite': 0.1.1
|
||||||
|
'@rivetkit/sqlite-vfs': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/sqlite-vfs@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd
|
||||||
|
'@rivetkit/traces': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/traces@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd
|
||||||
|
'@rivetkit/virtual-websocket': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/virtual-websocket@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd
|
||||||
|
'@rivetkit/workflow-engine': https://pkg.pr.new/rivet-dev/rivet/@rivetkit/workflow-engine@791500a6eb11c29133c3cbe14511f3b6ebf9d5bd
|
||||||
|
cbor-x: 1.6.3
|
||||||
|
get-port: 7.1.0
|
||||||
|
hono: 4.12.2
|
||||||
|
invariant: 2.2.4
|
||||||
|
nanoevents: 9.1.0
|
||||||
|
p-retry: 6.2.1
|
||||||
|
pino: 9.14.0
|
||||||
|
sandbox-agent: 0.3.2(zod@4.3.6)
|
||||||
|
tar: 7.5.7
|
||||||
|
uuid: 12.0.0
|
||||||
|
vbare: 0.0.4
|
||||||
|
zod: 4.3.6
|
||||||
|
optionalDependencies:
|
||||||
|
'@e2b/code-interpreter': 2.3.3
|
||||||
|
'@hono/node-server': 1.19.9(hono@4.12.2)
|
||||||
|
'@hono/node-ws': 1.3.0(@hono/node-server@1.19.9(hono@4.12.2))(hono@4.12.2)
|
||||||
|
dockerode: 4.0.9
|
||||||
|
drizzle-kit: 0.31.9
|
||||||
|
drizzle-orm: 0.44.7(@cloudflare/workers-types@4.20260313.1)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.18.0)(better-sqlite3@11.10.0)(bun-types@1.3.10)(kysely@0.28.11)(pg@8.20.0)
|
||||||
|
ws: 8.19.0
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- '@standard-schema/spec'
|
||||||
|
- bufferutil
|
||||||
|
- utf-8-validate
|
||||||
|
|
||||||
robust-predicates@3.0.2: {}
|
robust-predicates@3.0.2: {}
|
||||||
|
|
||||||
rollup@4.56.0:
|
rollup@4.56.0:
|
||||||
|
|
@ -13633,6 +13793,15 @@ snapshots:
|
||||||
|
|
||||||
safer-buffer@2.1.2: {}
|
safer-buffer@2.1.2: {}
|
||||||
|
|
||||||
|
sandbox-agent@0.3.2(zod@4.3.6):
|
||||||
|
dependencies:
|
||||||
|
'@sandbox-agent/cli-shared': 0.3.2
|
||||||
|
acp-http-client: 0.3.2(zod@4.3.6)
|
||||||
|
optionalDependencies:
|
||||||
|
'@sandbox-agent/cli': 0.3.2
|
||||||
|
transitivePeerDependencies:
|
||||||
|
- zod
|
||||||
|
|
||||||
sax@1.4.4: {}
|
sax@1.4.4: {}
|
||||||
|
|
||||||
scheduler@0.23.2:
|
scheduler@0.23.2:
|
||||||
|
|
|
||||||
|
|
@ -82,6 +82,7 @@ const DEFAULT_BASE_URL = "http://sandbox-agent";
|
||||||
const DEFAULT_REPLAY_MAX_EVENTS = 50;
|
const DEFAULT_REPLAY_MAX_EVENTS = 50;
|
||||||
const DEFAULT_REPLAY_MAX_CHARS = 12_000;
|
const DEFAULT_REPLAY_MAX_CHARS = 12_000;
|
||||||
const EVENT_INDEX_SCAN_EVENTS_LIMIT = 500;
|
const EVENT_INDEX_SCAN_EVENTS_LIMIT = 500;
|
||||||
|
const MAX_EVENT_INDEX_INSERT_RETRIES = 3;
|
||||||
const SESSION_CANCEL_METHOD = "session/cancel";
|
const SESSION_CANCEL_METHOD = "session/cancel";
|
||||||
const MANUAL_CANCEL_ERROR = "Manual session/cancel calls are not allowed. Use destroySession(sessionId) instead.";
|
const MANUAL_CANCEL_ERROR = "Manual session/cancel calls are not allowed. Use destroySession(sessionId) instead.";
|
||||||
const HEALTH_WAIT_MIN_DELAY_MS = 500;
|
const HEALTH_WAIT_MIN_DELAY_MS = 500;
|
||||||
|
|
@ -841,6 +842,7 @@ export class SandboxAgent {
|
||||||
private readonly pendingPermissionRequests = new Map<string, PendingPermissionRequestState>();
|
private readonly pendingPermissionRequests = new Map<string, PendingPermissionRequestState>();
|
||||||
private readonly nextSessionEventIndexBySession = new Map<string, number>();
|
private readonly nextSessionEventIndexBySession = new Map<string, number>();
|
||||||
private readonly seedSessionEventIndexBySession = new Map<string, Promise<void>>();
|
private readonly seedSessionEventIndexBySession = new Map<string, Promise<void>>();
|
||||||
|
private readonly pendingObservedEnvelopePersistenceBySession = new Map<string, Promise<void>>();
|
||||||
|
|
||||||
constructor(options: SandboxAgentConnectOptions) {
|
constructor(options: SandboxAgentConnectOptions) {
|
||||||
const baseUrl = options.baseUrl?.trim();
|
const baseUrl = options.baseUrl?.trim();
|
||||||
|
|
@ -906,6 +908,7 @@ export class SandboxAgent {
|
||||||
this.liveConnections.clear();
|
this.liveConnections.clear();
|
||||||
const pending = [...this.pendingLiveConnections.values()];
|
const pending = [...this.pendingLiveConnections.values()];
|
||||||
this.pendingLiveConnections.clear();
|
this.pendingLiveConnections.clear();
|
||||||
|
this.pendingObservedEnvelopePersistenceBySession.clear();
|
||||||
|
|
||||||
const pendingSettled = await Promise.allSettled(pending);
|
const pendingSettled = await Promise.allSettled(pending);
|
||||||
for (const item of pendingSettled) {
|
for (const item of pendingSettled) {
|
||||||
|
|
@ -969,7 +972,6 @@ export class SandboxAgent {
|
||||||
};
|
};
|
||||||
|
|
||||||
await this.persist.updateSession(record);
|
await this.persist.updateSession(record);
|
||||||
this.nextSessionEventIndexBySession.set(record.id, 1);
|
|
||||||
live.bindSession(record.id, record.agentSessionId);
|
live.bindSession(record.id, record.agentSessionId);
|
||||||
let session = this.upsertSessionHandle(record);
|
let session = this.upsertSessionHandle(record);
|
||||||
|
|
||||||
|
|
@ -1639,7 +1641,9 @@ export class SandboxAgent {
|
||||||
agent,
|
agent,
|
||||||
serverId,
|
serverId,
|
||||||
onObservedEnvelope: (connection, envelope, direction, localSessionId) => {
|
onObservedEnvelope: (connection, envelope, direction, localSessionId) => {
|
||||||
void this.persistObservedEnvelope(connection, envelope, direction, localSessionId);
|
void this.enqueueObservedEnvelopePersistence(connection, envelope, direction, localSessionId).catch((error) => {
|
||||||
|
console.error("Failed to persist observed sandbox-agent envelope", error);
|
||||||
|
});
|
||||||
},
|
},
|
||||||
onPermissionRequest: async (connection, localSessionId, agentSessionId, request) =>
|
onPermissionRequest: async (connection, localSessionId, agentSessionId, request) =>
|
||||||
this.enqueuePermissionRequest(connection, localSessionId, agentSessionId, request),
|
this.enqueuePermissionRequest(connection, localSessionId, agentSessionId, request),
|
||||||
|
|
@ -1675,7 +1679,9 @@ export class SandboxAgent {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const event: SessionEvent = {
|
let event: SessionEvent | null = null;
|
||||||
|
for (let attempt = 0; attempt < MAX_EVENT_INDEX_INSERT_RETRIES; attempt += 1) {
|
||||||
|
event = {
|
||||||
id: randomId(),
|
id: randomId(),
|
||||||
eventIndex: await this.allocateSessionEventIndex(localSessionId),
|
eventIndex: await this.allocateSessionEventIndex(localSessionId),
|
||||||
sessionId: localSessionId,
|
sessionId: localSessionId,
|
||||||
|
|
@ -1685,7 +1691,20 @@ export class SandboxAgent {
|
||||||
payload: cloneEnvelope(envelope),
|
payload: cloneEnvelope(envelope),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
await this.persist.insertEvent(event);
|
await this.persist.insertEvent(event);
|
||||||
|
break;
|
||||||
|
} catch (error) {
|
||||||
|
if (!isSessionEventIndexConflict(error) || attempt === MAX_EVENT_INDEX_INSERT_RETRIES - 1) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!event) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
await this.persistSessionStateFromEvent(localSessionId, envelope, direction);
|
await this.persistSessionStateFromEvent(localSessionId, envelope, direction);
|
||||||
|
|
||||||
const listeners = this.eventListeners.get(localSessionId);
|
const listeners = this.eventListeners.get(localSessionId);
|
||||||
|
|
@ -1698,6 +1717,34 @@ export class SandboxAgent {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async enqueueObservedEnvelopePersistence(
|
||||||
|
connection: LiveAcpConnection,
|
||||||
|
envelope: AnyMessage,
|
||||||
|
direction: AcpEnvelopeDirection,
|
||||||
|
localSessionId: string | null,
|
||||||
|
): Promise<void> {
|
||||||
|
if (!localSessionId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const previous = this.pendingObservedEnvelopePersistenceBySession.get(localSessionId) ?? Promise.resolve();
|
||||||
|
const current = previous
|
||||||
|
.catch(() => {
|
||||||
|
// Keep later envelope persistence moving even if an earlier write failed.
|
||||||
|
})
|
||||||
|
.then(() => this.persistObservedEnvelope(connection, envelope, direction, localSessionId));
|
||||||
|
|
||||||
|
this.pendingObservedEnvelopePersistenceBySession.set(localSessionId, current);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await current;
|
||||||
|
} finally {
|
||||||
|
if (this.pendingObservedEnvelopePersistenceBySession.get(localSessionId) === current) {
|
||||||
|
this.pendingObservedEnvelopePersistenceBySession.delete(localSessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private async persistSessionStateFromEvent(sessionId: string, envelope: AnyMessage, direction: AcpEnvelopeDirection): Promise<void> {
|
private async persistSessionStateFromEvent(sessionId: string, envelope: AnyMessage, direction: AcpEnvelopeDirection): Promise<void> {
|
||||||
if (direction !== "inbound") {
|
if (direction !== "inbound") {
|
||||||
return;
|
return;
|
||||||
|
|
@ -2066,6 +2113,14 @@ export class SandboxAgent {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isSessionEventIndexConflict(error: unknown): boolean {
|
||||||
|
if (!(error instanceof Error)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return /UNIQUE constraint failed: .*session_id, .*event_index/.test(error.message);
|
||||||
|
}
|
||||||
|
|
||||||
type PendingPermissionRequestState = {
|
type PendingPermissionRequestState = {
|
||||||
id: string;
|
id: string;
|
||||||
sessionId: string;
|
sessionId: string;
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,15 @@ import { dirname, resolve } from "node:path";
|
||||||
import { join } from "node:path";
|
import { join } from "node:path";
|
||||||
import { fileURLToPath } from "node:url";
|
import { fileURLToPath } from "node:url";
|
||||||
import { tmpdir } from "node:os";
|
import { tmpdir } from "node:os";
|
||||||
import { InMemorySessionPersistDriver, SandboxAgent, type SessionEvent } from "../src/index.ts";
|
import {
|
||||||
|
InMemorySessionPersistDriver,
|
||||||
|
SandboxAgent,
|
||||||
|
type ListEventsRequest,
|
||||||
|
type ListPage,
|
||||||
|
type SessionEvent,
|
||||||
|
type SessionPersistDriver,
|
||||||
|
type SessionRecord,
|
||||||
|
} from "../src/index.ts";
|
||||||
import { spawnSandboxAgent, isNodeRuntime, type SandboxAgentSpawnHandle } from "../src/spawn.ts";
|
import { spawnSandboxAgent, isNodeRuntime, type SandboxAgentSpawnHandle } from "../src/spawn.ts";
|
||||||
import { prepareMockAgentDataHome } from "./helpers/mock-agent.ts";
|
import { prepareMockAgentDataHome } from "./helpers/mock-agent.ts";
|
||||||
import WebSocket from "ws";
|
import WebSocket from "ws";
|
||||||
|
|
@ -40,6 +48,44 @@ function sleep(ms: number): Promise<void> {
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class StrictUniqueSessionPersistDriver implements SessionPersistDriver {
|
||||||
|
private readonly events = new InMemorySessionPersistDriver({
|
||||||
|
maxEventsPerSession: 500,
|
||||||
|
});
|
||||||
|
private readonly eventIndexesBySession = new Map<string, Set<number>>();
|
||||||
|
|
||||||
|
async getSession(id: string): Promise<SessionRecord | null> {
|
||||||
|
return this.events.getSession(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
async listSessions(request?: { cursor?: string; limit?: number }): Promise<ListPage<SessionRecord>> {
|
||||||
|
return this.events.listSessions(request);
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateSession(session: SessionRecord): Promise<void> {
|
||||||
|
await this.events.updateSession(session);
|
||||||
|
}
|
||||||
|
|
||||||
|
async listEvents(request: ListEventsRequest): Promise<ListPage<SessionEvent>> {
|
||||||
|
return this.events.listEvents(request);
|
||||||
|
}
|
||||||
|
|
||||||
|
async insertEvent(event: SessionEvent): Promise<void> {
|
||||||
|
await sleep(5);
|
||||||
|
|
||||||
|
const indexes = this.eventIndexesBySession.get(event.sessionId) ?? new Set<number>();
|
||||||
|
if (indexes.has(event.eventIndex)) {
|
||||||
|
throw new Error("UNIQUE constraint failed: sandbox_agent_events.session_id, sandbox_agent_events.event_index");
|
||||||
|
}
|
||||||
|
|
||||||
|
indexes.add(event.eventIndex);
|
||||||
|
this.eventIndexesBySession.set(event.sessionId, indexes);
|
||||||
|
|
||||||
|
await sleep(5);
|
||||||
|
await this.events.insertEvent(event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function waitFor<T>(fn: () => T | undefined | null, timeoutMs = 6000, stepMs = 30): Promise<T> {
|
async function waitFor<T>(fn: () => T | undefined | null, timeoutMs = 6000, stepMs = 30): Promise<T> {
|
||||||
const started = Date.now();
|
const started = Date.now();
|
||||||
while (Date.now() - started < timeoutMs) {
|
while (Date.now() - started < timeoutMs) {
|
||||||
|
|
@ -207,6 +253,27 @@ describe("Integration: TypeScript SDK flat session API", () => {
|
||||||
await sdk.dispose();
|
await sdk.dispose();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("preserves observed event indexes across session creation follow-up calls", async () => {
|
||||||
|
const persist = new StrictUniqueSessionPersistDriver();
|
||||||
|
const sdk = await SandboxAgent.connect({
|
||||||
|
baseUrl,
|
||||||
|
token,
|
||||||
|
persist,
|
||||||
|
});
|
||||||
|
|
||||||
|
const session = await sdk.createSession({ agent: "mock" });
|
||||||
|
const prompt = await session.prompt([{ type: "text", text: "preserve event indexes" }]);
|
||||||
|
expect(prompt.stopReason).toBe("end_turn");
|
||||||
|
|
||||||
|
const events = await waitForAsync(async () => {
|
||||||
|
const page = await sdk.getEvents({ sessionId: session.id, limit: 200 });
|
||||||
|
return page.items.length >= 4 ? page : null;
|
||||||
|
});
|
||||||
|
expect(new Set(events.items.map((event) => event.eventIndex)).size).toBe(events.items.length);
|
||||||
|
|
||||||
|
await sdk.dispose();
|
||||||
|
});
|
||||||
|
|
||||||
it("covers agent query flags and filesystem HTTP helpers", async () => {
|
it("covers agent query flags and filesystem HTTP helpers", async () => {
|
||||||
const sdk = await SandboxAgent.connect({
|
const sdk = await SandboxAgent.connect({
|
||||||
baseUrl,
|
baseUrl,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue