chore(foundry): workbench action responsiveness (#254)

* wip

* wip
This commit is contained in:
Nathan Flurry 2026-03-14 20:42:18 -07:00 committed by GitHub
parent 400f9a214e
commit 99abb9d42e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
171 changed files with 7260 additions and 7342 deletions

View file

@ -5,30 +5,29 @@
Keep the backend actor tree aligned with this shape unless we explicitly decide to change it:
```text
WorkspaceActor
├─ HistoryActor(workspace-scoped global feed)
├─ ProjectActor(repo)
│ ├─ ProjectBranchSyncActor
│ ├─ ProjectPrSyncActor
OrganizationActor
├─ HistoryActor(organization-scoped global feed)
├─ GithubDataActor
├─ RepositoryActor(repo)
│ └─ TaskActor(task)
│ ├─ TaskSessionActor(session) × N
│ │ └─ SessionStatusSyncActor(session) × 0..1
│ └─ Task-local workbench state
└─ SandboxInstanceActor(providerId, sandboxId) × N
└─ SandboxInstanceActor(sandboxProviderId, sandboxId) × N
```
## Ownership Rules
- `WorkspaceActor` is the workspace coordinator and lookup/index owner.
- `HistoryActor` is workspace-scoped. There is one workspace-level history feed.
- `ProjectActor` is the repo coordinator and owns repo-local caches/indexes.
- `OrganizationActor` is the organization coordinator and lookup/index owner.
- `HistoryActor` is organization-scoped. There is one organization-level history feed.
- `RepositoryActor` is the repo coordinator and owns repo-local caches/indexes.
- `TaskActor` is one branch. Treat `1 task = 1 branch` once branch assignment is finalized.
- `TaskActor` can have many sessions.
- `TaskActor` can reference many sandbox instances historically, but should have only one active sandbox/session at a time.
- Session unread state and draft prompts are backend-owned workbench state, not frontend-local state.
- Branch rename is a real git operation, not just metadata.
- `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity.
- Sync actors are polling workers only. They feed parent actors and should not become the source of truth.
- The backend stores no local git state. No clones, no refs, no working trees, and no git-spice. Repository metadata comes from GitHub API data and webhook events. Any working-tree git operation runs inside a sandbox via `executeInSandbox()`.
- When a backend request path must aggregate multiple independent actor calls or reads, prefer bounded parallelism over sequential fan-out when correctness permits. Do not serialize independent work by default.
## Maintenance

View file

@ -1,51 +1,51 @@
import type { TaskStatus, ProviderId } from "@sandbox-agent/foundry-shared";
import type { TaskStatus, SandboxProviderId } from "@sandbox-agent/foundry-shared";
export interface TaskCreatedEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
providerId: ProviderId;
sandboxProviderId: SandboxProviderId;
branchName: string;
title: string;
}
export interface TaskStatusEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
status: TaskStatus;
message: string;
}
export interface ProjectSnapshotEvent {
workspaceId: string;
export interface RepositorySnapshotEvent {
organizationId: string;
repoId: string;
updatedAt: number;
}
export interface AgentStartedEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
sessionId: string;
}
export interface AgentIdleEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
sessionId: string;
}
export interface AgentErrorEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
message: string;
}
export interface PrCreatedEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
prNumber: number;
@ -53,7 +53,7 @@ export interface PrCreatedEvent {
}
export interface PrClosedEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
prNumber: number;
@ -61,7 +61,7 @@ export interface PrClosedEvent {
}
export interface PrReviewEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
prNumber: number;
@ -70,7 +70,7 @@ export interface PrReviewEvent {
}
export interface CiStatusChangedEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
prNumber: number;
@ -81,7 +81,7 @@ export type TaskStepName = "auto_commit" | "push" | "pr_submit";
export type TaskStepStatus = "started" | "completed" | "skipped" | "failed";
export interface TaskStepEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
step: TaskStepName;
@ -90,23 +90,15 @@ export interface TaskStepEvent {
}
export interface BranchSwitchedEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
branchName: string;
}
export interface SessionAttachedEvent {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
sessionId: string;
}
export interface BranchSyncedEvent {
workspaceId: string;
repoId: string;
taskId: string;
branchName: string;
strategy: string;
}

View file

@ -6,6 +6,18 @@ const journal = {
tag: "0000_github_data",
breakpoints: true,
},
{
idx: 1,
when: 1773810002000,
tag: "0001_default_branch",
breakpoints: true,
},
{
idx: 2,
when: 1773810300000,
tag: "0002_github_branches",
breakpoints: true,
},
],
} as const;
@ -56,6 +68,16 @@ CREATE TABLE \`github_pull_requests\` (
\`is_draft\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0001: `ALTER TABLE \`github_repositories\` ADD \`default_branch\` text NOT NULL DEFAULT 'main';
`,
m0002: `CREATE TABLE \`github_branches\` (
\`branch_id\` text PRIMARY KEY NOT NULL,
\`repo_id\` text NOT NULL,
\`branch_name\` text NOT NULL,
\`commit_sha\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
} as const,
};

View file

@ -16,6 +16,15 @@ export const githubRepositories = sqliteTable("github_repositories", {
fullName: text("full_name").notNull(),
cloneUrl: text("clone_url").notNull(),
private: integer("private").notNull(),
defaultBranch: text("default_branch").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const githubBranches = sqliteTable("github_branches", {
branchId: text("branch_id").notNull().primaryKey(),
repoId: text("repo_id").notNull(),
branchName: text("branch_name").notNull(),
commitSha: text("commit_sha").notNull(),
updatedAt: integer("updated_at").notNull(),
});

View file

@ -3,16 +3,16 @@ import { eq } from "drizzle-orm";
import { actor } from "rivetkit";
import type { FoundryOrganization } from "@sandbox-agent/foundry-shared";
import { getActorRuntimeContext } from "../context.js";
import { getOrCreateWorkspace, getTask } from "../handles.js";
import { getOrCreateOrganization, getTask } from "../handles.js";
import { repoIdFromRemote } from "../../services/repo.js";
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
import { resolveOrganizationGithubAuth } from "../../services/github-auth.js";
import { githubDataDb } from "./db/db.js";
import { githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js";
import { githubBranches, githubMembers, githubMeta, githubPullRequests, githubRepositories } from "./db/schema.js";
const META_ROW_ID = 1;
interface GithubDataInput {
workspaceId: string;
organizationId: string;
}
interface GithubMemberRecord {
@ -28,6 +28,13 @@ interface GithubRepositoryRecord {
fullName: string;
cloneUrl: string;
private: boolean;
defaultBranch: string;
}
interface GithubBranchRecord {
repoId: string;
branchName: string;
commitSha: string;
}
interface GithubPullRequestRecord {
@ -156,21 +163,21 @@ async function writeMeta(c: any, patch: Partial<Awaited<ReturnType<typeof readMe
}
async function getOrganizationContext(c: any, overrides?: FullSyncInput) {
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
const organization = await workspace.getOrganizationShellStateIfInitialized({});
if (!organization) {
throw new Error(`Workspace ${c.state.workspaceId} is not initialized`);
const organizationHandle = await getOrCreateOrganization(c, c.state.organizationId);
const organizationState = await organizationHandle.getOrganizationShellStateIfInitialized({});
if (!organizationState) {
throw new Error(`Organization ${c.state.organizationId} is not initialized`);
}
const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId);
const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId);
return {
kind: overrides?.kind ?? organization.snapshot.kind,
githubLogin: overrides?.githubLogin ?? organization.githubLogin,
connectedAccount: overrides?.connectedAccount ?? organization.snapshot.github.connectedAccount ?? organization.githubLogin,
installationId: overrides?.installationId ?? organization.githubInstallationId ?? null,
kind: overrides?.kind ?? organizationState.snapshot.kind,
githubLogin: overrides?.githubLogin ?? organizationState.githubLogin,
connectedAccount: overrides?.connectedAccount ?? organizationState.snapshot.github.connectedAccount ?? organizationState.githubLogin,
installationId: overrides?.installationId ?? organizationState.githubInstallationId ?? null,
installationStatus:
overrides?.installationStatus ??
organization.snapshot.github.installationStatus ??
(organization.snapshot.kind === "personal" ? "connected" : "reconnect_required"),
organizationState.snapshot.github.installationStatus ??
(organizationState.snapshot.kind === "personal" ? "connected" : "reconnect_required"),
accessToken: overrides?.accessToken ?? auth?.githubToken ?? null,
};
}
@ -185,6 +192,23 @@ async function replaceRepositories(c: any, repositories: GithubRepositoryRecord[
fullName: repository.fullName,
cloneUrl: repository.cloneUrl,
private: repository.private ? 1 : 0,
defaultBranch: repository.defaultBranch,
updatedAt,
})
.run();
}
}
async function replaceBranches(c: any, branches: GithubBranchRecord[], updatedAt: number) {
await c.db.delete(githubBranches).run();
for (const branch of branches) {
await c.db
.insert(githubBranches)
.values({
branchId: `${branch.repoId}:${branch.branchName}`,
repoId: branch.repoId,
branchName: branch.branchName,
commitSha: branch.commitSha,
updatedAt,
})
.run();
@ -234,12 +258,12 @@ async function replacePullRequests(c: any, pullRequests: GithubPullRequestRecord
}
async function refreshTaskSummaryForBranch(c: any, repoId: string, branchName: string) {
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.refreshTaskSummaryForGithubBranch({ repoId, branchName });
const organization = await getOrCreateOrganization(c, c.state.organizationId);
await organization.refreshTaskSummaryForGithubBranch({ repoId, branchName });
}
async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows: any[]) {
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
const organization = await getOrCreateOrganization(c, c.state.organizationId);
const beforeById = new Map(beforeRows.map((row) => [row.prId, row]));
const afterById = new Map(afterRows.map((row) => [row.prId, row]));
@ -258,7 +282,7 @@ async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows:
if (!changed) {
continue;
}
await workspace.applyOpenPullRequestUpdate({
await organization.applyOpenPullRequestUpdate({
pullRequest: pullRequestSummaryFromRow(row),
});
await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName);
@ -268,14 +292,14 @@ async function emitPullRequestChangeEvents(c: any, beforeRows: any[], afterRows:
if (afterById.has(prId)) {
continue;
}
await workspace.removeOpenPullRequest({ prId });
await organization.removeOpenPullRequest({ prId });
await refreshTaskSummaryForBranch(c, row.repoId, row.headRefName);
}
}
async function autoArchiveTaskForClosedPullRequest(c: any, row: any) {
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
const match = await workspace.findTaskForGithubBranch({
const organization = await getOrCreateOrganization(c, c.state.organizationId);
const match = await organization.findTaskForGithubBranch({
repoId: row.repoId,
branchName: row.headRefName,
});
@ -283,7 +307,7 @@ async function autoArchiveTaskForClosedPullRequest(c: any, row: any) {
return;
}
try {
const task = getTask(c, c.state.workspaceId, row.repoId, match.taskId);
const task = getTask(c, c.state.organizationId, row.repoId, match.taskId);
await task.archive({ reason: `PR ${String(row.state).toLowerCase()}` });
} catch {
// Best-effort only. Task summary refresh will still clear the PR state.
@ -391,6 +415,69 @@ async function resolvePullRequests(
}));
}
async function listRepositoryBranchesForContext(
context: Awaited<ReturnType<typeof getOrganizationContext>>,
repository: GithubRepositoryRecord,
): Promise<GithubBranchRecord[]> {
const { appShell } = getActorRuntimeContext();
let branches: Array<{ name: string; commitSha: string }> = [];
if (context.installationId != null) {
try {
branches = await appShell.github.listInstallationRepositoryBranches(context.installationId, repository.fullName);
} catch (error) {
if (!context.accessToken) {
throw error;
}
}
}
if (branches.length === 0 && context.accessToken) {
branches = await appShell.github.listUserRepositoryBranches(context.accessToken, repository.fullName);
}
const repoId = repoIdFromRemote(repository.cloneUrl);
return branches.map((branch) => ({
repoId,
branchName: branch.name,
commitSha: branch.commitSha,
}));
}
async function resolveBranches(
_c: any,
context: Awaited<ReturnType<typeof getOrganizationContext>>,
repositories: GithubRepositoryRecord[],
): Promise<GithubBranchRecord[]> {
return (await Promise.all(repositories.map((repository) => listRepositoryBranchesForContext(context, repository)))).flat();
}
async function refreshRepositoryBranches(
c: any,
context: Awaited<ReturnType<typeof getOrganizationContext>>,
repository: GithubRepositoryRecord,
updatedAt: number,
): Promise<void> {
const nextBranches = await listRepositoryBranchesForContext(context, repository);
await c.db
.delete(githubBranches)
.where(eq(githubBranches.repoId, repoIdFromRemote(repository.cloneUrl)))
.run();
for (const branch of nextBranches) {
await c.db
.insert(githubBranches)
.values({
branchId: `${branch.repoId}:${branch.branchName}`,
repoId: branch.repoId,
branchName: branch.branchName,
commitSha: branch.commitSha,
updatedAt,
})
.run();
}
}
async function readAllPullRequestRows(c: any) {
return await c.db.select().from(githubPullRequests).all();
}
@ -409,15 +496,17 @@ async function runFullSync(c: any, input: FullSyncInput = {}) {
});
const repositories = await resolveRepositories(c, context);
const branches = await resolveBranches(c, context, repositories);
const members = await resolveMembers(c, context);
const pullRequests = await resolvePullRequests(c, context, repositories);
await replaceRepositories(c, repositories, startedAt);
await replaceBranches(c, branches, startedAt);
await replaceMembers(c, members, startedAt);
await replacePullRequests(c, pullRequests);
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.applyGithubDataProjection({
const organization = await getOrCreateOrganization(c, c.state.organizationId);
await organization.applyGithubDataProjection({
connectedAccount: context.connectedAccount,
installationStatus: context.installationStatus,
installationId: context.installationId,
@ -455,16 +544,18 @@ export const githubData = actor({
actionTimeout: 5 * 60_000,
},
createState: (_c, input: GithubDataInput) => ({
workspaceId: input.workspaceId,
organizationId: input.organizationId,
}),
actions: {
async getSummary(c) {
const repositories = await c.db.select().from(githubRepositories).all();
const branches = await c.db.select().from(githubBranches).all();
const members = await c.db.select().from(githubMembers).all();
const pullRequests = await c.db.select().from(githubPullRequests).all();
return {
...(await readMeta(c)),
repositoryCount: repositories.length,
branchCount: branches.length,
memberCount: members.length,
pullRequestCount: pullRequests.length,
};
@ -477,14 +568,39 @@ export const githubData = actor({
fullName: row.fullName,
cloneUrl: row.cloneUrl,
private: Boolean(row.private),
defaultBranch: row.defaultBranch,
}));
},
async getRepository(c, input: { repoId: string }) {
const row = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, input.repoId)).get();
if (!row) {
return null;
}
return {
repoId: row.repoId,
fullName: row.fullName,
cloneUrl: row.cloneUrl,
private: Boolean(row.private),
defaultBranch: row.defaultBranch,
};
},
async listPullRequestsForRepository(c, input: { repoId: string }) {
const rows = await c.db.select().from(githubPullRequests).where(eq(githubPullRequests.repoId, input.repoId)).all();
return rows.map(pullRequestSummaryFromRow);
},
async listBranchesForRepository(c, input: { repoId: string }) {
const rows = await c.db.select().from(githubBranches).where(eq(githubBranches.repoId, input.repoId)).all();
return rows
.map((row) => ({
branchName: row.branchName,
commitSha: row.commitSha,
}))
.sort((left, right) => left.branchName.localeCompare(right.branchName));
},
async listOpenPullRequests(c) {
const rows = await c.db.select().from(githubPullRequests).all();
return rows.map(pullRequestSummaryFromRow).sort((left, right) => right.updatedAtMs - left.updatedAtMs);
@ -539,6 +655,7 @@ export const githubData = actor({
fullName: repository.fullName,
cloneUrl: repository.cloneUrl,
private: repository.private ? 1 : 0,
defaultBranch: repository.defaultBranch,
updatedAt,
})
.onConflictDoUpdate({
@ -547,13 +664,25 @@ export const githubData = actor({
fullName: repository.fullName,
cloneUrl: repository.cloneUrl,
private: repository.private ? 1 : 0,
defaultBranch: repository.defaultBranch,
updatedAt,
},
})
.run();
await refreshRepositoryBranches(
c,
context,
{
fullName: repository.fullName,
cloneUrl: repository.cloneUrl,
private: repository.private,
defaultBranch: repository.defaultBranch,
},
updatedAt,
);
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.applyGithubRepositoryProjection({
const organization = await getOrCreateOrganization(c, c.state.organizationId);
await organization.applyGithubRepositoryProjection({
repoId: input.repoId,
remoteUrl: repository.cloneUrl,
});
@ -562,6 +691,7 @@ export const githubData = actor({
fullName: repository.fullName,
cloneUrl: repository.cloneUrl,
private: repository.private,
defaultBranch: repository.defaultBranch,
};
},
@ -656,6 +786,7 @@ export const githubData = actor({
async clearState(c, input: ClearStateInput) {
const beforeRows = await readAllPullRequestRows(c);
await c.db.delete(githubPullRequests).run();
await c.db.delete(githubBranches).run();
await c.db.delete(githubRepositories).run();
await c.db.delete(githubMembers).run();
await writeMeta(c, {
@ -667,8 +798,8 @@ export const githubData = actor({
lastSyncAt: null,
});
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.applyGithubDataProjection({
const organization = await getOrCreateOrganization(c, c.state.organizationId);
await organization.applyGithubDataProjection({
connectedAccount: input.connectedAccount,
installationStatus: input.installationStatus,
installationId: input.installationId,
@ -683,6 +814,7 @@ export const githubData = actor({
async handlePullRequestWebhook(c, input: PullRequestWebhookInput) {
const beforeRows = await readAllPullRequestRows(c);
const repoId = repoIdFromRemote(input.repository.cloneUrl);
const currentRepository = await c.db.select().from(githubRepositories).where(eq(githubRepositories.repoId, repoId)).get();
const updatedAt = Date.now();
const state = normalizePrStatus(input.pullRequest);
const prId = `${repoId}#${input.pullRequest.number}`;
@ -694,6 +826,7 @@ export const githubData = actor({
fullName: input.repository.fullName,
cloneUrl: input.repository.cloneUrl,
private: input.repository.private ? 1 : 0,
defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main",
updatedAt,
})
.onConflictDoUpdate({
@ -702,6 +835,7 @@ export const githubData = actor({
fullName: input.repository.fullName,
cloneUrl: input.repository.cloneUrl,
private: input.repository.private ? 1 : 0,
defaultBranch: currentRepository?.defaultBranch ?? input.pullRequest.baseRefName ?? "main",
updatedAt,
},
})
@ -753,8 +887,8 @@ export const githubData = actor({
lastSyncAt: updatedAt,
});
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.applyGithubRepositoryProjection({
const organization = await getOrCreateOrganization(c, c.state.organizationId);
await organization.applyGithubRepositoryProjection({
repoId,
remoteUrl: input.repository.cloneUrl,
});

View file

@ -1,12 +1,12 @@
import { authUserKey, githubDataKey, taskKey, historyKey, projectBranchSyncKey, projectKey, taskSandboxKey, workspaceKey } from "./keys.js";
import { authUserKey, githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "./keys.js";
export function actorClient(c: any) {
return c.client();
}
export async function getOrCreateWorkspace(c: any, workspaceId: string) {
return await actorClient(c).workspace.getOrCreate(workspaceKey(workspaceId), {
createWithInput: workspaceId,
export async function getOrCreateOrganization(c: any, organizationId: string) {
return await actorClient(c).organization.getOrCreate(organizationKey(organizationId), {
createWithInput: organizationId,
});
}
@ -20,76 +20,61 @@ export function getAuthUser(c: any, userId: string) {
return actorClient(c).authUser.get(authUserKey(userId));
}
export async function getOrCreateProject(c: any, workspaceId: string, repoId: string, remoteUrl: string) {
return await actorClient(c).project.getOrCreate(projectKey(workspaceId, repoId), {
export async function getOrCreateRepository(c: any, organizationId: string, repoId: string, remoteUrl: string) {
return await actorClient(c).repository.getOrCreate(repositoryKey(organizationId, repoId), {
createWithInput: {
workspaceId,
organizationId,
repoId,
remoteUrl,
},
});
}
export function getProject(c: any, workspaceId: string, repoId: string) {
return actorClient(c).project.get(projectKey(workspaceId, repoId));
export function getRepository(c: any, organizationId: string, repoId: string) {
return actorClient(c).repository.get(repositoryKey(organizationId, repoId));
}
export function getTask(c: any, workspaceId: string, repoId: string, taskId: string) {
return actorClient(c).task.get(taskKey(workspaceId, repoId, taskId));
export function getTask(c: any, organizationId: string, repoId: string, taskId: string) {
return actorClient(c).task.get(taskKey(organizationId, repoId, taskId));
}
export async function getOrCreateTask(c: any, workspaceId: string, repoId: string, taskId: string, createWithInput: Record<string, unknown>) {
return await actorClient(c).task.getOrCreate(taskKey(workspaceId, repoId, taskId), {
export async function getOrCreateTask(c: any, organizationId: string, repoId: string, taskId: string, createWithInput: Record<string, unknown>) {
return await actorClient(c).task.getOrCreate(taskKey(organizationId, repoId, taskId), {
createWithInput,
});
}
export async function getOrCreateHistory(c: any, workspaceId: string, repoId: string) {
return await actorClient(c).history.getOrCreate(historyKey(workspaceId, repoId), {
export async function getOrCreateHistory(c: any, organizationId: string, repoId: string) {
return await actorClient(c).history.getOrCreate(historyKey(organizationId, repoId), {
createWithInput: {
workspaceId,
organizationId,
repoId,
},
});
}
export async function getOrCreateGithubData(c: any, workspaceId: string) {
return await actorClient(c).githubData.getOrCreate(githubDataKey(workspaceId), {
export async function getOrCreateGithubData(c: any, organizationId: string) {
return await actorClient(c).githubData.getOrCreate(githubDataKey(organizationId), {
createWithInput: {
workspaceId,
organizationId,
},
});
}
export function getGithubData(c: any, workspaceId: string) {
return actorClient(c).githubData.get(githubDataKey(workspaceId));
export function getGithubData(c: any, organizationId: string) {
return actorClient(c).githubData.get(githubDataKey(organizationId));
}
export async function getOrCreateProjectBranchSync(c: any, workspaceId: string, repoId: string, repoPath: string, intervalMs: number) {
return await actorClient(c).projectBranchSync.getOrCreate(projectBranchSyncKey(workspaceId, repoId), {
createWithInput: {
workspaceId,
repoId,
repoPath,
intervalMs,
},
});
export function getTaskSandbox(c: any, organizationId: string, sandboxId: string) {
return actorClient(c).taskSandbox.get(taskSandboxKey(organizationId, sandboxId));
}
export function getTaskSandbox(c: any, workspaceId: string, sandboxId: string) {
return actorClient(c).taskSandbox.get(taskSandboxKey(workspaceId, sandboxId));
}
export async function getOrCreateTaskSandbox(c: any, workspaceId: string, sandboxId: string, createWithInput?: Record<string, unknown>) {
return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(workspaceId, sandboxId), {
export async function getOrCreateTaskSandbox(c: any, organizationId: string, sandboxId: string, createWithInput?: Record<string, unknown>) {
return await actorClient(c).taskSandbox.getOrCreate(taskSandboxKey(organizationId, sandboxId), {
createWithInput,
});
}
export function selfProjectBranchSync(c: any) {
return actorClient(c).projectBranchSync.getForId(c.actorId);
}
export function selfHistory(c: any) {
return actorClient(c).history.getForId(c.actorId);
}
@ -98,12 +83,12 @@ export function selfTask(c: any) {
return actorClient(c).task.getForId(c.actorId);
}
export function selfWorkspace(c: any) {
return actorClient(c).workspace.getForId(c.actorId);
export function selfOrganization(c: any) {
return actorClient(c).organization.getForId(c.actorId);
}
export function selfProject(c: any) {
return actorClient(c).project.getForId(c.actorId);
export function selfRepository(c: any) {
return actorClient(c).repository.getForId(c.actorId);
}
export function selfAuthUser(c: any) {

View file

@ -8,7 +8,7 @@ import { historyDb } from "./db/db.js";
import { events } from "./db/schema.js";
export interface HistoryInput {
workspaceId: string;
organizationId: string;
repoId: string;
}
@ -70,7 +70,7 @@ export const history = actor({
icon: "database",
},
createState: (_c, input: HistoryInput) => ({
workspaceId: input.workspaceId,
organizationId: input.organizationId,
repoId: input.repoId,
}),
actions: {
@ -106,7 +106,7 @@ export const history = actor({
return rows.map((row) => ({
...row,
workspaceId: c.state.workspaceId,
organizationId: c.state.organizationId,
repoId: c.state.repoId,
}));
},

View file

@ -3,10 +3,9 @@ import { setup } from "rivetkit";
import { githubData } from "./github-data/index.js";
import { task } from "./task/index.js";
import { history } from "./history/index.js";
import { projectBranchSync } from "./project-branch-sync/index.js";
import { project } from "./project/index.js";
import { repository } from "./repository/index.js";
import { taskSandbox } from "./sandbox/index.js";
import { workspace } from "./workspace/index.js";
import { organization } from "./organization/index.js";
import { logger } from "../logging.js";
const RUNNER_VERSION = Math.floor(Date.now() / 1000);
@ -23,13 +22,12 @@ export const registry = setup({
},
use: {
authUser,
workspace,
project,
organization,
repository,
task,
taskSandbox,
history,
githubData,
projectBranchSync,
},
});
@ -40,7 +38,6 @@ export * from "./github-data/index.js";
export * from "./task/index.js";
export * from "./history/index.js";
export * from "./keys.js";
export * from "./project-branch-sync/index.js";
export * from "./project/index.js";
export * from "./repository/index.js";
export * from "./sandbox/index.js";
export * from "./workspace/index.js";
export * from "./organization/index.js";

View file

@ -1,33 +1,29 @@
export type ActorKey = string[];
export function workspaceKey(workspaceId: string): ActorKey {
return ["ws", workspaceId];
export function organizationKey(organizationId: string): ActorKey {
return ["org", organizationId];
}
export function authUserKey(userId: string): ActorKey {
return ["ws", "app", "user", userId];
return ["org", "app", "user", userId];
}
export function projectKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId];
export function repositoryKey(organizationId: string, repoId: string): ActorKey {
return ["org", organizationId, "repository", repoId];
}
export function taskKey(workspaceId: string, repoId: string, taskId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "task", taskId];
export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey {
return ["org", organizationId, "repository", repoId, "task", taskId];
}
export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey {
return ["ws", workspaceId, "sandbox", sandboxId];
export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey {
return ["org", organizationId, "sandbox", sandboxId];
}
export function historyKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "history"];
export function historyKey(organizationId: string, repoId: string): ActorKey {
return ["org", organizationId, "repository", repoId, "history"];
}
export function githubDataKey(workspaceId: string): ActorKey {
return ["ws", workspaceId, "github-data"];
}
export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "branch-sync"];
export function githubDataKey(organizationId: string): ActorKey {
return ["org", organizationId, "github-data"];
}

View file

@ -2,7 +2,11 @@ import { logger } from "../logging.js";
export function resolveErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
let msg = error.message;
if (error.cause) {
msg += ` [cause: ${resolveErrorMessage(error.cause)}]`;
}
return msg;
}
return String(error);
}

View file

@ -1,18 +1,14 @@
// @ts-nocheck
import { setTimeout as delay } from "node:timers/promises";
import { desc, eq } from "drizzle-orm";
import { Loop } from "rivetkit/workflow";
import type {
AddRepoInput,
CreateTaskInput,
HistoryEvent,
HistoryQueryInput,
ListTasksInput,
ProviderId,
SandboxProviderId,
RepoOverview,
RepoRecord,
RepoStackActionInput,
RepoStackActionResult,
StarSandboxAgentRepoInput,
StarSandboxAgentRepoResult,
SwitchResult,
@ -26,37 +22,33 @@ import type {
TaskWorkbenchSelectInput,
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchTabInput,
TaskWorkbenchSessionInput,
TaskWorkbenchUpdateDraftInput,
WorkbenchOpenPrSummary,
WorkbenchRepoSummary,
WorkbenchRepositorySummary,
WorkbenchSessionSummary,
WorkbenchTaskSummary,
WorkspaceEvent,
WorkspaceSummarySnapshot,
WorkspaceUseInput,
OrganizationEvent,
OrganizationSummarySnapshot,
OrganizationUseInput,
} from "@sandbox-agent/foundry-shared";
import { getActorRuntimeContext } from "../context.js";
import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateProject, selfWorkspace } from "../handles.js";
import { getGithubData, getOrCreateGithubData, getTask, getOrCreateHistory, getOrCreateRepository, selfOrganization } from "../handles.js";
import { logActorWarning, resolveErrorMessage } from "../logging.js";
import { availableSandboxProviderIds, defaultSandboxProviderId } from "../../sandbox-config.js";
import { normalizeRemoteUrl, repoIdFromRemote } from "../../services/repo.js";
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
import { organizationProfile, taskLookup, repos, providerProfiles, taskSummaries } from "./db/schema.js";
import { defaultSandboxProviderId } from "../../sandbox-config.js";
import { repoIdFromRemote } from "../../services/repo.js";
import { resolveOrganizationGithubAuth } from "../../services/github-auth.js";
import { organizationProfile, taskLookup, repos, taskSummaries } from "./db/schema.js";
import { agentTypeForModel } from "../task/workbench.js";
import { expectQueueResponse } from "../../services/queue.js";
import { workspaceAppActions } from "./app-shell.js";
import { organizationAppActions } from "./app-shell.js";
interface WorkspaceState {
workspaceId: string;
}
interface RefreshProviderProfilesCommand {
providerId?: ProviderId;
interface OrganizationState {
organizationId: string;
}
interface GetTaskInput {
workspaceId: string;
organizationId: string;
taskId: string;
}
@ -65,32 +57,30 @@ interface TaskProxyActionInput extends GetTaskInput {
}
interface RepoOverviewInput {
workspaceId: string;
organizationId: string;
repoId: string;
}
const WORKSPACE_QUEUE_NAMES = [
"workspace.command.addRepo",
"workspace.command.createTask",
"workspace.command.refreshProviderProfiles",
"workspace.command.syncGithubOrganizationRepos",
"workspace.command.syncGithubSession",
const ORGANIZATION_QUEUE_NAMES = [
"organization.command.createTask",
"organization.command.syncGithubOrganizationRepos",
"organization.command.syncGithubSession",
] as const;
const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent";
type WorkspaceQueueName = (typeof WORKSPACE_QUEUE_NAMES)[number];
type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number];
export { WORKSPACE_QUEUE_NAMES };
export { ORGANIZATION_QUEUE_NAMES };
export function workspaceWorkflowQueueName(name: WorkspaceQueueName): WorkspaceQueueName {
export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName {
return name;
}
const ORGANIZATION_PROFILE_ROW_ID = "profile";
function assertWorkspace(c: { state: WorkspaceState }, workspaceId: string): void {
if (workspaceId !== c.state.workspaceId) {
throw new Error(`Workspace actor mismatch: actor=${c.state.workspaceId} command=${workspaceId}`);
function assertOrganization(c: { state: OrganizationState }, organizationId: string): void {
if (organizationId !== c.state.organizationId) {
throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} command=${organizationId}`);
}
}
@ -136,12 +126,12 @@ async function collectAllTaskSummaries(c: any): Promise<TaskSummary[]> {
const all: TaskSummary[] = [];
for (const row of repoRows) {
try {
const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl);
const snapshot = await project.listTaskSummaries({ includeArchived: true });
const repository = await getOrCreateRepository(c, c.state.organizationId, row.repoId, row.remoteUrl);
const snapshot = await repository.listTaskSummaries({ includeArchived: true });
all.push(...snapshot);
} catch (error) {
logActorWarning("workspace", "failed collecting tasks for repo", {
workspaceId: c.state.workspaceId,
logActorWarning("organization", "failed collecting tasks for repo", {
organizationId: c.state.organizationId,
repoId: row.repoId,
error: resolveErrorMessage(error),
});
@ -166,7 +156,7 @@ function repoLabelFromRemote(remoteUrl: string): string {
return remoteUrl;
}
function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepoSummary {
function buildRepoSummary(repoRow: { repoId: string; remoteUrl: string; updatedAt: number }, taskRows: WorkbenchTaskSummary[]): WorkbenchRepositorySummary {
const repoTasks = taskRows.filter((task) => task.repoId === repoRow.repoId);
const latestActivityMs = repoTasks.reduce((latest, task) => Math.max(latest, task.updatedAtMs), repoRow.updatedAt);
@ -207,14 +197,14 @@ function taskSummaryFromRow(row: any): WorkbenchTaskSummary {
}
async function listOpenPullRequestsSnapshot(c: any, taskRows: WorkbenchTaskSummary[]): Promise<WorkbenchOpenPrSummary[]> {
const githubData = getGithubData(c, c.state.workspaceId);
const githubData = getGithubData(c, c.state.organizationId);
const openPullRequests = await githubData.listOpenPullRequests({}).catch(() => []);
const claimedBranches = new Set(taskRows.filter((task) => task.branch).map((task) => `${task.repoId}:${task.branch}`));
return openPullRequests.filter((pullRequest: WorkbenchOpenPrSummary) => !claimedBranches.has(`${pullRequest.repoId}:${pullRequest.headRefName}`));
}
async function reconcileWorkbenchProjection(c: any): Promise<WorkspaceSummarySnapshot> {
async function reconcileWorkbenchProjection(c: any): Promise<OrganizationSummarySnapshot> {
const repoRows = await c.db
.select({ repoId: repos.repoId, remoteUrl: repos.remoteUrl, updatedAt: repos.updatedAt })
.from(repos)
@ -224,12 +214,12 @@ async function reconcileWorkbenchProjection(c: any): Promise<WorkspaceSummarySna
const taskRows: WorkbenchTaskSummary[] = [];
for (const row of repoRows) {
try {
const project = await getOrCreateProject(c, c.state.workspaceId, row.repoId, row.remoteUrl);
const summaries = await project.listTaskSummaries({ includeArchived: true });
const repository = await getOrCreateRepository(c, c.state.organizationId, row.repoId, row.remoteUrl);
const summaries = await repository.listTaskSummaries({ includeArchived: true });
for (const summary of summaries) {
try {
await upsertTaskLookupRow(c, summary.taskId, row.repoId);
const task = getTask(c, c.state.workspaceId, row.repoId, summary.taskId);
const task = getTask(c, c.state.organizationId, row.repoId, summary.taskId);
const taskSummary = await task.getTaskSummary({});
taskRows.push(taskSummary);
await c.db
@ -241,8 +231,8 @@ async function reconcileWorkbenchProjection(c: any): Promise<WorkspaceSummarySna
})
.run();
} catch (error) {
logActorWarning("workspace", "failed collecting task summary during reconciliation", {
workspaceId: c.state.workspaceId,
logActorWarning("organization", "failed collecting task summary during reconciliation", {
organizationId: c.state.organizationId,
repoId: row.repoId,
taskId: summary.taskId,
error: resolveErrorMessage(error),
@ -250,8 +240,8 @@ async function reconcileWorkbenchProjection(c: any): Promise<WorkspaceSummarySna
}
}
} catch (error) {
logActorWarning("workspace", "failed collecting repo during workbench reconciliation", {
workspaceId: c.state.workspaceId,
logActorWarning("organization", "failed collecting repo during workbench reconciliation", {
organizationId: c.state.organizationId,
repoId: row.repoId,
error: resolveErrorMessage(error),
});
@ -260,7 +250,7 @@ async function reconcileWorkbenchProjection(c: any): Promise<WorkspaceSummarySna
taskRows.sort((left, right) => right.updatedAtMs - left.updatedAtMs);
return {
workspaceId: c.state.workspaceId,
organizationId: c.state.organizationId,
repos: repoRows.map((row) => buildRepoSummary(row, taskRows)).sort((left, right) => right.latestActivityMs - left.latestActivityMs),
taskSummaries: taskRows,
openPullRequests: await listOpenPullRequestsSnapshot(c, taskRows),
@ -269,33 +259,15 @@ async function reconcileWorkbenchProjection(c: any): Promise<WorkspaceSummarySna
async function requireWorkbenchTask(c: any, taskId: string) {
const repoId = await resolveRepoId(c, taskId);
return getTask(c, c.state.workspaceId, repoId, taskId);
}
async function waitForWorkbenchTaskReady(task: any, timeoutMs = 5 * 60_000): Promise<any> {
const startedAt = Date.now();
for (;;) {
const record = await task.get();
if (record?.branchName && record?.title) {
return record;
}
if (record?.status === "error") {
throw new Error("task initialization failed before the workbench session was ready");
}
if (Date.now() - startedAt > timeoutMs) {
throw new Error("timed out waiting for task initialization");
}
await delay(1_000);
}
return getTask(c, c.state.organizationId, repoId, taskId);
}
/**
* Reads the workspace sidebar snapshot from the workspace actor's local SQLite
* Reads the organization sidebar snapshot from the organization actor's local SQLite
* plus the org-scoped GitHub actor for open PRs. Task actors still push
* summary updates into `task_summaries`, so the hot read path stays bounded.
*/
async function getWorkspaceSummarySnapshot(c: any): Promise<WorkspaceSummarySnapshot> {
async function getOrganizationSummarySnapshot(c: any): Promise<OrganizationSummarySnapshot> {
const repoRows = await c.db
.select({
repoId: repos.repoId,
@ -309,7 +281,7 @@ async function getWorkspaceSummarySnapshot(c: any): Promise<WorkspaceSummarySnap
const summaries = taskRows.map(taskSummaryFromRow);
return {
workspaceId: c.state.workspaceId,
organizationId: c.state.organizationId,
repos: repoRows.map((row) => buildRepoSummary(row, summaries)).sort((left, right) => right.latestActivityMs - left.latestActivityMs),
taskSummaries: summaries,
openPullRequests: await listOpenPullRequestsSnapshot(c, summaries),
@ -323,61 +295,14 @@ async function broadcastRepoSummary(
): Promise<void> {
const matchingTaskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoRow.repoId)).all();
const repo = buildRepoSummary(repoRow, matchingTaskRows.map(taskSummaryFromRow));
c.broadcast("workspaceUpdated", { type, repo } satisfies WorkspaceEvent);
}
async function addRepoMutation(c: any, input: AddRepoInput): Promise<RepoRecord> {
assertWorkspace(c, input.workspaceId);
const remoteUrl = normalizeRemoteUrl(input.remoteUrl);
if (!remoteUrl) {
throw new Error("remoteUrl is required");
}
const { driver } = getActorRuntimeContext();
const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId);
await driver.git.validateRemote(remoteUrl, { githubToken: auth?.githubToken ?? null });
const repoId = repoIdFromRemote(remoteUrl);
const now = Date.now();
const existing = await c.db.select({ repoId: repos.repoId }).from(repos).where(eq(repos.repoId, repoId)).get();
await c.db
.insert(repos)
.values({
repoId,
remoteUrl,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: repos.repoId,
set: {
remoteUrl,
updatedAt: now,
},
})
.run();
await broadcastRepoSummary(c, existing ? "repoUpdated" : "repoAdded", {
repoId,
remoteUrl,
updatedAt: now,
});
return {
workspaceId: c.state.workspaceId,
repoId,
remoteUrl,
createdAt: now,
updatedAt: now,
};
c.broadcast("organizationUpdated", { type, repo } satisfies OrganizationEvent);
}
async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskRecord> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const { config } = getActorRuntimeContext();
const providerId = input.providerId ?? defaultSandboxProviderId(config);
const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config);
const repoId = input.repoId;
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, repoId)).get();
@ -386,27 +311,11 @@ async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskR
}
const remoteUrl = repoRow.remoteUrl;
await c.db
.insert(providerProfiles)
.values({
providerId,
profileJson: JSON.stringify({ providerId }),
updatedAt: Date.now(),
})
.onConflictDoUpdate({
target: providerProfiles.providerId,
set: {
profileJson: JSON.stringify({ providerId }),
updatedAt: Date.now(),
},
})
.run();
const repository = await getOrCreateRepository(c, c.state.organizationId, repoId, remoteUrl);
const project = await getOrCreateProject(c, c.state.workspaceId, repoId, remoteUrl);
const created = await project.createTask({
const created = await repository.createTask({
task: input.task,
providerId,
sandboxProviderId,
agentType: input.agentType ?? null,
explicitTitle: input.explicitTitle ?? null,
explicitBranchName: input.explicitBranchName ?? null,
@ -426,13 +335,13 @@ async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskR
.run();
try {
const task = getTask(c, c.state.workspaceId, repoId, created.taskId);
await workspaceActions.applyTaskSummaryUpdate(c, {
const task = getTask(c, c.state.organizationId, repoId, created.taskId);
await organizationActions.applyTaskSummaryUpdate(c, {
taskSummary: await task.getTaskSummary({}),
});
} catch (error) {
logActorWarning("workspace", "failed seeding task summary after task creation", {
workspaceId: c.state.workspaceId,
logActorWarning("organization", "failed seeding task summary after task creation", {
organizationId: c.state.organizationId,
repoId,
taskId: created.taskId,
error: resolveErrorMessage(error),
@ -442,34 +351,10 @@ async function createTaskMutation(c: any, input: CreateTaskInput): Promise<TaskR
return created;
}
async function refreshProviderProfilesMutation(c: any, command?: RefreshProviderProfilesCommand): Promise<void> {
const body = command ?? {};
const { config } = getActorRuntimeContext();
const providerIds: ProviderId[] = body.providerId ? [body.providerId] : availableSandboxProviderIds(config);
for (const providerId of providerIds) {
await c.db
.insert(providerProfiles)
.values({
providerId,
profileJson: JSON.stringify({ providerId }),
updatedAt: Date.now(),
})
.onConflictDoUpdate({
target: providerProfiles.providerId,
set: {
profileJson: JSON.stringify({ providerId }),
updatedAt: Date.now(),
},
})
.run();
}
}
export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
await ctx.loop("workspace-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-workspace-command", {
names: [...WORKSPACE_QUEUE_NAMES],
export async function runOrganizationWorkflow(ctx: any): Promise<void> {
await ctx.loop("organization-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-organization-command", {
names: [...ORGANIZATION_QUEUE_NAMES],
completable: true,
});
if (!msg) {
@ -477,19 +362,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
}
try {
if (msg.name === "workspace.command.addRepo") {
if (msg.name === "organization.command.createTask") {
const result = await loopCtx.step({
name: "workspace-add-repo",
timeout: 60_000,
run: async () => addRepoMutation(loopCtx, msg.body as AddRepoInput),
});
await msg.complete(result);
return Loop.continue(undefined);
}
if (msg.name === "workspace.command.createTask") {
const result = await loopCtx.step({
name: "workspace-create-task",
name: "organization-create-task",
timeout: 5 * 60_000,
run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskInput),
});
@ -497,17 +372,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
return Loop.continue(undefined);
}
if (msg.name === "workspace.command.refreshProviderProfiles") {
await loopCtx.step("workspace-refresh-provider-profiles", async () =>
refreshProviderProfilesMutation(loopCtx, msg.body as RefreshProviderProfilesCommand),
);
await msg.complete({ ok: true });
return Loop.continue(undefined);
}
if (msg.name === "workspace.command.syncGithubSession") {
if (msg.name === "organization.command.syncGithubSession") {
await loopCtx.step({
name: "workspace-sync-github-session",
name: "organization-sync-github-session",
timeout: 60_000,
run: async () => {
const { syncGithubOrganizations } = await import("./app-shell.js");
@ -518,9 +385,9 @@ export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
return Loop.continue(undefined);
}
if (msg.name === "workspace.command.syncGithubOrganizationRepos") {
if (msg.name === "organization.command.syncGithubOrganizationRepos") {
await loopCtx.step({
name: "workspace-sync-github-organization-repos",
name: "organization-sync-github-organization-repos",
timeout: 60_000,
run: async () => {
const { syncGithubOrganizationRepos } = await import("./app-shell.js");
@ -532,14 +399,12 @@ export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
}
} catch (error) {
const message = resolveErrorMessage(error);
logActorWarning("workspace", "workspace workflow command failed", {
workspaceId: loopCtx.state.workspaceId,
logActorWarning("organization", "organization workflow command failed", {
queueName: msg.name,
error: message,
});
await msg.complete({ error: message }).catch((completeError: unknown) => {
logActorWarning("workspace", "workspace workflow failed completing error response", {
workspaceId: loopCtx.state.workspaceId,
logActorWarning("organization", "organization workflow failed completing error response", {
queueName: msg.name,
error: resolveErrorMessage(completeError),
});
@ -550,25 +415,15 @@ export async function runWorkspaceWorkflow(ctx: any): Promise<void> {
});
}
export const workspaceActions = {
...workspaceAppActions,
async useWorkspace(c: any, input: WorkspaceUseInput): Promise<{ workspaceId: string }> {
assertWorkspace(c, input.workspaceId);
return { workspaceId: c.state.workspaceId };
export const organizationActions = {
...organizationAppActions,
async useOrganization(c: any, input: OrganizationUseInput): Promise<{ organizationId: string }> {
assertOrganization(c, input.organizationId);
return { organizationId: c.state.organizationId };
},
async addRepo(c: any, input: AddRepoInput): Promise<RepoRecord> {
const self = selfWorkspace(c);
return expectQueueResponse<RepoRecord>(
await self.send(workspaceWorkflowQueueName("workspace.command.addRepo"), input, {
wait: true,
timeout: 60_000,
}),
);
},
async listRepos(c: any, input: WorkspaceUseInput): Promise<RepoRecord[]> {
assertWorkspace(c, input.workspaceId);
async listRepos(c: any, input: OrganizationUseInput): Promise<RepoRecord[]> {
assertOrganization(c, input.organizationId);
const rows = await c.db
.select({
@ -582,7 +437,7 @@ export const workspaceActions = {
.all();
return rows.map((row) => ({
workspaceId: c.state.workspaceId,
organizationId: c.state.organizationId,
repoId: row.repoId,
remoteUrl: row.remoteUrl,
createdAt: row.createdAt,
@ -591,19 +446,22 @@ export const workspaceActions = {
},
async createTask(c: any, input: CreateTaskInput): Promise<TaskRecord> {
const self = selfWorkspace(c);
const self = selfOrganization(c);
return expectQueueResponse<TaskRecord>(
await self.send(workspaceWorkflowQueueName("workspace.command.createTask"), input, {
await self.send(organizationWorkflowQueueName("organization.command.createTask"), input, {
wait: true,
timeout: 5 * 60_000,
timeout: 10_000,
}),
);
},
async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise<StarSandboxAgentRepoResult> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const { driver } = getActorRuntimeContext();
await driver.github.starRepository(SANDBOX_AGENT_REPO);
const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId);
await driver.github.starRepository(SANDBOX_AGENT_REPO, {
githubToken: auth?.githubToken ?? null,
});
return {
repo: SANDBOX_AGENT_REPO,
starredAt: Date.now(),
@ -613,7 +471,7 @@ export const workspaceActions = {
/**
* Called by task actors when their summary-level state changes.
* This is the write path for the local materialized projection; clients read
* the projection via `getWorkspaceSummary`, but only task actors should push
* the projection via `getOrganizationSummary`, but only task actors should push
* rows into it.
*/
async applyTaskSummaryUpdate(c: any, input: { taskSummary: WorkbenchTaskSummary }): Promise<void> {
@ -625,12 +483,12 @@ export const workspaceActions = {
set: taskSummaryRowFromSummary(input.taskSummary),
})
.run();
c.broadcast("workspaceUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies WorkspaceEvent);
c.broadcast("organizationUpdated", { type: "taskSummaryUpdated", taskSummary: input.taskSummary } satisfies OrganizationEvent);
},
async removeTaskSummary(c: any, input: { taskId: string }): Promise<void> {
await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run();
c.broadcast("workspaceUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies WorkspaceEvent);
c.broadcast("organizationUpdated", { type: "taskRemoved", taskId: input.taskId } satisfies OrganizationEvent);
},
async findTaskForGithubBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> {
@ -645,13 +503,13 @@ export const workspaceActions = {
for (const summary of matches) {
try {
const task = getTask(c, c.state.workspaceId, input.repoId, summary.taskId);
await workspaceActions.applyTaskSummaryUpdate(c, {
const task = getTask(c, c.state.organizationId, input.repoId, summary.taskId);
await organizationActions.applyTaskSummaryUpdate(c, {
taskSummary: await task.getTaskSummary({}),
});
} catch (error) {
logActorWarning("workspace", "failed refreshing task summary for GitHub branch", {
workspaceId: c.state.workspaceId,
logActorWarning("organization", "failed refreshing task summary for GitHub branch", {
organizationId: c.state.organizationId,
repoId: input.repoId,
branchName: input.branchName,
taskId: summary.taskId,
@ -666,11 +524,11 @@ export const workspaceActions = {
if (summaries.some((summary) => summary.branch === input.pullRequest.headRefName)) {
return;
}
c.broadcast("workspaceUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies WorkspaceEvent);
c.broadcast("organizationUpdated", { type: "pullRequestUpdated", pullRequest: input.pullRequest } satisfies OrganizationEvent);
},
async removeOpenPullRequest(c: any, input: { prId: string }): Promise<void> {
c.broadcast("workspaceUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies WorkspaceEvent);
c.broadcast("organizationUpdated", { type: "pullRequestRemoved", prId: input.prId } satisfies OrganizationEvent);
},
async applyGithubRepositoryProjection(c: any, input: { repoId: string; remoteUrl: string }): Promise<void> {
@ -747,7 +605,7 @@ export const workspaceActions = {
continue;
}
await c.db.delete(repos).where(eq(repos.repoId, repo.repoId)).run();
c.broadcast("workspaceUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies WorkspaceEvent);
c.broadcast("organizationUpdated", { type: "repoRemoved", repoId: repo.repoId } satisfies OrganizationEvent);
}
const profile = await c.db
@ -775,13 +633,13 @@ export const workspaceActions = {
async recordGithubWebhookReceipt(
c: any,
input: {
workspaceId: string;
organizationId: string;
event: string;
action?: string | null;
receivedAt?: number;
},
): Promise<void> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const profile = await c.db
.select({ id: organizationProfile.id })
@ -802,45 +660,38 @@ export const workspaceActions = {
.run();
},
async getWorkspaceSummary(c: any, input: WorkspaceUseInput): Promise<WorkspaceSummarySnapshot> {
assertWorkspace(c, input.workspaceId);
return await getWorkspaceSummarySnapshot(c);
async getOrganizationSummary(c: any, input: OrganizationUseInput): Promise<OrganizationSummarySnapshot> {
assertOrganization(c, input.organizationId);
return await getOrganizationSummarySnapshot(c);
},
async reconcileWorkbenchState(c: any, input: WorkspaceUseInput): Promise<WorkspaceSummarySnapshot> {
assertWorkspace(c, input.workspaceId);
async reconcileWorkbenchState(c: any, input: OrganizationUseInput): Promise<OrganizationSummarySnapshot> {
assertOrganization(c, input.organizationId);
return await reconcileWorkbenchProjection(c);
},
async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; tabId?: string }> {
const created = await workspaceActions.createTask(c, {
workspaceId: c.state.workspaceId,
async createWorkbenchTask(c: any, input: TaskWorkbenchCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> {
// Step 1: Create the task record (wait: true — local state mutations only).
const created = await organizationActions.createTask(c, {
organizationId: c.state.organizationId,
repoId: input.repoId,
task: input.task,
...(input.title ? { explicitTitle: input.title } : {}),
...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}),
...(input.model ? { agentType: agentTypeForModel(input.model) } : {}),
});
// Step 2: Enqueue session creation + initial message (wait: false).
// The task workflow creates the session record and sends the message in
// the background. The client observes progress via push events on the
// task subscription topic.
const task = await requireWorkbenchTask(c, created.taskId);
await waitForWorkbenchTaskReady(task);
const session = await task.createWorkbenchSession({
taskId: created.taskId,
...(input.model ? { model: input.model } : {}),
});
await task.sendWorkbenchMessage({
taskId: created.taskId,
tabId: session.tabId,
await task.createWorkbenchSessionAndSend({
model: input.model,
text: input.task,
attachments: [],
waitForCompletion: true,
});
await task.getSessionDetail({
sessionId: session.tabId,
});
return {
taskId: created.taskId,
tabId: session.tabId,
};
return { taskId: created.taskId };
},
async markWorkbenchUnread(c: any, input: TaskWorkbenchSelectInput): Promise<void> {
@ -858,7 +709,7 @@ export const workspaceActions = {
await task.renameWorkbenchBranch(input);
},
async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> {
async createWorkbenchSession(c: any, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> {
const task = await requireWorkbenchTask(c, input.taskId);
return await task.createWorkbenchSession({ ...(input.model ? { model: input.model } : {}) });
},
@ -888,12 +739,12 @@ export const workspaceActions = {
await task.sendWorkbenchMessage(input);
},
async stopWorkbenchSession(c: any, input: TaskWorkbenchTabInput): Promise<void> {
async stopWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise<void> {
const task = await requireWorkbenchTask(c, input.taskId);
await task.stopWorkbenchSession(input);
},
async closeWorkbenchSession(c: any, input: TaskWorkbenchTabInput): Promise<void> {
async closeWorkbenchSession(c: any, input: TaskWorkbenchSessionInput): Promise<void> {
const task = await requireWorkbenchTask(c, input.taskId);
await task.closeWorkbenchSession(input);
},
@ -909,23 +760,23 @@ export const workspaceActions = {
},
async reloadGithubOrganization(c: any): Promise<void> {
await getOrCreateGithubData(c, c.state.workspaceId).reloadOrganization({});
await getOrCreateGithubData(c, c.state.organizationId).reloadOrganization({});
},
async reloadGithubPullRequests(c: any): Promise<void> {
await getOrCreateGithubData(c, c.state.workspaceId).reloadAllPullRequests({});
await getOrCreateGithubData(c, c.state.organizationId).reloadAllPullRequests({});
},
async reloadGithubRepository(c: any, input: { repoId: string }): Promise<void> {
await getOrCreateGithubData(c, c.state.workspaceId).reloadRepository(input);
await getOrCreateGithubData(c, c.state.organizationId).reloadRepository(input);
},
async reloadGithubPullRequest(c: any, input: { repoId: string; prNumber: number }): Promise<void> {
await getOrCreateGithubData(c, c.state.workspaceId).reloadPullRequest(input);
await getOrCreateGithubData(c, c.state.organizationId).reloadPullRequest(input);
},
async listTasks(c: any, input: ListTasksInput): Promise<TaskSummary[]> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
if (input.repoId) {
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get();
@ -933,67 +784,41 @@ export const workspaceActions = {
throw new Error(`Unknown repo: ${input.repoId}`);
}
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
return await project.listTaskSummaries({ includeArchived: true });
const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl);
return await repository.listTaskSummaries({ includeArchived: true });
}
return await collectAllTaskSummaries(c);
},
async getRepoOverview(c: any, input: RepoOverviewInput): Promise<RepoOverview> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get();
if (!repoRow) {
throw new Error(`Unknown repo: ${input.repoId}`);
}
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
await project.ensure({ remoteUrl: repoRow.remoteUrl });
return await project.getRepoOverview({});
},
async runRepoStackAction(c: any, input: RepoStackActionInput): Promise<RepoStackActionResult> {
assertWorkspace(c, input.workspaceId);
const repoRow = await c.db.select({ remoteUrl: repos.remoteUrl }).from(repos).where(eq(repos.repoId, input.repoId)).get();
if (!repoRow) {
throw new Error(`Unknown repo: ${input.repoId}`);
}
const project = await getOrCreateProject(c, c.state.workspaceId, input.repoId, repoRow.remoteUrl);
await project.ensure({ remoteUrl: repoRow.remoteUrl });
return await project.runRepoStackAction({
action: input.action,
branchName: input.branchName,
parentBranch: input.parentBranch,
});
const repository = await getOrCreateRepository(c, c.state.organizationId, input.repoId, repoRow.remoteUrl);
return await repository.getRepoOverview({});
},
async switchTask(c: any, taskId: string): Promise<SwitchResult> {
const repoId = await resolveRepoId(c, taskId);
const h = getTask(c, c.state.workspaceId, repoId, taskId);
const h = getTask(c, c.state.organizationId, repoId, taskId);
const record = await h.get();
const switched = await h.switch();
return {
workspaceId: c.state.workspaceId,
organizationId: c.state.organizationId,
taskId,
providerId: record.providerId,
sandboxProviderId: record.sandboxProviderId,
switchTarget: switched.switchTarget,
};
},
async refreshProviderProfiles(c: any, command?: RefreshProviderProfilesCommand): Promise<void> {
const self = selfWorkspace(c);
await self.send(workspaceWorkflowQueueName("workspace.command.refreshProviderProfiles"), command ?? {}, {
wait: true,
timeout: 60_000,
});
},
async history(c: any, input: HistoryQueryInput): Promise<HistoryEvent[]> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const limit = input.limit ?? 20;
const repoRows = await c.db.select({ repoId: repos.repoId }).from(repos).all();
@ -1002,7 +827,7 @@ export const workspaceActions = {
for (const row of repoRows) {
try {
const hist = await getOrCreateHistory(c, c.state.workspaceId, row.repoId);
const hist = await getOrCreateHistory(c, c.state.organizationId, row.repoId);
const items = await hist.list({
branch: input.branch,
taskId: input.taskId,
@ -1010,8 +835,8 @@ export const workspaceActions = {
});
allEvents.push(...items);
} catch (error) {
logActorWarning("workspace", "history lookup failed for repo", {
workspaceId: c.state.workspaceId,
logActorWarning("organization", "history lookup failed for repo", {
organizationId: c.state.organizationId,
repoId: row.repoId,
error: resolveErrorMessage(error),
});
@ -1023,7 +848,7 @@ export const workspaceActions = {
},
async getTask(c: any, input: GetTaskInput): Promise<TaskRecord> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const repoId = await resolveRepoId(c, input.taskId);
@ -1032,49 +857,49 @@ export const workspaceActions = {
throw new Error(`Unknown repo: ${repoId}`);
}
const project = await getOrCreateProject(c, c.state.workspaceId, repoId, repoRow.remoteUrl);
return await project.getTaskEnriched({ taskId: input.taskId });
const repository = await getOrCreateRepository(c, c.state.organizationId, repoId, repoRow.remoteUrl);
return await repository.getTaskEnriched({ taskId: input.taskId });
},
async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const repoId = await resolveRepoId(c, input.taskId);
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
const h = getTask(c, c.state.organizationId, repoId, input.taskId);
return await h.attach({ reason: input.reason });
},
async pushTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const repoId = await resolveRepoId(c, input.taskId);
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
const h = getTask(c, c.state.organizationId, repoId, input.taskId);
await h.push({ reason: input.reason });
},
async syncTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const repoId = await resolveRepoId(c, input.taskId);
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
const h = getTask(c, c.state.organizationId, repoId, input.taskId);
await h.sync({ reason: input.reason });
},
async mergeTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const repoId = await resolveRepoId(c, input.taskId);
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
const h = getTask(c, c.state.organizationId, repoId, input.taskId);
await h.merge({ reason: input.reason });
},
async archiveTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const repoId = await resolveRepoId(c, input.taskId);
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
const h = getTask(c, c.state.organizationId, repoId, input.taskId);
await h.archive({ reason: input.reason });
},
async killTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertWorkspace(c, input.workspaceId);
assertOrganization(c, input.organizationId);
const repoId = await resolveRepoId(c, input.taskId);
const h = getTask(c, c.state.workspaceId, repoId, input.taskId);
const h = getTask(c, c.state.organizationId, repoId, input.taskId);
await h.kill({ reason: input.reason });
},
};

View file

@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const projectDb = db({ schema, migrations });
export const organizationDb = db({ schema, migrations });

View file

@ -0,0 +1,6 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/organization/db/drizzle",
schema: "./src/actors/organization/db/schema.ts",
});

View file

@ -69,12 +69,6 @@ CREATE TABLE `organization_profile` (
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `provider_profiles` (
`provider_id` text PRIMARY KEY NOT NULL,
`profile_json` text NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `repos` (
`repo_id` text PRIMARY KEY NOT NULL,
`remote_url` text NOT NULL,

View file

@ -457,37 +457,6 @@
"uniqueConstraints": {},
"checkConstraints": {}
},
"provider_profiles": {
"name": "provider_profiles",
"columns": {
"provider_id": {
"name": "provider_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"profile_json": {
"name": "profile_json",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"repos": {
"name": "repos",
"columns": {

View file

@ -22,6 +22,12 @@ const journal = {
tag: "0002_task_summaries",
breakpoints: true,
},
{
idx: 3,
when: 1773810001000,
tag: "0003_drop_provider_profiles",
breakpoints: true,
},
],
} as const;
@ -99,12 +105,6 @@ CREATE TABLE \`organization_profile\` (
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`provider_profiles\` (
\`provider_id\` text PRIMARY KEY NOT NULL,
\`profile_json\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`repos\` (
\`repo_id\` text PRIMARY KEY NOT NULL,
\`remote_url\` text NOT NULL,
@ -170,6 +170,8 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` (
\`pull_request_json\` text,
\`sessions_summary_json\` text DEFAULT '[]' NOT NULL
);
`,
m0003: `DROP TABLE IF EXISTS \`provider_profiles\`;
`,
} as const,
};

View file

@ -1,12 +1,6 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
// SQLite is per workspace actor instance, so no workspaceId column needed.
export const providerProfiles = sqliteTable("provider_profiles", {
providerId: text("provider_id").notNull().primaryKey(),
// Structured by the provider profile snapshot returned by provider integrations.
profileJson: text("profile_json").notNull(),
updatedAt: integer("updated_at").notNull(),
});
// SQLite is per organization actor instance, so no organizationId column needed.
export const repos = sqliteTable("repos", {
repoId: text("repo_id").notNull().primaryKey(),
@ -23,7 +17,7 @@ export const taskLookup = sqliteTable("task_lookup", {
/**
* Materialized sidebar projection maintained by task actors.
* The source of truth still lives on each task actor; this table exists so
* workspace reads can stay local and avoid fan-out across child actors.
* organization reads can stay local and avoid fan-out across child actors.
*/
export const taskSummaries = sqliteTable("task_summaries", {
taskId: text("task_id").notNull().primaryKey(),

View file

@ -0,0 +1,19 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { organizationDb } from "./db/db.js";
import { runOrganizationWorkflow, ORGANIZATION_QUEUE_NAMES, organizationActions } from "./actions.js";
export const organization = actor({
db: organizationDb,
queues: Object.fromEntries(ORGANIZATION_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
name: "Organization",
icon: "compass",
actionTimeout: 5 * 60_000,
},
createState: (_c, organizationId: string) => ({
organizationId,
}),
actions: organizationActions,
run: workflow(runOrganizationWorkflow),
});

View file

@ -1,178 +0,0 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import type { GitDriver } from "../../driver.js";
import { getActorRuntimeContext } from "../context.js";
import { getProject, selfProjectBranchSync } from "../handles.js";
import { logActorWarning, resolveErrorMessage, resolveErrorStack } from "../logging.js";
import { type PollingControlState, runWorkflowPollingLoop } from "../polling.js";
import { parentLookupFromStack } from "../project/stack-model.js";
import { withRepoGitLock } from "../../services/repo-git-lock.js";
export interface ProjectBranchSyncInput {
workspaceId: string;
repoId: string;
repoPath: string;
intervalMs: number;
}
interface SetIntervalCommand {
intervalMs: number;
}
interface EnrichedBranchSnapshot {
branchName: string;
commitSha: string;
parentBranch: string | null;
trackedInStack: boolean;
diffStat: string | null;
hasUnpushed: boolean;
conflictsWithMain: boolean;
}
interface ProjectBranchSyncState extends PollingControlState {
workspaceId: string;
repoId: string;
repoPath: string;
}
const CONTROL = {
start: "project.branch_sync.control.start",
stop: "project.branch_sync.control.stop",
setInterval: "project.branch_sync.control.set_interval",
force: "project.branch_sync.control.force",
} as const;
async function enrichBranches(workspaceId: string, repoId: string, repoPath: string, git: GitDriver): Promise<EnrichedBranchSnapshot[]> {
return await withRepoGitLock(repoPath, async () => {
await git.fetch(repoPath);
const branches = await git.listRemoteBranches(repoPath);
const { driver } = getActorRuntimeContext();
const stackEntries = await driver.stack.listStack(repoPath).catch(() => []);
const parentByBranch = parentLookupFromStack(stackEntries);
const enriched: EnrichedBranchSnapshot[] = [];
const baseRef = await git.remoteDefaultBaseRef(repoPath);
const baseSha = await git.revParse(repoPath, baseRef).catch(() => "");
for (const branch of branches) {
let branchDiffStat: string | null = null;
let branchHasUnpushed = false;
let branchConflicts = false;
try {
branchDiffStat = await git.diffStatForBranch(repoPath, branch.branchName);
} catch (error) {
logActorWarning("project-branch-sync", "diffStatForBranch failed", {
workspaceId,
repoId,
branchName: branch.branchName,
error: resolveErrorMessage(error),
});
branchDiffStat = null;
}
try {
const headSha = await git.revParse(repoPath, `origin/${branch.branchName}`);
branchHasUnpushed = Boolean(baseSha && headSha && headSha !== baseSha);
} catch (error) {
logActorWarning("project-branch-sync", "revParse failed", {
workspaceId,
repoId,
branchName: branch.branchName,
error: resolveErrorMessage(error),
});
branchHasUnpushed = false;
}
try {
branchConflicts = await git.conflictsWithMain(repoPath, branch.branchName);
} catch (error) {
logActorWarning("project-branch-sync", "conflictsWithMain failed", {
workspaceId,
repoId,
branchName: branch.branchName,
error: resolveErrorMessage(error),
});
branchConflicts = false;
}
enriched.push({
branchName: branch.branchName,
commitSha: branch.commitSha,
parentBranch: parentByBranch.get(branch.branchName) ?? null,
trackedInStack: parentByBranch.has(branch.branchName),
diffStat: branchDiffStat,
hasUnpushed: branchHasUnpushed,
conflictsWithMain: branchConflicts,
});
}
return enriched;
});
}
async function pollBranches(c: { state: ProjectBranchSyncState }): Promise<void> {
const { driver } = getActorRuntimeContext();
const enrichedItems = await enrichBranches(c.state.workspaceId, c.state.repoId, c.state.repoPath, driver.git);
const parent = getProject(c, c.state.workspaceId, c.state.repoId);
await parent.applyBranchSyncResult({ items: enrichedItems, at: Date.now() });
}
export const projectBranchSync = actor({
queues: {
[CONTROL.start]: queue(),
[CONTROL.stop]: queue(),
[CONTROL.setInterval]: queue(),
[CONTROL.force]: queue(),
},
options: {
name: "Project Branch Sync",
icon: "code-branch",
// Polling actors rely on timer-based wakeups; sleeping would pause the timer and stop polling.
noSleep: true,
},
createState: (_c, input: ProjectBranchSyncInput): ProjectBranchSyncState => ({
workspaceId: input.workspaceId,
repoId: input.repoId,
repoPath: input.repoPath,
intervalMs: input.intervalMs,
running: true,
}),
actions: {
async start(c): Promise<void> {
const self = selfProjectBranchSync(c);
await self.send(CONTROL.start, {}, { wait: true, timeout: 15_000 });
},
async stop(c): Promise<void> {
const self = selfProjectBranchSync(c);
await self.send(CONTROL.stop, {}, { wait: true, timeout: 15_000 });
},
async setIntervalMs(c, payload: SetIntervalCommand): Promise<void> {
const self = selfProjectBranchSync(c);
await self.send(CONTROL.setInterval, payload, { wait: true, timeout: 15_000 });
},
async force(c): Promise<void> {
const self = selfProjectBranchSync(c);
await self.send(CONTROL.force, {}, { wait: true, timeout: 5 * 60_000 });
},
},
run: workflow(async (ctx) => {
await runWorkflowPollingLoop<ProjectBranchSyncState>(ctx, {
loopName: "project-branch-sync-loop",
control: CONTROL,
onPoll: async (loopCtx) => {
try {
await pollBranches(loopCtx);
} catch (error) {
logActorWarning("project-branch-sync", "poll failed", {
error: resolveErrorMessage(error),
stack: resolveErrorStack(error),
});
}
},
});
}),
});

File diff suppressed because it is too large Load diff

View file

@ -1,6 +0,0 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/project/db/drizzle",
schema: "./src/actors/project/db/schema.ts",
});

View file

@ -1,40 +0,0 @@
CREATE TABLE `branches` (
`branch_name` text PRIMARY KEY NOT NULL,
`commit_sha` text NOT NULL,
`parent_branch` text,
`tracked_in_stack` integer DEFAULT 0 NOT NULL,
`diff_stat` text,
`has_unpushed` integer DEFAULT 0 NOT NULL,
`conflicts_with_main` integer DEFAULT 0 NOT NULL,
`first_seen_at` integer,
`last_seen_at` integer,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `pr_cache` (
`branch_name` text PRIMARY KEY NOT NULL,
`pr_number` integer NOT NULL,
`state` text NOT NULL,
`title` text NOT NULL,
`pr_url` text,
`pr_author` text,
`is_draft` integer DEFAULT 0 NOT NULL,
`ci_status` text,
`review_status` text,
`reviewer` text,
`fetched_at` integer,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `repo_meta` (
`id` integer PRIMARY KEY NOT NULL,
`remote_url` text NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `task_index` (
`task_id` text PRIMARY KEY NOT NULL,
`branch_name` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);

View file

@ -1,265 +0,0 @@
{
"version": "6",
"dialect": "sqlite",
"id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"branches": {
"name": "branches",
"columns": {
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"commit_sha": {
"name": "commit_sha",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"parent_branch": {
"name": "parent_branch",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"tracked_in_stack": {
"name": "tracked_in_stack",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"diff_stat": {
"name": "diff_stat",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"has_unpushed": {
"name": "has_unpushed",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"conflicts_with_main": {
"name": "conflicts_with_main",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"first_seen_at": {
"name": "first_seen_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"last_seen_at": {
"name": "last_seen_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"pr_cache": {
"name": "pr_cache",
"columns": {
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"pr_number": {
"name": "pr_number",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"state": {
"name": "state",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"pr_url": {
"name": "pr_url",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"pr_author": {
"name": "pr_author",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"is_draft": {
"name": "is_draft",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"ci_status": {
"name": "ci_status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"review_status": {
"name": "review_status",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"reviewer": {
"name": "reviewer",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"fetched_at": {
"name": "fetched_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"repo_meta": {
"name": "repo_meta",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"remote_url": {
"name": "remote_url",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"task_index": {
"name": "task_index",
"columns": {
"task_id": {
"name": "task_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -1,46 +0,0 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
entries: [
{
idx: 0,
when: 1773376221848,
tag: "0000_useful_la_nuit",
breakpoints: true,
},
],
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`branches\` (
\`branch_name\` text PRIMARY KEY NOT NULL,
\`commit_sha\` text NOT NULL,
\`parent_branch\` text,
\`tracked_in_stack\` integer DEFAULT 0 NOT NULL,
\`diff_stat\` text,
\`has_unpushed\` integer DEFAULT 0 NOT NULL,
\`conflicts_with_main\` integer DEFAULT 0 NOT NULL,
\`first_seen_at\` integer,
\`last_seen_at\` integer,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`repo_meta\` (
\`id\` integer PRIMARY KEY NOT NULL,
\`remote_url\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`task_index\` (
\`task_id\` text PRIMARY KEY NOT NULL,
\`branch_name\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
} as const,
};

View file

@ -1,41 +0,0 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
// SQLite is per project actor instance (workspaceId+repoId), so no workspaceId/repoId columns needed.
export const branches = sqliteTable("branches", {
branchName: text("branch_name").notNull().primaryKey(),
commitSha: text("commit_sha").notNull(),
parentBranch: text("parent_branch"),
trackedInStack: integer("tracked_in_stack").notNull().default(0),
diffStat: text("diff_stat"),
hasUnpushed: integer("has_unpushed").notNull().default(0),
conflictsWithMain: integer("conflicts_with_main").notNull().default(0),
firstSeenAt: integer("first_seen_at"),
lastSeenAt: integer("last_seen_at"),
updatedAt: integer("updated_at").notNull(),
});
export const repoMeta = sqliteTable("repo_meta", {
id: integer("id").primaryKey(),
remoteUrl: text("remote_url").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const taskIndex = sqliteTable("task_index", {
taskId: text("task_id").notNull().primaryKey(),
branchName: text("branch_name"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const repoActionJobs = sqliteTable("repo_action_jobs", {
jobId: text("job_id").notNull().primaryKey(),
action: text("action").notNull(),
branchName: text("branch_name"),
parentBranch: text("parent_branch"),
status: text("status").notNull(),
message: text("message").notNull(),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
completedAt: integer("completed_at"),
});

View file

@ -1,30 +0,0 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { projectDb } from "./db/db.js";
import { PROJECT_QUEUE_NAMES, projectActions, runProjectWorkflow } from "./actions.js";
export interface ProjectInput {
workspaceId: string;
repoId: string;
remoteUrl: string;
}
export const project = actor({
db: projectDb,
queues: Object.fromEntries(PROJECT_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
name: "Project",
icon: "folder",
actionTimeout: 5 * 60_000,
},
createState: (_c, input: ProjectInput) => ({
workspaceId: input.workspaceId,
repoId: input.repoId,
remoteUrl: input.remoteUrl,
localPath: null as string | null,
syncActorsStarted: false,
taskIndexHydrated: false,
}),
actions: projectActions,
run: workflow(runProjectWorkflow),
});

View file

@ -1,69 +0,0 @@
export interface StackEntry {
branchName: string;
parentBranch: string | null;
}
export interface OrderedBranchRow {
branchName: string;
parentBranch: string | null;
updatedAt: number;
}
export function normalizeParentBranch(branchName: string, parentBranch: string | null | undefined): string | null {
const parent = parentBranch?.trim() || null;
if (!parent || parent === branchName) {
return null;
}
return parent;
}
export function parentLookupFromStack(entries: StackEntry[]): Map<string, string | null> {
const lookup = new Map<string, string | null>();
for (const entry of entries) {
const branchName = entry.branchName.trim();
if (!branchName) {
continue;
}
lookup.set(branchName, normalizeParentBranch(branchName, entry.parentBranch));
}
return lookup;
}
export function sortBranchesForOverview(rows: OrderedBranchRow[]): OrderedBranchRow[] {
const byName = new Map(rows.map((row) => [row.branchName, row]));
const depthMemo = new Map<string, number>();
const computing = new Set<string>();
const depthFor = (branchName: string): number => {
const cached = depthMemo.get(branchName);
if (cached != null) {
return cached;
}
if (computing.has(branchName)) {
return 999;
}
computing.add(branchName);
const row = byName.get(branchName);
const parent = row?.parentBranch;
let depth = 0;
if (parent && parent !== branchName && byName.has(parent)) {
depth = Math.min(998, depthFor(parent) + 1);
}
computing.delete(branchName);
depthMemo.set(branchName, depth);
return depth;
};
return [...rows].sort((a, b) => {
const da = depthFor(a.branchName);
const db = depthFor(b.branchName);
if (da !== db) {
return da - db;
}
if (a.updatedAt !== b.updatedAt) {
return b.updatedAt - a.updatedAt;
}
return a.branchName.localeCompare(b.branchName);
});
}

View file

@ -0,0 +1,557 @@
// @ts-nocheck
import { randomUUID } from "node:crypto";
import { and, desc, eq, isNotNull, ne } from "drizzle-orm";
import { Loop } from "rivetkit/workflow";
import type { AgentType, RepoOverview, SandboxProviderId, TaskRecord, TaskSummary } from "@sandbox-agent/foundry-shared";
import { getGithubData, getOrCreateHistory, getOrCreateTask, getTask, selfRepository } from "../handles.js";
import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../services/create-flow.js";
import { expectQueueResponse } from "../../services/queue.js";
import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js";
import { repoMeta, taskIndex } from "./db/schema.js";
interface CreateTaskCommand {
task: string;
sandboxProviderId: SandboxProviderId;
agentType: AgentType | null;
explicitTitle: string | null;
explicitBranchName: string | null;
initialPrompt: string | null;
onBranch: string | null;
}
interface RegisterTaskBranchCommand {
taskId: string;
branchName: string;
requireExistingRemote?: boolean;
}
interface ListTaskSummariesCommand {
includeArchived?: boolean;
}
interface GetTaskEnrichedCommand {
taskId: string;
}
interface GetPullRequestForBranchCommand {
branchName: string;
}
const REPOSITORY_QUEUE_NAMES = ["repository.command.createTask", "repository.command.registerTaskBranch"] as const;
type RepositoryQueueName = (typeof REPOSITORY_QUEUE_NAMES)[number];
export { REPOSITORY_QUEUE_NAMES };
export function repositoryWorkflowQueueName(name: RepositoryQueueName): RepositoryQueueName {
return name;
}
function isStaleTaskReferenceError(error: unknown): boolean {
const message = resolveErrorMessage(error);
return isActorNotFoundError(error) || message.startsWith("Task not found:");
}
async function persistRemoteUrl(c: any, remoteUrl: string): Promise<void> {
c.state.remoteUrl = remoteUrl;
await c.db
.insert(repoMeta)
.values({
id: 1,
remoteUrl,
updatedAt: Date.now(),
})
.onConflictDoUpdate({
target: repoMeta.id,
set: {
remoteUrl,
updatedAt: Date.now(),
},
})
.run();
}
async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise<void> {
try {
await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run();
} catch {
// Best effort cleanup only.
}
}
async function reinsertTaskIndexRow(c: any, taskId: string, branchName: string | null, updatedAt: number): Promise<void> {
const now = Date.now();
await c.db
.insert(taskIndex)
.values({
taskId,
branchName,
createdAt: updatedAt || now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskIndex.taskId,
set: {
branchName,
updatedAt: now,
},
})
.run();
}
async function listKnownTaskBranches(c: any): Promise<string[]> {
const rows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all();
return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0);
}
async function resolveGitHubRepository(c: any) {
const githubData = getGithubData(c, c.state.organizationId);
return await githubData.getRepository({ repoId: c.state.repoId }).catch(() => null);
}
async function listGitHubBranches(c: any): Promise<Array<{ branchName: string; commitSha: string }>> {
const githubData = getGithubData(c, c.state.organizationId);
return await githubData.listBranchesForRepository({ repoId: c.state.repoId }).catch(() => []);
}
async function enrichTaskRecord(c: any, record: TaskRecord): Promise<TaskRecord> {
const branchName = record.branchName?.trim() || null;
if (!branchName) {
return record;
}
const pr =
branchName != null
? await getGithubData(c, c.state.organizationId)
.listPullRequestsForRepository({ repoId: c.state.repoId })
.then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null)
.catch(() => null)
: null;
return {
...record,
prUrl: pr?.url ?? null,
prAuthor: pr?.authorLogin ?? null,
ciStatus: null,
reviewStatus: null,
reviewer: pr?.authorLogin ?? null,
diffStat: record.diffStat ?? null,
hasUnpushed: record.hasUnpushed ?? null,
conflictsWithMain: record.conflictsWithMain ?? null,
parentBranch: record.parentBranch ?? null,
};
}
async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise<TaskRecord> {
const organizationId = c.state.organizationId;
const repoId = c.state.repoId;
const repoRemote = c.state.remoteUrl;
const onBranch = cmd.onBranch?.trim() || null;
const taskId = randomUUID();
let initialBranchName: string | null = null;
let initialTitle: string | null = null;
await persistRemoteUrl(c, repoRemote);
if (onBranch) {
initialBranchName = onBranch;
initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined);
await registerTaskBranchMutation(c, {
taskId,
branchName: onBranch,
requireExistingRemote: true,
});
} else {
const reservedBranches = await listKnownTaskBranches(c);
const resolved = resolveCreateFlowDecision({
task: cmd.task,
explicitTitle: cmd.explicitTitle ?? undefined,
explicitBranchName: cmd.explicitBranchName ?? undefined,
localBranches: [],
taskBranches: reservedBranches,
});
initialBranchName = resolved.branchName;
initialTitle = resolved.title;
const now = Date.now();
await c.db
.insert(taskIndex)
.values({
taskId,
branchName: resolved.branchName,
createdAt: now,
updatedAt: now,
})
.onConflictDoNothing()
.run();
}
let taskHandle: Awaited<ReturnType<typeof getOrCreateTask>>;
try {
taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, {
organizationId,
repoId,
taskId,
repoRemote,
branchName: initialBranchName,
title: initialTitle,
task: cmd.task,
sandboxProviderId: cmd.sandboxProviderId,
agentType: cmd.agentType,
explicitTitle: null,
explicitBranchName: null,
initialPrompt: cmd.initialPrompt,
});
} catch (error) {
if (initialBranchName) {
await deleteStaleTaskIndexRow(c, taskId);
}
throw error;
}
const created = await taskHandle.initialize({ sandboxProviderId: cmd.sandboxProviderId });
const history = await getOrCreateHistory(c, organizationId, repoId);
await history.append({
kind: "task.created",
taskId,
payload: {
repoId,
sandboxProviderId: cmd.sandboxProviderId,
},
});
return created;
}
async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> {
const branchName = cmd.branchName.trim();
if (!branchName) {
throw new Error("branchName is required");
}
await persistRemoteUrl(c, c.state.remoteUrl);
const existingOwner = await c.db
.select({ taskId: taskIndex.taskId })
.from(taskIndex)
.where(and(eq(taskIndex.branchName, branchName), ne(taskIndex.taskId, cmd.taskId)))
.get();
if (existingOwner) {
let ownerMissing = false;
try {
await getTask(c, c.state.organizationId, c.state.repoId, existingOwner.taskId).get();
} catch (error) {
if (isStaleTaskReferenceError(error)) {
ownerMissing = true;
await deleteStaleTaskIndexRow(c, existingOwner.taskId);
} else {
throw error;
}
}
if (!ownerMissing) {
throw new Error(`branch is already assigned to a different task: ${branchName}`);
}
}
const branches = await listGitHubBranches(c);
const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null;
if (cmd.requireExistingRemote && !branchMatch) {
throw new Error(`Remote branch not found: ${branchName}`);
}
const repository = await resolveGitHubRepository(c);
const defaultBranch = repository?.defaultBranch ?? "main";
const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? "";
const now = Date.now();
await c.db
.insert(taskIndex)
.values({
taskId: cmd.taskId,
branchName,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskIndex.taskId,
set: {
branchName,
updatedAt: now,
},
})
.run();
return { branchName, headSha };
}
async function listTaskSummaries(c: any, includeArchived = false): Promise<TaskSummary[]> {
const taskRows = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).orderBy(desc(taskIndex.updatedAt)).all();
const records: TaskSummary[] = [];
for (const row of taskRows) {
try {
const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get();
if (!includeArchived && record.status === "archived") {
continue;
}
records.push({
organizationId: record.organizationId,
repoId: record.repoId,
taskId: record.taskId,
branchName: record.branchName,
title: record.title,
status: record.status,
updatedAt: record.updatedAt,
});
} catch (error) {
if (isStaleTaskReferenceError(error)) {
await deleteStaleTaskIndexRow(c, row.taskId);
continue;
}
logActorWarning("repository", "failed loading task summary row", {
organizationId: c.state.organizationId,
repoId: c.state.repoId,
taskId: row.taskId,
error: resolveErrorMessage(error),
});
}
}
records.sort((a, b) => b.updatedAt - a.updatedAt);
return records;
}
function sortOverviewBranches(
branches: Array<{
branchName: string;
commitSha: string;
taskId: string | null;
taskTitle: string | null;
taskStatus: TaskRecord["status"] | null;
prNumber: number | null;
prState: string | null;
prUrl: string | null;
ciStatus: string | null;
reviewStatus: string | null;
reviewer: string | null;
updatedAt: number;
}>,
defaultBranch: string | null,
) {
return [...branches].sort((left, right) => {
if (defaultBranch) {
if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1;
if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1;
}
if (Boolean(left.taskId) !== Boolean(right.taskId)) {
return left.taskId ? -1 : 1;
}
if (left.updatedAt !== right.updatedAt) {
return right.updatedAt - left.updatedAt;
}
return left.branchName.localeCompare(right.branchName);
});
}
export async function runRepositoryWorkflow(ctx: any): Promise<void> {
await ctx.loop("repository-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-repository-command", {
names: [...REPOSITORY_QUEUE_NAMES],
completable: true,
});
if (!msg) {
return Loop.continue(undefined);
}
try {
if (msg.name === "repository.command.createTask") {
const result = await loopCtx.step({
name: "repository-create-task",
timeout: 5 * 60_000,
run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand),
});
await msg.complete(result);
return Loop.continue(undefined);
}
if (msg.name === "repository.command.registerTaskBranch") {
const result = await loopCtx.step({
name: "repository-register-task-branch",
timeout: 60_000,
run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand),
});
await msg.complete(result);
return Loop.continue(undefined);
}
} catch (error) {
const message = resolveErrorMessage(error);
logActorWarning("repository", "repository workflow command failed", {
queueName: msg.name,
error: message,
});
await msg.complete({ error: message }).catch(() => {});
}
return Loop.continue(undefined);
});
}
export const repositoryActions = {
async createTask(c: any, cmd: CreateTaskCommand): Promise<TaskRecord> {
const self = selfRepository(c);
return expectQueueResponse<TaskRecord>(
await self.send(repositoryWorkflowQueueName("repository.command.createTask"), cmd, {
wait: true,
timeout: 10_000,
}),
);
},
async listReservedBranches(c: any): Promise<string[]> {
return await listKnownTaskBranches(c);
},
async registerTaskBranch(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> {
const self = selfRepository(c);
return expectQueueResponse<{ branchName: string; headSha: string }>(
await self.send(repositoryWorkflowQueueName("repository.command.registerTaskBranch"), cmd, {
wait: true,
timeout: 10_000,
}),
);
},
async listTaskSummaries(c: any, cmd?: ListTaskSummariesCommand): Promise<TaskSummary[]> {
return await listTaskSummaries(c, cmd?.includeArchived === true);
},
async getTaskEnriched(c: any, cmd: GetTaskEnrichedCommand): Promise<TaskRecord> {
const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.taskId, cmd.taskId)).get();
if (!row) {
const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get();
await reinsertTaskIndexRow(c, cmd.taskId, record.branchName ?? null, record.updatedAt ?? Date.now());
return await enrichTaskRecord(c, record);
}
try {
const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get();
return await enrichTaskRecord(c, record);
} catch (error) {
if (isStaleTaskReferenceError(error)) {
await deleteStaleTaskIndexRow(c, cmd.taskId);
throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`);
}
throw error;
}
},
async getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> {
const repository = await resolveGitHubRepository(c);
return {
defaultBranch: repository?.defaultBranch ?? null,
fullName: repository?.fullName ?? null,
remoteUrl: c.state.remoteUrl,
};
},
async getRepoOverview(c: any): Promise<RepoOverview> {
await persistRemoteUrl(c, c.state.remoteUrl);
const now = Date.now();
const repository = await resolveGitHubRepository(c);
const githubBranches = await listGitHubBranches(c).catch(() => []);
const githubData = getGithubData(c, c.state.organizationId);
const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []);
const prByBranch = new Map(prRows.map((row) => [row.headRefName, row]));
const taskRows = await c.db
.select({
taskId: taskIndex.taskId,
branchName: taskIndex.branchName,
updatedAt: taskIndex.updatedAt,
})
.from(taskIndex)
.all();
const taskMetaByBranch = new Map<string, { taskId: string; title: string | null; status: TaskRecord["status"] | null; updatedAt: number }>();
for (const row of taskRows) {
if (!row.branchName) {
continue;
}
try {
const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get();
taskMetaByBranch.set(row.branchName, {
taskId: row.taskId,
title: record.title ?? null,
status: record.status,
updatedAt: record.updatedAt,
});
} catch (error) {
if (isStaleTaskReferenceError(error)) {
await deleteStaleTaskIndexRow(c, row.taskId);
continue;
}
}
}
const branchMap = new Map<string, { branchName: string; commitSha: string }>();
for (const branch of githubBranches) {
branchMap.set(branch.branchName, branch);
}
for (const branchName of taskMetaByBranch.keys()) {
if (!branchMap.has(branchName)) {
branchMap.set(branchName, { branchName, commitSha: "" });
}
}
if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) {
branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" });
}
const branches = sortOverviewBranches(
[...branchMap.values()].map((branch) => {
const taskMeta = taskMetaByBranch.get(branch.branchName);
const pr = prByBranch.get(branch.branchName);
return {
branchName: branch.branchName,
commitSha: branch.commitSha,
taskId: taskMeta?.taskId ?? null,
taskTitle: taskMeta?.title ?? null,
taskStatus: taskMeta?.status ?? null,
prNumber: pr?.number ?? null,
prState: pr?.state ?? null,
prUrl: pr?.url ?? null,
ciStatus: null,
reviewStatus: null,
reviewer: pr?.authorLogin ?? null,
updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now),
};
}),
repository?.defaultBranch ?? null,
);
return {
organizationId: c.state.organizationId,
repoId: c.state.repoId,
remoteUrl: c.state.remoteUrl,
baseRef: repository?.defaultBranch ?? null,
fetchedAt: now,
branches,
};
},
async getPullRequestForBranch(c: any, cmd: GetPullRequestForBranchCommand): Promise<{ number: number; status: "draft" | "ready" } | null> {
const branchName = cmd.branchName?.trim();
if (!branchName) {
return null;
}
const githubData = getGithubData(c, c.state.organizationId);
return await githubData.getPullRequestForBranch({
repoId: c.state.repoId,
branchName,
});
},
};

View file

@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const workspaceDb = db({ schema, migrations });
export const repositoryDb = db({ schema, migrations });

View file

@ -0,0 +1,6 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/repository/db/drizzle",
schema: "./src/actors/repository/db/schema.ts",
});

View file

@ -0,0 +1,12 @@
CREATE TABLE `repo_meta` (
`id` integer PRIMARY KEY NOT NULL,
`remote_url` text NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `task_index` (
`task_id` text PRIMARY KEY NOT NULL,
`branch_name` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);

View file

@ -0,0 +1,87 @@
{
"version": "6",
"dialect": "sqlite",
"id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"repo_meta": {
"name": "repo_meta",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"remote_url": {
"name": "remote_url",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"task_index": {
"name": "task_index",
"columns": {
"task_id": {
"name": "task_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"branch_name": {
"name": "branch_name",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View file

@ -0,0 +1,43 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
entries: [
{
idx: 0,
when: 1773376221848,
tag: "0000_useful_la_nuit",
breakpoints: true,
},
{
idx: 1,
when: 1778900000000,
tag: "0001_remove_local_git_state",
breakpoints: true,
},
],
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`repo_meta\` (
\t\`id\` integer PRIMARY KEY NOT NULL,
\t\`remote_url\` text NOT NULL,
\t\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`task_index\` (
\t\`task_id\` text PRIMARY KEY NOT NULL,
\t\`branch_name\` text,
\t\`created_at\` integer NOT NULL,
\t\`updated_at\` integer NOT NULL
);
`,
m0001: `DROP TABLE IF EXISTS \`branches\`;
--> statement-breakpoint
DROP TABLE IF EXISTS \`repo_action_jobs\`;
`,
} as const,
};

View file

@ -0,0 +1,16 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
// SQLite is per repository actor instance (organizationId+repoId).
export const repoMeta = sqliteTable("repo_meta", {
id: integer("id").primaryKey(),
remoteUrl: text("remote_url").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const taskIndex = sqliteTable("task_index", {
taskId: text("task_id").notNull().primaryKey(),
branchName: text("branch_name"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});

View file

@ -0,0 +1,27 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { repositoryDb } from "./db/db.js";
import { REPOSITORY_QUEUE_NAMES, repositoryActions, runRepositoryWorkflow } from "./actions.js";
export interface RepositoryInput {
organizationId: string;
repoId: string;
remoteUrl: string;
}
export const repository = actor({
db: repositoryDb,
queues: Object.fromEntries(REPOSITORY_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
name: "Repository",
icon: "folder",
actionTimeout: 5 * 60_000,
},
createState: (_c, input: RepositoryInput) => ({
organizationId: input.organizationId,
repoId: input.repoId,
remoteUrl: input.remoteUrl,
}),
actions: repositoryActions,
run: workflow(runRepositoryWorkflow),
});

View file

@ -4,21 +4,21 @@ import { existsSync } from "node:fs";
import Dockerode from "dockerode";
import { SandboxAgent } from "sandbox-agent";
import { getActorRuntimeContext } from "../context.js";
import { workspaceKey } from "../keys.js";
import { organizationKey } from "../keys.js";
import { resolveSandboxProviderId } from "../../sandbox-config.js";
const SANDBOX_REPO_CWD = "/home/sandbox/workspace/repo";
const SANDBOX_REPO_CWD = "/home/sandbox/organization/repo";
const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:full";
const DEFAULT_LOCAL_SANDBOX_PORT = 2468;
const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" });
function parseTaskSandboxKey(key: readonly string[]): { workspaceId: string; taskId: string } {
if (key.length !== 4 || key[0] !== "ws" || key[2] !== "sandbox") {
function parseTaskSandboxKey(key: readonly string[]): { organizationId: string; taskId: string } {
if (key.length !== 4 || key[0] !== "org" || key[2] !== "sandbox") {
throw new Error(`Invalid task sandbox key: ${JSON.stringify(key)}`);
}
return {
workspaceId: key[1]!,
organizationId: key[1]!,
taskId: key[3]!,
};
}
@ -191,24 +191,24 @@ function sanitizeActorResult(value: unknown, seen = new WeakSet<object>()): unkn
const baseTaskSandbox = sandboxActor({
createProvider: async (c) => {
const { config } = getActorRuntimeContext();
const { workspaceId, taskId } = parseTaskSandboxKey(c.key);
const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), {
createWithInput: workspaceId,
const { organizationId, taskId } = parseTaskSandboxKey(c.key);
const organization = await c.client().organization.getOrCreate(organizationKey(organizationId), {
createWithInput: organizationId,
});
const task = await workspace.getTask({ workspaceId, taskId });
const providerId = resolveSandboxProviderId(config, task.providerId);
const task = await organization.getTask({ organizationId, taskId });
const sandboxProviderId = resolveSandboxProviderId(config, task.sandboxProviderId);
if (providerId === "e2b") {
if (sandboxProviderId === "e2b") {
return e2b({
create: () => ({
template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x",
template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x",
envs: sandboxEnvObject(),
}),
installAgents: ["claude", "codex"],
});
}
return createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE);
return createLocalSandboxProvider(config.sandboxProviders.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE);
},
});
@ -236,23 +236,23 @@ async function providerForConnection(c: any): Promise<any | null> {
const providerFactory = baseTaskSandbox.config.actions as Record<string, unknown>;
void providerFactory;
const { config } = getActorRuntimeContext();
const { workspaceId, taskId } = parseTaskSandboxKey(c.key);
const workspace = await c.client().workspace.getOrCreate(workspaceKey(workspaceId), {
createWithInput: workspaceId,
const { organizationId, taskId } = parseTaskSandboxKey(c.key);
const organization = await c.client().organization.getOrCreate(organizationKey(organizationId), {
createWithInput: organizationId,
});
const task = await workspace.getTask({ workspaceId, taskId });
const providerId = resolveSandboxProviderId(config, task.providerId);
const task = await organization.getTask({ organizationId, taskId });
const sandboxProviderId = resolveSandboxProviderId(config, task.sandboxProviderId);
const provider =
providerId === "e2b"
sandboxProviderId === "e2b"
? e2b({
create: () => ({
template: config.providers.e2b.template ?? "sandbox-agent-full-0.3.x",
template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x",
envs: sandboxEnvObject(),
}),
installAgents: ["claude", "codex"],
})
: createLocalSandboxProvider(config.providers.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE);
: createLocalSandboxProvider(config.sandboxProviders.local.image ?? process.env.HF_LOCAL_SANDBOX_IMAGE ?? DEFAULT_LOCAL_SANDBOX_IMAGE);
c.vars.provider = provider;
return provider;
@ -360,31 +360,31 @@ export const taskSandbox = actor({
}
},
async providerState(c: any): Promise<{ providerId: "e2b" | "local"; sandboxId: string; state: string; at: number }> {
async providerState(c: any): Promise<{ sandboxProviderId: "e2b" | "local"; sandboxId: string; state: string; at: number }> {
const { config } = getActorRuntimeContext();
const { taskId } = parseTaskSandboxKey(c.key);
const at = Date.now();
const providerId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null);
const sandboxProviderId = resolveSandboxProviderId(config, c.state.providerName === "e2b" ? "e2b" : c.state.providerName === "docker" ? "local" : null);
if (c.state.sandboxDestroyed) {
return { providerId, sandboxId: taskId, state: "destroyed", at };
return { sandboxProviderId, sandboxId: taskId, state: "destroyed", at };
}
if (!c.state.sandboxId) {
return { providerId, sandboxId: taskId, state: "pending", at };
return { sandboxProviderId, sandboxId: taskId, state: "pending", at };
}
try {
const health = await baseActions.getHealth(c);
return {
providerId,
sandboxProviderId,
sandboxId: taskId,
state: health.status === "ok" ? "running" : "degraded",
at,
};
} catch {
return {
providerId,
sandboxProviderId,
sandboxId: taskId,
state: "error",
at,

View file

@ -10,6 +10,12 @@ const journal = {
tag: "0000_charming_maestro",
breakpoints: true,
},
{
idx: 1,
when: 1773810000000,
tag: "0001_sandbox_provider_columns",
breakpoints: true,
},
],
} as const;
@ -63,9 +69,13 @@ CREATE TABLE \`task_workbench_sessions\` (
\`created\` integer DEFAULT 1 NOT NULL,
\`closed\` integer DEFAULT 0 NOT NULL,
\`thinking_since_ms\` integer,
\`created_at\` integer NOT NULL,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0001: `ALTER TABLE \`task\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`;
--> statement-breakpoint
ALTER TABLE \`task_sandboxes\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`;
`,
} as const,
};

View file

@ -9,7 +9,7 @@ export const task = sqliteTable(
branchName: text("branch_name"),
title: text("title"),
task: text("task").notNull(),
providerId: text("provider_id").notNull(),
sandboxProviderId: text("sandbox_provider_id").notNull(),
status: text("status").notNull(),
agentType: text("agent_type").default("claude"),
prSubmitted: integer("pr_submitted").default(0),
@ -39,7 +39,7 @@ export const taskRuntime = sqliteTable(
export const taskSandboxes = sqliteTable("task_sandboxes", {
sandboxId: text("sandbox_id").notNull().primaryKey(),
providerId: text("provider_id").notNull(),
sandboxProviderId: text("sandbox_provider_id").notNull(),
sandboxActorId: text("sandbox_actor_id"),
switchTarget: text("switch_target").notNull(),
cwd: text("cwd"),

View file

@ -9,7 +9,7 @@ import type {
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchUpdateDraftInput,
ProviderId,
SandboxProviderId,
} from "@sandbox-agent/foundry-shared";
import { expectQueueResponse } from "../../services/queue.js";
import { selfTask } from "../handles.js";
@ -37,15 +37,14 @@ import {
import { TASK_QUEUE_NAMES, taskWorkflowQueueName, runTaskWorkflow } from "./workflow/index.js";
export interface TaskInput {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
repoRemote: string;
repoLocalPath?: string;
branchName: string | null;
title: string | null;
task: string;
providerId: ProviderId;
sandboxProviderId: SandboxProviderId;
agentType: AgentType | null;
explicitTitle: string | null;
explicitBranchName: string | null;
@ -53,15 +52,15 @@ export interface TaskInput {
}
interface InitializeCommand {
providerId?: ProviderId;
sandboxProviderId?: SandboxProviderId;
}
interface TaskActionCommand {
reason?: string;
}
interface TaskTabCommand {
tabId: string;
interface TaskSessionCommand {
sessionId: string;
}
interface TaskStatusSyncCommand {
@ -101,14 +100,15 @@ interface TaskWorkbenchSendMessageCommand {
attachments: Array<any>;
}
interface TaskWorkbenchSendMessageActionInput extends TaskWorkbenchSendMessageInput {
waitForCompletion?: boolean;
}
interface TaskWorkbenchCreateSessionCommand {
model?: string;
}
interface TaskWorkbenchCreateSessionAndSendCommand {
model?: string;
text: string;
}
interface TaskWorkbenchSessionCommand {
sessionId: string;
}
@ -122,15 +122,14 @@ export const task = actor({
actionTimeout: 5 * 60_000,
},
createState: (_c, input: TaskInput) => ({
workspaceId: input.workspaceId,
organizationId: input.organizationId,
repoId: input.repoId,
taskId: input.taskId,
repoRemote: input.repoRemote,
repoLocalPath: input.repoLocalPath,
branchName: input.branchName,
title: input.title,
task: input.task,
providerId: input.providerId,
sandboxProviderId: input.sandboxProviderId,
agentType: input.agentType,
explicitTitle: input.explicitTitle,
explicitBranchName: input.explicitBranchName,
@ -143,7 +142,7 @@ export const task = actor({
const self = selfTask(c);
const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, {
wait: true,
timeout: 5 * 60_000,
timeout: 10_000,
});
return expectQueueResponse<TaskRecord>(result);
},
@ -160,7 +159,7 @@ export const task = actor({
const self = selfTask(c);
const result = await self.send(taskWorkflowQueueName("task.command.attach"), cmd ?? {}, {
wait: true,
timeout: 20_000,
timeout: 10_000,
});
return expectQueueResponse<{ target: string; sessionId: string | null }>(result);
},
@ -172,7 +171,7 @@ export const task = actor({
{},
{
wait: true,
timeout: 20_000,
timeout: 10_000,
},
);
return expectQueueResponse<{ switchTarget: string }>(result);
@ -236,7 +235,7 @@ export const task = actor({
{},
{
wait: true,
timeout: 20_000,
timeout: 10_000,
},
);
},
@ -256,27 +255,40 @@ export const task = actor({
});
},
async createWorkbenchSession(c, input?: { model?: string }): Promise<{ tabId: string }> {
async createWorkbenchSession(c, input?: { model?: string }): Promise<{ sessionId: string }> {
const self = selfTask(c);
const result = await self.send(
taskWorkflowQueueName("task.command.workbench.create_session"),
{ ...(input?.model ? { model: input.model } : {}) } satisfies TaskWorkbenchCreateSessionCommand,
{
wait: true,
timeout: 5 * 60_000,
timeout: 10_000,
},
);
return expectQueueResponse<{ tabId: string }>(result);
return expectQueueResponse<{ sessionId: string }>(result);
},
/**
* Fire-and-forget: creates a workbench session and sends the initial message.
* Used by createWorkbenchTask so the caller doesn't block on session creation.
*/
async createWorkbenchSessionAndSend(c, input: { model?: string; text: string }): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.create_session_and_send"),
{ model: input.model, text: input.text } satisfies TaskWorkbenchCreateSessionAndSendCommand,
{ wait: false },
);
},
async renameWorkbenchSession(c, input: TaskWorkbenchRenameSessionInput): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.rename_session"),
{ sessionId: input.tabId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand,
{ sessionId: input.sessionId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand,
{
wait: true,
timeout: 20_000,
timeout: 10_000,
},
);
},
@ -285,10 +297,10 @@ export const task = actor({
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.set_session_unread"),
{ sessionId: input.tabId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand,
{ sessionId: input.sessionId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand,
{
wait: true,
timeout: 20_000,
timeout: 10_000,
},
);
},
@ -298,13 +310,12 @@ export const task = actor({
await self.send(
taskWorkflowQueueName("task.command.workbench.update_draft"),
{
sessionId: input.tabId,
sessionId: input.sessionId,
text: input.text,
attachments: input.attachments,
} satisfies TaskWorkbenchUpdateDraftCommand,
{
wait: true,
timeout: 20_000,
wait: false,
},
);
},
@ -313,36 +324,32 @@ export const task = actor({
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.change_model"),
{ sessionId: input.tabId, model: input.model } satisfies TaskWorkbenchChangeModelCommand,
{ sessionId: input.sessionId, model: input.model } satisfies TaskWorkbenchChangeModelCommand,
{
wait: true,
timeout: 20_000,
timeout: 10_000,
},
);
},
async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageActionInput): Promise<void> {
async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageInput): Promise<void> {
const self = selfTask(c);
const result = await self.send(
await self.send(
taskWorkflowQueueName("task.command.workbench.send_message"),
{
sessionId: input.tabId,
sessionId: input.sessionId,
text: input.text,
attachments: input.attachments,
} satisfies TaskWorkbenchSendMessageCommand,
{
wait: input.waitForCompletion === true,
...(input.waitForCompletion === true ? { timeout: 10 * 60_000 } : {}),
wait: false,
},
);
if (input.waitForCompletion === true) {
expectQueueResponse(result);
}
},
async stopWorkbenchSession(c, input: TaskTabCommand): Promise<void> {
async stopWorkbenchSession(c, input: TaskSessionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, {
await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, {
wait: false,
});
},
@ -355,9 +362,9 @@ export const task = actor({
});
},
async closeWorkbenchSession(c, input: TaskTabCommand): Promise<void> {
async closeWorkbenchSession(c, input: TaskSessionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.tabId } satisfies TaskWorkbenchSessionCommand, {
await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, {
wait: false,
});
},

View file

@ -3,10 +3,11 @@ import { randomUUID } from "node:crypto";
import { basename, dirname } from "node:path";
import { asc, eq } from "drizzle-orm";
import { getActorRuntimeContext } from "../context.js";
import { getOrCreateProject, getOrCreateTaskSandbox, getOrCreateWorkspace, getTaskSandbox, selfTask } from "../handles.js";
import { getOrCreateRepository, getOrCreateTaskSandbox, getOrCreateOrganization, getTaskSandbox, selfTask } from "../handles.js";
import { SANDBOX_REPO_CWD } from "../sandbox/index.js";
import { resolveSandboxProviderId } from "../../sandbox-config.js";
import { resolveWorkspaceGithubAuth } from "../../services/github-auth.js";
import { resolveOrganizationGithubAuth } from "../../services/github-auth.js";
import { githubRepoFullNameFromRemote } from "../../services/repo.js";
import { task as taskTable, taskRuntime, taskSandboxes, taskWorkbenchSessions } from "./db/schema.js";
import { getCurrentRecord } from "./workflow/common.js";
@ -172,8 +173,7 @@ async function listSessionMetaRows(c: any, options?: { includeClosed?: boolean }
const mapped = rows.map((row: any) => ({
...row,
id: row.sessionId,
sessionId: row.sandboxSessionId ?? null,
tabId: row.sessionId,
sessionId: row.sessionId,
sandboxSessionId: row.sandboxSessionId ?? null,
status: row.status ?? "ready",
errorMessage: row.errorMessage ?? null,
@ -209,8 +209,7 @@ async function readSessionMeta(c: any, sessionId: string): Promise<any | null> {
return {
...row,
id: row.sessionId,
sessionId: row.sandboxSessionId ?? null,
tabId: row.sessionId,
sessionId: row.sessionId,
sandboxSessionId: row.sandboxSessionId ?? null,
status: row.status ?? "ready",
errorMessage: row.errorMessage ?? null,
@ -227,7 +226,7 @@ async function readSessionMeta(c: any, sessionId: string): Promise<any | null> {
async function ensureSessionMeta(
c: any,
params: {
tabId: string;
sessionId: string;
sandboxSessionId?: string | null;
model?: string;
sessionName?: string;
@ -238,7 +237,7 @@ async function ensureSessionMeta(
},
): Promise<any> {
await ensureWorkbenchSessionTable(c);
const existing = await readSessionMeta(c, params.tabId);
const existing = await readSessionMeta(c, params.sessionId);
if (existing) {
return existing;
}
@ -251,7 +250,7 @@ async function ensureSessionMeta(
await c.db
.insert(taskWorkbenchSessions)
.values({
sessionId: params.tabId,
sessionId: params.sessionId,
sandboxSessionId: params.sandboxSessionId ?? null,
sessionName,
model,
@ -271,20 +270,20 @@ async function ensureSessionMeta(
})
.run();
return await readSessionMeta(c, params.tabId);
return await readSessionMeta(c, params.sessionId);
}
async function updateSessionMeta(c: any, tabId: string, values: Record<string, unknown>): Promise<any> {
await ensureSessionMeta(c, { tabId });
async function updateSessionMeta(c: any, sessionId: string, values: Record<string, unknown>): Promise<any> {
await ensureSessionMeta(c, { sessionId });
await c.db
.update(taskWorkbenchSessions)
.set({
...values,
updatedAt: Date.now(),
})
.where(eq(taskWorkbenchSessions.sessionId, tabId))
.where(eq(taskWorkbenchSessions.sessionId, sessionId))
.run();
return await readSessionMeta(c, tabId);
return await readSessionMeta(c, sessionId);
}
async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: string): Promise<any | null> {
@ -296,33 +295,25 @@ async function readSessionMetaBySandboxSessionId(c: any, sandboxSessionId: strin
return await readSessionMeta(c, row.sessionId);
}
async function requireReadySessionMeta(c: any, tabId: string): Promise<any> {
const meta = await readSessionMeta(c, tabId);
async function requireReadySessionMeta(c: any, sessionId: string): Promise<any> {
const meta = await readSessionMeta(c, sessionId);
if (!meta) {
throw new Error(`Unknown workbench tab: ${tabId}`);
throw new Error(`Unknown workbench session: ${sessionId}`);
}
if (meta.status !== "ready" || !meta.sandboxSessionId) {
throw new Error(meta.errorMessage ?? "This workbench tab is still preparing");
throw new Error(meta.errorMessage ?? "This workbench session is still preparing");
}
return meta;
}
async function ensureReadySessionMeta(c: any, tabId: string): Promise<any> {
const meta = await readSessionMeta(c, tabId);
export function requireSendableSessionMeta(meta: any, sessionId: string): any {
if (!meta) {
throw new Error(`Unknown workbench tab: ${tabId}`);
throw new Error(`Unknown workbench session: ${sessionId}`);
}
if (meta.status === "ready" && meta.sandboxSessionId) {
return meta;
if (meta.status !== "ready" || !meta.sandboxSessionId) {
throw new Error(`Session is not ready (status: ${meta.status}). Wait for session provisioning to complete.`);
}
if (meta.status === "error") {
throw new Error(meta.errorMessage ?? "This workbench tab failed to prepare");
}
await ensureWorkbenchSession(c, tabId);
return await requireReadySessionMeta(c, tabId);
return meta;
}
function shellFragment(parts: string[]): string {
@ -339,23 +330,23 @@ async function getTaskSandboxRuntime(
): Promise<{
sandbox: any;
sandboxId: string;
providerId: string;
sandboxProviderId: string;
switchTarget: string;
cwd: string;
}> {
const { config } = getActorRuntimeContext();
const sandboxId = stableSandboxId(c);
const providerId = resolveSandboxProviderId(config, record.providerId ?? c.state.providerId ?? null);
const sandbox = await getOrCreateTaskSandbox(c, c.state.workspaceId, sandboxId, {});
const sandboxProviderId = resolveSandboxProviderId(config, record.sandboxProviderId ?? c.state.sandboxProviderId ?? null);
const sandbox = await getOrCreateTaskSandbox(c, c.state.organizationId, sandboxId, {});
const actorId = typeof sandbox.resolve === "function" ? await sandbox.resolve().catch(() => null) : null;
const switchTarget = providerId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`;
const switchTarget = sandboxProviderId === "local" ? `sandbox://local/${sandboxId}` : `sandbox://e2b/${sandboxId}`;
const now = Date.now();
await c.db
.insert(taskSandboxes)
.values({
sandboxId,
providerId,
sandboxProviderId,
sandboxActorId: typeof actorId === "string" ? actorId : null,
switchTarget,
cwd: SANDBOX_REPO_CWD,
@ -366,7 +357,7 @@ async function getTaskSandboxRuntime(
.onConflictDoUpdate({
target: taskSandboxes.sandboxId,
set: {
providerId,
sandboxProviderId,
sandboxActorId: typeof actorId === "string" ? actorId : null,
switchTarget,
cwd: SANDBOX_REPO_CWD,
@ -389,7 +380,7 @@ async function getTaskSandboxRuntime(
return {
sandbox,
sandboxId,
providerId,
sandboxProviderId,
switchTarget,
cwd: SANDBOX_REPO_CWD,
};
@ -400,17 +391,10 @@ async function ensureSandboxRepo(c: any, sandbox: any, record: any): Promise<voi
throw new Error("cannot prepare a sandbox repo before the task branch exists");
}
const { driver } = getActorRuntimeContext();
const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId);
let repoLocalPath = c.state.repoLocalPath;
if (!repoLocalPath) {
const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote);
const ensured = await project.ensure({ remoteUrl: c.state.repoRemote });
repoLocalPath = ensured.localPath;
c.state.repoLocalPath = repoLocalPath;
}
const baseRef = await driver.git.remoteDefaultBaseRef(repoLocalPath);
const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId);
const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote);
const metadata = await repository.getRepositoryMetadata({});
const baseRef = metadata.defaultBranch ?? "main";
const sandboxRepoRoot = dirname(SANDBOX_REPO_CWD);
const script = [
"set -euo pipefail",
@ -665,7 +649,7 @@ async function readSessionTranscript(c: any, record: any, sessionId: string) {
return [];
}
const sandbox = getTaskSandbox(c, c.state.workspaceId, sandboxId);
const sandbox = getTaskSandbox(c, c.state.organizationId, sandboxId);
const page = await sandbox.getEvents({
sessionId,
limit: 100,
@ -681,8 +665,8 @@ async function readSessionTranscript(c: any, record: any, sessionId: string) {
}));
}
async function writeSessionTranscript(c: any, tabId: string, transcript: Array<any>): Promise<void> {
await updateSessionMeta(c, tabId, {
async function writeSessionTranscript(c: any, sessionId: string, transcript: Array<any>): Promise<void> {
await updateSessionMeta(c, sessionId, {
transcriptJson: JSON.stringify(transcript),
transcriptUpdatedAt: Date.now(),
});
@ -697,12 +681,12 @@ async function enqueueWorkbenchRefresh(
await self.send(command, body, { wait: false });
}
async function enqueueWorkbenchEnsureSession(c: any, tabId: string): Promise<void> {
async function enqueueWorkbenchEnsureSession(c: any, sessionId: string): Promise<void> {
const self = selfTask(c);
await self.send(
"task.command.workbench.ensure_session",
{
tabId,
sessionId,
},
{
wait: false,
@ -750,8 +734,8 @@ async function readPullRequestSummary(c: any, branchName: string | null) {
}
try {
const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote);
return await project.getPullRequestForBranch({ branchName });
const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote);
return await repository.getPullRequestForBranch({ branchName });
} catch {
return null;
}
@ -762,7 +746,7 @@ export async function ensureWorkbenchSeeded(c: any): Promise<any> {
const record = await getCurrentRecord({ db: c.db, state: c.state });
if (record.activeSessionId) {
await ensureSessionMeta(c, {
tabId: record.activeSessionId,
sessionId: record.activeSessionId,
sandboxSessionId: record.activeSessionId,
model: defaultModelForAgent(record.agentType),
sessionName: "Session 1",
@ -791,7 +775,8 @@ function buildSessionSummary(record: any, meta: any): any {
return {
id: meta.id,
sessionId: derivedSandboxSessionId,
sessionId: meta.sessionId,
sandboxSessionId: derivedSandboxSessionId,
sessionName: meta.sessionName,
agent: agentKindForModel(meta.model),
model: meta.model,
@ -806,9 +791,8 @@ function buildSessionSummary(record: any, meta: any): any {
function buildSessionDetailFromMeta(record: any, meta: any): any {
const summary = buildSessionSummary(record, meta);
return {
sessionId: meta.tabId,
tabId: meta.tabId,
sandboxSessionId: summary.sessionId,
sessionId: meta.sessionId,
sandboxSessionId: summary.sandboxSessionId ?? null,
sessionName: summary.sessionName,
agent: summary.agent,
model: summary.model,
@ -828,7 +812,7 @@ function buildSessionDetailFromMeta(record: any, meta: any): any {
/**
* Builds a WorkbenchTaskSummary from local task actor state. Task actors push
* this to the parent workspace actor so workspace sidebar reads stay local.
* this to the parent organization actor so organization sidebar reads stay local.
*/
export async function buildTaskSummary(c: any): Promise<any> {
const record = await ensureWorkbenchSeeded(c);
@ -874,7 +858,7 @@ export async function buildTaskDetail(c: any): Promise<any> {
fileTree: gitState.fileTree,
minutesUsed: 0,
sandboxes: (record.sandboxes ?? []).map((sandbox: any) => ({
providerId: sandbox.providerId,
sandboxProviderId: sandbox.sandboxProviderId,
sandboxId: sandbox.sandboxId,
cwd: sandbox.cwd ?? null,
})),
@ -883,13 +867,13 @@ export async function buildTaskDetail(c: any): Promise<any> {
}
/**
* Builds a WorkbenchSessionDetail for a specific session tab.
* Builds a WorkbenchSessionDetail for a specific session.
*/
export async function buildSessionDetail(c: any, tabId: string): Promise<any> {
export async function buildSessionDetail(c: any, sessionId: string): Promise<any> {
const record = await ensureWorkbenchSeeded(c);
const meta = await readSessionMeta(c, tabId);
const meta = await readSessionMeta(c, sessionId);
if (!meta || meta.closed) {
throw new Error(`Unknown workbench session tab: ${tabId}`);
throw new Error(`Unknown workbench session: ${sessionId}`);
}
if (!meta.sandboxSessionId) {
@ -899,7 +883,7 @@ export async function buildSessionDetail(c: any, tabId: string): Promise<any> {
try {
const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId);
if (JSON.stringify(meta.transcript ?? []) !== JSON.stringify(transcript)) {
await writeSessionTranscript(c, meta.tabId, transcript);
await writeSessionTranscript(c, meta.sessionId, transcript);
return buildSessionDetailFromMeta(record, {
...meta,
transcript,
@ -921,21 +905,21 @@ export async function getTaskDetail(c: any): Promise<any> {
return await buildTaskDetail(c);
}
export async function getSessionDetail(c: any, tabId: string): Promise<any> {
return await buildSessionDetail(c, tabId);
export async function getSessionDetail(c: any, sessionId: string): Promise<any> {
return await buildSessionDetail(c, sessionId);
}
/**
* Replaces the old notifyWorkbenchUpdated pattern.
*
* The task actor emits two kinds of updates:
* - Push summary state up to the parent workspace actor so the sidebar
* - Push summary state up to the parent organization actor so the sidebar
* materialized projection stays current.
* - Broadcast full detail/session payloads down to direct task subscribers.
*/
export async function broadcastTaskUpdate(c: any, options?: { sessionId?: string }): Promise<void> {
const workspace = await getOrCreateWorkspace(c, c.state.workspaceId);
await workspace.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) });
const organization = await getOrCreateOrganization(c, c.state.organizationId);
await organization.applyTaskSummaryUpdate({ taskSummary: await buildTaskSummary(c) });
c.broadcast("taskUpdated", {
type: "taskDetailUpdated",
detail: await buildTaskDetail(c),
@ -964,8 +948,8 @@ export async function refreshWorkbenchSessionTranscript(c: any, sessionId: strin
}
const transcript = await readSessionTranscript(c, record, meta.sandboxSessionId);
await writeSessionTranscript(c, meta.tabId, transcript);
await broadcastTaskUpdate(c, { sessionId: meta.tabId });
await writeSessionTranscript(c, meta.sessionId, transcript);
await broadcastTaskUpdate(c, { sessionId: meta.sessionId });
}
export async function renameWorkbenchTask(c: any, value: string): Promise<void> {
@ -1029,31 +1013,31 @@ export async function renameWorkbenchBranch(c: any, value: string): Promise<void
.run();
c.state.branchName = nextBranch;
const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote);
await project.registerTaskBranch({
const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote);
await repository.registerTaskBranch({
taskId: c.state.taskId,
branchName: nextBranch,
});
await broadcastTaskUpdate(c);
}
export async function createWorkbenchSession(c: any, model?: string): Promise<{ tabId: string }> {
const tabId = `tab-${randomUUID()}`;
export async function createWorkbenchSession(c: any, model?: string): Promise<{ sessionId: string }> {
const sessionId = `session-${randomUUID()}`;
const record = await ensureWorkbenchSeeded(c);
await ensureSessionMeta(c, {
tabId,
sessionId,
model: model ?? defaultModelForAgent(record.agentType),
sandboxSessionId: null,
status: pendingWorkbenchSessionStatus(record),
created: false,
});
await broadcastTaskUpdate(c, { sessionId: tabId });
await enqueueWorkbenchEnsureSession(c, tabId);
return { tabId };
await broadcastTaskUpdate(c, { sessionId: sessionId });
await enqueueWorkbenchEnsureSession(c, sessionId);
return { sessionId };
}
export async function ensureWorkbenchSession(c: any, tabId: string, model?: string): Promise<void> {
const meta = await readSessionMeta(c, tabId);
export async function ensureWorkbenchSession(c: any, sessionId: string, model?: string): Promise<void> {
const meta = await readSessionMeta(c, sessionId);
if (!meta || meta.closed) {
return;
}
@ -1063,12 +1047,12 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId: meta.sandboxSessionId,
});
await broadcastTaskUpdate(c, { sessionId: tabId });
await broadcastTaskUpdate(c, { sessionId: sessionId });
return;
}
await updateSessionMeta(c, tabId, {
sandboxSessionId: meta.sandboxSessionId ?? tabId,
await updateSessionMeta(c, sessionId, {
sandboxSessionId: meta.sandboxSessionId ?? sessionId,
status: "pending_session_create",
errorMessage: null,
});
@ -1077,7 +1061,7 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri
const runtime = await getTaskSandboxRuntime(c, record);
await ensureSandboxRepo(c, runtime.sandbox, record);
await runtime.sandbox.createSession({
id: meta.sandboxSessionId ?? tabId,
id: meta.sandboxSessionId ?? sessionId,
agent: agentTypeForModel(model ?? meta.model ?? defaultModelForAgent(record.agentType)),
model: model ?? meta.model ?? defaultModelForAgent(record.agentType),
sessionInit: {
@ -1085,22 +1069,22 @@ export async function ensureWorkbenchSession(c: any, tabId: string, model?: stri
},
});
await updateSessionMeta(c, tabId, {
sandboxSessionId: meta.sandboxSessionId ?? tabId,
await updateSessionMeta(c, sessionId, {
sandboxSessionId: meta.sandboxSessionId ?? sessionId,
status: "ready",
errorMessage: null,
});
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId: meta.sandboxSessionId ?? tabId,
sessionId: meta.sandboxSessionId ?? sessionId,
});
} catch (error) {
await updateSessionMeta(c, tabId, {
await updateSessionMeta(c, sessionId, {
status: "error",
errorMessage: error instanceof Error ? error.message : String(error),
});
}
await broadcastTaskUpdate(c, { sessionId: tabId });
await broadcastTaskUpdate(c, { sessionId: sessionId });
}
export async function enqueuePendingWorkbenchSessions(c: any): Promise<void> {
@ -1113,7 +1097,7 @@ export async function enqueuePendingWorkbenchSessions(c: any): Promise<void> {
await self.send(
"task.command.workbench.ensure_session",
{
tabId: row.tabId,
sessionId: row.sessionId,
model: row.model,
},
{
@ -1167,7 +1151,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str
let shouldEnsure = nextMeta.status === "pending_provision" || nextMeta.status === "pending_session_create" || nextMeta.status === "error";
if (shouldRecreateSessionForModelChange(nextMeta)) {
const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c));
const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c));
await sandbox.destroySession(nextMeta.sandboxSessionId);
nextMeta = await updateSessionMeta(c, sessionId, {
sandboxSessionId: null,
@ -1179,7 +1163,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str
});
shouldEnsure = true;
} else if (nextMeta.status === "ready" && nextMeta.sandboxSessionId) {
const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c));
const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c));
if (typeof sandbox.rawSendSessionMethod === "function") {
try {
await sandbox.rawSendSessionMethod(nextMeta.sandboxSessionId, "session/set_config_option", {
@ -1204,7 +1188,7 @@ export async function changeWorkbenchModel(c: any, sessionId: string, model: str
}
export async function sendWorkbenchMessage(c: any, sessionId: string, text: string, attachments: Array<any>): Promise<void> {
const meta = await ensureReadySessionMeta(c, sessionId);
const meta = requireSendableSessionMeta(await readSessionMeta(c, sessionId), sessionId);
const record = await ensureWorkbenchSeeded(c);
const runtime = await getTaskSandboxRuntime(c, record);
await ensureSandboxRepo(c, runtime.sandbox, record);
@ -1253,7 +1237,7 @@ export async function sendWorkbenchMessage(c: any, sessionId: string, text: stri
export async function stopWorkbenchSession(c: any, sessionId: string): Promise<void> {
const meta = await requireReadySessionMeta(c, sessionId);
const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c));
const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c));
await sandbox.destroySession(meta.sandboxSessionId);
await updateSessionMeta(c, sessionId, {
thinkingSinceMs: null,
@ -1263,7 +1247,7 @@ export async function stopWorkbenchSession(c: any, sessionId: string): Promise<v
export async function syncWorkbenchSessionStatus(c: any, sessionId: string, status: "running" | "idle" | "error", at: number): Promise<void> {
const record = await ensureWorkbenchSeeded(c);
const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { tabId: sessionId, sandboxSessionId: sessionId }));
const meta = (await readSessionMetaBySandboxSessionId(c, sessionId)) ?? (await ensureSessionMeta(c, { sessionId: sessionId, sandboxSessionId: sessionId }));
let changed = false;
if (record.activeSessionId === sessionId || record.activeSessionId === meta.sandboxSessionId) {
@ -1317,13 +1301,13 @@ export async function syncWorkbenchSessionStatus(c: any, sessionId: string, stat
}
if (changed) {
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId,
});
if (status !== "running") {
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_session_transcript", {
sessionId,
});
await enqueueWorkbenchRefresh(c, "task.command.workbench.refresh_derived", {});
}
await broadcastTaskUpdate(c, { sessionId: meta.tabId });
await broadcastTaskUpdate(c, { sessionId: meta.sessionId });
}
}
@ -1339,7 +1323,7 @@ export async function closeWorkbenchSession(c: any, sessionId: string): Promise<
return;
}
if (meta.sandboxSessionId) {
const sandbox = getTaskSandbox(c, c.state.workspaceId, stableSandboxId(c));
const sandbox = getTaskSandbox(c, c.state.organizationId, stableSandboxId(c));
await sandbox.destroySession(meta.sandboxSessionId);
}
await updateSessionMeta(c, sessionId, {
@ -1365,10 +1349,10 @@ export async function markWorkbenchUnread(c: any): Promise<void> {
if (!latest) {
return;
}
await updateSessionMeta(c, latest.tabId, {
await updateSessionMeta(c, latest.sessionId, {
unread: 1,
});
await broadcastTaskUpdate(c, { sessionId: latest.tabId });
await broadcastTaskUpdate(c, { sessionId: latest.sessionId });
}
export async function publishWorkbenchPr(c: any): Promise<void> {
@ -1376,17 +1360,17 @@ export async function publishWorkbenchPr(c: any): Promise<void> {
if (!record.branchName) {
throw new Error("cannot publish PR without a branch");
}
let repoLocalPath = c.state.repoLocalPath;
if (!repoLocalPath) {
const project = await getOrCreateProject(c, c.state.workspaceId, c.state.repoId, c.state.repoRemote);
const result = await project.ensure({ remoteUrl: c.state.repoRemote });
repoLocalPath = result.localPath;
c.state.repoLocalPath = repoLocalPath;
const repository = await getOrCreateRepository(c, c.state.organizationId, c.state.repoId, c.state.repoRemote);
const metadata = await repository.getRepositoryMetadata({});
const repoFullName = metadata.fullName ?? githubRepoFullNameFromRemote(c.state.repoRemote);
if (!repoFullName) {
throw new Error(`Unable to resolve GitHub repository for ${c.state.repoRemote}`);
}
const { driver } = getActorRuntimeContext();
const auth = await resolveWorkspaceGithubAuth(c, c.state.workspaceId);
const created = await driver.github.createPr(repoLocalPath, record.branchName, record.title ?? c.state.task, undefined, {
const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId);
await driver.github.createPr(repoFullName, record.branchName, record.title ?? c.state.task, undefined, {
githubToken: auth?.githubToken ?? null,
baseBranch: metadata.defaultBranch ?? undefined,
});
await c.db
.update(taskTable)

View file

@ -28,7 +28,7 @@ export async function handleAttachActivity(loopCtx: any, msg: any): Promise<void
if (record.activeSandboxId) {
try {
const sandbox = getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId);
const sandbox = getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId);
const connection = await sandbox.sandboxAgentConnection();
if (typeof connection?.endpoint === "string" && connection.endpoint.length > 0) {
target = connection.endpoint;
@ -78,9 +78,9 @@ export async function handleArchiveActivity(loopCtx: any, msg: any): Promise<voi
if (record.activeSandboxId) {
await setTaskState(loopCtx, "archive_release_sandbox", "releasing sandbox");
void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => {
void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => {
logActorWarning("task.commands", "failed to release sandbox during archive", {
workspaceId: loopCtx.state.workspaceId,
organizationId: loopCtx.state.organizationId,
repoId: loopCtx.state.repoId,
taskId: loopCtx.state.taskId,
sandboxId: record.activeSandboxId,
@ -106,7 +106,7 @@ export async function killDestroySandboxActivity(loopCtx: any): Promise<void> {
return;
}
await getTaskSandbox(loopCtx, loopCtx.state.workspaceId, record.activeSandboxId).destroy();
await getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy();
}
export async function killWriteDbActivity(loopCtx: any, msg: any): Promise<void> {

View file

@ -93,7 +93,7 @@ export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
branchName: taskTable.branchName,
title: taskTable.title,
task: taskTable.task,
providerId: taskTable.providerId,
sandboxProviderId: taskTable.sandboxProviderId,
status: taskTable.status,
statusMessage: taskRuntime.statusMessage,
activeSandboxId: taskRuntime.activeSandboxId,
@ -115,7 +115,7 @@ export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
const sandboxes = await db
.select({
sandboxId: taskSandboxes.sandboxId,
providerId: taskSandboxes.providerId,
sandboxProviderId: taskSandboxes.sandboxProviderId,
sandboxActorId: taskSandboxes.sandboxActorId,
switchTarget: taskSandboxes.switchTarget,
cwd: taskSandboxes.cwd,
@ -126,21 +126,21 @@ export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
.all();
return {
workspaceId: ctx.state.workspaceId,
organizationId: ctx.state.organizationId,
repoId: ctx.state.repoId,
repoRemote: ctx.state.repoRemote,
taskId: ctx.state.taskId,
branchName: row.branchName,
title: row.title,
task: row.task,
providerId: row.providerId,
sandboxProviderId: row.sandboxProviderId,
status: row.status,
statusMessage: row.statusMessage ?? null,
activeSandboxId: row.activeSandboxId ?? null,
activeSessionId: row.activeSessionId ?? null,
sandboxes: sandboxes.map((sb) => ({
sandboxId: sb.sandboxId,
providerId: sb.providerId,
sandboxProviderId: sb.sandboxProviderId,
sandboxActorId: sb.sandboxActorId ?? null,
switchTarget: sb.switchTarget,
cwd: sb.cwd ?? null,
@ -165,8 +165,8 @@ export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
export async function appendHistory(ctx: any, kind: string, payload: Record<string, unknown>): Promise<void> {
const client = ctx.client();
const history = await client.history.getOrCreate(historyKey(ctx.state.workspaceId, ctx.state.repoId), {
createWithInput: { workspaceId: ctx.state.workspaceId, repoId: ctx.state.repoId },
const history = await client.history.getOrCreate(historyKey(ctx.state.organizationId, ctx.state.repoId), {
createWithInput: { organizationId: ctx.state.organizationId, repoId: ctx.state.repoId },
});
await history.append({
kind,

View file

@ -1,14 +1,7 @@
import { Loop } from "rivetkit/workflow";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { getCurrentRecord } from "./common.js";
import {
initAssertNameActivity,
initBootstrapDbActivity,
initCompleteActivity,
initEnqueueProvisionActivity,
initEnsureNameActivity,
initFailedActivity,
} from "./init.js";
import { initBootstrapDbActivity, initCompleteActivity, initEnqueueProvisionActivity, initFailedActivity } from "./init.js";
import {
handleArchiveActivity,
handleAttachActivity,
@ -67,12 +60,8 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
await loopCtx.removed("init-failed", "step");
await loopCtx.removed("init-failed-v2", "step");
try {
await loopCtx.step({
name: "init-ensure-name",
timeout: 5 * 60_000,
run: async () => initEnsureNameActivity(loopCtx),
});
await loopCtx.step("init-assert-name", async () => initAssertNameActivity(loopCtx));
await loopCtx.removed("init-ensure-name", "step");
await loopCtx.removed("init-assert-name", "step");
await loopCtx.removed("init-create-sandbox", "step");
await loopCtx.removed("init-ensure-agent", "step");
await loopCtx.removed("init-start-sandbox-instance", "step");
@ -156,11 +145,31 @@ const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
}
},
"task.command.workbench.create_session_and_send": async (loopCtx, msg) => {
try {
const created = await loopCtx.step({
name: "workbench-create-session-for-send",
timeout: 5 * 60_000,
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
});
await loopCtx.step({
name: "workbench-send-initial-message",
timeout: 5 * 60_000,
run: async () => sendWorkbenchMessage(loopCtx, created.sessionId, msg.body.text, []),
});
} catch (error) {
logActorWarning("task.workflow", "create_session_and_send failed", {
error: resolveErrorMessage(error),
});
}
await msg.complete({ ok: true });
},
"task.command.workbench.ensure_session": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-ensure-session",
timeout: 5 * 60_000,
run: async () => ensureWorkbenchSession(loopCtx, msg.body.tabId, msg.body?.model),
run: async () => ensureWorkbenchSession(loopCtx, msg.body.sessionId, msg.body?.model),
});
await msg.complete({ ok: true });
},
@ -269,7 +278,16 @@ export async function runTaskWorkflow(ctx: any): Promise<void> {
}
const handler = commandHandlers[msg.name as TaskQueueName];
if (handler) {
await handler(loopCtx, msg);
try {
await handler(loopCtx, msg);
} catch (error) {
const message = resolveErrorMessage(error);
logActorWarning("task.workflow", "task workflow command failed", {
queueName: msg.name,
error: message,
});
await msg.complete({ error: message }).catch(() => {});
}
}
return Loop.continue(undefined);
});

View file

@ -1,10 +1,8 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import { resolveCreateFlowDecision } from "../../../services/create-flow.js";
import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js";
import { getActorRuntimeContext } from "../../context.js";
import { getOrCreateHistory, getOrCreateProject, selfTask } from "../../handles.js";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { getOrCreateHistory, selfTask } from "../../handles.js";
import { resolveErrorMessage } from "../../logging.js";
import { defaultSandboxProviderId } from "../../../sandbox-config.js";
import { task as taskTable, taskRuntime } from "../db/schema.js";
import { TASK_ROW_ID, appendHistory, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js";
@ -19,9 +17,8 @@ async function ensureTaskRuntimeCacheColumns(db: any): Promise<void> {
export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<void> {
const { config } = getActorRuntimeContext();
const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config);
const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config);
const now = Date.now();
const initialStatusMessage = loopCtx.state.branchName && loopCtx.state.title ? "provisioning" : "naming";
await ensureTaskRuntimeCacheColumns(loopCtx.db);
@ -32,7 +29,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
branchName: loopCtx.state.branchName,
title: loopCtx.state.title,
task: loopCtx.state.task,
providerId,
sandboxProviderId,
status: "init_bootstrap_db",
agentType: loopCtx.state.agentType ?? config.default_agent,
createdAt: now,
@ -44,7 +41,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
branchName: loopCtx.state.branchName,
title: loopCtx.state.title,
task: loopCtx.state.task,
providerId,
sandboxProviderId,
status: "init_bootstrap_db",
agentType: loopCtx.state.agentType ?? config.default_agent,
updatedAt: now,
@ -60,7 +57,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: initialStatusMessage,
statusMessage: "provisioning",
gitStateJson: null,
gitStateUpdatedAt: null,
provisionStage: "queued",
@ -74,7 +71,7 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: initialStatusMessage,
statusMessage: "provisioning",
provisionStage: "queued",
provisionStageUpdatedAt: now,
updatedAt: now,
@ -102,7 +99,7 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro
});
} catch (error) {
logActorWarning("task.init", "background provision command failed", {
workspaceId: loopCtx.state.workspaceId,
organizationId: loopCtx.state.organizationId,
repoId: loopCtx.state.repoId,
taskId: loopCtx.state.taskId,
error: resolveErrorMessage(error),
@ -111,106 +108,10 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro
}
}
export async function initEnsureNameActivity(loopCtx: any): Promise<void> {
await setTaskState(loopCtx, "init_ensure_name", "determining title and branch");
const existing = await loopCtx.db
.select({
branchName: taskTable.branchName,
title: taskTable.title,
})
.from(taskTable)
.where(eq(taskTable.id, TASK_ROW_ID))
.get();
if (existing?.branchName && existing?.title) {
loopCtx.state.branchName = existing.branchName;
loopCtx.state.title = existing.title;
return;
}
const { driver } = getActorRuntimeContext();
const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId);
let repoLocalPath = loopCtx.state.repoLocalPath;
if (!repoLocalPath) {
const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote);
const result = await project.ensure({ remoteUrl: loopCtx.state.repoRemote });
repoLocalPath = result.localPath;
loopCtx.state.repoLocalPath = repoLocalPath;
}
try {
await driver.git.fetch(repoLocalPath, { githubToken: auth?.githubToken ?? null });
} catch (error) {
logActorWarning("task.init", "fetch before naming failed", {
workspaceId: loopCtx.state.workspaceId,
repoId: loopCtx.state.repoId,
taskId: loopCtx.state.taskId,
error: resolveErrorMessage(error),
});
}
const remoteBranches = (await driver.git.listRemoteBranches(repoLocalPath, { githubToken: auth?.githubToken ?? null })).map(
(branch: any) => branch.branchName,
);
const project = await getOrCreateProject(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId, loopCtx.state.repoRemote);
const reservedBranches = await project.listReservedBranches({});
const resolved = resolveCreateFlowDecision({
task: loopCtx.state.task,
explicitTitle: loopCtx.state.explicitTitle ?? undefined,
explicitBranchName: loopCtx.state.explicitBranchName ?? undefined,
localBranches: remoteBranches,
taskBranches: reservedBranches,
});
const now = Date.now();
await loopCtx.db
.update(taskTable)
.set({
branchName: resolved.branchName,
title: resolved.title,
updatedAt: now,
})
.where(eq(taskTable.id, TASK_ROW_ID))
.run();
loopCtx.state.branchName = resolved.branchName;
loopCtx.state.title = resolved.title;
loopCtx.state.explicitTitle = null;
loopCtx.state.explicitBranchName = null;
await loopCtx.db
.update(taskRuntime)
.set({
statusMessage: "provisioning",
provisionStage: "repo_prepared",
provisionStageUpdatedAt: now,
updatedAt: now,
})
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
await project.registerTaskBranch({
taskId: loopCtx.state.taskId,
branchName: resolved.branchName,
});
await appendHistory(loopCtx, "task.named", {
title: resolved.title,
branchName: resolved.branchName,
});
}
export async function initAssertNameActivity(loopCtx: any): Promise<void> {
await setTaskState(loopCtx, "init_assert_name", "validating naming");
if (!loopCtx.state.branchName) {
throw new Error("task branchName is not initialized");
}
}
export async function initCompleteActivity(loopCtx: any, body: any): Promise<void> {
const now = Date.now();
const { config } = getActorRuntimeContext();
const providerId = body?.providerId ?? loopCtx.state.providerId ?? defaultSandboxProviderId(config);
const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config);
await setTaskState(loopCtx, "init_complete", "task initialized");
await loopCtx.db
@ -224,12 +125,12 @@ export async function initCompleteActivity(loopCtx: any, body: any): Promise<voi
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
const history = await getOrCreateHistory(loopCtx, loopCtx.state.workspaceId, loopCtx.state.repoId);
const history = await getOrCreateHistory(loopCtx, loopCtx.state.organizationId, loopCtx.state.repoId);
await history.append({
kind: "task.initialized",
taskId: loopCtx.state.taskId,
branchName: loopCtx.state.branchName,
payload: { providerId },
payload: { sandboxProviderId },
});
loopCtx.state.initialized = true;
@ -240,7 +141,7 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise<
const detail = resolveErrorDetail(error);
const messages = collectErrorMessages(error);
const { config } = getActorRuntimeContext();
const providerId = loopCtx.state.providerId ?? defaultSandboxProviderId(config);
const sandboxProviderId = loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config);
await loopCtx.db
.insert(taskTable)
@ -249,7 +150,7 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise<
branchName: loopCtx.state.branchName ?? null,
title: loopCtx.state.title ?? null,
task: loopCtx.state.task,
providerId,
sandboxProviderId,
status: "error",
agentType: loopCtx.state.agentType ?? config.default_agent,
createdAt: now,
@ -261,7 +162,7 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise<
branchName: loopCtx.state.branchName ?? null,
title: loopCtx.state.title ?? null,
task: loopCtx.state.task,
providerId,
sandboxProviderId,
status: "error",
agentType: loopCtx.state.agentType ?? config.default_agent,
updatedAt: now,

View file

@ -1,7 +1,7 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import { getTaskSandbox } from "../../handles.js";
import { resolveWorkspaceGithubAuth } from "../../../services/github-auth.js";
import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js";
import { taskRuntime, taskSandboxes } from "../db/schema.js";
import { TASK_ROW_ID, appendHistory, getCurrentRecord } from "./common.js";
@ -49,8 +49,8 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive
`git push -u origin ${JSON.stringify(branchName)}`,
].join("; ");
const sandbox = getTaskSandbox(loopCtx, loopCtx.state.workspaceId, activeSandboxId);
const auth = await resolveWorkspaceGithubAuth(loopCtx, loopCtx.state.workspaceId);
const sandbox = getTaskSandbox(loopCtx, loopCtx.state.organizationId, activeSandboxId);
const auth = await resolveOrganizationGithubAuth(loopCtx, loopCtx.state.organizationId);
const result = await sandbox.runProcess({
command: "bash",
args: ["-lc", script],

View file

@ -13,6 +13,7 @@ export const TASK_QUEUE_NAMES = [
"task.command.workbench.rename_task",
"task.command.workbench.rename_branch",
"task.command.workbench.create_session",
"task.command.workbench.create_session_and_send",
"task.command.workbench.ensure_session",
"task.command.workbench.rename_session",
"task.command.workbench.set_session_unread",

View file

@ -1,6 +0,0 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/workspace/db/drizzle",
schema: "./src/actors/workspace/db/schema.ts",
});

View file

@ -1,19 +0,0 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { workspaceDb } from "./db/db.js";
import { runWorkspaceWorkflow, WORKSPACE_QUEUE_NAMES, workspaceActions } from "./actions.js";
export const workspace = actor({
db: workspaceDb,
queues: Object.fromEntries(WORKSPACE_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
name: "Workspace",
icon: "compass",
actionTimeout: 5 * 60_000,
},
createState: (_c, workspaceId: string) => ({
workspaceId,
}),
actions: workspaceActions,
run: workflow(runWorkspaceWorkflow),
});

View file

@ -0,0 +1,13 @@
import type { AppConfig } from "@sandbox-agent/foundry-shared";
export function defaultOrganization(config: AppConfig): string {
const organizationId = config.organization.default.trim();
return organizationId.length > 0 ? organizationId : "default";
}
export function resolveOrganization(flagOrganization: string | undefined, config: AppConfig): string {
if (flagOrganization && flagOrganization.trim().length > 0) {
return flagOrganization.trim();
}
return defaultOrganization(config);
}

View file

@ -1,13 +0,0 @@
import type { AppConfig } from "@sandbox-agent/foundry-shared";
export function defaultWorkspace(config: AppConfig): string {
const ws = config.workspace.default.trim();
return ws.length > 0 ? ws : "default";
}
export function resolveWorkspace(flagWorkspace: string | undefined, config: AppConfig): string {
if (flagWorkspace && flagWorkspace.trim().length > 0) {
return flagWorkspace.trim();
}
return defaultWorkspace(config);
}

View file

@ -1,64 +1,12 @@
import type { BranchSnapshot } from "./integrations/git/index.js";
import type { PullRequestSnapshot } from "./integrations/github/index.js";
import {
validateRemote,
ensureCloned,
fetch,
listRemoteBranches,
remoteDefaultBaseRef,
revParse,
ensureRemoteBranch,
diffStatForBranch,
conflictsWithMain,
} from "./integrations/git/index.js";
import {
gitSpiceAvailable,
gitSpiceListStack,
gitSpiceRebaseBranch,
gitSpiceReparentBranch,
gitSpiceRestackRepo,
gitSpiceRestackSubtree,
gitSpiceSyncRepo,
gitSpiceTrackBranch,
} from "./integrations/git-spice/index.js";
import { listPullRequests, createPr, starRepository } from "./integrations/github/index.js";
export interface GitDriver {
validateRemote(remoteUrl: string, options?: { githubToken?: string | null }): Promise<void>;
ensureCloned(remoteUrl: string, targetPath: string, options?: { githubToken?: string | null }): Promise<void>;
fetch(repoPath: string, options?: { githubToken?: string | null }): Promise<void>;
listRemoteBranches(repoPath: string, options?: { githubToken?: string | null }): Promise<BranchSnapshot[]>;
remoteDefaultBaseRef(repoPath: string): Promise<string>;
revParse(repoPath: string, ref: string): Promise<string>;
ensureRemoteBranch(repoPath: string, branchName: string, options?: { githubToken?: string | null }): Promise<void>;
diffStatForBranch(repoPath: string, branchName: string): Promise<string>;
conflictsWithMain(repoPath: string, branchName: string): Promise<boolean>;
}
export interface StackBranchSnapshot {
branchName: string;
parentBranch: string | null;
}
export interface StackDriver {
available(repoPath: string): Promise<boolean>;
listStack(repoPath: string): Promise<StackBranchSnapshot[]>;
syncRepo(repoPath: string): Promise<void>;
restackRepo(repoPath: string): Promise<void>;
restackSubtree(repoPath: string, branchName: string): Promise<void>;
rebaseBranch(repoPath: string, branchName: string): Promise<void>;
reparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void>;
trackBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void>;
}
import { createPr, starRepository } from "./integrations/github/index.js";
export interface GithubDriver {
listPullRequests(repoPath: string, options?: { githubToken?: string | null }): Promise<PullRequestSnapshot[]>;
createPr(
repoPath: string,
repoFullName: string,
headBranch: string,
title: string,
body?: string,
options?: { githubToken?: string | null },
options?: { githubToken?: string | null; baseBranch?: string | null },
): Promise<{ number: number; url: string }>;
starRepository(repoFullName: string, options?: { githubToken?: string | null }): Promise<void>;
}
@ -68,37 +16,13 @@ export interface TmuxDriver {
}
export interface BackendDriver {
git: GitDriver;
stack: StackDriver;
github: GithubDriver;
tmux: TmuxDriver;
}
export function createDefaultDriver(): BackendDriver {
return {
git: {
validateRemote,
ensureCloned,
fetch,
listRemoteBranches,
remoteDefaultBaseRef,
revParse,
ensureRemoteBranch,
diffStatForBranch,
conflictsWithMain,
},
stack: {
available: gitSpiceAvailable,
listStack: gitSpiceListStack,
syncRepo: gitSpiceSyncRepo,
restackRepo: gitSpiceRestackRepo,
restackSubtree: gitSpiceRestackSubtree,
rebaseBranch: gitSpiceRebaseBranch,
reparentBranch: gitSpiceReparentBranch,
trackBranch: gitSpiceTrackBranch,
},
github: {
listPullRequests,
createPr,
starRepository,
},

View file

@ -3,14 +3,14 @@ import { cors } from "hono/cors";
import { randomUUID } from "node:crypto";
import { initActorRuntimeContext } from "./actors/context.js";
import { registry } from "./actors/index.js";
import { workspaceKey } from "./actors/keys.js";
import { organizationKey } from "./actors/keys.js";
import { loadConfig } from "./config/backend.js";
import { createBackends, createNotificationService } from "./notifications/index.js";
import { createDefaultDriver } from "./driver.js";
import { createClient } from "rivetkit/client";
import { initBetterAuthService } from "./services/better-auth.js";
import { createDefaultAppShellServices } from "./services/app-shell-runtime.js";
import { APP_SHELL_WORKSPACE_ID } from "./actors/workspace/app-shell.js";
import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/app-shell.js";
import { logger } from "./logging.js";
export interface BackendStartOptions {
@ -18,7 +18,7 @@ export interface BackendStartOptions {
port?: number;
}
interface AppWorkspaceLogContext {
interface AppOrganizationLogContext {
action?: string;
cfConnectingIp?: string;
cfRay?: string;
@ -68,8 +68,8 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
return undefined;
};
config.providers.e2b.apiKey = envFirst("E2B_API_KEY") ?? config.providers.e2b.apiKey;
config.providers.e2b.template = envFirst("HF_E2B_TEMPLATE", "E2B_TEMPLATE") ?? config.providers.e2b.template;
config.sandboxProviders.e2b.apiKey = envFirst("E2B_API_KEY") ?? config.sandboxProviders.e2b.apiKey;
config.sandboxProviders.e2b.template = envFirst("HF_E2B_TEMPLATE", "E2B_TEMPLATE") ?? config.sandboxProviders.e2b.template;
const driver = createDefaultDriver();
const backends = await createBackends(config.notify);
@ -85,7 +85,7 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
appUrl: appShellServices.appUrl,
});
const requestHeaderContext = (c: any): AppWorkspaceLogContext => ({
const requestHeaderContext = (c: any): AppOrganizationLogContext => ({
cfConnectingIp: c.req.header("cf-connecting-ip") ?? undefined,
cfRay: c.req.header("cf-ray") ?? undefined,
forwardedFor: c.req.header("x-forwarded-for") ?? undefined,
@ -164,27 +164,27 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
);
});
// Cache the app workspace actor handle for the lifetime of this backend process.
// The "app" workspace is a singleton coordinator for auth indexes, org state, and
// Cache the app organization actor handle for the lifetime of this backend process.
// The "app" organization is a singleton coordinator for auth indexes, org state, and
// billing. Caching avoids repeated getOrCreate round-trips on every HTTP request.
let cachedAppWorkspace: any | null = null;
let cachedAppOrganization: any | null = null;
const appWorkspace = async (context: AppWorkspaceLogContext = {}) => {
if (cachedAppWorkspace) return cachedAppWorkspace;
const appOrganization = async (context: AppOrganizationLogContext = {}) => {
if (cachedAppOrganization) return cachedAppOrganization;
const start = performance.now();
try {
const handle = await actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), {
createWithInput: APP_SHELL_WORKSPACE_ID,
const handle = await actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), {
createWithInput: APP_SHELL_ORGANIZATION_ID,
});
cachedAppWorkspace = handle;
cachedAppOrganization = handle;
logger.info(
{
...context,
cache: "miss",
durationMs: Math.round((performance.now() - start) * 100) / 100,
},
"app_workspace_resolve",
"app_organization_resolve",
);
return handle;
} catch (error) {
@ -196,13 +196,13 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
errorMessage: error instanceof Error ? error.message : String(error),
errorStack: error instanceof Error ? error.stack : undefined,
},
"app_workspace_resolve_failed",
"app_organization_resolve_failed",
);
throw error;
}
};
const requestLogContext = (c: any, sessionId?: string): AppWorkspaceLogContext => ({
const requestLogContext = (c: any, sessionId?: string): AppOrganizationLogContext => ({
...requestHeaderContext(c),
method: c.req.method,
path: c.req.path,
@ -255,7 +255,7 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
if (!sessionId) {
return c.text("Unauthorized", 401);
}
const result = await (await appWorkspace(requestLogContext(c, sessionId))).finalizeAppCheckoutSession({
const result = await (await appOrganization(requestLogContext(c, sessionId))).finalizeAppCheckoutSession({
organizationId,
sessionId,
checkoutSessionId,
@ -265,7 +265,7 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
const handleStripeWebhook = async (c: any) => {
const payload = await c.req.text();
await (await appWorkspace(requestLogContext(c))).handleAppStripeWebhook({
await (await appOrganization(requestLogContext(c))).handleAppStripeWebhook({
payload,
signatureHeader: c.req.header("stripe-signature") ?? null,
});
@ -276,7 +276,7 @@ export async function startBackend(options: BackendStartOptions = {}): Promise<v
app.post("/v1/webhooks/github", async (c) => {
const payload = await c.req.text();
await (await appWorkspace(requestLogContext(c))).handleAppGithubWebhook({
await (await appOrganization(requestLogContext(c))).handleAppGithubWebhook({
payload,
signatureHeader: c.req.header("x-hub-signature-256") ?? null,
eventHeader: c.req.header("x-github-event") ?? null,

View file

@ -1,223 +0,0 @@
import { execFile } from "node:child_process";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
const DEFAULT_TIMEOUT_MS = 2 * 60_000;
interface SpiceCommand {
command: string;
prefix: string[];
}
export interface SpiceStackEntry {
branchName: string;
parentBranch: string | null;
}
function spiceCommands(): SpiceCommand[] {
const explicit = process.env.HF_GIT_SPICE_BIN?.trim();
const list: SpiceCommand[] = [];
if (explicit) {
list.push({ command: explicit, prefix: [] });
}
list.push({ command: "git-spice", prefix: [] });
list.push({ command: "git", prefix: ["spice"] });
return list;
}
function commandLabel(cmd: SpiceCommand): string {
return [cmd.command, ...cmd.prefix].join(" ");
}
function looksMissing(error: unknown): boolean {
const detail = error instanceof Error ? error.message : String(error);
return detail.includes("ENOENT") || detail.includes("not a git command") || detail.includes("command not found");
}
async function tryRun(repoPath: string, cmd: SpiceCommand, args: string[]): Promise<{ stdout: string; stderr: string }> {
return await execFileAsync(cmd.command, [...cmd.prefix, ...args], {
cwd: repoPath,
timeout: DEFAULT_TIMEOUT_MS,
maxBuffer: 1024 * 1024 * 8,
env: {
...process.env,
NO_COLOR: "1",
FORCE_COLOR: "0",
},
});
}
async function pickCommand(repoPath: string): Promise<SpiceCommand | null> {
for (const candidate of spiceCommands()) {
try {
await tryRun(repoPath, candidate, ["--help"]);
return candidate;
} catch (error) {
if (looksMissing(error)) {
continue;
}
}
}
return null;
}
async function runSpice(repoPath: string, args: string[]): Promise<{ stdout: string; stderr: string }> {
const cmd = await pickCommand(repoPath);
if (!cmd) {
throw new Error("git-spice is not available (set HF_GIT_SPICE_BIN or install git-spice)");
}
return await tryRun(repoPath, cmd, args);
}
function parseLogJson(stdout: string): SpiceStackEntry[] {
const trimmed = stdout.trim();
if (!trimmed) {
return [];
}
const entries: SpiceStackEntry[] = [];
// `git-spice log ... --json` prints one JSON object per line.
for (const line of trimmed.split("\n")) {
const raw = line.trim();
if (!raw.startsWith("{")) {
continue;
}
try {
const value = JSON.parse(raw) as {
name?: string;
branch?: string;
parent?: string | null;
parentBranch?: string | null;
};
const branchName = (value.name ?? value.branch ?? "").trim();
if (!branchName) {
continue;
}
const parentRaw = value.parent ?? value.parentBranch ?? null;
const parentBranch = parentRaw ? parentRaw.trim() || null : null;
entries.push({ branchName, parentBranch });
} catch {
continue;
}
}
const seen = new Set<string>();
return entries.filter((entry) => {
if (seen.has(entry.branchName)) {
return false;
}
seen.add(entry.branchName);
return true;
});
}
async function runFallbacks(repoPath: string, commands: string[][], errorContext: string): Promise<void> {
const failures: string[] = [];
for (const args of commands) {
try {
await runSpice(repoPath, args);
return;
} catch (error) {
failures.push(`${args.join(" ")} :: ${error instanceof Error ? error.message : String(error)}`);
}
}
throw new Error(`${errorContext}. attempts=${failures.join(" | ")}`);
}
export async function gitSpiceAvailable(repoPath: string): Promise<boolean> {
return (await pickCommand(repoPath)) !== null;
}
export async function gitSpiceListStack(repoPath: string): Promise<SpiceStackEntry[]> {
try {
const { stdout } = await runSpice(repoPath, ["log", "short", "--all", "--json", "--no-cr-status", "--no-prompt"]);
return parseLogJson(stdout);
} catch {
return [];
}
}
export async function gitSpiceSyncRepo(repoPath: string): Promise<void> {
await runFallbacks(
repoPath,
[
["repo", "sync", "--restack", "--no-prompt"],
["repo", "sync", "--restack"],
["repo", "sync"],
],
"git-spice repo sync failed",
);
}
export async function gitSpiceRestackRepo(repoPath: string): Promise<void> {
await runFallbacks(
repoPath,
[
["repo", "restack", "--no-prompt"],
["repo", "restack"],
],
"git-spice repo restack failed",
);
}
export async function gitSpiceRestackSubtree(repoPath: string, branchName: string): Promise<void> {
await runFallbacks(
repoPath,
[
["upstack", "restack", "--branch", branchName, "--no-prompt"],
["upstack", "restack", "--branch", branchName],
["branch", "restack", "--branch", branchName, "--no-prompt"],
["branch", "restack", "--branch", branchName],
],
`git-spice restack subtree failed for ${branchName}`,
);
}
export async function gitSpiceRebaseBranch(repoPath: string, branchName: string): Promise<void> {
await runFallbacks(
repoPath,
[
["branch", "restack", "--branch", branchName, "--no-prompt"],
["branch", "restack", "--branch", branchName],
],
`git-spice branch restack failed for ${branchName}`,
);
}
export async function gitSpiceReparentBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void> {
await runFallbacks(
repoPath,
[
["upstack", "onto", "--branch", branchName, parentBranch, "--no-prompt"],
["upstack", "onto", "--branch", branchName, parentBranch],
["branch", "onto", "--branch", branchName, parentBranch, "--no-prompt"],
["branch", "onto", "--branch", branchName, parentBranch],
],
`git-spice reparent failed for ${branchName} -> ${parentBranch}`,
);
}
export async function gitSpiceTrackBranch(repoPath: string, branchName: string, parentBranch: string): Promise<void> {
await runFallbacks(
repoPath,
[
["branch", "track", branchName, "--base", parentBranch, "--no-prompt"],
["branch", "track", branchName, "--base", parentBranch],
],
`git-spice track failed for ${branchName}`,
);
}
export function normalizeBaseBranchName(ref: string): string {
const trimmed = ref.trim();
if (!trimmed) {
return "main";
}
return trimmed.startsWith("origin/") ? trimmed.slice("origin/".length) : trimmed;
}
export function describeSpiceCommandForLogs(repoPath: string): Promise<string | null> {
return pickCommand(repoPath).then((cmd) => (cmd ? commandLabel(cmd) : null));
}

View file

@ -1,313 +0,0 @@
import { execFile } from "node:child_process";
import { chmodSync, existsSync, mkdirSync, mkdtempSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { dirname, resolve } from "node:path";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
const DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS = 15_000;
const DEFAULT_GIT_FETCH_TIMEOUT_MS = 2 * 60_000;
const DEFAULT_GIT_CLONE_TIMEOUT_MS = 5 * 60_000;
interface GitAuthOptions {
githubToken?: string | null;
}
function resolveGithubToken(options?: GitAuthOptions): string | null {
const token = options?.githubToken ?? process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN ?? process.env.HF_GITHUB_TOKEN ?? process.env.HF_GH_TOKEN ?? null;
if (!token) return null;
const trimmed = token.trim();
return trimmed.length > 0 ? trimmed : null;
}
let cachedAskpassPath: string | null = null;
function ensureAskpassScript(): string {
if (cachedAskpassPath) {
return cachedAskpassPath;
}
const dir = mkdtempSync(resolve(tmpdir(), "foundry-git-askpass-"));
const path = resolve(dir, "askpass.sh");
// Git invokes $GIT_ASKPASS with the prompt string as argv[1]. Provide both username and password.
// We avoid embedding the token in this file; it is read from env at runtime.
const content = [
"#!/bin/sh",
'prompt="$1"',
// Prefer GH_TOKEN/GITHUB_TOKEN but support HF_* aliases too.
'token="${GH_TOKEN:-${GITHUB_TOKEN:-${HF_GITHUB_TOKEN:-${HF_GH_TOKEN:-}}}}"',
'case "$prompt" in',
' *Username*) echo "x-access-token" ;;',
' *Password*) echo "$token" ;;',
' *) echo "" ;;',
"esac",
"",
].join("\n");
writeFileSync(path, content, "utf8");
chmodSync(path, 0o700);
cachedAskpassPath = path;
return path;
}
function gitEnv(options?: GitAuthOptions): Record<string, string> {
const env: Record<string, string> = { ...(process.env as Record<string, string>) };
env.GIT_TERMINAL_PROMPT = "0";
const token = resolveGithubToken(options);
if (token) {
env.GIT_ASKPASS = ensureAskpassScript();
// Some tooling expects these vars; keep them aligned.
env.GITHUB_TOKEN = token;
env.GH_TOKEN = token;
}
return env;
}
async function configureGithubAuth(repoPath: string, options?: GitAuthOptions): Promise<void> {
const token = resolveGithubToken(options);
if (!token) {
return;
}
const authHeader = Buffer.from(`x-access-token:${token}`, "utf8").toString("base64");
await execFileAsync("git", ["-C", repoPath, "config", "--local", "credential.helper", ""], {
env: gitEnv(options),
});
await execFileAsync("git", ["-C", repoPath, "config", "--local", "http.https://github.com/.extraheader", `AUTHORIZATION: basic ${authHeader}`], {
env: gitEnv(options),
});
}
export interface BranchSnapshot {
branchName: string;
commitSha: string;
}
export async function fetch(repoPath: string, options?: GitAuthOptions): Promise<void> {
await execFileAsync("git", ["-C", repoPath, "fetch", "--prune", "--no-auto-gc"], {
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
env: gitEnv(options),
});
}
export async function revParse(repoPath: string, ref: string): Promise<string> {
const { stdout } = await execFileAsync("git", ["-C", repoPath, "rev-parse", ref], { env: gitEnv() });
return stdout.trim();
}
export async function validateRemote(remoteUrl: string, options?: GitAuthOptions): Promise<void> {
const remote = remoteUrl.trim();
if (!remote) {
throw new Error("remoteUrl is required");
}
try {
await execFileAsync("git", ["ls-remote", "--exit-code", remote, "HEAD"], {
// This command does not need repo context. Running from a neutral directory
// avoids inheriting broken worktree .git indirection inside dev containers.
cwd: tmpdir(),
maxBuffer: 1024 * 1024,
timeout: DEFAULT_GIT_VALIDATE_REMOTE_TIMEOUT_MS,
env: gitEnv(options),
});
} catch (error) {
const detail = error instanceof Error ? error.message : String(error);
throw new Error(`git remote validation failed: ${detail}`);
}
}
function isGitRepo(path: string): boolean {
return existsSync(resolve(path, ".git"));
}
export async function ensureCloned(remoteUrl: string, targetPath: string, options?: GitAuthOptions): Promise<void> {
const remote = remoteUrl.trim();
if (!remote) {
throw new Error("remoteUrl is required");
}
if (existsSync(targetPath)) {
if (!isGitRepo(targetPath)) {
throw new Error(`targetPath exists but is not a git repo: ${targetPath}`);
}
// Keep origin aligned with the configured remote URL.
await execFileAsync("git", ["-C", targetPath, "remote", "set-url", "origin", remote], {
maxBuffer: 1024 * 1024,
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
env: gitEnv(options),
});
await configureGithubAuth(targetPath, options);
await fetch(targetPath, options);
return;
}
mkdirSync(dirname(targetPath), { recursive: true });
await execFileAsync("git", ["clone", remote, targetPath], {
maxBuffer: 1024 * 1024 * 8,
timeout: DEFAULT_GIT_CLONE_TIMEOUT_MS,
env: gitEnv(options),
});
await configureGithubAuth(targetPath, options);
await fetch(targetPath, options);
await ensureLocalBaseBranch(targetPath);
}
async function hasLocalBranches(repoPath: string): Promise<boolean> {
try {
const { stdout } = await execFileAsync("git", ["-C", repoPath, "for-each-ref", "--format=%(refname:short)", "refs/heads"], {
env: gitEnv(),
});
return stdout
.split("\n")
.map((line) => line.trim())
.some(Boolean);
} catch {
return false;
}
}
async function ensureLocalBaseBranch(repoPath: string): Promise<void> {
if (await hasLocalBranches(repoPath)) {
return;
}
const baseRef = await remoteDefaultBaseRef(repoPath);
const localBranch = baseRef.replace(/^origin\//, "");
await execFileAsync("git", ["-C", repoPath, "checkout", "-B", localBranch, baseRef], {
maxBuffer: 1024 * 1024,
timeout: DEFAULT_GIT_FETCH_TIMEOUT_MS,
env: gitEnv(),
});
}
export async function remoteDefaultBaseRef(repoPath: string): Promise<string> {
try {
const { stdout } = await execFileAsync("git", ["-C", repoPath, "symbolic-ref", "refs/remotes/origin/HEAD"], { env: gitEnv() });
const ref = stdout.trim(); // refs/remotes/origin/main
const match = ref.match(/^refs\/remotes\/(.+)$/);
if (match?.[1]) {
return match[1];
}
} catch {
// fall through
}
const candidates = ["origin/main", "origin/master", "main", "master"];
for (const ref of candidates) {
try {
await execFileAsync("git", ["-C", repoPath, "rev-parse", "--verify", ref], { env: gitEnv() });
return ref;
} catch {
continue;
}
}
return "origin/main";
}
export async function listRemoteBranches(repoPath: string, options?: GitAuthOptions): Promise<BranchSnapshot[]> {
await fetch(repoPath, options);
const { stdout } = await execFileAsync("git", ["-C", repoPath, "for-each-ref", "--format=%(refname:short) %(objectname)", "refs/remotes/origin"], {
maxBuffer: 1024 * 1024,
env: gitEnv(options),
});
return stdout
.trim()
.split("\n")
.filter((line) => line.trim().length > 0)
.map((line) => {
const [refName, commitSha] = line.trim().split(/\s+/, 2);
const short = (refName ?? "").trim();
const branchName = short.replace(/^origin\//, "");
return { branchName, commitSha: commitSha ?? "" };
})
.filter((row) => row.branchName.length > 0 && row.branchName !== "HEAD" && row.branchName !== "origin" && row.commitSha.length > 0);
}
async function remoteBranchExists(repoPath: string, branchName: string): Promise<boolean> {
try {
await execFileAsync("git", ["-C", repoPath, "show-ref", "--verify", `refs/remotes/origin/${branchName}`], { env: gitEnv() });
return true;
} catch {
return false;
}
}
export async function ensureRemoteBranch(repoPath: string, branchName: string, options?: GitAuthOptions): Promise<void> {
await fetch(repoPath, options);
await ensureLocalBaseBranch(repoPath);
if (await remoteBranchExists(repoPath, branchName)) {
return;
}
const baseRef = await remoteDefaultBaseRef(repoPath);
await execFileAsync("git", ["-C", repoPath, "push", "origin", `${baseRef}:refs/heads/${branchName}`], {
maxBuffer: 1024 * 1024 * 2,
env: gitEnv(options),
});
await fetch(repoPath, options);
}
export async function diffStatForBranch(repoPath: string, branchName: string): Promise<string> {
try {
const baseRef = await remoteDefaultBaseRef(repoPath);
const headRef = `origin/${branchName}`;
const { stdout } = await execFileAsync("git", ["-C", repoPath, "diff", "--shortstat", `${baseRef}...${headRef}`], {
maxBuffer: 1024 * 1024,
env: gitEnv(),
});
const trimmed = stdout.trim();
if (!trimmed) {
return "+0/-0";
}
const insertMatch = trimmed.match(/(\d+)\s+insertion/);
const deleteMatch = trimmed.match(/(\d+)\s+deletion/);
const insertions = insertMatch ? insertMatch[1] : "0";
const deletions = deleteMatch ? deleteMatch[1] : "0";
return `+${insertions}/-${deletions}`;
} catch {
return "+0/-0";
}
}
export async function conflictsWithMain(repoPath: string, branchName: string): Promise<boolean> {
try {
const baseRef = await remoteDefaultBaseRef(repoPath);
const headRef = `origin/${branchName}`;
// Use merge-tree (git 2.38+) for a clean conflict check.
try {
await execFileAsync("git", ["-C", repoPath, "merge-tree", "--write-tree", "--no-messages", baseRef, headRef], { env: gitEnv() });
// If merge-tree exits 0, no conflicts. Non-zero exit means conflicts.
return false;
} catch {
// merge-tree exits non-zero when there are conflicts
return true;
}
} catch {
return false;
}
}
export async function getOriginOwner(repoPath: string): Promise<string> {
try {
const { stdout } = await execFileAsync("git", ["-C", repoPath, "remote", "get-url", "origin"], { env: gitEnv() });
const url = stdout.trim();
// Handle SSH: git@github.com:owner/repo.git
const sshMatch = url.match(/[:\/]([^\/]+)\/[^\/]+(?:\.git)?$/);
if (sshMatch) {
return sshMatch[1] ?? "";
}
// Handle HTTPS: https://github.com/owner/repo.git
const httpsMatch = url.match(/\/\/[^\/]+\/([^\/]+)\//);
if (httpsMatch) {
return httpsMatch[1] ?? "";
}
return "";
} catch {
return "";
}
}

View file

@ -1,262 +1,80 @@
import { execFile } from "node:child_process";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
interface GithubAuthOptions {
githubToken?: string | null;
baseBranch?: string | null;
}
function ghEnv(options?: GithubAuthOptions): Record<string, string> {
const env: Record<string, string> = { ...(process.env as Record<string, string>) };
function authHeaders(options?: GithubAuthOptions): HeadersInit {
const token = options?.githubToken?.trim();
if (token) {
env.GH_TOKEN = token;
env.GITHUB_TOKEN = token;
if (!token) {
throw new Error("GitHub token is required for this operation");
}
return env;
}
export interface PullRequestSnapshot {
number: number;
headRefName: string;
state: string;
title: string;
url: string;
author: string;
isDraft: boolean;
ciStatus: string | null;
reviewStatus: string | null;
reviewer: string | null;
}
interface GhPrListItem {
number: number;
headRefName: string;
state: string;
title: string;
url?: string;
author?: { login?: string };
isDraft?: boolean;
statusCheckRollup?: Array<{
state?: string;
status?: string;
conclusion?: string;
__typename?: string;
}>;
reviews?: Array<{
state?: string;
author?: { login?: string };
}>;
}
function parseCiStatus(checks: GhPrListItem["statusCheckRollup"]): string | null {
if (!checks || checks.length === 0) return null;
let total = 0;
let successes = 0;
let hasRunning = false;
for (const check of checks) {
total++;
const conclusion = check.conclusion?.toUpperCase();
const state = check.state?.toUpperCase();
const status = check.status?.toUpperCase();
if (conclusion === "SUCCESS" || state === "SUCCESS") {
successes++;
} else if (status === "IN_PROGRESS" || status === "QUEUED" || status === "PENDING" || state === "PENDING") {
hasRunning = true;
}
}
if (hasRunning && successes < total) {
return "running";
}
return `${successes}/${total}`;
}
function parseReviewStatus(reviews: GhPrListItem["reviews"]): { status: string | null; reviewer: string | null } {
if (!reviews || reviews.length === 0) {
return { status: null, reviewer: null };
}
// Build a map of latest review per author
const latestByAuthor = new Map<string, { state: string; login: string }>();
for (const review of reviews) {
const login = review.author?.login ?? "unknown";
const state = review.state?.toUpperCase() ?? "";
if (state === "COMMENTED") continue; // Skip comments, only track actionable reviews
latestByAuthor.set(login, { state, login });
}
// Check for CHANGES_REQUESTED first (takes priority), then APPROVED
for (const [, entry] of latestByAuthor) {
if (entry.state === "CHANGES_REQUESTED") {
return { status: "CHANGES_REQUESTED", reviewer: entry.login };
}
}
for (const [, entry] of latestByAuthor) {
if (entry.state === "APPROVED") {
return { status: "APPROVED", reviewer: entry.login };
}
}
// If there are reviews but none are APPROVED or CHANGES_REQUESTED
if (latestByAuthor.size > 0) {
const first = latestByAuthor.values().next().value;
return { status: "PENDING", reviewer: first?.login ?? null };
}
return { status: null, reviewer: null };
}
function snapshotFromGhItem(item: GhPrListItem): PullRequestSnapshot {
const { status: reviewStatus, reviewer } = parseReviewStatus(item.reviews);
return {
number: item.number,
headRefName: item.headRefName,
state: item.state,
title: item.title,
url: item.url ?? "",
author: item.author?.login ?? "",
isDraft: item.isDraft ?? false,
ciStatus: parseCiStatus(item.statusCheckRollup),
reviewStatus,
reviewer,
Accept: "application/vnd.github+json",
Authorization: `Bearer ${token}`,
"X-GitHub-Api-Version": "2022-11-28",
};
}
const PR_JSON_FIELDS = "number,headRefName,state,title,url,author,isDraft,statusCheckRollup,reviews";
export async function listPullRequests(repoPath: string, options?: GithubAuthOptions): Promise<PullRequestSnapshot[]> {
try {
const { stdout } = await execFileAsync("gh", ["pr", "list", "--json", PR_JSON_FIELDS, "--limit", "200"], {
maxBuffer: 1024 * 1024 * 4,
cwd: repoPath,
env: ghEnv(options),
});
const parsed = JSON.parse(stdout) as GhPrListItem[];
return parsed.map((item) => {
// Handle fork PRs where headRefName may contain "owner:branch"
const headRefName = item.headRefName.includes(":") ? (item.headRefName.split(":").pop() ?? item.headRefName) : item.headRefName;
return snapshotFromGhItem({ ...item, headRefName });
});
} catch {
return [];
}
}
export async function getPrInfo(repoPath: string, branchName: string, options?: GithubAuthOptions): Promise<PullRequestSnapshot | null> {
try {
const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", PR_JSON_FIELDS], {
maxBuffer: 1024 * 1024 * 4,
cwd: repoPath,
env: ghEnv(options),
});
const item = JSON.parse(stdout) as GhPrListItem;
return snapshotFromGhItem(item);
} catch {
return null;
}
async function githubRequest(path: string, init: RequestInit, options?: GithubAuthOptions): Promise<Response> {
return await fetch(`https://api.github.com${path}`, {
...init,
headers: {
...authHeaders(options),
...(init.headers ?? {}),
},
});
}
export async function createPr(
repoPath: string,
repoFullName: string,
headBranch: string,
title: string,
body?: string,
options?: GithubAuthOptions,
): Promise<{ number: number; url: string }> {
const args = ["pr", "create", "--title", title, "--head", headBranch];
if (body) {
args.push("--body", body);
} else {
args.push("--body", "");
const baseBranch = options?.baseBranch?.trim() || "main";
const response = await githubRequest(
`/repos/${repoFullName}/pulls`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
title,
head: headBranch,
base: baseBranch,
body: body ?? "",
}),
},
options,
);
const payload = (await response.json()) as { number?: number; html_url?: string; message?: string };
if (!response.ok || !payload.number || !payload.html_url) {
throw new Error(payload.message ?? `Failed to create pull request for ${repoFullName}`);
}
const { stdout } = await execFileAsync("gh", args, {
maxBuffer: 1024 * 1024,
cwd: repoPath,
env: ghEnv(options),
});
// gh pr create outputs the PR URL on success
const url = stdout.trim();
// Extract PR number from URL: https://github.com/owner/repo/pull/123
const numberMatch = url.match(/\/pull\/(\d+)/);
const number = numberMatch ? parseInt(numberMatch[1]!, 10) : 0;
return { number, url };
return {
number: payload.number,
url: payload.html_url,
};
}
export async function starRepository(repoFullName: string, options?: GithubAuthOptions): Promise<void> {
try {
await execFileAsync("gh", ["api", "--method", "PUT", `user/starred/${repoFullName}`], {
maxBuffer: 1024 * 1024,
env: ghEnv(options),
});
} catch (error) {
const message =
error instanceof Error ? error.message : `Failed to star GitHub repository ${repoFullName}. Ensure GitHub auth is configured for the backend.`;
throw new Error(message);
}
}
export async function getAllowedMergeMethod(repoPath: string, options?: GithubAuthOptions): Promise<"squash" | "rebase" | "merge"> {
try {
// Get the repo owner/name from gh
const { stdout: repoJson } = await execFileAsync("gh", ["repo", "view", "--json", "owner,name"], { cwd: repoPath, env: ghEnv(options) });
const repo = JSON.parse(repoJson) as { owner: { login: string }; name: string };
const repoFullName = `${repo.owner.login}/${repo.name}`;
const { stdout } = await execFileAsync("gh", ["api", `repos/${repoFullName}`, "--jq", ".allow_squash_merge, .allow_rebase_merge, .allow_merge_commit"], {
maxBuffer: 1024 * 1024,
cwd: repoPath,
env: ghEnv(options),
});
const lines = stdout.trim().split("\n");
const allowSquash = lines[0]?.trim() === "true";
const allowRebase = lines[1]?.trim() === "true";
const allowMerge = lines[2]?.trim() === "true";
if (allowSquash) return "squash";
if (allowRebase) return "rebase";
if (allowMerge) return "merge";
return "squash";
} catch {
return "squash";
}
}
export async function mergePr(repoPath: string, prNumber: number, options?: GithubAuthOptions): Promise<void> {
const method = await getAllowedMergeMethod(repoPath, options);
await execFileAsync("gh", ["pr", "merge", String(prNumber), `--${method}`, "--delete-branch"], { cwd: repoPath, env: ghEnv(options) });
}
export async function isPrMerged(repoPath: string, branchName: string, options?: GithubAuthOptions): Promise<boolean> {
try {
const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", "state"], { cwd: repoPath, env: ghEnv(options) });
const parsed = JSON.parse(stdout) as { state: string };
return parsed.state.toUpperCase() === "MERGED";
} catch {
return false;
}
}
export async function getPrTitle(repoPath: string, branchName: string): Promise<string | null> {
try {
const { stdout } = await execFileAsync("gh", ["pr", "view", branchName, "--json", "title"], { cwd: repoPath });
const parsed = JSON.parse(stdout) as { title: string };
return parsed.title;
} catch {
return null;
const response = await githubRequest(
`/user/starred/${repoFullName}`,
{
method: "PUT",
headers: {
"Content-Length": "0",
},
},
options,
);
if (!response.ok) {
const payload = (await response.json().catch(() => null)) as { message?: string } | null;
throw new Error(payload?.message ?? `Failed to star GitHub repository ${repoFullName}`);
}
}

View file

@ -1,140 +0,0 @@
import { execFile } from "node:child_process";
import { promisify } from "node:util";
const execFileAsync = promisify(execFile);
export async function graphiteAvailable(repoPath: string): Promise<boolean> {
try {
await execFileAsync("gt", ["trunk"], { cwd: repoPath });
return true;
} catch {
return false;
}
}
export async function graphiteGet(repoPath: string, branchName: string): Promise<boolean> {
try {
await execFileAsync("gt", ["get", branchName], { cwd: repoPath });
return true;
} catch {
return false;
}
}
export async function graphiteCreateBranch(repoPath: string, branchName: string): Promise<void> {
await execFileAsync("gt", ["create", branchName], { cwd: repoPath });
}
export async function graphiteCheckout(repoPath: string, branchName: string): Promise<void> {
await execFileAsync("gt", ["checkout", branchName], { cwd: repoPath });
}
export async function graphiteSubmit(repoPath: string): Promise<void> {
await execFileAsync("gt", ["submit", "--no-edit"], { cwd: repoPath });
}
export async function graphiteMergeBranch(repoPath: string, branchName: string): Promise<void> {
await execFileAsync("gt", ["merge", branchName], { cwd: repoPath });
}
export async function graphiteAbandon(repoPath: string, branchName: string): Promise<void> {
await execFileAsync("gt", ["abandon", branchName], { cwd: repoPath });
}
export interface GraphiteStackEntry {
branchName: string;
parentBranch: string | null;
}
export async function graphiteGetStack(repoPath: string): Promise<GraphiteStackEntry[]> {
try {
// Try JSON output first
const { stdout } = await execFileAsync("gt", ["log", "--json"], {
cwd: repoPath,
maxBuffer: 1024 * 1024,
});
const parsed = JSON.parse(stdout) as Array<{
branch?: string;
name?: string;
parent?: string;
parentBranch?: string;
}>;
return parsed.map((entry) => ({
branchName: entry.branch ?? entry.name ?? "",
parentBranch: entry.parent ?? entry.parentBranch ?? null,
}));
} catch {
// Fall back to text parsing of `gt log`
try {
const { stdout } = await execFileAsync("gt", ["log"], {
cwd: repoPath,
maxBuffer: 1024 * 1024,
});
const entries: GraphiteStackEntry[] = [];
const lines = stdout.split("\n").filter((l) => l.trim().length > 0);
// Parse indented tree output: each line has tree chars (|, /, \, -, etc.)
// followed by branch names. Build parent-child from indentation level.
const branchStack: string[] = [];
for (const line of lines) {
// Strip ANSI color codes
const clean = line.replace(/\x1b\[[0-9;]*m/g, "");
// Extract branch name: skip tree characters and whitespace
const branchMatch = clean.match(/[│├└─|/\\*\s]*(?:◉|○|●)?\s*(.+)/);
if (!branchMatch) continue;
const branchName = branchMatch[1]!.trim();
if (!branchName || branchName.startsWith("(") || branchName === "") continue;
// Determine indentation level by counting leading whitespace/tree chars
const indent = clean.search(/[a-zA-Z0-9]/);
const level = Math.max(0, Math.floor(indent / 2));
// Trim stack to current level
while (branchStack.length > level) {
branchStack.pop();
}
const parentBranch = branchStack.length > 0 ? (branchStack[branchStack.length - 1] ?? null) : null;
entries.push({ branchName, parentBranch });
branchStack.push(branchName);
}
return entries;
} catch {
return [];
}
}
}
export async function graphiteGetParent(repoPath: string, branchName: string): Promise<string | null> {
try {
// Try `gt get <branchName>` to see parent info
const { stdout } = await execFileAsync("gt", ["get", branchName], {
cwd: repoPath,
maxBuffer: 1024 * 1024,
});
// Parse output for parent branch reference
const parentMatch = stdout.match(/parent:\s*(\S+)/i);
if (parentMatch) {
return parentMatch[1] ?? null;
}
} catch {
// Fall through to stack-based lookup
}
// Fall back to stack info
try {
const stack = await graphiteGetStack(repoPath);
const entry = stack.find((e) => e.branchName === branchName);
return entry?.parentBranch ?? null;
} catch {
return null;
}
}

View file

@ -1,10 +1,10 @@
import type { AppConfig, ProviderId } from "@sandbox-agent/foundry-shared";
import type { AppConfig, SandboxProviderId } from "@sandbox-agent/foundry-shared";
function hasE2BApiKey(config: AppConfig): boolean {
return Boolean(config.providers.e2b.apiKey?.trim());
return Boolean(config.sandboxProviders.e2b.apiKey?.trim());
}
function forcedSandboxProviderId(): ProviderId | null {
function forcedSandboxProviderId(): SandboxProviderId | null {
const raw = process.env.FOUNDRY_SANDBOX_PROVIDER?.trim() ?? process.env.HF_SANDBOX_PROVIDER?.trim() ?? null;
if (raw === "local" || raw === "e2b") {
return raw;
@ -12,7 +12,7 @@ function forcedSandboxProviderId(): ProviderId | null {
return null;
}
export function defaultSandboxProviderId(config: AppConfig): ProviderId {
export function defaultSandboxProviderId(config: AppConfig): SandboxProviderId {
const forced = forcedSandboxProviderId();
if (forced === "local") {
return "local";
@ -26,11 +26,11 @@ export function defaultSandboxProviderId(config: AppConfig): ProviderId {
return hasE2BApiKey(config) ? "e2b" : "local";
}
export function availableSandboxProviderIds(config: AppConfig): ProviderId[] {
export function availableSandboxProviderIds(config: AppConfig): SandboxProviderId[] {
return hasE2BApiKey(config) ? ["e2b", "local"] : ["local"];
}
export function resolveSandboxProviderId(config: AppConfig, requested?: ProviderId | null): ProviderId {
export function resolveSandboxProviderId(config: AppConfig, requested?: SandboxProviderId | null): SandboxProviderId {
if (requested === "e2b" && !hasE2BApiKey(config)) {
throw new Error("E2B provider is not configured. Set E2B_API_KEY before selecting the e2b provider.");
}

View file

@ -38,6 +38,12 @@ export interface GitHubRepositoryRecord {
fullName: string;
cloneUrl: string;
private: boolean;
defaultBranch: string;
}
export interface GitHubBranchRecord {
name: string;
commitSha: string;
}
export interface GitHubMemberRecord {
@ -341,12 +347,14 @@ export class GitHubAppClient {
full_name: string;
clone_url: string;
private: boolean;
default_branch: string;
}>("/user/repos?per_page=100&affiliation=owner,collaborator,organization_member&sort=updated", accessToken);
return repositories.map((repository) => ({
fullName: repository.full_name,
cloneUrl: repository.clone_url,
private: repository.private,
defaultBranch: repository.default_branch,
}));
}
@ -356,12 +364,14 @@ export class GitHubAppClient {
full_name: string;
clone_url: string;
private: boolean;
default_branch: string;
}>("/installation/repositories?per_page=100", accessToken);
return repositories.map((repository) => ({
fullName: repository.full_name,
cloneUrl: repository.clone_url,
private: repository.private,
defaultBranch: repository.default_branch,
}));
}
@ -371,11 +381,13 @@ export class GitHubAppClient {
full_name: string;
clone_url: string;
private: boolean;
default_branch: string;
}>(`/repos/${fullName}`, accessToken);
return {
fullName: repository.full_name,
cloneUrl: repository.clone_url,
private: repository.private,
defaultBranch: repository.default_branch,
};
} catch (error) {
if (error instanceof GitHubAppError && error.status === 404) {
@ -390,6 +402,15 @@ export class GitHubAppClient {
return await this.getUserRepository(accessToken, fullName);
}
async listUserRepositoryBranches(accessToken: string, fullName: string): Promise<GitHubBranchRecord[]> {
return await this.listRepositoryBranches(accessToken, fullName);
}
async listInstallationRepositoryBranches(installationId: number, fullName: string): Promise<GitHubBranchRecord[]> {
const accessToken = await this.createInstallationAccessToken(installationId);
return await this.listRepositoryBranches(accessToken, fullName);
}
async listOrganizationMembers(accessToken: string, organizationLogin: string): Promise<GitHubMemberRecord[]> {
const members = await this.paginate<{
id: number;
@ -687,6 +708,20 @@ export class GitHubAppClient {
nextUrl: parseNextLink(response.headers.get("link")),
};
}
private async listRepositoryBranches(accessToken: string, fullName: string): Promise<GitHubBranchRecord[]> {
const branches = await this.paginate<{
name: string;
commit?: { sha?: string | null } | null;
}>(`/repos/${fullName}/branches?per_page=100`, accessToken);
return branches
.map((branch) => ({
name: branch.name?.trim() ?? "",
commitSha: branch.commit?.sha?.trim() ?? "",
}))
.filter((branch) => branch.name.length > 0 && branch.commitSha.length > 0);
}
}
function parseNextLink(linkHeader: string | null): string | null {

View file

@ -1,7 +1,7 @@
import { betterAuth } from "better-auth";
import { createAdapterFactory } from "better-auth/adapters";
import { APP_SHELL_WORKSPACE_ID } from "../actors/workspace/app-shell.js";
import { authUserKey, workspaceKey } from "../actors/keys.js";
import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js";
import { authUserKey, organizationKey } from "../actors/keys.js";
import { logger } from "../logging.js";
const AUTH_BASE_PATH = "/v1/auth";
@ -43,7 +43,7 @@ async function callAuthEndpoint(auth: any, url: string, init?: RequestInit): Pro
return await auth.handler(new Request(url, init));
}
function resolveRouteUserId(workspace: any, resolved: any): string | null {
function resolveRouteUserId(organization: any, resolved: any): string | null {
if (!resolved) {
return null;
}
@ -75,11 +75,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
// getOrCreate is intentional here: the adapter runs during Better Auth callbacks
// which can fire before any explicit create path. The app workspace and auth user
// which can fire before any explicit create path. The app organization and auth user
// actors must exist by the time the adapter needs them.
const appWorkspace = () =>
actorClient.workspace.getOrCreate(workspaceKey(APP_SHELL_WORKSPACE_ID), {
createWithInput: APP_SHELL_WORKSPACE_ID,
const appOrganization = () =>
actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), {
createWithInput: APP_SHELL_ORGANIZATION_ID,
});
// getOrCreate is intentional: Better Auth creates user records during OAuth
@ -109,9 +109,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
const email = direct("email");
if (typeof email === "string" && email.length > 0) {
const workspace = await appWorkspace();
const resolved = await workspace.authFindEmailIndex({ email: email.toLowerCase() });
return resolveRouteUserId(workspace, resolved);
const organization = await appOrganization();
const resolved = await organization.authFindEmailIndex({ email: email.toLowerCase() });
return resolveRouteUserId(organization, resolved);
}
return null;
}
@ -124,12 +124,12 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const sessionId = direct("id") ?? data?.id;
const sessionToken = direct("token") ?? data?.token;
if (typeof sessionId === "string" || typeof sessionToken === "string") {
const workspace = await appWorkspace();
const resolved = await workspace.authFindSessionIndex({
const organization = await appOrganization();
const resolved = await organization.authFindSessionIndex({
...(typeof sessionId === "string" ? { sessionId } : {}),
...(typeof sessionToken === "string" ? { sessionToken } : {}),
});
return resolveRouteUserId(workspace, resolved);
return resolveRouteUserId(organization, resolved);
}
return null;
}
@ -142,14 +142,14 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const accountRecordId = direct("id") ?? data?.id;
const providerId = direct("providerId") ?? data?.providerId;
const accountId = direct("accountId") ?? data?.accountId;
const workspace = await appWorkspace();
const organization = await appOrganization();
if (typeof accountRecordId === "string" && accountRecordId.length > 0) {
const resolved = await workspace.authFindAccountIndex({ id: accountRecordId });
return resolveRouteUserId(workspace, resolved);
const resolved = await organization.authFindAccountIndex({ id: accountRecordId });
return resolveRouteUserId(organization, resolved);
}
if (typeof providerId === "string" && providerId.length > 0 && typeof accountId === "string" && accountId.length > 0) {
const resolved = await workspace.authFindAccountIndex({ providerId, accountId });
return resolveRouteUserId(workspace, resolved);
const resolved = await organization.authFindAccountIndex({ providerId, accountId });
return resolveRouteUserId(organization, resolved);
}
return null;
}
@ -157,9 +157,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return null;
};
const ensureWorkspaceVerification = async (method: string, payload: Record<string, unknown>) => {
const workspace = await appWorkspace();
return await workspace[method](payload);
const ensureOrganizationVerification = async (method: string, payload: Record<string, unknown>) => {
const organization = await appOrganization();
return await organization[method](payload);
};
return {
@ -170,7 +170,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
create: async ({ model, data }) => {
const transformed = await transformInput(data, model, "create", true);
if (model === "verification") {
return await ensureWorkspaceVerification("authCreateVerification", { data: transformed });
return await ensureOrganizationVerification("authCreateVerification", { data: transformed });
}
const userId = await resolveUserIdForQuery(model, undefined, transformed);
@ -180,17 +180,17 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const userActor = await getAuthUser(userId);
const created = await userActor.createAuthRecord({ model, data: transformed });
const workspace = await appWorkspace();
const organization = await appOrganization();
if (model === "user" && typeof transformed.email === "string" && transformed.email.length > 0) {
await workspace.authUpsertEmailIndex({
await organization.authUpsertEmailIndex({
email: transformed.email.toLowerCase(),
userId,
});
}
if (model === "session") {
await workspace.authUpsertSessionIndex({
await organization.authUpsertSessionIndex({
sessionId: String(created.id),
sessionToken: String(created.token),
userId,
@ -198,7 +198,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
if (model === "account") {
await workspace.authUpsertAccountIndex({
await organization.authUpsertAccountIndex({
id: String(created.id),
providerId: String(created.providerId),
accountId: String(created.accountId),
@ -212,7 +212,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
findOne: async ({ model, where, join }) => {
const transformedWhere = transformWhereClause({ model, where, action: "findOne" });
if (model === "verification") {
return await ensureWorkspaceVerification("authFindOneVerification", { where: transformedWhere, join });
return await ensureOrganizationVerification("authFindOneVerification", { where: transformedWhere, join });
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
@ -228,7 +228,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
findMany: async ({ model, where, limit, sortBy, offset, join }) => {
const transformedWhere = transformWhereClause({ model, where, action: "findMany" });
if (model === "verification") {
return await ensureWorkspaceVerification("authFindManyVerification", {
return await ensureOrganizationVerification("authFindManyVerification", {
where: transformedWhere,
limit,
sortBy,
@ -240,11 +240,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
if (model === "session") {
const tokenClause = transformedWhere?.find((entry: any) => entry.field === "token" && entry.operator === "in");
if (tokenClause && Array.isArray(tokenClause.value)) {
const workspace = await appWorkspace();
const organization = await appOrganization();
const resolved = await Promise.all(
(tokenClause.value as string[]).map(async (sessionToken: string) => ({
sessionToken,
route: await workspace.authFindSessionIndex({ sessionToken }),
route: await organization.authFindSessionIndex({ sessionToken }),
})),
);
const byUser = new Map<string, string[]>();
@ -284,7 +284,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const transformedWhere = transformWhereClause({ model, where, action: "update" });
const transformedUpdate = (await transformInput(update as Record<string, unknown>, model, "update", true)) as Record<string, unknown>;
if (model === "verification") {
return await ensureWorkspaceVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate });
return await ensureOrganizationVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate });
}
const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate);
@ -302,19 +302,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
? await userActor.findOneAuthRecord({ model, where: transformedWhere })
: null;
const updated = await userActor.updateAuthRecord({ model, where: transformedWhere, update: transformedUpdate });
const workspace = await appWorkspace();
const organization = await appOrganization();
if (model === "user" && updated) {
if (before?.email && before.email !== updated.email) {
await workspace.authDeleteEmailIndex({ email: before.email.toLowerCase() });
await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() });
}
if (updated.email) {
await workspace.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId });
await organization.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId });
}
}
if (model === "session" && updated) {
await workspace.authUpsertSessionIndex({
await organization.authUpsertSessionIndex({
sessionId: String(updated.id),
sessionToken: String(updated.token),
userId,
@ -322,7 +322,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
if (model === "account" && updated) {
await workspace.authUpsertAccountIndex({
await organization.authUpsertAccountIndex({
id: String(updated.id),
providerId: String(updated.providerId),
accountId: String(updated.accountId),
@ -337,7 +337,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const transformedWhere = transformWhereClause({ model, where, action: "updateMany" });
const transformedUpdate = (await transformInput(update as Record<string, unknown>, model, "update", true)) as Record<string, unknown>;
if (model === "verification") {
return await ensureWorkspaceVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate });
return await ensureOrganizationVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate });
}
const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate);
@ -352,7 +352,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
delete: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "delete" });
if (model === "verification") {
await ensureWorkspaceVerification("authDeleteVerification", { where: transformedWhere });
await ensureOrganizationVerification("authDeleteVerification", { where: transformedWhere });
return;
}
@ -362,19 +362,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
const userActor = await getAuthUser(userId);
const workspace = await appWorkspace();
const organization = await appOrganization();
const before = await userActor.findOneAuthRecord({ model, where: transformedWhere });
await userActor.deleteAuthRecord({ model, where: transformedWhere });
if (model === "session" && before) {
await workspace.authDeleteSessionIndex({
await organization.authDeleteSessionIndex({
sessionId: before.id,
sessionToken: before.token,
});
}
if (model === "account" && before) {
await workspace.authDeleteAccountIndex({
await organization.authDeleteAccountIndex({
id: before.id,
providerId: before.providerId,
accountId: before.accountId,
@ -382,14 +382,14 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
if (model === "user" && before?.email) {
await workspace.authDeleteEmailIndex({ email: before.email.toLowerCase() });
await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() });
}
},
deleteMany: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "deleteMany" });
if (model === "verification") {
return await ensureWorkspaceVerification("authDeleteManyVerification", { where: transformedWhere });
return await ensureOrganizationVerification("authDeleteManyVerification", { where: transformedWhere });
}
if (model === "session") {
@ -398,11 +398,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return 0;
}
const userActor = await getAuthUser(userId);
const workspace = await appWorkspace();
const organization = await appOrganization();
const sessions = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit: 5000 });
const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere });
for (const session of sessions) {
await workspace.authDeleteSessionIndex({
await organization.authDeleteSessionIndex({
sessionId: session.id,
sessionToken: session.token,
});
@ -423,7 +423,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
count: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "count" });
if (model === "verification") {
return await ensureWorkspaceVerification("authCountVerification", { where: transformedWhere });
return await ensureOrganizationVerification("authCountVerification", { where: transformedWhere });
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
@ -476,8 +476,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
},
async getAuthState(sessionId: string) {
const workspace = await appWorkspace();
const route = await workspace.authFindSessionIndex({ sessionId });
const organization = await appOrganization();
const route = await organization.authFindSessionIndex({ sessionId });
if (!route?.userId) {
return null;
}

View file

@ -1,20 +0,0 @@
import type { AppConfig } from "@sandbox-agent/foundry-shared";
import { homedir } from "node:os";
import { dirname, join, resolve } from "node:path";
function expandPath(input: string): string {
if (input.startsWith("~/")) {
return `${homedir()}/${input.slice(2)}`;
}
return input;
}
export function foundryDataDir(config: AppConfig): string {
// Keep data collocated with the backend DB by default.
const dbPath = expandPath(config.backend.dbPath);
return resolve(dirname(dbPath));
}
export function foundryRepoClonePath(config: AppConfig, workspaceId: string, repoId: string): string {
return resolve(join(foundryDataDir(config), "repos", workspaceId, repoId));
}

View file

@ -1,20 +1,20 @@
import { getOrCreateWorkspace } from "../actors/handles.js";
import { APP_SHELL_WORKSPACE_ID } from "../actors/workspace/app-shell.js";
import { getOrCreateOrganization } from "../actors/handles.js";
import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js";
export interface ResolvedGithubAuth {
githubToken: string;
scopes: string[];
}
export async function resolveWorkspaceGithubAuth(c: any, workspaceId: string): Promise<ResolvedGithubAuth | null> {
if (!workspaceId || workspaceId === APP_SHELL_WORKSPACE_ID) {
export async function resolveOrganizationGithubAuth(c: any, organizationId: string): Promise<ResolvedGithubAuth | null> {
if (!organizationId || organizationId === APP_SHELL_ORGANIZATION_ID) {
return null;
}
try {
const appWorkspace = await getOrCreateWorkspace(c, APP_SHELL_WORKSPACE_ID);
const resolved = await appWorkspace.resolveAppGithubToken({
organizationId: workspaceId,
const appOrganization = await getOrCreateOrganization(c, APP_SHELL_ORGANIZATION_ID);
const resolved = await appOrganization.resolveAppGithubToken({
organizationId: organizationId,
requireRepoScope: true,
});
if (!resolved?.accessToken) {

View file

@ -1,45 +0,0 @@
interface RepoLockState {
locked: boolean;
waiters: Array<() => void>;
}
const repoLocks = new Map<string, RepoLockState>();
async function acquireRepoLock(repoPath: string): Promise<() => void> {
let state = repoLocks.get(repoPath);
if (!state) {
state = { locked: false, waiters: [] };
repoLocks.set(repoPath, state);
}
if (!state.locked) {
state.locked = true;
return () => releaseRepoLock(repoPath, state);
}
await new Promise<void>((resolve) => {
state!.waiters.push(resolve);
});
return () => releaseRepoLock(repoPath, state!);
}
function releaseRepoLock(repoPath: string, state: RepoLockState): void {
const next = state.waiters.shift();
if (next) {
next();
return;
}
state.locked = false;
repoLocks.delete(repoPath);
}
export async function withRepoGitLock<T>(repoPath: string, fn: () => Promise<T>): Promise<T> {
const release = await acquireRepoLock(repoPath);
try {
return await fn();
} finally {
release();
}
}

View file

@ -82,3 +82,30 @@ export function repoLabelFromRemote(remoteUrl: string): string {
return basename(trimmed.replace(/\.git$/i, ""));
}
export function githubRepoFullNameFromRemote(remoteUrl: string): string | null {
const normalized = normalizeRemoteUrl(remoteUrl);
if (!normalized) {
return null;
}
try {
const url = new URL(normalized);
const hostname = url.hostname.replace(/^www\./i, "").toLowerCase();
if (hostname !== "github.com") {
return null;
}
const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean);
if (parts.length < 2) {
return null;
}
const owner = parts[0]?.trim();
const repo = (parts[1] ?? "").replace(/\.git$/i, "").trim();
if (!owner || !repo) {
return null;
}
return `${owner}/${repo}`;
} catch {
return null;
}
}

View file

@ -1,129 +0,0 @@
import { chmodSync, mkdtempSync, writeFileSync, readFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { describe, expect, it } from "vitest";
import { gitSpiceAvailable, gitSpiceListStack, gitSpiceRestackSubtree } from "../src/integrations/git-spice/index.js";
function makeTempDir(prefix: string): string {
return mkdtempSync(join(tmpdir(), prefix));
}
function writeScript(path: string, body: string): void {
writeFileSync(path, body, "utf8");
chmodSync(path, 0o755);
}
async function withEnv<T>(updates: Record<string, string | undefined>, fn: () => Promise<T>): Promise<T> {
const previous = new Map<string, string | undefined>();
for (const [key, value] of Object.entries(updates)) {
previous.set(key, process.env[key]);
if (value == null) {
delete process.env[key];
} else {
process.env[key] = value;
}
}
try {
return await fn();
} finally {
for (const [key, value] of previous) {
if (value == null) {
delete process.env[key];
} else {
process.env[key] = value;
}
}
}
}
describe("git-spice integration", () => {
it("parses stack rows from mixed/malformed json output", async () => {
const repoPath = makeTempDir("hf-git-spice-parse-");
const scriptPath = join(repoPath, "fake-git-spice.sh");
writeScript(
scriptPath,
[
"#!/bin/sh",
'if [ \"$1\" = \"--help\" ]; then',
" exit 0",
"fi",
'if [ \"$1\" = \"log\" ]; then',
" echo 'noise line'",
' echo \'{"branch":"feature/a","parent":"main"}\'',
" echo '{bad json'",
' echo \'{"name":"feature/b","parentBranch":"feature/a"}\'',
' echo \'{"name":"feature/a","parent":"main"}\'',
" exit 0",
"fi",
"exit 1",
].join("\n"),
);
await withEnv({ HF_GIT_SPICE_BIN: scriptPath }, async () => {
const rows = await gitSpiceListStack(repoPath);
expect(rows).toEqual([
{ branchName: "feature/a", parentBranch: "main" },
{ branchName: "feature/b", parentBranch: "feature/a" },
]);
});
});
it("falls back across versioned subtree restack command variants", async () => {
const repoPath = makeTempDir("hf-git-spice-fallback-");
const scriptPath = join(repoPath, "fake-git-spice.sh");
const logPath = join(repoPath, "calls.log");
writeScript(
scriptPath,
[
"#!/bin/sh",
'echo \"$*\" >> \"$SPICE_LOG_PATH\"',
'if [ \"$1\" = \"--help\" ]; then',
" exit 0",
"fi",
'if [ \"$1\" = \"upstack\" ] && [ \"$2\" = \"restack\" ]; then',
" exit 1",
"fi",
'if [ \"$1\" = \"branch\" ] && [ \"$2\" = \"restack\" ] && [ \"$5\" = \"--no-prompt\" ]; then',
" exit 0",
"fi",
"exit 1",
].join("\n"),
);
await withEnv(
{
HF_GIT_SPICE_BIN: scriptPath,
SPICE_LOG_PATH: logPath,
},
async () => {
await gitSpiceRestackSubtree(repoPath, "feature/a");
},
);
const lines = readFileSync(logPath, "utf8")
.trim()
.split("\n")
.filter((line) => line.trim().length > 0);
expect(lines).toContain("upstack restack --branch feature/a --no-prompt");
expect(lines).toContain("upstack restack --branch feature/a");
expect(lines).toContain("branch restack --branch feature/a --no-prompt");
expect(lines).not.toContain("branch restack --branch feature/a");
});
it("reports unavailable when explicit binary and PATH are missing", async () => {
const repoPath = makeTempDir("hf-git-spice-missing-");
await withEnv(
{
HF_GIT_SPICE_BIN: "/non-existent/hf-git-spice-binary",
PATH: "/non-existent/bin",
},
async () => {
const available = await gitSpiceAvailable(repoPath);
expect(available).toBe(false);
},
);
});
});

View file

@ -1,40 +0,0 @@
import { afterEach, beforeEach, describe, expect, test } from "vitest";
import { mkdtempSync, mkdirSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { execFile } from "node:child_process";
import { validateRemote } from "../src/integrations/git/index.js";
const execFileAsync = promisify(execFile);
describe("validateRemote", () => {
const originalCwd = process.cwd();
beforeEach(() => {
process.chdir(originalCwd);
});
afterEach(() => {
process.chdir(originalCwd);
});
test("ignores broken worktree gitdir in current directory", async () => {
const sandboxDir = mkdtempSync(join(tmpdir(), "validate-remote-cwd-"));
const brokenRepoDir = resolve(sandboxDir, "broken-worktree");
const remoteRepoDir = resolve(sandboxDir, "remote");
mkdirSync(brokenRepoDir, { recursive: true });
writeFileSync(resolve(brokenRepoDir, ".git"), "gitdir: /definitely/missing/worktree\n", "utf8");
await execFileAsync("git", ["init", remoteRepoDir]);
await execFileAsync("git", ["-C", remoteRepoDir, "config", "user.name", "Foundry Test"]);
await execFileAsync("git", ["-C", remoteRepoDir, "config", "user.email", "test@example.com"]);
writeFileSync(resolve(remoteRepoDir, "README.md"), "# test\n", "utf8");
await execFileAsync("git", ["-C", remoteRepoDir, "add", "README.md"]);
await execFileAsync("git", ["-C", remoteRepoDir, "commit", "-m", "init"]);
process.chdir(brokenRepoDir);
await expect(validateRemote(remoteRepoDir)).resolves.toBeUndefined();
});
});

View file

@ -9,7 +9,7 @@ export function createTestConfig(overrides?: Partial<AppConfig>): AppConfig {
return ConfigSchema.parse({
auto_submit: true,
notify: ["terminal" as const],
workspace: { default: "default" },
organization: { default: "default" },
backend: {
host: "127.0.0.1",
port: 7741,
@ -19,7 +19,7 @@ export function createTestConfig(overrides?: Partial<AppConfig>): AppConfig {
backup_interval_secs: 3600,
backup_retention_days: 7,
},
providers: {
sandboxProviders: {
local: {},
e2b: {},
},

View file

@ -1,47 +1,15 @@
import type { BackendDriver, GitDriver, GithubDriver, StackDriver, TmuxDriver } from "../../src/driver.js";
import type { BackendDriver, GithubDriver, TmuxDriver } from "../../src/driver.js";
export function createTestDriver(overrides?: Partial<BackendDriver>): BackendDriver {
return {
git: overrides?.git ?? createTestGitDriver(),
stack: overrides?.stack ?? createTestStackDriver(),
github: overrides?.github ?? createTestGithubDriver(),
tmux: overrides?.tmux ?? createTestTmuxDriver(),
};
}
export function createTestGitDriver(overrides?: Partial<GitDriver>): GitDriver {
return {
validateRemote: async () => {},
ensureCloned: async () => {},
fetch: async () => {},
listRemoteBranches: async () => [],
remoteDefaultBaseRef: async () => "origin/main",
revParse: async () => "abc1234567890",
ensureRemoteBranch: async () => {},
diffStatForBranch: async () => "+0/-0",
conflictsWithMain: async () => false,
...overrides,
};
}
export function createTestStackDriver(overrides?: Partial<StackDriver>): StackDriver {
return {
available: async () => false,
listStack: async () => [],
syncRepo: async () => {},
restackRepo: async () => {},
restackSubtree: async () => {},
rebaseBranch: async () => {},
reparentBranch: async () => {},
trackBranch: async () => {},
...overrides,
};
}
export function createTestGithubDriver(overrides?: Partial<GithubDriver>): GithubDriver {
return {
listPullRequests: async () => [],
createPr: async (_repoPath, _headBranch, _title) => ({
createPr: async (_repoFullName, _headBranch, _title) => ({
number: 1,
url: `https://github.com/test/repo/pull/1`,
}),

View file

@ -1,20 +1,19 @@
import { describe, expect, it } from "vitest";
import { githubDataKey, historyKey, projectBranchSyncKey, projectKey, taskKey, taskSandboxKey, workspaceKey } from "../src/actors/keys.js";
import { githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/actors/keys.js";
describe("actor keys", () => {
it("prefixes every key with workspace namespace", () => {
it("prefixes every key with organization namespace", () => {
const keys = [
workspaceKey("default"),
projectKey("default", "repo"),
organizationKey("default"),
repositoryKey("default", "repo"),
taskKey("default", "repo", "task"),
taskSandboxKey("default", "sbx"),
historyKey("default", "repo"),
githubDataKey("default"),
projectBranchSyncKey("default", "repo"),
];
for (const key of keys) {
expect(key[0]).toBe("ws");
expect(key[0]).toBe("org");
expect(key[1]).toBe("default");
}
});

View file

@ -6,8 +6,9 @@ import { execFileSync } from "node:child_process";
import { setTimeout as delay } from "node:timers/promises";
import { describe, expect, it } from "vitest";
import { setupTest } from "rivetkit/test";
import { workspaceKey } from "../src/actors/keys.js";
import { organizationKey } from "../src/actors/keys.js";
import { registry } from "../src/actors/index.js";
import { repoIdFromRemote } from "../src/services/repo.js";
import { createTestDriver } from "./helpers/test-driver.js";
import { createTestRuntimeContext } from "./helpers/test-context.js";
@ -24,59 +25,60 @@ function createRepo(): { repoPath: string } {
return { repoPath };
}
async function waitForWorkspaceRows(ws: any, workspaceId: string, expectedCount: number) {
async function waitForOrganizationRows(ws: any, organizationId: string, expectedCount: number) {
for (let attempt = 0; attempt < 40; attempt += 1) {
const rows = await ws.listTasks({ workspaceId });
const rows = await ws.listTasks({ organizationId });
if (rows.length >= expectedCount) {
return rows;
}
await delay(50);
}
return ws.listTasks({ workspaceId });
return ws.listTasks({ organizationId });
}
describe("workspace isolation", () => {
it.skipIf(!runActorIntegration)("keeps task lists isolated by workspace", async (t) => {
describe("organization isolation", () => {
it.skipIf(!runActorIntegration)("keeps task lists isolated by organization", async (t) => {
const testDriver = createTestDriver();
createTestRuntimeContext(testDriver);
const { client } = await setupTest(t, registry);
const wsA = await client.workspace.getOrCreate(workspaceKey("alpha"), {
const wsA = await client.organization.getOrCreate(organizationKey("alpha"), {
createWithInput: "alpha",
});
const wsB = await client.workspace.getOrCreate(workspaceKey("beta"), {
const wsB = await client.organization.getOrCreate(organizationKey("beta"), {
createWithInput: "beta",
});
const { repoPath } = createRepo();
const repoA = await wsA.addRepo({ workspaceId: "alpha", remoteUrl: repoPath });
const repoB = await wsB.addRepo({ workspaceId: "beta", remoteUrl: repoPath });
const repoId = repoIdFromRemote(repoPath);
await wsA.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath });
await wsB.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath });
await wsA.createTask({
workspaceId: "alpha",
repoId: repoA.repoId,
organizationId: "alpha",
repoId,
task: "task A",
providerId: "local",
sandboxProviderId: "local",
explicitBranchName: "feature/a",
explicitTitle: "A",
});
await wsB.createTask({
workspaceId: "beta",
repoId: repoB.repoId,
organizationId: "beta",
repoId,
task: "task B",
providerId: "local",
sandboxProviderId: "local",
explicitBranchName: "feature/b",
explicitTitle: "B",
});
const aRows = await waitForWorkspaceRows(wsA, "alpha", 1);
const bRows = await waitForWorkspaceRows(wsB, "beta", 1);
const aRows = await waitForOrganizationRows(wsA, "alpha", 1);
const bRows = await waitForOrganizationRows(wsB, "beta", 1);
expect(aRows.length).toBe(1);
expect(bRows.length).toBe(1);
expect(aRows[0]?.workspaceId).toBe("alpha");
expect(bRows[0]?.workspaceId).toBe("beta");
expect(aRows[0]?.organizationId).toBe("alpha");
expect(bRows[0]?.organizationId).toBe("beta");
expect(aRows[0]?.taskId).not.toBe(bRows[0]?.taskId);
});
});

View file

@ -1,14 +1,14 @@
// @ts-nocheck
import { describe, expect, it } from "vitest";
import { setupTest } from "rivetkit/test";
import { workspaceKey } from "../src/actors/keys.js";
import { organizationKey } from "../src/actors/keys.js";
import { registry } from "../src/actors/index.js";
import { createTestDriver } from "./helpers/test-driver.js";
import { createTestRuntimeContext } from "./helpers/test-context.js";
const runActorIntegration = process.env.HF_ENABLE_ACTOR_INTEGRATION_TESTS === "1";
describe("workspace star sandbox agent repo", () => {
describe("organization star sandbox agent repo", () => {
it.skipIf(!runActorIntegration)("stars the sandbox agent repo through the github driver", async (t) => {
const calls: string[] = [];
const testDriver = createTestDriver({
@ -26,11 +26,11 @@ describe("workspace star sandbox agent repo", () => {
createTestRuntimeContext(testDriver);
const { client } = await setupTest(t, registry);
const ws = await client.workspace.getOrCreate(workspaceKey("alpha"), {
const ws = await client.organization.getOrCreate(organizationKey("alpha"), {
createWithInput: "alpha",
});
const result = await ws.starSandboxAgentRepo({ workspaceId: "alpha" });
const result = await ws.starSandboxAgentRepo({ organizationId: "alpha" });
expect(calls).toEqual(["rivet-dev/sandbox-agent"]);
expect(result.repo).toBe("rivet-dev/sandbox-agent");

View file

@ -6,7 +6,7 @@ function makeConfig(overrides?: Partial<AppConfig>): AppConfig {
return ConfigSchema.parse({
auto_submit: true,
notify: ["terminal"],
workspace: { default: "default" },
organization: { default: "default" },
backend: {
host: "127.0.0.1",
port: 7741,
@ -16,7 +16,7 @@ function makeConfig(overrides?: Partial<AppConfig>): AppConfig {
backup_interval_secs: 3600,
backup_retention_days: 7,
},
providers: {
sandboxProviders: {
local: {},
e2b: {},
},
@ -33,7 +33,7 @@ describe("sandbox config", () => {
it("prefers e2b when an api key is configured", () => {
const config = makeConfig({
providers: {
sandboxProviders: {
local: {},
e2b: { apiKey: "test-token" },
},

View file

@ -1,34 +0,0 @@
import { describe, expect, it } from "vitest";
import { normalizeParentBranch, parentLookupFromStack, sortBranchesForOverview } from "../src/actors/project/stack-model.js";
describe("stack-model", () => {
it("normalizes self-parent references to null", () => {
expect(normalizeParentBranch("feature/a", "feature/a")).toBeNull();
expect(normalizeParentBranch("feature/a", "main")).toBe("main");
expect(normalizeParentBranch("feature/a", null)).toBeNull();
});
it("builds parent lookup with sanitized entries", () => {
const lookup = parentLookupFromStack([
{ branchName: "feature/a", parentBranch: "main" },
{ branchName: "feature/b", parentBranch: "feature/b" },
{ branchName: " ", parentBranch: "main" },
]);
expect(lookup.get("feature/a")).toBe("main");
expect(lookup.get("feature/b")).toBeNull();
expect(lookup.has(" ")).toBe(false);
});
it("orders branches by graph depth and handles cycles safely", () => {
const rows = sortBranchesForOverview([
{ branchName: "feature/b", parentBranch: "feature/a", updatedAt: 200 },
{ branchName: "feature/a", parentBranch: "main", updatedAt: 100 },
{ branchName: "main", parentBranch: null, updatedAt: 50 },
{ branchName: "cycle-a", parentBranch: "cycle-b", updatedAt: 300 },
{ branchName: "cycle-b", parentBranch: "cycle-a", updatedAt: 250 },
]);
expect(rows.map((row) => row.branchName)).toEqual(["main", "feature/a", "feature/b", "cycle-a", "cycle-b"]);
});
});

View file

@ -1,5 +1,5 @@
import { describe, expect, it } from "vitest";
import { shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workbench.js";
import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workbench.js";
describe("workbench unread status transitions", () => {
it("marks unread when a running session first becomes idle", () => {
@ -57,3 +57,30 @@ describe("workbench model changes", () => {
).toBe(false);
});
});
describe("workbench send readiness", () => {
it("rejects unknown sessions", () => {
expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workbench session: session-1");
});
it("rejects pending sessions", () => {
expect(() =>
requireSendableSessionMeta(
{
status: "pending_session_create",
sandboxSessionId: null,
},
"session-2",
),
).toThrow("Session is not ready (status: pending_session_create). Wait for session provisioning to complete.");
});
it("accepts ready sessions with a sandbox session id", () => {
const meta = {
status: "ready",
sandboxSessionId: "session-1",
};
expect(requireSendableSessionMeta(meta, "session-3")).toBe(meta);
});
});

View file

@ -8,7 +8,7 @@ import { ensureBackendRunning, getBackendStatus, parseBackendPort, stopBackend }
import { writeStderr, writeStdout } from "./io.js";
import { openEditorForTask } from "./task-editor.js";
import { spawnCreateTmuxWindow } from "./tmux.js";
import { loadConfig, resolveWorkspace, saveConfig } from "./workspace/config.js";
import { loadConfig, resolveOrganization, saveConfig } from "./organization/config.js";
async function ensureBunRuntime(): Promise<void> {
if (typeof (globalThis as { Bun?: unknown }).Bun !== "undefined") {
@ -41,9 +41,9 @@ async function ensureBunRuntime(): Promise<void> {
throw new Error("hf requires Bun runtime. Set HF_BUN or install Bun at ~/.bun/bin/bun.");
}
async function runTuiCommand(config: ReturnType<typeof loadConfig>, workspaceId: string): Promise<void> {
async function runTuiCommand(config: ReturnType<typeof loadConfig>, organizationId: string): Promise<void> {
const mod = await import("./tui.js");
await mod.runTui(config, workspaceId);
await mod.runTui(config, organizationId);
}
function readOption(args: string[], flag: string): string | undefined {
@ -87,6 +87,92 @@ function positionals(args: string[]): string[] {
return out;
}
function normalizeRepoSelector(value: string): string {
let normalized = value.trim();
if (!normalized) {
return "";
}
normalized = normalized.replace(/\/+$/, "");
if (/^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/.test(normalized)) {
return `https://github.com/${normalized}.git`;
}
if (/^(?:www\.)?github\.com\/.+/i.test(normalized)) {
normalized = `https://${normalized.replace(/^www\./i, "")}`;
}
try {
if (/^https?:\/\//i.test(normalized)) {
const url = new URL(normalized);
const hostname = url.hostname.replace(/^www\./i, "");
if (hostname.toLowerCase() === "github.com") {
const parts = url.pathname.split("/").filter(Boolean);
if (parts.length >= 2) {
return `${url.protocol}//${hostname}/${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}.git`;
}
}
url.search = "";
url.hash = "";
return url.toString().replace(/\/+$/, "");
}
} catch {
// Keep the selector as-is for matching below.
}
return normalized;
}
function githubRepoFullNameFromSelector(value: string): string | null {
const normalized = normalizeRepoSelector(value);
try {
const url = new URL(normalized);
if (url.hostname.replace(/^www\./i, "").toLowerCase() !== "github.com") {
return null;
}
const parts = url.pathname.replace(/\/+$/, "").split("/").filter(Boolean);
if (parts.length < 2) {
return null;
}
return `${parts[0]}/${(parts[1] ?? "").replace(/\.git$/i, "")}`;
} catch {
return null;
}
}
async function resolveImportedRepo(
client: ReturnType<typeof createBackendClientFromConfig>,
organizationId: string,
repoSelector: string,
): Promise<Awaited<ReturnType<typeof client.listRepos>>[number]> {
const selector = repoSelector.trim();
if (!selector) {
throw new Error("Missing required --repo <repo-id|git-remote|owner/repo>");
}
const normalizedSelector = normalizeRepoSelector(selector);
const selectorFullName = githubRepoFullNameFromSelector(selector);
const repos = await client.listRepos(organizationId);
const match = repos.find((repo) => {
if (repo.repoId === selector) {
return true;
}
if (normalizeRepoSelector(repo.remoteUrl) === normalizedSelector) {
return true;
}
const repoFullName = githubRepoFullNameFromSelector(repo.remoteUrl);
return Boolean(selectorFullName && repoFullName && repoFullName === selectorFullName);
});
if (!match) {
throw new Error(
`Repo not available in organization ${organizationId}: ${repoSelector}. Create it in GitHub first, then sync repos in Foundry before running hf create.`,
);
}
return match;
}
function printUsage(): void {
writeStdout(`
Usage:
@ -94,22 +180,22 @@ Usage:
hf backend stop [--host HOST] [--port PORT]
hf backend status
hf backend inspect
hf status [--workspace WS] [--json]
hf history [--workspace WS] [--limit N] [--branch NAME] [--task ID] [--json]
hf workspace use <name>
hf tui [--workspace WS]
hf status [--organization ORG] [--json]
hf history [--organization ORG] [--limit N] [--branch NAME] [--task ID] [--json]
hf organization use <name>
hf tui [--organization ORG]
hf create [task] [--workspace WS] --repo <git-remote> [--name NAME|--branch NAME] [--title TITLE] [--agent claude|codex] [--on BRANCH]
hf list [--workspace WS] [--format table|json] [--full]
hf switch [task-id | -] [--workspace WS]
hf attach <task-id> [--workspace WS]
hf merge <task-id> [--workspace WS]
hf archive <task-id> [--workspace WS]
hf push <task-id> [--workspace WS]
hf sync <task-id> [--workspace WS]
hf kill <task-id> [--workspace WS] [--delete-branch] [--abandon]
hf prune [--workspace WS] [--dry-run] [--yes]
hf statusline [--workspace WS] [--format table|claude-code]
hf create [task] [--organization ORG] --repo <repo-id|git-remote|owner/repo> [--name NAME|--branch NAME] [--title TITLE] [--agent claude|codex] [--on BRANCH]
hf list [--organization ORG] [--format table|json] [--full]
hf switch [task-id | -] [--organization ORG]
hf attach <task-id> [--organization ORG]
hf merge <task-id> [--organization ORG]
hf archive <task-id> [--organization ORG]
hf push <task-id> [--organization ORG]
hf sync <task-id> [--organization ORG]
hf kill <task-id> [--organization ORG] [--delete-branch] [--abandon]
hf prune [--organization ORG] [--dry-run] [--yes]
hf statusline [--organization ORG] [--format table|claude-code]
hf db path
hf db nuke
@ -123,19 +209,19 @@ Tips:
function printStatusUsage(): void {
writeStdout(`
Usage:
hf status [--workspace WS] [--json]
hf status [--organization ORG] [--json]
Text Output:
workspace=<workspace-id>
organization=<organization-id>
backend running=<true|false> pid=<pid|unknown> version=<version|unknown>
tasks total=<number>
status queued=<n> running=<n> idle=<n> archived=<n> killed=<n> error=<n>
providers <provider-id>=<count> ...
providers -
sandboxProviders <provider-id>=<count> ...
sandboxProviders -
JSON Output:
{
"workspaceId": "default",
"organizationId": "default",
"backend": { ...backend status object... },
"tasks": {
"total": 4,
@ -149,7 +235,7 @@ JSON Output:
function printHistoryUsage(): void {
writeStdout(`
Usage:
hf history [--workspace WS] [--limit N] [--branch NAME] [--task ID] [--json]
hf history [--organization ORG] [--limit N] [--branch NAME] [--task ID] [--json]
Text Output:
<iso8601>\t<event-kind>\t<branch|task|repo|->\t<payload-json>
@ -164,18 +250,23 @@ JSON Output:
[
{
"id": "...",
"workspaceId": "default",
"organizationId": "default",
"kind": "task.created",
"taskId": "...",
"repoId": "...",
"branchName": "feature/foo",
"payloadJson": "{\\"providerId\\":\\"local\\"}",
"payloadJson": "{\\"sandboxProviderId\\":\\"local\\"}",
"createdAt": 1770607522229
}
]
`);
}
async function listDetailedTasks(client: ReturnType<typeof createBackendClientFromConfig>, organizationId: string): Promise<TaskRecord[]> {
const rows = await client.listTasks(organizationId);
return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId)));
}
async function handleBackend(args: string[]): Promise<void> {
const sub = args[0] ?? "start";
const config = loadConfig();
@ -232,38 +323,38 @@ async function handleBackend(args: string[]): Promise<void> {
throw new Error(`Unknown backend subcommand: ${sub}`);
}
async function handleWorkspace(args: string[]): Promise<void> {
async function handleOrganization(args: string[]): Promise<void> {
const sub = args[0];
if (sub !== "use") {
throw new Error("Usage: hf workspace use <name>");
throw new Error("Usage: hf organization use <name>");
}
const name = args[1];
if (!name) {
throw new Error("Missing workspace name");
throw new Error("Missing organization name");
}
const config = loadConfig();
config.workspace.default = name;
config.organization.default = name;
saveConfig(config);
const client = createBackendClientFromConfig(config);
try {
await client.useWorkspace(name);
await client.useOrganization(name);
} catch {
// Backend may not be running yet. Config is already updated.
}
writeStdout(`workspace=${name}`);
writeStdout(`organization=${name}`);
}
async function handleList(args: string[]): Promise<void> {
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const format = readOption(args, "--format") ?? "table";
const full = hasFlag(args, "--full");
const client = createBackendClientFromConfig(config);
const rows = await client.listTasks(workspaceId);
const rows = await listDetailedTasks(client, organizationId);
if (format === "json") {
writeStdout(JSON.stringify(rows, null, 2));
@ -277,10 +368,10 @@ async function handleList(args: string[]): Promise<void> {
for (const row of rows) {
const age = formatRelativeAge(row.updatedAt);
let line = `${row.taskId}\t${row.branchName}\t${row.status}\t${row.providerId}\t${age}`;
let line = `${row.taskId}\t${row.branchName}\t${row.status}\t${row.sandboxProviderId}\t${age}`;
if (full) {
const task = row.task.length > 60 ? `${row.task.slice(0, 57)}...` : row.task;
line += `\t${row.title}\t${task}\t${row.activeSessionId ?? "-"}\t${row.activeSandboxId ?? "-"}`;
const preview = row.task.length > 60 ? `${row.task.slice(0, 57)}...` : row.task;
line += `\t${row.title}\t${preview}\t${row.activeSessionId ?? "-"}\t${row.activeSandboxId ?? "-"}`;
}
writeStdout(line);
}
@ -292,9 +383,9 @@ async function handlePush(args: string[]): Promise<void> {
throw new Error("Missing task id for push");
}
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const client = createBackendClientFromConfig(config);
await client.runAction(workspaceId, taskId, "push");
await client.runAction(organizationId, taskId, "push");
writeStdout("ok");
}
@ -304,9 +395,9 @@ async function handleSync(args: string[]): Promise<void> {
throw new Error("Missing task id for sync");
}
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const client = createBackendClientFromConfig(config);
await client.runAction(workspaceId, taskId, "sync");
await client.runAction(organizationId, taskId, "sync");
writeStdout("ok");
}
@ -316,7 +407,7 @@ async function handleKill(args: string[]): Promise<void> {
throw new Error("Missing task id for kill");
}
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const deleteBranch = hasFlag(args, "--delete-branch");
const abandon = hasFlag(args, "--abandon");
@ -328,17 +419,17 @@ async function handleKill(args: string[]): Promise<void> {
}
const client = createBackendClientFromConfig(config);
await client.runAction(workspaceId, taskId, "kill");
await client.runAction(organizationId, taskId, "kill");
writeStdout("ok");
}
async function handlePrune(args: string[]): Promise<void> {
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const dryRun = hasFlag(args, "--dry-run");
const yes = hasFlag(args, "--yes");
const client = createBackendClientFromConfig(config);
const rows = await client.listTasks(workspaceId);
const rows = await listDetailedTasks(client, organizationId);
const prunable = rows.filter((r) => r.status === "archived" || r.status === "killed");
if (prunable.length === 0) {
@ -366,10 +457,10 @@ async function handlePrune(args: string[]): Promise<void> {
async function handleStatusline(args: string[]): Promise<void> {
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const format = readOption(args, "--format") ?? "table";
const client = createBackendClientFromConfig(config);
const rows = await client.listTasks(workspaceId);
const rows = await listDetailedTasks(client, organizationId);
const summary = summarizeTasks(rows);
const running = summary.byStatus.running;
const idle = summary.byStatus.idle;
@ -402,7 +493,7 @@ async function handleDb(args: string[]): Promise<void> {
async function waitForTaskReady(
client: ReturnType<typeof createBackendClientFromConfig>,
workspaceId: string,
organizationId: string,
taskId: string,
timeoutMs: number,
): Promise<TaskRecord> {
@ -410,7 +501,7 @@ async function waitForTaskReady(
let delayMs = 250;
for (;;) {
const record = await client.getTask(workspaceId, taskId);
const record = await client.getTask(organizationId, taskId);
const hasName = Boolean(record.branchName && record.title);
const hasSandbox = Boolean(record.activeSandboxId);
@ -432,11 +523,11 @@ async function waitForTaskReady(
async function handleCreate(args: string[]): Promise<void> {
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const repoRemote = readOption(args, "--repo");
if (!repoRemote) {
throw new Error("Missing required --repo <git-remote>");
const repoSelector = readOption(args, "--repo");
if (!repoSelector) {
throw new Error("Missing required --repo <repo-id|git-remote|owner/repo>");
}
const explicitBranchName = readOption(args, "--name") ?? readOption(args, "--branch");
const explicitTitle = readOption(args, "--title");
@ -446,15 +537,15 @@ async function handleCreate(args: string[]): Promise<void> {
const onBranch = readOption(args, "--on");
const taskFromArgs = positionals(args).join(" ").trim();
const task = taskFromArgs || openEditorForTask();
const taskPrompt = taskFromArgs || openEditorForTask();
const client = createBackendClientFromConfig(config);
const repo = await client.addRepo(workspaceId, repoRemote);
const repo = await resolveImportedRepo(client, organizationId, repoSelector);
const payload = CreateTaskInputSchema.parse({
workspaceId,
organizationId,
repoId: repo.repoId,
task,
task: taskPrompt,
explicitTitle: explicitTitle || undefined,
explicitBranchName: explicitBranchName || undefined,
agentType,
@ -462,30 +553,30 @@ async function handleCreate(args: string[]): Promise<void> {
});
const created = await client.createTask(payload);
const task = await waitForTaskReady(client, workspaceId, created.taskId, 180_000);
const switched = await client.switchTask(workspaceId, task.taskId);
const attached = await client.attachTask(workspaceId, task.taskId);
const createdTask = await waitForTaskReady(client, organizationId, created.taskId, 180_000);
const switched = await client.switchTask(organizationId, createdTask.taskId);
const attached = await client.attachTask(organizationId, createdTask.taskId);
writeStdout(`Branch: ${task.branchName ?? "-"}`);
writeStdout(`Task: ${task.taskId}`);
writeStdout(`Provider: ${task.providerId}`);
writeStdout(`Branch: ${createdTask.branchName ?? "-"}`);
writeStdout(`Task: ${createdTask.taskId}`);
writeStdout(`Provider: ${createdTask.sandboxProviderId}`);
writeStdout(`Session: ${attached.sessionId ?? "none"}`);
writeStdout(`Target: ${switched.switchTarget || attached.target}`);
writeStdout(`Title: ${task.title ?? "-"}`);
writeStdout(`Title: ${createdTask.title ?? "-"}`);
const tmuxResult = spawnCreateTmuxWindow({
branchName: task.branchName ?? task.taskId,
branchName: createdTask.branchName ?? createdTask.taskId,
targetPath: switched.switchTarget || attached.target,
sessionId: attached.sessionId,
});
if (tmuxResult.created) {
writeStdout(`Window: created (${task.branchName})`);
writeStdout(`Window: created (${createdTask.branchName})`);
return;
}
writeStdout("");
writeStdout(`Run: hf switch ${task.taskId}`);
writeStdout(`Run: hf switch ${createdTask.taskId}`);
if ((switched.switchTarget || attached.target).startsWith("/")) {
writeStdout(`cd ${switched.switchTarget || attached.target}`);
}
@ -493,8 +584,8 @@ async function handleCreate(args: string[]): Promise<void> {
async function handleTui(args: string[]): Promise<void> {
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
await runTuiCommand(config, workspaceId);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
await runTuiCommand(config, organizationId);
}
async function handleStatus(args: string[]): Promise<void> {
@ -504,17 +595,17 @@ async function handleStatus(args: string[]): Promise<void> {
}
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const client = createBackendClientFromConfig(config);
const backendStatus = await getBackendStatus(config.backend.host, config.backend.port);
const rows = await client.listTasks(workspaceId);
const rows = await listDetailedTasks(client, organizationId);
const summary = summarizeTasks(rows);
if (hasFlag(args, "--json")) {
writeStdout(
JSON.stringify(
{
workspaceId,
organizationId,
backend: backendStatus,
tasks: {
total: summary.total,
@ -529,7 +620,7 @@ async function handleStatus(args: string[]): Promise<void> {
return;
}
writeStdout(`workspace=${workspaceId}`);
writeStdout(`organization=${organizationId}`);
writeStdout(`backend running=${backendStatus.running} pid=${backendStatus.pid ?? "unknown"} version=${backendStatus.version ?? "unknown"}`);
writeStdout(`tasks total=${summary.total}`);
writeStdout(
@ -538,7 +629,7 @@ async function handleStatus(args: string[]): Promise<void> {
const providerSummary = Object.entries(summary.byProvider)
.map(([provider, count]) => `${provider}=${count}`)
.join(" ");
writeStdout(`providers ${providerSummary || "-"}`);
writeStdout(`sandboxProviders ${providerSummary || "-"}`);
}
async function handleHistory(args: string[]): Promise<void> {
@ -548,13 +639,13 @@ async function handleHistory(args: string[]): Promise<void> {
}
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const limit = parseIntOption(readOption(args, "--limit"), 20, "limit");
const branch = readOption(args, "--branch");
const taskId = readOption(args, "--task");
const client = createBackendClientFromConfig(config);
const rows = await client.listHistory({
workspaceId,
organizationId,
limit,
branch: branch || undefined,
taskId: taskId || undefined,
@ -593,11 +684,11 @@ async function handleSwitchLike(cmd: string, args: string[]): Promise<void> {
}
const config = loadConfig();
const workspaceId = resolveWorkspace(readOption(args, "--workspace"), config);
const organizationId = resolveOrganization(readOption(args, "--organization"), config);
const client = createBackendClientFromConfig(config);
if (cmd === "switch" && taskId === "-") {
const rows = await client.listTasks(workspaceId);
const rows = await listDetailedTasks(client, organizationId);
const active = rows.filter((r) => {
const group = groupTaskStatus(r.status);
return group === "running" || group === "idle" || group === "queued";
@ -611,19 +702,19 @@ async function handleSwitchLike(cmd: string, args: string[]): Promise<void> {
}
if (cmd === "switch") {
const result = await client.switchTask(workspaceId, taskId);
const result = await client.switchTask(organizationId, taskId);
writeStdout(`cd ${result.switchTarget}`);
return;
}
if (cmd === "attach") {
const result = await client.attachTask(workspaceId, taskId);
const result = await client.attachTask(organizationId, taskId);
writeStdout(`target=${result.target} session=${result.sessionId ?? "none"}`);
return;
}
if (cmd === "merge" || cmd === "archive") {
await client.runAction(workspaceId, taskId, cmd);
await client.runAction(organizationId, taskId, cmd);
writeStdout("ok");
return;
}
@ -656,8 +747,8 @@ async function main(): Promise<void> {
return;
}
if (cmd === "workspace") {
await handleWorkspace(rest);
if (cmd === "organization") {
await handleOrganization(rest);
return;
}

View file

@ -2,7 +2,7 @@ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
import { dirname } from "node:path";
import { homedir } from "node:os";
import * as toml from "@iarna/toml";
import { ConfigSchema, resolveWorkspaceId, type AppConfig } from "@sandbox-agent/foundry-shared";
import { ConfigSchema, resolveOrganizationId, type AppConfig } from "@sandbox-agent/foundry-shared";
export const CONFIG_PATH = `${homedir()}/.config/foundry/config.toml`;
@ -20,6 +20,6 @@ export function saveConfig(config: AppConfig, path = CONFIG_PATH): void {
writeFileSync(path, toml.stringify(config), "utf8");
}
export function resolveWorkspace(flagWorkspace: string | undefined, config: AppConfig): string {
return resolveWorkspaceId(flagWorkspace, config);
export function resolveOrganization(flagOrganization: string | undefined, config: AppConfig): string {
return resolveOrganizationId(flagOrganization, config);
}

View file

@ -588,7 +588,7 @@ function pointer(obj: JsonObject, parts: string[]): unknown {
function opencodeConfigPaths(baseDir: string): string[] {
const paths: string[] = [];
const rootish = opencodeProjectConfigPaths(baseDir);
const rootish = opencodeRepositoryConfigPaths(baseDir);
paths.push(...rootish);
const configDir = process.env.XDG_CONFIG_HOME || join(homedir(), ".config");
@ -611,12 +611,12 @@ function opencodeThemeDirs(configDir: string | undefined, baseDir: string): stri
dirs.push(join(xdgConfig, "opencode", "themes"));
dirs.push(join(homedir(), ".opencode", "themes"));
dirs.push(...opencodeProjectThemeDirs(baseDir));
dirs.push(...opencodeRepositoryThemeDirs(baseDir));
return dirs;
}
function opencodeProjectConfigPaths(baseDir: string): string[] {
function opencodeRepositoryConfigPaths(baseDir: string): string[] {
const dirs = ancestorDirs(baseDir);
const out: string[] = [];
for (const dir of dirs) {
@ -628,7 +628,7 @@ function opencodeProjectConfigPaths(baseDir: string): string[] {
return out;
}
function opencodeProjectThemeDirs(baseDir: string): string[] {
function opencodeRepositoryThemeDirs(baseDir: string): string[] {
const dirs = ancestorDirs(baseDir);
const out: string[] = [];
for (const dir of dirs) {

View file

@ -56,6 +56,11 @@ interface RenderOptions {
height?: number;
}
async function listDetailedTasks(client: ReturnType<typeof createBackendClientFromConfig>, organizationId: string): Promise<TaskRecord[]> {
const rows = await client.listTasks(organizationId);
return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId)));
}
function pad(input: string, width: number): string {
if (width <= 0) {
return "";
@ -183,7 +188,7 @@ function helpLines(width: number): string[] {
export function formatRows(
rows: TaskRecord[],
selected: number,
workspaceId: string,
organizationId: string,
status: string,
searchQuery = "",
showHelp = false,
@ -212,7 +217,7 @@ export function formatRows(
return `${marker}${pad(display.name, branchWidth)} ${pad(display.diff, COLUMN_WIDTHS.diff)} ${pad(display.agent, COLUMN_WIDTHS.agent)} ${pad(display.pr, COLUMN_WIDTHS.pr)} ${pad(display.author, COLUMN_WIDTHS.author)} ${pad(display.ci, COLUMN_WIDTHS.ci)} ${pad(display.review, COLUMN_WIDTHS.review)} ${pad(display.age, COLUMN_WIDTHS.age)}`;
});
const footer = fitLine(buildFooterLine(totalWidth, ["Ctrl-H:cheatsheet", `workspace:${workspaceId}`, status], `v${CLI_BUILD_ID}`), totalWidth);
const footer = fitLine(buildFooterLine(totalWidth, ["Ctrl-H:cheatsheet", `organization:${organizationId}`, status], `v${CLI_BUILD_ID}`), totalWidth);
const contentHeight = totalHeight - 1;
const lines = [...header, ...body].map((line) => fitLine(line, totalWidth));
@ -309,7 +314,7 @@ function buildStyledContent(content: string, theme: TuiTheme, api: StyledTextApi
return new api.StyledText(chunks);
}
export async function runTui(config: AppConfig, workspaceId: string): Promise<void> {
export async function runTui(config: AppConfig, organizationId: string): Promise<void> {
const core = (await import("@opentui/core")) as OpenTuiLike;
const createCliRenderer = core.createCliRenderer;
const TextRenderable = core.TextRenderable;
@ -359,7 +364,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise<vo
if (closed) {
return;
}
const output = formatRows(filteredRows, selected, workspaceId, status, searchQuery, showHelp, {
const output = formatRows(filteredRows, selected, organizationId, status, searchQuery, showHelp, {
width: renderer.width ?? process.stdout.columns,
height: renderer.height ?? process.stdout.rows,
});
@ -372,7 +377,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise<vo
return;
}
try {
allRows = await client.listTasks(workspaceId);
allRows = await listDetailedTasks(client, organizationId);
if (closed) {
return;
}
@ -517,7 +522,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise<vo
render();
void (async () => {
try {
const result = await client.switchTask(workspaceId, row.taskId);
const result = await client.switchTask(organizationId, row.taskId);
close(`cd ${result.switchTarget}`);
} catch (err) {
busy = false;
@ -538,7 +543,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise<vo
render();
void (async () => {
try {
const result = await client.attachTask(workspaceId, row.taskId);
const result = await client.attachTask(organizationId, row.taskId);
close(`target=${result.target} session=${result.sessionId ?? "none"}`);
} catch (err) {
busy = false;
@ -554,7 +559,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise<vo
if (!row) {
return;
}
void runActionWithRefresh(`archiving ${row.taskId}`, async () => client.runAction(workspaceId, row.taskId, "archive"), `archived ${row.taskId}`);
void runActionWithRefresh(`archiving ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "archive"), `archived ${row.taskId}`);
return;
}
@ -563,7 +568,7 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise<vo
if (!row) {
return;
}
void runActionWithRefresh(`syncing ${row.taskId}`, async () => client.runAction(workspaceId, row.taskId, "sync"), `synced ${row.taskId}`);
void runActionWithRefresh(`syncing ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "sync"), `synced ${row.taskId}`);
return;
}
@ -575,8 +580,8 @@ export async function runTui(config: AppConfig, workspaceId: string): Promise<vo
void runActionWithRefresh(
`merging ${row.taskId}`,
async () => {
await client.runAction(workspaceId, row.taskId, "merge");
await client.runAction(workspaceId, row.taskId, "archive");
await client.runAction(organizationId, row.taskId, "merge");
await client.runAction(organizationId, row.taskId, "archive");
},
`merged+archived ${row.taskId}`,
);

View file

@ -37,7 +37,7 @@ function healthyMetadataResponse(): { ok: boolean; json: () => Promise<unknown>
json: async () => ({
runtime: "rivetkit",
actorNames: {
workspace: {},
organization: {},
},
}),
};
@ -58,7 +58,7 @@ describe("backend manager", () => {
const config: AppConfig = ConfigSchema.parse({
auto_submit: true,
notify: ["terminal"],
workspace: { default: "default" },
organization: { default: "default" },
backend: {
host: "127.0.0.1",
port: 7741,
@ -68,7 +68,7 @@ describe("backend manager", () => {
backup_interval_secs: 3600,
backup_retention_days: 7,
},
providers: {
sandboxProviders: {
local: {},
e2b: {},
},

View file

@ -1,13 +1,13 @@
import { describe, expect, it } from "vitest";
import { ConfigSchema } from "@sandbox-agent/foundry-shared";
import { resolveWorkspace } from "../src/workspace/config.js";
import { resolveOrganization } from "../src/organization/config.js";
describe("cli workspace resolution", () => {
it("uses default workspace when no flag", () => {
describe("cli organization resolution", () => {
it("uses default organization when no flag", () => {
const config = ConfigSchema.parse({
auto_submit: true as const,
notify: ["terminal" as const],
workspace: { default: "team" },
organization: { default: "team" },
backend: {
host: "127.0.0.1",
port: 7741,
@ -17,13 +17,13 @@ describe("cli workspace resolution", () => {
backup_interval_secs: 3600,
backup_retention_days: 7,
},
providers: {
sandboxProviders: {
local: {},
e2b: {},
},
});
expect(resolveWorkspace(undefined, config)).toBe("team");
expect(resolveWorkspace("alpha", config)).toBe("alpha");
expect(resolveOrganization(undefined, config)).toBe("team");
expect(resolveOrganization("alpha", config)).toBe("alpha");
});
});

View file

@ -21,7 +21,7 @@ describe("resolveTuiTheme", () => {
const baseConfig: AppConfig = ConfigSchema.parse({
auto_submit: true,
notify: ["terminal"],
workspace: { default: "default" },
organization: { default: "default" },
backend: {
host: "127.0.0.1",
port: 7741,
@ -31,7 +31,7 @@ describe("resolveTuiTheme", () => {
backup_interval_secs: 3600,
backup_retention_days: 7,
},
providers: {
sandboxProviders: {
local: {},
e2b: {},
},

View file

@ -4,14 +4,14 @@ import { filterTasks, fuzzyMatch } from "@sandbox-agent/foundry-client";
import { formatRows } from "../src/tui.js";
const sample: TaskRecord = {
workspaceId: "default",
organizationId: "default",
repoId: "repo-a",
repoRemote: "https://example.com/repo-a.git",
taskId: "task-1",
branchName: "feature/test",
title: "Test Title",
task: "Do test",
providerId: "local",
sandboxProviderId: "local",
status: "running",
statusMessage: null,
activeSandboxId: "sandbox-1",
@ -19,7 +19,7 @@ const sample: TaskRecord = {
sandboxes: [
{
sandboxId: "sandbox-1",
providerId: "local",
sandboxProviderId: "local",
switchTarget: "sandbox://local/sandbox-1",
cwd: null,
createdAt: 1,

View file

@ -24,7 +24,7 @@ export interface FoundryAppClient {
cancelScheduledRenewal(organizationId: string): Promise<void>;
resumeSubscription(organizationId: string): Promise<void>;
reconnectGithub(organizationId: string): Promise<void>;
recordSeatUsage(workspaceId: string): Promise<void>;
recordSeatUsage(organizationId: string): Promise<void>;
}
export interface CreateFoundryAppClientOptions {

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,10 @@
export * from "./app-client.js";
export * from "./backend-client.js";
export * from "./interest/manager.js";
export * from "./interest/mock-manager.js";
export * from "./interest/remote-manager.js";
export * from "./interest/topics.js";
export * from "./interest/use-interest.js";
export * from "./subscription/manager.js";
export * from "./subscription/mock-manager.js";
export * from "./subscription/remote-manager.js";
export * from "./subscription/topics.js";
export * from "./subscription/use-subscription.js";
export * from "./keys.js";
export * from "./mock-app.js";
export * from "./view-model.js";

View file

@ -1,12 +0,0 @@
import { createMockBackendClient } from "../mock/backend-client.js";
import { RemoteInterestManager } from "./remote-manager.js";
/**
* Mock implementation shares the same interest-manager harness as the remote
* path, but uses the in-memory mock backend that synthesizes actor events.
*/
export class MockInterestManager extends RemoteInterestManager {
constructor() {
super(createMockBackendClient());
}
}

View file

@ -1,29 +1,21 @@
export type ActorKey = string[];
export function workspaceKey(workspaceId: string): ActorKey {
return ["ws", workspaceId];
export function organizationKey(organizationId: string): ActorKey {
return ["org", organizationId];
}
export function projectKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId];
export function repositoryKey(organizationId: string, repoId: string): ActorKey {
return ["org", organizationId, "repository", repoId];
}
export function taskKey(workspaceId: string, repoId: string, taskId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "task", taskId];
export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey {
return ["org", organizationId, "repository", repoId, "task", taskId];
}
export function taskSandboxKey(workspaceId: string, sandboxId: string): ActorKey {
return ["ws", workspaceId, "sandbox", sandboxId];
export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey {
return ["org", organizationId, "sandbox", sandboxId];
}
export function historyKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "history"];
}
export function projectPrSyncKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "pr-sync"];
}
export function projectBranchSyncKey(workspaceId: string, repoId: string): ActorKey {
return ["ws", workspaceId, "project", repoId, "branch-sync"];
export function historyKey(organizationId: string, repoId: string): ActorKey {
return ["org", organizationId, "repository", repoId, "history"];
}

View file

@ -67,7 +67,7 @@ export interface MockFoundryOrganizationSettings {
export interface MockFoundryOrganization {
id: string;
workspaceId: string;
organizationId: string;
kind: MockOrganizationKind;
settings: MockFoundryOrganizationSettings;
github: MockFoundryGithubState;
@ -118,7 +118,7 @@ export interface MockFoundryAppClient {
cancelScheduledRenewal(organizationId: string): Promise<void>;
resumeSubscription(organizationId: string): Promise<void>;
reconnectGithub(organizationId: string): Promise<void>;
recordSeatUsage(workspaceId: string): void;
recordSeatUsage(organizationId: string): void;
}
const STORAGE_KEY = "sandbox-agent-foundry:mock-app:v1";
@ -173,7 +173,7 @@ function buildRivetOrganization(): MockFoundryOrganization {
return {
id: "rivet",
workspaceId: "rivet",
organizationId: "rivet",
kind: "organization",
settings: {
displayName: rivetDevFixture.name ?? rivetDevFixture.login,
@ -254,7 +254,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
organizations: [
{
id: "personal-nathan",
workspaceId: "personal-nathan",
organizationId: "personal-nathan",
kind: "personal",
settings: {
displayName: "Nathan",
@ -290,7 +290,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
},
{
id: "acme",
workspaceId: "acme",
organizationId: "acme",
kind: "organization",
settings: {
displayName: "Acme",
@ -335,7 +335,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
buildRivetOrganization(),
{
id: "personal-jamie",
workspaceId: "personal-jamie",
organizationId: "personal-jamie",
kind: "personal",
settings: {
displayName: "Jamie",
@ -659,8 +659,8 @@ class MockFoundryAppStore implements MockFoundryAppClient {
}));
}
recordSeatUsage(workspaceId: string): void {
const org = this.snapshot.organizations.find((candidate) => candidate.workspaceId === workspaceId);
recordSeatUsage(organizationId: string): void {
const org = this.snapshot.organizations.find((candidate) => candidate.organizationId === organizationId);
const currentUser = currentMockUser(this.snapshot);
if (!org || !currentUser) {
return;

View file

@ -1,5 +1,4 @@
import type {
AddRepoInput,
AppEvent,
CreateTaskInput,
FoundryAppSnapshot,
@ -17,21 +16,19 @@ import type {
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchSnapshot,
TaskWorkbenchTabInput,
TaskWorkbenchSessionInput,
TaskWorkbenchUpdateDraftInput,
TaskEvent,
WorkbenchSessionDetail,
WorkbenchTaskDetail,
WorkbenchTaskSummary,
WorkspaceEvent,
WorkspaceSummarySnapshot,
OrganizationEvent,
OrganizationSummarySnapshot,
HistoryEvent,
HistoryQueryInput,
ProviderId,
SandboxProviderId,
RepoOverview,
RepoRecord,
RepoStackActionInput,
RepoStackActionResult,
StarSandboxAgentRepoResult,
SwitchResult,
} from "@sandbox-agent/foundry-shared";
@ -91,7 +88,7 @@ function toTaskStatus(status: TaskRecord["status"], archived: boolean): TaskReco
return status;
}
export function createMockBackendClient(defaultWorkspaceId = "default"): BackendClient {
export function createMockBackendClient(defaultOrganizationId = "default"): BackendClient {
const workbench = getSharedMockWorkbenchClient();
const listenersBySandboxId = new Map<string, Set<() => void>>();
const processesBySandboxId = new Map<string, MockProcessRecord[]>();
@ -176,9 +173,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
updatedAtMs: task.updatedAtMs,
branch: task.branch,
pullRequest: task.pullRequest,
sessionsSummary: task.tabs.map((tab) => ({
sessionsSummary: task.sessions.map((tab) => ({
id: tab.id,
sessionId: tab.sessionId,
sandboxSessionId: tab.sandboxSessionId ?? tab.sessionId,
sessionName: tab.sessionName,
agent: tab.agent,
model: tab.model,
@ -192,10 +190,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
const buildTaskDetail = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskDetail => ({
...buildTaskSummary(task),
task: task.title,
agentType: task.tabs[0]?.agent === "Codex" ? "codex" : "claude",
agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude",
runtimeStatus: toTaskStatus(task.status === "archived" ? "archived" : "running", task.status === "archived"),
statusMessage: task.status === "archived" ? "archived" : "mock sandbox ready",
activeSessionId: task.tabs[0]?.sessionId ?? null,
activeSessionId: task.sessions[0]?.sessionId ?? null,
diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0",
prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null,
reviewStatus: null,
@ -205,7 +203,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
minutesUsed: task.minutesUsed,
sandboxes: [
{
providerId: "local",
sandboxProviderId: "local",
sandboxId: task.id,
cwd: mockCwd(task.repoName, task.id),
},
@ -213,15 +211,14 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
activeSandboxId: task.id,
});
const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], tabId: string): WorkbenchSessionDetail => {
const tab = task.tabs.find((candidate) => candidate.id === tabId);
const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], sessionId: string): WorkbenchSessionDetail => {
const tab = task.sessions.find((candidate) => candidate.id === sessionId);
if (!tab) {
throw new Error(`Unknown mock tab ${tabId} for task ${task.id}`);
throw new Error(`Unknown mock session ${sessionId} for task ${task.id}`);
}
return {
sessionId: tab.id,
tabId: tab.id,
sandboxSessionId: tab.sessionId,
sandboxSessionId: tab.sandboxSessionId ?? tab.sessionId,
sessionName: tab.sessionName,
agent: tab.agent,
model: tab.model,
@ -234,11 +231,11 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
};
};
const buildWorkspaceSummary = (): WorkspaceSummarySnapshot => {
const buildOrganizationSummary = (): OrganizationSummarySnapshot => {
const snapshot = workbench.getSnapshot();
const taskSummaries = snapshot.tasks.map(buildTaskSummary);
return {
workspaceId: defaultWorkspaceId,
organizationId: defaultOrganizationId,
repos: snapshot.repos.map((repo) => {
const repoTasks = taskSummaries.filter((task) => task.repoId === repo.id);
return {
@ -253,39 +250,40 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
};
};
const workspaceScope = (workspaceId: string): string => `workspace:${workspaceId}`;
const taskScope = (workspaceId: string, repoId: string, taskId: string): string => `task:${workspaceId}:${repoId}:${taskId}`;
const sandboxScope = (workspaceId: string, providerId: string, sandboxId: string): string => `sandbox:${workspaceId}:${providerId}:${sandboxId}`;
const organizationScope = (organizationId: string): string => `organization:${organizationId}`;
const taskScope = (organizationId: string, repoId: string, taskId: string): string => `task:${organizationId}:${repoId}:${taskId}`;
const sandboxScope = (organizationId: string, sandboxProviderId: string, sandboxId: string): string =>
`sandbox:${organizationId}:${sandboxProviderId}:${sandboxId}`;
const emitWorkspaceSnapshot = (): void => {
const summary = buildWorkspaceSummary();
const emitOrganizationSnapshot = (): void => {
const summary = buildOrganizationSummary();
const latestTask = [...summary.taskSummaries].sort((left, right) => right.updatedAtMs - left.updatedAtMs)[0] ?? null;
if (latestTask) {
emitConnectionEvent(workspaceScope(defaultWorkspaceId), "workspaceUpdated", {
emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", {
type: "taskSummaryUpdated",
taskSummary: latestTask,
} satisfies WorkspaceEvent);
} satisfies OrganizationEvent);
}
};
const emitTaskUpdate = (taskId: string): void => {
const task = requireTask(taskId);
emitConnectionEvent(taskScope(defaultWorkspaceId, task.repoId, task.id), "taskUpdated", {
emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "taskUpdated", {
type: "taskDetailUpdated",
detail: buildTaskDetail(task),
} satisfies TaskEvent);
};
const emitSessionUpdate = (taskId: string, tabId: string): void => {
const emitSessionUpdate = (taskId: string, sessionId: string): void => {
const task = requireTask(taskId);
emitConnectionEvent(taskScope(defaultWorkspaceId, task.repoId, task.id), "sessionUpdated", {
emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "sessionUpdated", {
type: "sessionUpdated",
session: buildSessionDetail(task, tabId),
session: buildSessionDetail(task, sessionId),
} satisfies SessionEvent);
};
const emitSandboxProcessesUpdate = (sandboxId: string): void => {
emitConnectionEvent(sandboxScope(defaultWorkspaceId, "local", sandboxId), "processesUpdated", {
emitConnectionEvent(sandboxScope(defaultOrganizationId, "local", sandboxId), "processesUpdated", {
type: "processesUpdated",
processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)),
} satisfies SandboxProcessesEvent);
@ -296,22 +294,22 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
const cwd = mockCwd(task.repoName, task.id);
const archived = task.status === "archived";
return {
workspaceId: defaultWorkspaceId,
organizationId: defaultOrganizationId,
repoId: task.repoId,
repoRemote: mockRepoRemote(task.repoName),
taskId: task.id,
branchName: task.branch,
title: task.title,
task: task.title,
providerId: "local",
sandboxProviderId: "local",
status: toTaskStatus(archived ? "archived" : "running", archived),
statusMessage: archived ? "archived" : "mock sandbox ready",
activeSandboxId: task.id,
activeSessionId: task.tabs[0]?.sessionId ?? null,
activeSessionId: task.sessions[0]?.sessionId ?? null,
sandboxes: [
{
sandboxId: task.id,
providerId: "local",
sandboxProviderId: "local",
sandboxActorId: "mock-sandbox",
switchTarget: `mock://${task.id}`,
cwd,
@ -319,7 +317,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
updatedAt: task.updatedAtMs,
},
],
agentType: task.tabs[0]?.agent === "Codex" ? "codex" : "claude",
agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude",
prSubmitted: Boolean(task.pullRequest),
diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0",
prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null,
@ -366,16 +364,16 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return unsupportedAppSnapshot();
},
async connectWorkspace(workspaceId: string): Promise<ActorConn> {
return createConn(workspaceScope(workspaceId));
async connectOrganization(organizationId: string): Promise<ActorConn> {
return createConn(organizationScope(organizationId));
},
async connectTask(workspaceId: string, repoId: string, taskId: string): Promise<ActorConn> {
return createConn(taskScope(workspaceId, repoId, taskId));
async connectTask(organizationId: string, repoId: string, taskId: string): Promise<ActorConn> {
return createConn(taskScope(organizationId, repoId, taskId));
},
async connectSandbox(workspaceId: string, providerId: ProviderId, sandboxId: string): Promise<ActorConn> {
return createConn(sandboxScope(workspaceId, providerId, sandboxId));
async connectSandbox(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<ActorConn> {
return createConn(sandboxScope(organizationId, sandboxProviderId, sandboxId));
},
subscribeApp(): () => void {
@ -434,13 +432,9 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return unsupportedAppSnapshot();
},
async addRepo(_workspaceId: string, _remoteUrl: string): Promise<RepoRecord> {
notSupported("addRepo");
},
async listRepos(_workspaceId: string): Promise<RepoRecord[]> {
async listRepos(_organizationId: string): Promise<RepoRecord[]> {
return workbench.getSnapshot().repos.map((repo) => ({
workspaceId: defaultWorkspaceId,
organizationId: defaultOrganizationId,
repoId: repo.id,
remoteUrl: mockRepoRemote(repo.label),
createdAt: nowMs(),
@ -452,12 +446,12 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
notSupported("createTask");
},
async listTasks(_workspaceId: string, repoId?: string): Promise<TaskSummary[]> {
async listTasks(_organizationId: string, repoId?: string): Promise<TaskSummary[]> {
return workbench
.getSnapshot()
.tasks.filter((task) => !repoId || task.repoId === repoId)
.map((task) => ({
workspaceId: defaultWorkspaceId,
organizationId: defaultOrganizationId,
repoId: task.repoId,
taskId: task.id,
branchName: task.branch,
@ -467,15 +461,10 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
}));
},
async getRepoOverview(_workspaceId: string, _repoId: string): Promise<RepoOverview> {
async getRepoOverview(_organizationId: string, _repoId: string): Promise<RepoOverview> {
notSupported("getRepoOverview");
},
async runRepoStackAction(_input: RepoStackActionInput): Promise<RepoStackActionResult> {
notSupported("runRepoStackAction");
},
async getTask(_workspaceId: string, taskId: string): Promise<TaskRecord> {
async getTask(_organizationId: string, taskId: string): Promise<TaskRecord> {
return buildTaskRecord(taskId);
},
@ -483,23 +472,23 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return [];
},
async switchTask(_workspaceId: string, taskId: string): Promise<SwitchResult> {
async switchTask(_organizationId: string, taskId: string): Promise<SwitchResult> {
return {
workspaceId: defaultWorkspaceId,
organizationId: defaultOrganizationId,
taskId,
providerId: "local",
sandboxProviderId: "local",
switchTarget: `mock://${taskId}`,
};
},
async attachTask(_workspaceId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> {
async attachTask(_organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> {
return {
target: `mock://${taskId}`,
sessionId: requireTask(taskId).tabs[0]?.sessionId ?? null,
sessionId: requireTask(taskId).sessions[0]?.sessionId ?? null,
};
},
async runAction(_workspaceId: string, _taskId: string): Promise<void> {
async runAction(_organizationId: string, _taskId: string): Promise<void> {
notSupported("runAction");
},
@ -516,8 +505,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
},
async createSandboxProcess(input: {
workspaceId: string;
providerId: ProviderId;
organizationId: string;
sandboxProviderId: SandboxProviderId;
sandboxId: string;
request: ProcessCreateRequest;
}): Promise<SandboxProcessRecord> {
@ -529,15 +518,15 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return cloneProcess(created);
},
async listSandboxProcesses(_workspaceId: string, _providerId: ProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> {
async listSandboxProcesses(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise<{ processes: SandboxProcessRecord[] }> {
return {
processes: ensureProcessList(sandboxId).map((process) => cloneProcess(process)),
};
},
async getSandboxProcessLogs(
_workspaceId: string,
_providerId: ProviderId,
_organizationId: string,
_providerId: SandboxProviderId,
sandboxId: string,
processId: string,
query?: ProcessLogFollowQuery,
@ -564,8 +553,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
},
async stopSandboxProcess(
_workspaceId: string,
_providerId: ProviderId,
_organizationId: string,
_providerId: SandboxProviderId,
sandboxId: string,
processId: string,
_query?: ProcessSignalQuery,
@ -583,8 +572,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
},
async killSandboxProcess(
_workspaceId: string,
_providerId: ProviderId,
_organizationId: string,
_providerId: SandboxProviderId,
sandboxId: string,
processId: string,
_query?: ProcessSignalQuery,
@ -601,7 +590,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return cloneProcess(process);
},
async deleteSandboxProcess(_workspaceId: string, _providerId: ProviderId, sandboxId: string, processId: string): Promise<void> {
async deleteSandboxProcess(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string, processId: string): Promise<void> {
processesBySandboxId.set(
sandboxId,
ensureProcessList(sandboxId).filter((candidate) => candidate.id !== processId),
@ -609,7 +598,7 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
notifySandbox(sandboxId);
},
subscribeSandboxProcesses(_workspaceId: string, _providerId: ProviderId, sandboxId: string, listener: () => void): () => void {
subscribeSandboxProcesses(_organizationId: string, _providerId: SandboxProviderId, sandboxId: string, listener: () => void): () => void {
let listeners = listenersBySandboxId.get(sandboxId);
if (!listeners) {
listeners = new Set();
@ -637,26 +626,26 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
},
async sandboxProviderState(
_workspaceId: string,
_providerId: ProviderId,
_organizationId: string,
_providerId: SandboxProviderId,
sandboxId: string,
): Promise<{ providerId: ProviderId; sandboxId: string; state: string; at: number }> {
return { providerId: "local", sandboxId, state: "running", at: nowMs() };
): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }> {
return { sandboxProviderId: "local", sandboxId, state: "running", at: nowMs() };
},
async getSandboxAgentConnection(): Promise<{ endpoint: string; token?: string }> {
return { endpoint: "mock://terminal-unavailable" };
},
async getWorkspaceSummary(): Promise<WorkspaceSummarySnapshot> {
return buildWorkspaceSummary();
async getOrganizationSummary(): Promise<OrganizationSummarySnapshot> {
return buildOrganizationSummary();
},
async getTaskDetail(_workspaceId: string, _repoId: string, taskId: string): Promise<WorkbenchTaskDetail> {
async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise<WorkbenchTaskDetail> {
return buildTaskDetail(requireTask(taskId));
},
async getSessionDetail(_workspaceId: string, _repoId: string, taskId: string, sessionId: string): Promise<WorkbenchSessionDetail> {
async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise<WorkbenchSessionDetail> {
return buildSessionDetail(requireTask(taskId), sessionId);
},
@ -664,103 +653,103 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return workbench.getSnapshot();
},
subscribeWorkbench(_workspaceId: string, listener: () => void): () => void {
subscribeWorkbench(_organizationId: string, listener: () => void): () => void {
return workbench.subscribe(listener);
},
async createWorkbenchTask(_workspaceId: string, input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
async createWorkbenchTask(_organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
const created = await workbench.createTask(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(created.taskId);
if (created.tabId) {
emitSessionUpdate(created.taskId, created.tabId);
if (created.sessionId) {
emitSessionUpdate(created.taskId, created.sessionId);
}
return created;
},
async markWorkbenchUnread(_workspaceId: string, input: TaskWorkbenchSelectInput): Promise<void> {
async markWorkbenchUnread(_organizationId: string, input: TaskWorkbenchSelectInput): Promise<void> {
await workbench.markTaskUnread(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async renameWorkbenchTask(_workspaceId: string, input: TaskWorkbenchRenameInput): Promise<void> {
async renameWorkbenchTask(_organizationId: string, input: TaskWorkbenchRenameInput): Promise<void> {
await workbench.renameTask(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async renameWorkbenchBranch(_workspaceId: string, input: TaskWorkbenchRenameInput): Promise<void> {
async renameWorkbenchBranch(_organizationId: string, input: TaskWorkbenchRenameInput): Promise<void> {
await workbench.renameBranch(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async createWorkbenchSession(_workspaceId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ tabId: string }> {
const created = await workbench.addTab(input);
emitWorkspaceSnapshot();
async createWorkbenchSession(_organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> {
const created = await workbench.addSession(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, created.tabId);
emitSessionUpdate(input.taskId, created.sessionId);
return created;
},
async renameWorkbenchSession(_workspaceId: string, input: TaskWorkbenchRenameSessionInput): Promise<void> {
async renameWorkbenchSession(_organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise<void> {
await workbench.renameSession(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async setWorkbenchSessionUnread(_workspaceId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
async setWorkbenchSessionUnread(_organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
await workbench.setSessionUnread(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async updateWorkbenchDraft(_workspaceId: string, input: TaskWorkbenchUpdateDraftInput): Promise<void> {
async updateWorkbenchDraft(_organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise<void> {
await workbench.updateDraft(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async changeWorkbenchModel(_workspaceId: string, input: TaskWorkbenchChangeModelInput): Promise<void> {
async changeWorkbenchModel(_organizationId: string, input: TaskWorkbenchChangeModelInput): Promise<void> {
await workbench.changeModel(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async sendWorkbenchMessage(_workspaceId: string, input: TaskWorkbenchSendMessageInput): Promise<void> {
async sendWorkbenchMessage(_organizationId: string, input: TaskWorkbenchSendMessageInput): Promise<void> {
await workbench.sendMessage(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async stopWorkbenchSession(_workspaceId: string, input: TaskWorkbenchTabInput): Promise<void> {
async stopWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise<void> {
await workbench.stopAgent(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.tabId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async closeWorkbenchSession(_workspaceId: string, input: TaskWorkbenchTabInput): Promise<void> {
await workbench.closeTab(input);
emitWorkspaceSnapshot();
async closeWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise<void> {
await workbench.closeSession(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async publishWorkbenchPr(_workspaceId: string, input: TaskWorkbenchSelectInput): Promise<void> {
async publishWorkbenchPr(_organizationId: string, input: TaskWorkbenchSelectInput): Promise<void> {
await workbench.publishPr(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async revertWorkbenchFile(_workspaceId: string, input: TaskWorkbenchDiffInput): Promise<void> {
async revertWorkbenchFile(_organizationId: string, input: TaskWorkbenchDiffInput): Promise<void> {
await workbench.revertFile(input);
emitWorkspaceSnapshot();
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
@ -776,8 +765,8 @@ export function createMockBackendClient(defaultWorkspaceId = "default"): Backend
return { ok: true };
},
async useWorkspace(workspaceId: string): Promise<{ workspaceId: string }> {
return { workspaceId };
async useOrganization(organizationId: string): Promise<{ organizationId: string }> {
return { organizationId };
},
async starSandboxAgentRepo(): Promise<StarSandboxAgentRepoResult> {

View file

@ -1,7 +1,7 @@
import {
MODEL_GROUPS,
buildInitialMockLayoutViewModel,
groupWorkbenchProjects,
groupWorkbenchRepositories,
nowMs,
providerAgent,
randomReply,
@ -10,7 +10,7 @@ import {
uid,
} from "../workbench-model.js";
import type {
TaskWorkbenchAddTabResponse,
TaskWorkbenchAddSessionResponse,
TaskWorkbenchChangeModelInput,
TaskWorkbenchCreateTaskInput,
TaskWorkbenchCreateTaskResponse,
@ -21,9 +21,9 @@ import type {
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchSnapshot,
TaskWorkbenchTabInput,
TaskWorkbenchSessionInput,
TaskWorkbenchUpdateDraftInput,
WorkbenchAgentTab as AgentTab,
WorkbenchSession as AgentSession,
WorkbenchTask as Task,
WorkbenchTranscriptEvent as TranscriptEvent,
} from "@sandbox-agent/foundry-shared";
@ -65,7 +65,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
async createTask(input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
const id = uid();
const tabId = `session-${id}`;
const sessionId = `session-${id}`;
const repo = this.snapshot.repos.find((candidate) => candidate.id === input.repoId);
if (!repo) {
throw new Error(`Cannot create mock task for unknown repo ${input.repoId}`);
@ -79,10 +79,10 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
updatedAtMs: nowMs(),
branch: input.branch?.trim() || null,
pullRequest: null,
tabs: [
sessions: [
{
id: tabId,
sessionId: tabId,
id: sessionId,
sessionId: sessionId,
sessionName: "Session 1",
agent: providerAgent(
MODEL_GROUPS.find((group) => group.models.some((model) => model.id === (input.model ?? "claude-sonnet-4")))?.provider ?? "Claude",
@ -106,19 +106,19 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
...current,
tasks: [nextTask, ...current.tasks],
}));
return { taskId: id, tabId };
return { taskId: id, sessionId };
}
async markTaskUnread(input: TaskWorkbenchSelectInput): Promise<void> {
this.updateTask(input.taskId, (task) => {
const targetTab = task.tabs[task.tabs.length - 1] ?? null;
if (!targetTab) {
const targetSession = task.sessions[task.sessions.length - 1] ?? null;
if (!targetSession) {
return task;
}
return {
...task,
tabs: task.tabs.map((tab) => (tab.id === targetTab.id ? { ...tab, unread: true } : tab)),
sessions: task.sessions.map((session) => (session.id === targetSession.id ? { ...session, unread: true } : session)),
};
});
}
@ -168,12 +168,12 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
}
async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise<void> {
this.assertTab(input.taskId, input.tabId);
this.assertSession(input.taskId, input.sessionId);
this.updateTask(input.taskId, (task) => ({
...task,
updatedAtMs: nowMs(),
tabs: task.tabs.map((tab) =>
tab.id === input.tabId
sessions: task.sessions.map((tab) =>
tab.id === input.sessionId
? {
...tab,
draft: {
@ -193,7 +193,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
throw new Error(`Cannot send an empty mock prompt for task ${input.taskId}`);
}
this.assertTab(input.taskId, input.tabId);
this.assertSession(input.taskId, input.sessionId);
const startedAtMs = nowMs();
this.updateTask(input.taskId, (currentTask) => {
@ -202,10 +202,10 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
const newBranch = isFirstOnTask ? `feat/${slugify(newTitle)}` : currentTask.branch;
const userMessageLines = [text, ...input.attachments.map((attachment) => `@ ${attachment.filePath}:${attachment.lineNumber}`)];
const userEvent = buildTranscriptEvent({
sessionId: input.tabId,
sessionId: input.sessionId,
sender: "client",
createdAt: startedAtMs,
eventIndex: candidateEventIndex(currentTask, input.tabId),
eventIndex: candidateEventIndex(currentTask, input.sessionId),
payload: {
method: "session/prompt",
params: {
@ -220,8 +220,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
branch: newBranch,
status: "running",
updatedAtMs: startedAtMs,
tabs: currentTask.tabs.map((candidate) =>
candidate.id === input.tabId
sessions: currentTask.sessions.map((candidate) =>
candidate.id === input.sessionId
? {
...candidate,
created: true,
@ -236,20 +236,20 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
};
});
const existingTimer = this.pendingTimers.get(input.tabId);
const existingTimer = this.pendingTimers.get(input.sessionId);
if (existingTimer) {
clearTimeout(existingTimer);
}
const timer = setTimeout(() => {
const task = this.requireTask(input.taskId);
const replyTab = this.requireTab(task, input.tabId);
this.requireSession(task, input.sessionId);
const completedAtMs = nowMs();
const replyEvent = buildTranscriptEvent({
sessionId: input.tabId,
sessionId: input.sessionId,
sender: "agent",
createdAt: completedAtMs,
eventIndex: candidateEventIndex(task, input.tabId),
eventIndex: candidateEventIndex(task, input.sessionId),
payload: {
result: {
text: randomReply(),
@ -259,8 +259,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
});
this.updateTask(input.taskId, (currentTask) => {
const updatedTabs = currentTask.tabs.map((candidate) => {
if (candidate.id !== input.tabId) {
const updatedTabs = currentTask.sessions.map((candidate) => {
if (candidate.id !== input.sessionId) {
return candidate;
}
@ -277,35 +277,35 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
return {
...currentTask,
updatedAtMs: completedAtMs,
tabs: updatedTabs,
sessions: updatedTabs,
status: currentTask.status === "archived" ? "archived" : anyRunning ? "running" : "idle",
};
});
this.pendingTimers.delete(input.tabId);
this.pendingTimers.delete(input.sessionId);
}, 2_500);
this.pendingTimers.set(input.tabId, timer);
this.pendingTimers.set(input.sessionId, timer);
}
async stopAgent(input: TaskWorkbenchTabInput): Promise<void> {
this.assertTab(input.taskId, input.tabId);
const existing = this.pendingTimers.get(input.tabId);
async stopAgent(input: TaskWorkbenchSessionInput): Promise<void> {
this.assertSession(input.taskId, input.sessionId);
const existing = this.pendingTimers.get(input.sessionId);
if (existing) {
clearTimeout(existing);
this.pendingTimers.delete(input.tabId);
this.pendingTimers.delete(input.sessionId);
}
this.updateTask(input.taskId, (currentTask) => {
const updatedTabs = currentTask.tabs.map((candidate) =>
candidate.id === input.tabId ? { ...candidate, status: "idle" as const, thinkingSinceMs: null } : candidate,
const updatedTabs = currentTask.sessions.map((candidate) =>
candidate.id === input.sessionId ? { ...candidate, status: "idle" as const, thinkingSinceMs: null } : candidate,
);
const anyRunning = updatedTabs.some((candidate) => candidate.status === "running");
return {
...currentTask,
updatedAtMs: nowMs(),
tabs: updatedTabs,
sessions: updatedTabs,
status: currentTask.status === "archived" ? "archived" : anyRunning ? "running" : "idle",
};
});
@ -314,40 +314,42 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
this.updateTask(input.taskId, (currentTask) => ({
...currentTask,
tabs: currentTask.tabs.map((candidate) => (candidate.id === input.tabId ? { ...candidate, unread: input.unread } : candidate)),
sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, unread: input.unread } : candidate)),
}));
}
async renameSession(input: TaskWorkbenchRenameSessionInput): Promise<void> {
const title = input.title.trim();
if (!title) {
throw new Error(`Cannot rename session ${input.tabId} to an empty title`);
throw new Error(`Cannot rename session ${input.sessionId} to an empty title`);
}
this.updateTask(input.taskId, (currentTask) => ({
...currentTask,
tabs: currentTask.tabs.map((candidate) => (candidate.id === input.tabId ? { ...candidate, sessionName: title } : candidate)),
sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, sessionName: title } : candidate)),
}));
}
async closeTab(input: TaskWorkbenchTabInput): Promise<void> {
async closeSession(input: TaskWorkbenchSessionInput): Promise<void> {
this.updateTask(input.taskId, (currentTask) => {
if (currentTask.tabs.length <= 1) {
if (currentTask.sessions.length <= 1) {
return currentTask;
}
return {
...currentTask,
tabs: currentTask.tabs.filter((candidate) => candidate.id !== input.tabId),
sessions: currentTask.sessions.filter((candidate) => candidate.id !== input.sessionId),
};
});
}
async addTab(input: TaskWorkbenchSelectInput): Promise<TaskWorkbenchAddTabResponse> {
async addSession(input: TaskWorkbenchSelectInput): Promise<TaskWorkbenchAddSessionResponse> {
this.assertTask(input.taskId);
const nextTab: AgentTab = {
id: uid(),
sessionId: null,
sessionName: `Session ${this.requireTask(input.taskId).tabs.length + 1}`,
const nextSessionId = uid();
const nextSession: AgentSession = {
id: nextSessionId,
sessionId: nextSessionId,
sandboxSessionId: null,
sessionName: `Session ${this.requireTask(input.taskId).sessions.length + 1}`,
agent: "Claude",
model: "claude-sonnet-4",
status: "idle",
@ -361,9 +363,9 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
this.updateTask(input.taskId, (currentTask) => ({
...currentTask,
updatedAtMs: nowMs(),
tabs: [...currentTask.tabs, nextTab],
sessions: [...currentTask.sessions, nextSession],
}));
return { tabId: nextTab.id };
return { sessionId: nextSession.id };
}
async changeModel(input: TaskWorkbenchChangeModelInput): Promise<void> {
@ -374,8 +376,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
this.updateTask(input.taskId, (currentTask) => ({
...currentTask,
tabs: currentTask.tabs.map((candidate) =>
candidate.id === input.tabId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate,
sessions: currentTask.sessions.map((candidate) =>
candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate,
),
}));
}
@ -384,7 +386,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
const nextSnapshot = updater(this.snapshot);
this.snapshot = {
...nextSnapshot,
projects: groupWorkbenchProjects(nextSnapshot.repos, nextSnapshot.tasks),
repositories: groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks),
};
this.notify();
}
@ -407,9 +409,9 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
this.requireTask(taskId);
}
private assertTab(taskId: string, tabId: string): void {
private assertSession(taskId: string, sessionId: string): void {
const task = this.requireTask(taskId);
this.requireTab(task, tabId);
this.requireSession(task, sessionId);
}
private requireTask(taskId: string): Task {
@ -420,18 +422,18 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
return task;
}
private requireTab(task: Task, tabId: string): AgentTab {
const tab = task.tabs.find((candidate) => candidate.id === tabId);
if (!tab) {
throw new Error(`Unable to find mock tab ${tabId} in task ${task.id}`);
private requireSession(task: Task, sessionId: string): AgentSession {
const session = task.sessions.find((candidate) => candidate.id === sessionId);
if (!session) {
throw new Error(`Unable to find mock session ${sessionId} in task ${task.id}`);
}
return tab;
return session;
}
}
function candidateEventIndex(task: Task, tabId: string): number {
const tab = task.tabs.find((candidate) => candidate.id === tabId);
return (tab?.transcript.length ?? 0) + 1;
function candidateEventIndex(task: Task, sessionId: string): number {
const session = task.sessions.find((candidate) => candidate.id === sessionId);
return (session?.transcript.length ?? 0) + 1;
}
let sharedMockWorkbenchClient: TaskWorkbenchClient | null = null;

View file

@ -104,8 +104,8 @@ class RemoteFoundryAppStore implements FoundryAppClient {
await this.backend.reconnectAppGithub(organizationId);
}
async recordSeatUsage(workspaceId: string): Promise<void> {
this.snapshot = await this.backend.recordAppSeatUsage(workspaceId);
async recordSeatUsage(organizationId: string): Promise<void> {
this.snapshot = await this.backend.recordAppSeatUsage(organizationId);
this.notify();
}

View file

@ -1,5 +1,5 @@
import type {
TaskWorkbenchAddTabResponse,
TaskWorkbenchAddSessionResponse,
TaskWorkbenchChangeModelInput,
TaskWorkbenchCreateTaskInput,
TaskWorkbenchCreateTaskResponse,
@ -10,21 +10,21 @@ import type {
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchSnapshot,
TaskWorkbenchTabInput,
TaskWorkbenchSessionInput,
TaskWorkbenchUpdateDraftInput,
} from "@sandbox-agent/foundry-shared";
import type { BackendClient } from "../backend-client.js";
import { groupWorkbenchProjects } from "../workbench-model.js";
import { groupWorkbenchRepositories } from "../workbench-model.js";
import type { TaskWorkbenchClient } from "../workbench-client.js";
export interface RemoteWorkbenchClientOptions {
backend: BackendClient;
workspaceId: string;
organizationId: string;
}
class RemoteWorkbenchStore implements TaskWorkbenchClient {
private readonly backend: BackendClient;
private readonly workspaceId: string;
private readonly organizationId: string;
private snapshot: TaskWorkbenchSnapshot;
private readonly listeners = new Set<() => void>();
private unsubscribeWorkbench: (() => void) | null = null;
@ -33,11 +33,11 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient {
constructor(options: RemoteWorkbenchClientOptions) {
this.backend = options.backend;
this.workspaceId = options.workspaceId;
this.organizationId = options.organizationId;
this.snapshot = {
workspaceId: options.workspaceId,
organizationId: options.organizationId,
repos: [],
projects: [],
repositories: [],
tasks: [],
};
}
@ -63,86 +63,86 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient {
}
async createTask(input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
const created = await this.backend.createWorkbenchTask(this.workspaceId, input);
const created = await this.backend.createWorkbenchTask(this.organizationId, input);
await this.refresh();
return created;
}
async markTaskUnread(input: TaskWorkbenchSelectInput): Promise<void> {
await this.backend.markWorkbenchUnread(this.workspaceId, input);
await this.backend.markWorkbenchUnread(this.organizationId, input);
await this.refresh();
}
async renameTask(input: TaskWorkbenchRenameInput): Promise<void> {
await this.backend.renameWorkbenchTask(this.workspaceId, input);
await this.backend.renameWorkbenchTask(this.organizationId, input);
await this.refresh();
}
async renameBranch(input: TaskWorkbenchRenameInput): Promise<void> {
await this.backend.renameWorkbenchBranch(this.workspaceId, input);
await this.backend.renameWorkbenchBranch(this.organizationId, input);
await this.refresh();
}
async archiveTask(input: TaskWorkbenchSelectInput): Promise<void> {
await this.backend.runAction(this.workspaceId, input.taskId, "archive");
await this.backend.runAction(this.organizationId, input.taskId, "archive");
await this.refresh();
}
async publishPr(input: TaskWorkbenchSelectInput): Promise<void> {
await this.backend.publishWorkbenchPr(this.workspaceId, input);
await this.backend.publishWorkbenchPr(this.organizationId, input);
await this.refresh();
}
async revertFile(input: TaskWorkbenchDiffInput): Promise<void> {
await this.backend.revertWorkbenchFile(this.workspaceId, input);
await this.backend.revertWorkbenchFile(this.organizationId, input);
await this.refresh();
}
async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise<void> {
await this.backend.updateWorkbenchDraft(this.workspaceId, input);
await this.backend.updateWorkbenchDraft(this.organizationId, input);
// Skip refresh — the server broadcast will trigger it, and the frontend
// holds local draft state to avoid the round-trip overwriting user input.
}
async sendMessage(input: TaskWorkbenchSendMessageInput): Promise<void> {
await this.backend.sendWorkbenchMessage(this.workspaceId, input);
await this.backend.sendWorkbenchMessage(this.organizationId, input);
await this.refresh();
}
async stopAgent(input: TaskWorkbenchTabInput): Promise<void> {
await this.backend.stopWorkbenchSession(this.workspaceId, input);
async stopAgent(input: TaskWorkbenchSessionInput): Promise<void> {
await this.backend.stopWorkbenchSession(this.organizationId, input);
await this.refresh();
}
async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
await this.backend.setWorkbenchSessionUnread(this.workspaceId, input);
await this.backend.setWorkbenchSessionUnread(this.organizationId, input);
await this.refresh();
}
async renameSession(input: TaskWorkbenchRenameSessionInput): Promise<void> {
await this.backend.renameWorkbenchSession(this.workspaceId, input);
await this.backend.renameWorkbenchSession(this.organizationId, input);
await this.refresh();
}
async closeTab(input: TaskWorkbenchTabInput): Promise<void> {
await this.backend.closeWorkbenchSession(this.workspaceId, input);
async closeSession(input: TaskWorkbenchSessionInput): Promise<void> {
await this.backend.closeWorkbenchSession(this.organizationId, input);
await this.refresh();
}
async addTab(input: TaskWorkbenchSelectInput): Promise<TaskWorkbenchAddTabResponse> {
const created = await this.backend.createWorkbenchSession(this.workspaceId, input);
async addSession(input: TaskWorkbenchSelectInput): Promise<TaskWorkbenchAddSessionResponse> {
const created = await this.backend.createWorkbenchSession(this.organizationId, input);
await this.refresh();
return created;
}
async changeModel(input: TaskWorkbenchChangeModelInput): Promise<void> {
await this.backend.changeWorkbenchModel(this.workspaceId, input);
await this.backend.changeWorkbenchModel(this.organizationId, input);
await this.refresh();
}
private ensureStarted(): void {
if (!this.unsubscribeWorkbench) {
this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.workspaceId, () => {
this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.organizationId, () => {
void this.refresh().catch(() => {
this.scheduleRefreshRetry();
});
@ -173,14 +173,14 @@ class RemoteWorkbenchStore implements TaskWorkbenchClient {
}
this.refreshPromise = (async () => {
const nextSnapshot = await this.backend.getWorkbench(this.workspaceId);
const nextSnapshot = await this.backend.getWorkbench(this.organizationId);
if (this.refreshRetryTimeout) {
clearTimeout(this.refreshRetryTimeout);
this.refreshRetryTimeout = null;
}
this.snapshot = {
...nextSnapshot,
projects: nextSnapshot.projects ?? groupWorkbenchProjects(nextSnapshot.repos, nextSnapshot.tasks),
repositories: nextSnapshot.repositories ?? groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks),
};
for (const listener of [...this.listeners]) {
listener();

View file

@ -2,7 +2,7 @@ import type { TopicData, TopicKey, TopicParams } from "./topics.js";
export type TopicStatus = "loading" | "connected" | "error";
export interface DebugInterestTopic {
export interface DebugSubscriptionTopic {
topicKey: TopicKey;
cacheKey: string;
listenerCount: number;
@ -17,17 +17,17 @@ export interface TopicState<K extends TopicKey> {
}
/**
* The InterestManager owns all realtime actor connections and cached state.
* The SubscriptionManager owns all realtime actor connections and cached state.
*
* Multiple subscribers to the same topic share one connection and one cache
* entry. After the last subscriber leaves, a short grace period keeps the
* connection warm so navigation does not thrash actor connections.
*/
export interface InterestManager {
export interface SubscriptionManager {
subscribe<K extends TopicKey>(topicKey: K, params: TopicParams<K>, listener: () => void): () => void;
getSnapshot<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicData<K> | undefined;
getStatus<K extends TopicKey>(topicKey: K, params: TopicParams<K>): TopicStatus;
getError<K extends TopicKey>(topicKey: K, params: TopicParams<K>): Error | null;
listDebugTopics(): DebugInterestTopic[];
listDebugTopics(): DebugSubscriptionTopic[];
dispose(): void;
}

View file

@ -0,0 +1,12 @@
import { createMockBackendClient } from "../mock/backend-client.js";
import { RemoteSubscriptionManager } from "./remote-manager.js";
/**
* Mock implementation shares the same subscription-manager harness as the remote
* path, but uses the in-memory mock backend that synthesizes actor events.
*/
export class MockSubscriptionManager extends RemoteSubscriptionManager {
constructor() {
super(createMockBackendClient());
}
}

View file

@ -1,14 +1,14 @@
import type { BackendClient } from "../backend-client.js";
import type { DebugInterestTopic, InterestManager, TopicStatus } from "./manager.js";
import type { DebugSubscriptionTopic, SubscriptionManager, TopicStatus } from "./manager.js";
import { topicDefinitions, type TopicData, type TopicDefinition, type TopicKey, type TopicParams } from "./topics.js";
const GRACE_PERIOD_MS = 30_000;
/**
* Remote implementation of InterestManager.
* Remote implementation of SubscriptionManager.
* Each cache entry owns one actor connection plus one materialized snapshot.
*/
export class RemoteInterestManager implements InterestManager {
export class RemoteSubscriptionManager implements SubscriptionManager {
private entries = new Map<string, TopicEntry<any, any, any>>();
constructor(private readonly backend: BackendClient) {}
@ -53,7 +53,7 @@ export class RemoteInterestManager implements InterestManager {
return this.entries.get((topicDefinitions[topicKey] as any).key(params))?.error ?? null;
}
listDebugTopics(): DebugInterestTopic[] {
listDebugTopics(): DebugSubscriptionTopic[] {
return [...this.entries.values()]
.filter((entry) => entry.listenerCount > 0)
.map((entry) => entry.getDebugTopic())
@ -91,7 +91,7 @@ class TopicEntry<TData, TParams, TEvent> {
private readonly params: TParams,
) {}
getDebugTopic(): DebugInterestTopic {
getDebugTopic(): DebugSubscriptionTopic {
return {
topicKey: this.topicKey,
cacheKey: this.cacheKey,

View file

@ -1,19 +1,19 @@
import type {
AppEvent,
FoundryAppSnapshot,
ProviderId,
SandboxProviderId,
SandboxProcessesEvent,
SessionEvent,
TaskEvent,
WorkbenchSessionDetail,
WorkbenchTaskDetail,
WorkspaceEvent,
WorkspaceSummarySnapshot,
OrganizationEvent,
OrganizationSummarySnapshot,
} from "@sandbox-agent/foundry-shared";
import type { ActorConn, BackendClient, SandboxProcessRecord } from "../backend-client.js";
/**
* Topic definitions for the interest manager.
* Topic definitions for the subscription manager.
*
* Each topic describes one actor connection plus one materialized read model.
* Events always carry full replacement payloads for the changed entity so the
@ -28,23 +28,23 @@ export interface TopicDefinition<TData, TParams, TEvent> {
}
export interface AppTopicParams {}
export interface WorkspaceTopicParams {
workspaceId: string;
export interface OrganizationTopicParams {
organizationId: string;
}
export interface TaskTopicParams {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
}
export interface SessionTopicParams {
workspaceId: string;
organizationId: string;
repoId: string;
taskId: string;
sessionId: string;
}
export interface SandboxProcessesTopicParams {
workspaceId: string;
providerId: ProviderId;
organizationId: string;
sandboxProviderId: SandboxProviderId;
sandboxId: string;
}
@ -62,17 +62,17 @@ export const topicDefinitions = {
app: {
key: () => "app",
event: "appUpdated",
connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectWorkspace("app"),
connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectOrganization("app"),
fetchInitial: (backend: BackendClient, _params: AppTopicParams) => backend.getAppSnapshot(),
applyEvent: (_current: FoundryAppSnapshot, event: AppEvent) => event.snapshot,
} satisfies TopicDefinition<FoundryAppSnapshot, AppTopicParams, AppEvent>,
workspace: {
key: (params: WorkspaceTopicParams) => `workspace:${params.workspaceId}`,
event: "workspaceUpdated",
connect: (backend: BackendClient, params: WorkspaceTopicParams) => backend.connectWorkspace(params.workspaceId),
fetchInitial: (backend: BackendClient, params: WorkspaceTopicParams) => backend.getWorkspaceSummary(params.workspaceId),
applyEvent: (current: WorkspaceSummarySnapshot, event: WorkspaceEvent) => {
organization: {
key: (params: OrganizationTopicParams) => `organization:${params.organizationId}`,
event: "organizationUpdated",
connect: (backend: BackendClient, params: OrganizationTopicParams) => backend.connectOrganization(params.organizationId),
fetchInitial: (backend: BackendClient, params: OrganizationTopicParams) => backend.getOrganizationSummary(params.organizationId),
applyEvent: (current: OrganizationSummarySnapshot, event: OrganizationEvent) => {
switch (event.type) {
case "taskSummaryUpdated":
return {
@ -107,22 +107,22 @@ export const topicDefinitions = {
};
}
},
} satisfies TopicDefinition<WorkspaceSummarySnapshot, WorkspaceTopicParams, WorkspaceEvent>,
} satisfies TopicDefinition<OrganizationSummarySnapshot, OrganizationTopicParams, OrganizationEvent>,
task: {
key: (params: TaskTopicParams) => `task:${params.workspaceId}:${params.taskId}`,
key: (params: TaskTopicParams) => `task:${params.organizationId}:${params.taskId}`,
event: "taskUpdated",
connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.workspaceId, params.repoId, params.taskId),
fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.workspaceId, params.repoId, params.taskId),
connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId),
fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.organizationId, params.repoId, params.taskId),
applyEvent: (_current: WorkbenchTaskDetail, event: TaskEvent) => event.detail,
} satisfies TopicDefinition<WorkbenchTaskDetail, TaskTopicParams, TaskEvent>,
session: {
key: (params: SessionTopicParams) => `session:${params.workspaceId}:${params.taskId}:${params.sessionId}`,
key: (params: SessionTopicParams) => `session:${params.organizationId}:${params.taskId}:${params.sessionId}`,
event: "sessionUpdated",
connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.workspaceId, params.repoId, params.taskId),
connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId),
fetchInitial: (backend: BackendClient, params: SessionTopicParams) =>
backend.getSessionDetail(params.workspaceId, params.repoId, params.taskId, params.sessionId),
backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId),
applyEvent: (current: WorkbenchSessionDetail, event: SessionEvent) => {
if (event.session.sessionId !== current.sessionId) {
return current;
@ -132,11 +132,12 @@ export const topicDefinitions = {
} satisfies TopicDefinition<WorkbenchSessionDetail, SessionTopicParams, SessionEvent>,
sandboxProcesses: {
key: (params: SandboxProcessesTopicParams) => `sandbox:${params.workspaceId}:${params.providerId}:${params.sandboxId}`,
key: (params: SandboxProcessesTopicParams) => `sandbox:${params.organizationId}:${params.sandboxProviderId}:${params.sandboxId}`,
event: "processesUpdated",
connect: (backend: BackendClient, params: SandboxProcessesTopicParams) => backend.connectSandbox(params.workspaceId, params.providerId, params.sandboxId),
connect: (backend: BackendClient, params: SandboxProcessesTopicParams) =>
backend.connectSandbox(params.organizationId, params.sandboxProviderId, params.sandboxId),
fetchInitial: async (backend: BackendClient, params: SandboxProcessesTopicParams) =>
(await backend.listSandboxProcesses(params.workspaceId, params.providerId, params.sandboxId)).processes,
(await backend.listSandboxProcesses(params.organizationId, params.sandboxProviderId, params.sandboxId)).processes,
applyEvent: (_current: SandboxProcessRecord[], event: SandboxProcessesEvent) => event.processes,
} satisfies TopicDefinition<SandboxProcessRecord[], SandboxProcessesTopicParams, SandboxProcessesEvent>,
} as const;

View file

@ -1,14 +1,14 @@
import { useMemo, useRef, useSyncExternalStore } from "react";
import type { InterestManager, TopicState } from "./manager.js";
import type { SubscriptionManager, TopicState } from "./manager.js";
import { topicDefinitions, type TopicKey, type TopicParams } from "./topics.js";
/**
* React bridge for the interest manager.
* React bridge for the subscription manager.
*
* `null` params disable the subscription entirely, which is how screens express
* conditional interest in task/session/sandbox topics.
* conditional subscription in task/session/sandbox topics.
*/
export function useInterest<K extends TopicKey>(manager: InterestManager, topicKey: K, params: TopicParams<K> | null): TopicState<K> {
export function useSubscription<K extends TopicKey>(manager: SubscriptionManager, topicKey: K, params: TopicParams<K> | null): TopicState<K> {
const paramsKey = params ? (topicDefinitions[topicKey] as any).key(params) : null;
const paramsRef = useRef<TopicParams<K> | null>(params);
paramsRef.current = params;

View file

@ -87,7 +87,7 @@ export function summarizeTasks(rows: TaskRecord[]): TaskSummary {
for (const row of rows) {
byStatus[groupTaskStatus(row.status)] += 1;
byProvider[row.providerId] = (byProvider[row.providerId] ?? 0) + 1;
byProvider[row.sandboxProviderId] = (byProvider[row.sandboxProviderId] ?? 0) + 1;
}
return {

Some files were not shown because too many files have changed in this diff Show more