chore(foundry): migrate to actions (#262)

* feat(foundry): checkpoint actor and workspace refactor

* docs(foundry): add agent handoff context

* wip(foundry): continue actor refactor

* wip(foundry): capture remaining local changes

* Complete Foundry refactor checklist

* Fix Foundry validation fallout

* wip

* wip: convert all actors from workflow to plain run handlers

Workaround for RivetKit bug where c.queue.iter() never yields messages
for actors created via getOrCreate from another actor's context. The
queue accepts messages (visible in inspector) but the iterator hangs.
Sleep/wake fixes it, but actors with active connections never sleep.

Converted organization, github-data, task, and user actors from
run: workflow(...) to plain run: async (c) => { for await ... }.

Also fixes:
- Missing auth tables in org migration (auth_verification etc)
- default_model NOT NULL constraint on org profile upsert
- Nested workflow step in github-data (HistoryDivergedError)
- Removed --force from frontend Dockerfile pnpm install

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

* Convert all actors from queues/workflows to direct actions, lazy task creation

Major refactor replacing all queue-based workflow communication with direct
RivetKit action calls across all actors. This works around a RivetKit bug
where c.queue.iter() deadlocks for actors created from another actor's context.

Key changes:
- All actors (organization, task, user, audit-log, github-data) converted
  from run: workflow(...) to actions-only (no run handler, no queues)
- PR sync creates virtual task entries in org local DB instead of spawning
  task actors — prevents OOM from 200+ actors created simultaneously
- Task actors created lazily on first user interaction via getOrCreate,
  self-initialize from org's getTaskIndexEntry data
- Removed requireRepoExists cross-actor call (caused 500s), replaced with
  local resolveTaskRepoId from org's taskIndex table
- Fixed getOrganizationContext to thread overrides through all sync phases
- Fixed sandbox repo path (/home/user/repo for E2B compatibility)
- Fixed buildSessionDetail to skip transcript fetch for pending sessions
- Added process crash protection (uncaughtException/unhandledRejection)
- Fixed React infinite render loop in mock-layout useEffect dependencies
- Added sandbox listProcesses error handling for expired E2B sandboxes
- Set E2B sandbox timeout to 1 hour (was 5 min default)
- Updated CLAUDE.md with lazy task creation rules, no-silent-catch policy,
  React hook dependency safety rules

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

* Fix E2B sandbox timeout comment, frontend stability, and create-flow improvements

- Add TEMPORARY comment on E2B timeoutMs with pointer to rivetkit sandbox
  resilience proposal for when autoPause lands
- Fix React useEffect dependency stability in mock-layout and
  organization-dashboard to prevent infinite re-render loops
- Fix terminal-pane ref handling
- Improve create-flow service and tests

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Nathan Flurry 2026-03-16 15:23:59 -07:00 committed by GitHub
parent 32f3c6c3bc
commit f45a467484
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
139 changed files with 9768 additions and 7204 deletions

View file

@ -20,7 +20,9 @@
"paths": {
"/v1/acp": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_acp_servers",
"responses": {
"200": {
@ -38,7 +40,9 @@
},
"/v1/acp/{server_id}": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_acp",
"parameters": [
{
@ -88,7 +92,9 @@
}
},
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "post_v1_acp",
"parameters": [
{
@ -198,7 +204,9 @@
}
},
"delete": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "delete_v1_acp",
"parameters": [
{
@ -220,7 +228,9 @@
},
"/v1/agents": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_agents",
"parameters": [
{
@ -270,7 +280,9 @@
},
"/v1/agents/{agent}": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_agent",
"parameters": [
{
@ -339,7 +351,9 @@
},
"/v1/agents/{agent}/install": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "post_v1_agent_install",
"parameters": [
{
@ -398,7 +412,9 @@
},
"/v1/config/mcp": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_config_mcp",
"parameters": [
{
@ -444,7 +460,9 @@
}
},
"put": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "put_v1_config_mcp",
"parameters": [
{
@ -483,7 +501,9 @@
}
},
"delete": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "delete_v1_config_mcp",
"parameters": [
{
@ -514,7 +534,9 @@
},
"/v1/config/skills": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_config_skills",
"parameters": [
{
@ -560,7 +582,9 @@
}
},
"put": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "put_v1_config_skills",
"parameters": [
{
@ -599,7 +623,9 @@
}
},
"delete": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "delete_v1_config_skills",
"parameters": [
{
@ -630,7 +656,9 @@
},
"/v1/fs/entries": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_fs_entries",
"parameters": [
{
@ -663,7 +691,9 @@
},
"/v1/fs/entry": {
"delete": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "delete_v1_fs_entry",
"parameters": [
{
@ -702,7 +732,9 @@
},
"/v1/fs/file": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_fs_file",
"parameters": [
{
@ -722,7 +754,9 @@
}
},
"put": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "put_v1_fs_file",
"parameters": [
{
@ -762,7 +796,9 @@
},
"/v1/fs/mkdir": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "post_v1_fs_mkdir",
"parameters": [
{
@ -791,7 +827,9 @@
},
"/v1/fs/move": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "post_v1_fs_move",
"requestBody": {
"content": {
@ -819,7 +857,9 @@
},
"/v1/fs/stat": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_fs_stat",
"parameters": [
{
@ -848,7 +888,9 @@
},
"/v1/fs/upload-batch": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "post_v1_fs_upload_batch",
"parameters": [
{
@ -889,7 +931,9 @@
},
"/v1/health": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"operationId": "get_v1_health",
"responses": {
"200": {
@ -907,7 +951,9 @@
},
"/v1/processes": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "List all managed processes.",
"description": "Returns a list of all processes (running and exited) currently tracked\nby the runtime, sorted by process ID.",
"operationId": "get_v1_processes",
@ -935,7 +981,9 @@
}
},
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Create a long-lived managed process.",
"description": "Spawns a new process with the given command and arguments. Supports both\npipe-based and PTY (tty) modes. Returns the process descriptor on success.",
"operationId": "post_v1_processes",
@ -995,7 +1043,9 @@
},
"/v1/processes/config": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Get process runtime configuration.",
"description": "Returns the current runtime configuration for the process management API,\nincluding limits for concurrency, timeouts, and buffer sizes.",
"operationId": "get_v1_processes_config",
@ -1023,7 +1073,9 @@
}
},
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Update process runtime configuration.",
"description": "Replaces the runtime configuration for the process management API.\nValidates that all values are non-zero and clamps default timeout to max.",
"operationId": "post_v1_processes_config",
@ -1073,7 +1125,9 @@
},
"/v1/processes/run": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Run a one-shot command.",
"description": "Executes a command to completion and returns its stdout, stderr, exit code,\nand duration. Supports configurable timeout and output size limits.",
"operationId": "post_v1_processes_run",
@ -1123,7 +1177,9 @@
},
"/v1/processes/{id}": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Get a single process by ID.",
"description": "Returns the current state of a managed process including its status,\nPID, exit code, and creation/exit timestamps.",
"operationId": "get_v1_process",
@ -1172,7 +1228,9 @@
}
},
"delete": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Delete a process record.",
"description": "Removes a stopped process from the runtime. Returns 409 if the process\nis still running; stop or kill it first.",
"operationId": "delete_v1_process",
@ -1226,7 +1284,9 @@
},
"/v1/processes/{id}/input": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Write input to a process.",
"description": "Sends data to a process's stdin (pipe mode) or PTY writer (tty mode).\nData can be encoded as base64, utf8, or text. Returns 413 if the decoded\npayload exceeds the configured `maxInputBytesPerRequest` limit.",
"operationId": "post_v1_process_input",
@ -1307,7 +1367,9 @@
},
"/v1/processes/{id}/kill": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Send SIGKILL to a process.",
"description": "Sends SIGKILL to the process and optionally waits up to `waitMs`\nmilliseconds for the process to exit before returning.",
"operationId": "post_v1_process_kill",
@ -1370,7 +1432,9 @@
},
"/v1/processes/{id}/logs": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Fetch process logs.",
"description": "Returns buffered log entries for a process. Supports filtering by stream\ntype, tail count, and sequence-based resumption. When `follow=true`,\nreturns an SSE stream that replays buffered entries then streams live output.",
"operationId": "get_v1_process_logs",
@ -1468,7 +1532,9 @@
},
"/v1/processes/{id}/stop": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Send SIGTERM to a process.",
"description": "Sends SIGTERM to the process and optionally waits up to `waitMs`\nmilliseconds for the process to exit before returning.",
"operationId": "post_v1_process_stop",
@ -1531,7 +1597,9 @@
},
"/v1/processes/{id}/terminal/resize": {
"post": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Resize a process terminal.",
"description": "Sets the PTY window size (columns and rows) for a tty-mode process and\nsends SIGWINCH so the child process can adapt.",
"operationId": "post_v1_process_terminal_resize",
@ -1612,7 +1680,9 @@
},
"/v1/processes/{id}/terminal/ws": {
"get": {
"tags": ["v1"],
"tags": [
"v1"
],
"summary": "Open an interactive WebSocket terminal session.",
"description": "Upgrades the connection to a WebSocket for bidirectional PTY I/O. Accepts\n`access_token` query param for browser-based auth (WebSocket API cannot\nsend custom headers). Streams raw PTY output as binary frames and accepts\nJSON control frames for input, resize, and close.",
"operationId": "get_v1_process_terminal_ws",
@ -1689,7 +1759,9 @@
"schemas": {
"AcpEnvelope": {
"type": "object",
"required": ["jsonrpc"],
"required": [
"jsonrpc"
],
"properties": {
"error": {
"nullable": true
@ -1723,7 +1795,11 @@
},
"AcpServerInfo": {
"type": "object",
"required": ["serverId", "agent", "createdAtMs"],
"required": [
"serverId",
"agent",
"createdAtMs"
],
"properties": {
"agent": {
"type": "string"
@ -1739,7 +1815,9 @@
},
"AcpServerListResponse": {
"type": "object",
"required": ["servers"],
"required": [
"servers"
],
"properties": {
"servers": {
"type": "array",
@ -1830,7 +1908,12 @@
},
"AgentInfo": {
"type": "object",
"required": ["id", "installed", "credentialsAvailable", "capabilities"],
"required": [
"id",
"installed",
"credentialsAvailable",
"capabilities"
],
"properties": {
"capabilities": {
"$ref": "#/components/schemas/AgentCapabilities"
@ -1873,7 +1956,11 @@
},
"AgentInstallArtifact": {
"type": "object",
"required": ["kind", "path", "source"],
"required": [
"kind",
"path",
"source"
],
"properties": {
"kind": {
"type": "string"
@ -1909,7 +1996,10 @@
},
"AgentInstallResponse": {
"type": "object",
"required": ["already_installed", "artifacts"],
"required": [
"already_installed",
"artifacts"
],
"properties": {
"already_installed": {
"type": "boolean"
@ -1924,7 +2014,9 @@
},
"AgentListResponse": {
"type": "object",
"required": ["agents"],
"required": [
"agents"
],
"properties": {
"agents": {
"type": "array",
@ -1957,7 +2049,9 @@
},
"FsActionResponse": {
"type": "object",
"required": ["path"],
"required": [
"path"
],
"properties": {
"path": {
"type": "string"
@ -1966,7 +2060,9 @@
},
"FsDeleteQuery": {
"type": "object",
"required": ["path"],
"required": [
"path"
],
"properties": {
"path": {
"type": "string"
@ -1988,7 +2084,12 @@
},
"FsEntry": {
"type": "object",
"required": ["name", "path", "entryType", "size"],
"required": [
"name",
"path",
"entryType",
"size"
],
"properties": {
"entryType": {
"$ref": "#/components/schemas/FsEntryType"
@ -2012,11 +2113,17 @@
},
"FsEntryType": {
"type": "string",
"enum": ["file", "directory"]
"enum": [
"file",
"directory"
]
},
"FsMoveRequest": {
"type": "object",
"required": ["from", "to"],
"required": [
"from",
"to"
],
"properties": {
"from": {
"type": "string"
@ -2032,7 +2139,10 @@
},
"FsMoveResponse": {
"type": "object",
"required": ["from", "to"],
"required": [
"from",
"to"
],
"properties": {
"from": {
"type": "string"
@ -2044,7 +2154,9 @@
},
"FsPathQuery": {
"type": "object",
"required": ["path"],
"required": [
"path"
],
"properties": {
"path": {
"type": "string"
@ -2053,7 +2165,11 @@
},
"FsStat": {
"type": "object",
"required": ["path", "entryType", "size"],
"required": [
"path",
"entryType",
"size"
],
"properties": {
"entryType": {
"$ref": "#/components/schemas/FsEntryType"
@ -2083,7 +2199,10 @@
},
"FsUploadBatchResponse": {
"type": "object",
"required": ["paths", "truncated"],
"required": [
"paths",
"truncated"
],
"properties": {
"paths": {
"type": "array",
@ -2098,7 +2217,10 @@
},
"FsWriteResponse": {
"type": "object",
"required": ["path", "bytesWritten"],
"required": [
"path",
"bytesWritten"
],
"properties": {
"bytesWritten": {
"type": "integer",
@ -2112,7 +2234,9 @@
},
"HealthResponse": {
"type": "object",
"required": ["status"],
"required": [
"status"
],
"properties": {
"status": {
"type": "string"
@ -2121,7 +2245,10 @@
},
"McpConfigQuery": {
"type": "object",
"required": ["directory", "mcpName"],
"required": [
"directory",
"mcpName"
],
"properties": {
"directory": {
"type": "string"
@ -2135,7 +2262,10 @@
"oneOf": [
{
"type": "object",
"required": ["command", "type"],
"required": [
"command",
"type"
],
"properties": {
"args": {
"type": "array",
@ -2169,13 +2299,18 @@
},
"type": {
"type": "string",
"enum": ["local"]
"enum": [
"local"
]
}
}
},
{
"type": "object",
"required": ["url", "type"],
"required": [
"url",
"type"
],
"properties": {
"bearerTokenEnvVar": {
"type": "string",
@ -2223,7 +2358,9 @@
},
"type": {
"type": "string",
"enum": ["remote"]
"enum": [
"remote"
]
},
"url": {
"type": "string"
@ -2237,7 +2374,11 @@
},
"ProblemDetails": {
"type": "object",
"required": ["type", "title", "status"],
"required": [
"type",
"title",
"status"
],
"properties": {
"detail": {
"type": "string",
@ -2263,7 +2404,14 @@
},
"ProcessConfig": {
"type": "object",
"required": ["maxConcurrentProcesses", "defaultRunTimeoutMs", "maxRunTimeoutMs", "maxOutputBytes", "maxLogBytesPerProcess", "maxInputBytesPerRequest"],
"required": [
"maxConcurrentProcesses",
"defaultRunTimeoutMs",
"maxRunTimeoutMs",
"maxOutputBytes",
"maxLogBytesPerProcess",
"maxInputBytesPerRequest"
],
"properties": {
"defaultRunTimeoutMs": {
"type": "integer",
@ -2295,7 +2443,9 @@
},
"ProcessCreateRequest": {
"type": "object",
"required": ["command"],
"required": [
"command"
],
"properties": {
"args": {
"type": "array",
@ -2326,7 +2476,15 @@
},
"ProcessInfo": {
"type": "object",
"required": ["id", "command", "args", "tty", "interactive", "status", "createdAtMs"],
"required": [
"id",
"command",
"args",
"tty",
"interactive",
"status",
"createdAtMs"
],
"properties": {
"args": {
"type": "array",
@ -2377,7 +2535,9 @@
},
"ProcessInputRequest": {
"type": "object",
"required": ["data"],
"required": [
"data"
],
"properties": {
"data": {
"type": "string"
@ -2390,7 +2550,9 @@
},
"ProcessInputResponse": {
"type": "object",
"required": ["bytesWritten"],
"required": [
"bytesWritten"
],
"properties": {
"bytesWritten": {
"type": "integer",
@ -2400,7 +2562,9 @@
},
"ProcessListResponse": {
"type": "object",
"required": ["processes"],
"required": [
"processes"
],
"properties": {
"processes": {
"type": "array",
@ -2412,7 +2576,13 @@
},
"ProcessLogEntry": {
"type": "object",
"required": ["sequence", "stream", "timestampMs", "data", "encoding"],
"required": [
"sequence",
"stream",
"timestampMs",
"data",
"encoding"
],
"properties": {
"data": {
"type": "string"
@ -2464,7 +2634,11 @@
},
"ProcessLogsResponse": {
"type": "object",
"required": ["processId", "stream", "entries"],
"required": [
"processId",
"stream",
"entries"
],
"properties": {
"entries": {
"type": "array",
@ -2482,11 +2656,18 @@
},
"ProcessLogsStream": {
"type": "string",
"enum": ["stdout", "stderr", "combined", "pty"]
"enum": [
"stdout",
"stderr",
"combined",
"pty"
]
},
"ProcessRunRequest": {
"type": "object",
"required": ["command"],
"required": [
"command"
],
"properties": {
"args": {
"type": "array",
@ -2522,7 +2703,14 @@
},
"ProcessRunResponse": {
"type": "object",
"required": ["timedOut", "stdout", "stderr", "stdoutTruncated", "stderrTruncated", "durationMs"],
"required": [
"timedOut",
"stdout",
"stderr",
"stdoutTruncated",
"stderrTruncated",
"durationMs"
],
"properties": {
"durationMs": {
"type": "integer",
@ -2564,11 +2752,17 @@
},
"ProcessState": {
"type": "string",
"enum": ["running", "exited"]
"enum": [
"running",
"exited"
]
},
"ProcessTerminalResizeRequest": {
"type": "object",
"required": ["cols", "rows"],
"required": [
"cols",
"rows"
],
"properties": {
"cols": {
"type": "integer",
@ -2584,7 +2778,10 @@
},
"ProcessTerminalResizeResponse": {
"type": "object",
"required": ["cols", "rows"],
"required": [
"cols",
"rows"
],
"properties": {
"cols": {
"type": "integer",
@ -2600,11 +2797,16 @@
},
"ServerStatus": {
"type": "string",
"enum": ["running", "stopped"]
"enum": [
"running",
"stopped"
]
},
"ServerStatusInfo": {
"type": "object",
"required": ["status"],
"required": [
"status"
],
"properties": {
"status": {
"$ref": "#/components/schemas/ServerStatus"
@ -2619,7 +2821,10 @@
},
"SkillSource": {
"type": "object",
"required": ["type", "source"],
"required": [
"type",
"source"
],
"properties": {
"ref": {
"type": "string",
@ -2646,7 +2851,9 @@
},
"SkillsConfig": {
"type": "object",
"required": ["sources"],
"required": [
"sources"
],
"properties": {
"sources": {
"type": "array",
@ -2658,7 +2865,10 @@
},
"SkillsConfigQuery": {
"type": "object",
"required": ["directory", "skillName"],
"required": [
"directory",
"skillName"
],
"properties": {
"directory": {
"type": "string"

179
foundry/AGENT-HANDOFF.md Normal file
View file

@ -0,0 +1,179 @@
# Foundry Agent Handoff
## Baseline
- Repo: `rivet-dev/sandbox-agent`
- Branch: `columbus-v2`
- Last pushed commit: `3174fe73` (`feat(foundry): checkpoint actor and workspace refactor`)
- Progress/spec tracker: [FOUNDRY-CHANGES.md](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/FOUNDRY-CHANGES.md)
## What is already landed
These spec slices are already implemented and pushed:
- Item `1`: backend actor rename `auth-user` -> `user`
- Item `2`: Better Auth mapping comments
- Item `5`: task raw SQL cleanup into migrations
- Item `6`: `history` -> `audit-log`
- Item `7`: default model moved to user-scoped app state
- Item `20`: admin action prefixing
- Item `23`: dead `getTaskEnriched` / `enrichTaskRecord` removal
- Item `25`: `Workbench` -> `Workspace` rename across backend/shared/client/frontend
- Item `26`: branch rename deleted
- Organization realtime was already collapsed to full-snapshot `organizationUpdated`
- Task realtime was already aligned to `taskUpdated`
## Known blocker
Spec item `3` is only partially done. The singleton constraint for the Better Auth `user` table is still blocked.
- File: [foundry/packages/backend/src/actors/user/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/db/schema.ts)
- Reason: Better Auth still depends on external string `user.id`, so a literal singleton `CHECK (id = 1)` on that table is not a safe mechanical change.
## Important current state
There are uncommitted edits on top of the pushed checkpoint. Another agent should start from the current worktree, not just `origin/columbus-v2`.
Current dirty files:
- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts)
- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts)
- [foundry/packages/backend/src/actors/repository/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/actions.ts)
- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts)
- [foundry/packages/client/src/mock/backend-client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/mock/backend-client.ts)
These files are the current hot path for the unfinished structural work.
## What is partially in place but not finished
### User-owned task UI state
The user actor already has the schema and CRUD surface for per-user task/session UI state:
- [foundry/packages/backend/src/actors/user/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/db/schema.ts)
`user_task_state`
- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts)
`getTaskState`, `upsertTaskState`, `deleteTaskState`
But the task actor and UI are still reading/writing the old task-global fields:
- [foundry/packages/backend/src/actors/task/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/db/schema.ts)
still contains `task_runtime.active_session_id` and session `unread` / `draft_*`
- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts)
still derives unread/draft/active-session from task-local rows
- [foundry/packages/frontend/src/components/mock-layout.tsx](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout.tsx)
still treats `activeSessionId` as frontend-local and uses task-level unread/draft state
So items `21`, `22`, `24`, and part of `19` are only half-done.
### Coordinator ownership
The current architecture still violates the intended coordinator pattern:
- Organization still owns `taskLookup` and `taskSummaries`
- [foundry/packages/backend/src/actors/organization/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/db/schema.ts)
- Organization still resolves `taskId -> repoId`
- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts)
- Task still pushes summary updates to organization instead of repository
- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts)
- Repository still does not own a `tasks` projection table yet
- [foundry/packages/backend/src/actors/repository/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/db/schema.ts)
So items `9`, `13`, and `15` are still open.
### Queue-only mutations
Task actor workspace commands already go through queue sends. Other actors still do not fully follow the queue-only mutation rule:
- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts)
- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts)
- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts)
- [foundry/packages/backend/src/actors/organization/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/app-shell.ts)
So items `4`, `10`, and `11` are still open.
### Dynamic model/agent data
The frontend/client still hardcode model groups:
- [foundry/packages/frontend/src/components/mock-layout/view-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout/view-model.ts)
- [foundry/packages/client/src/workspace-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/workspace-model.ts)
- [foundry/packages/shared/src/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/workspace.ts)
`WorkspaceModelId` is still a hardcoded union
The repo already has the API source of truth available through the TypeScript SDK:
- [sdks/typescript/src/client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/sdks/typescript/src/client.ts)
`SandboxAgent.listAgents({ config: true })`
- [server/packages/sandbox-agent/src/router.rs](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/server/packages/sandbox-agent/src/router.rs)
`/v1/agents`
- [server/packages/sandbox-agent/src/router/support.rs](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/server/packages/sandbox-agent/src/router/support.rs)
`fallback_config_options`
So item `8` is still open.
### GitHub sync chunking/progress
GitHub data sync is still a delete-and-replace flow:
- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts)
`replaceRepositories`, `replaceBranches`, `replaceMembers`, `replacePullRequests`, and full-sync flow
- [foundry/packages/backend/src/actors/github-data/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/db/schema.ts)
no generation/progress columns yet
- [foundry/packages/shared/src/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/app-shell.ts)
no structured sync progress field yet
So item `16` is still open.
## Recommended next order
If another agent picks this up, this is the safest order:
1. Finish items `21`, `22`, `24`, `19` together.
Reason: user-owned task UI state is already half-wired, and task schema cleanup depends on the same files.
2. Finish items `9`, `13`, `15` together.
Reason: coordinator ownership, repo-owned task projections, and PR/task unification are the same refactor seam.
3. Finish item `16`.
Reason: GitHub sync chunking is mostly isolated to `github-data` plus app-shell/shared snapshot wiring.
4. Finish item `8`.
Reason: dynamic model/agent data is largely independent once user default model is already user-scoped.
5. Finish items `4`, `10`, `11`, `12`, `18`, final event audit.
6. Do item `17` last.
## Concrete file hotspots for the next agent
Backend:
- [foundry/packages/backend/src/actors/task/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workspace.ts)
- [foundry/packages/backend/src/actors/task/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/db/schema.ts)
- [foundry/packages/backend/src/actors/task/workflow/common.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/common.ts)
- [foundry/packages/backend/src/actors/task/workflow/commands.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/commands.ts)
- [foundry/packages/backend/src/actors/task/workflow/init.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/task/workflow/init.ts)
- [foundry/packages/backend/src/actors/repository/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/actions.ts)
- [foundry/packages/backend/src/actors/repository/db/schema.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/repository/db/schema.ts)
- [foundry/packages/backend/src/actors/organization/actions.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/organization/actions.ts)
- [foundry/packages/backend/src/actors/github-data/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/github-data/index.ts)
- [foundry/packages/backend/src/actors/user/index.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/backend/src/actors/user/index.ts)
Shared/client/frontend:
- [foundry/packages/shared/src/workspace.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/workspace.ts)
- [foundry/packages/shared/src/contracts.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/contracts.ts)
- [foundry/packages/shared/src/app-shell.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/shared/src/app-shell.ts)
- [foundry/packages/client/src/backend-client.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/backend-client.ts)
- [foundry/packages/client/src/workspace-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/client/src/workspace-model.ts)
- [foundry/packages/frontend/src/components/mock-layout.tsx](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout.tsx)
- [foundry/packages/frontend/src/components/mock-layout/view-model.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/components/mock-layout/model-picker.tsx)
- [foundry/packages/frontend/src/features/tasks/status.ts](/Users/nathan/conductor/workspaces/sandbox-agent/columbus-v1/foundry/packages/frontend/src/features/tasks/status.ts)
## Notes that matter
- The pushed checkpoint is useful, but it is not the full current state. There are uncommitted edits in the hot-path backend files listed above.
- The current tree already contains a partially added `user_task_state` path. Do not duplicate that work; finish the migration by removing the old task-owned fields and rewiring readers/writers.
- The current task actor still reads mutable fields from `c.state` such as `repoRemote`, `branchName`, `title`, `task`, `sandboxProviderId`, and `agentType`. That is part of item `19`.
- The current frontend still synthesizes PR-only rows into fake tasks. That should go away as part of repo-owned task projection / PR unification.

View file

@ -56,6 +56,8 @@ Use `pnpm` workspaces and Turborepo.
- mock frontend changes: `just foundry-mock` or restart with `just foundry-mock-down && just foundry-mock`
- local frontend-only work outside Docker: restart `pnpm --filter @sandbox-agent/foundry-frontend dev` or `just foundry-dev-mock` as appropriate
- The backend does **not** hot reload. Bun's `--hot` flag causes the server to re-bind on a different port (e.g. 6421 instead of 6420), breaking all client connections while the container still exposes the original port. After backend code changes, restart the backend container: `just foundry-dev-down && just foundry-dev`.
- The dev server has debug logging enabled by default (`RIVET_LOG_LEVEL=debug`, `FOUNDRY_LOG_LEVEL=debug`) via `compose.dev.yaml`. Error stacks and timestamps are also enabled.
- The frontend client uses JSON encoding for RivetKit in development (`import.meta.env.DEV`) for easier debugging. Production uses the default encoding.
## Railway Logs
@ -73,13 +75,14 @@ Use `pnpm` workspaces and Turborepo.
- All backend interaction (actor calls, metadata/health checks, backend HTTP endpoint access) must go through the dedicated client library in `packages/client`.
- Outside `packages/client`, do not call backend endpoints directly (for example `fetch(.../v1/rivet...)`), except in black-box E2E tests that intentionally exercise raw transport behavior.
- GUI state should update in realtime (no manual refresh buttons). Prefer RivetKit push reactivity and actor-driven events; do not add polling/refetch for normal product flows.
- Keep the mock workbench types and mock client in `packages/shared` + `packages/client` up to date with the frontend contract. The mock is the UI testing reference implementation while backend functionality catches up.
- Keep the mock workspace types and mock client in `packages/shared` + `packages/client` up to date with the frontend contract. The mock is the UI testing reference implementation while backend functionality catches up.
- Keep frontend route/state coverage current in code and tests; there is no separate page-inventory doc to maintain.
- If Foundry uses a shared component from `@sandbox-agent/react`, make changes in `sdks/react` instead of copying or forking that component into Foundry.
- When changing shared React components in `sdks/react` for Foundry, verify they still work in the Sandbox Agent Inspector before finishing.
- When making UI changes, verify the live flow with `agent-browser`, take screenshots of the updated UI, and offer to open those screenshots in Preview when you finish.
- When making UI changes, verify the live flow with the Chrome DevTools MCP or `agent-browser`, take screenshots of the updated UI, and offer to open those screenshots in Preview when you finish.
- When asked for screenshots, capture all relevant affected screens and modal states, not just a single viewport. Include empty, populated, success, and blocked/error states when they are part of the changed flow.
- If a screenshot catches a transition frame, blank modal, or otherwise misleading state, retake it before reporting it.
- When verifying UI in the browser, attempt to sign in by navigating to `/signin` and clicking "Continue with GitHub". If the browser lands on the GitHub login page (github.com/login) and you don't have credentials, stop and ask the user to complete the sign-in. Do not assume the session is invalid just because you see the Foundry sign-in page — always attempt the OAuth flow first.
## Realtime Data Architecture
@ -99,7 +102,7 @@ Do not use polling (`refetchInterval`), empty "go re-fetch" broadcast events, or
- **Organization actor** materializes sidebar-level data in its own SQLite: repo catalog, task summaries (title, status, branch, PR, updatedAt), repo summaries (overview/branch state), and session summaries (id, name, status, unread, model — no transcript). Task actors push summary changes to the organization actor when they mutate. The organization actor broadcasts the updated entity to connected clients. `getOrganizationSummary` reads from local tables only — no fan-out to child actors.
- **Task actor** materializes its own detail state (session summaries, sandbox info, diffs, file tree). `getTaskDetail` reads from the task actor's own SQLite. The task actor broadcasts updates directly to clients connected to it.
- **Session data** lives on the task actor but is a separate subscription topic. The task topic includes `sessions_summary` (list without content). The `session` topic provides full transcript and draft state. Clients subscribe to the `session` topic for whichever session is active, and filter `sessionUpdated` events by session ID (ignoring events for other sessions on the same actor).
- The expensive fan-out (querying every repository/task actor) only exists as a background reconciliation/rebuild path, never on the hot read path.
- There is no fan-out on the read path. The organization actor owns all task summaries locally.
### Subscription manager
@ -141,6 +144,15 @@ The client subscribes to `app` always, `organization` when entering an organizat
- Do not add backend git clone paths, `git fetch`, `git for-each-ref`, or direct backend git CLI calls. If you need git data, either read stored GitHub metadata or run the command inside a sandbox.
- The `BackendDriver` has no `GitDriver` or `StackDriver`. Only `GithubDriver` and `TmuxDriver` remain.
## React Hook Dependency Safety
- **Never use unstable references as `useEffect`/`useMemo`/`useCallback` dependencies.** React compares dependencies by reference, not value. Expressions like `?? []`, `?? {}`, `.map(...)`, `.filter(...)`, or object/array literals create new references every render, causing infinite re-render loops when used as dependencies.
- If the upstream value may be `undefined`/`null` and you need a fallback, either:
- Use the raw upstream value as the dependency and apply the fallback inside the effect body: `useEffect(() => { doThing(value ?? []); }, [value]);`
- Derive a stable primitive key: `const key = JSON.stringify(value ?? []);` then depend on `key`
- Memoize: `const stable = useMemo(() => value ?? [], [value]);`
- When reviewing code, treat any `?? []`, `?? {}`, or inline `.map()/.filter()` in a dependency array as a bug.
## UI System
- Foundry's base UI system is `BaseUI` with `Styletron`, plus Foundry-specific theme/tokens on top. Treat that as the default UI foundation.
@ -165,6 +177,7 @@ The client subscribes to `app` always, `organization` when entering an organizat
- If the system reaches an unexpected state, raise an explicit error with actionable context.
- Do not fail silently, swallow errors, or auto-ignore inconsistent data.
- Prefer fail-fast behavior over hidden degradation when correctness is uncertain.
- **Never use bare `catch {}` or `catch { }` blocks.** Every catch must at minimum log the error with `logActorWarning` or `console.warn`. Silent catches hide bugs and make debugging impossible. If a catch is intentionally degrading (e.g. returning empty data when a sandbox is expired), it must still log so operators can see what happened. Use `catch (error) { logActorWarning(..., { error: resolveErrorMessage(error) }); }` or equivalent.
## RivetKit Dependency Policy
@ -205,8 +218,9 @@ For all Rivet/RivetKit implementation:
- Do not add custom backend REST endpoints (no `/v1/*` shim layer).
- We own the sandbox-agent project; treat sandbox-agent defects as first-party bugs and fix them instead of working around them.
- Keep strict single-writer ownership: each table/row has exactly one actor writer.
- Parent actors (`organization`, `repository`, `task`, `history`, `sandbox-instance`) use command-only loops with no timeout.
- Parent actors (`organization`, `task`, `sandbox-instance`) use command-only loops with no timeout.
- Periodic syncing lives in dedicated child actors with one timeout cadence each.
- **Task actors must be created lazily** — never during sync or bulk operations. PR sync writes virtual entries to the org's local `taskIndex`/`taskSummaries` tables. The task actor is created on first user interaction via `getOrCreate`. See `packages/backend/CLAUDE.md` "Lazy Task Actor Creation" for details.
- Do not build blocking flows that wait on external systems to become ready or complete. Prefer push-based progression driven by actor messages, events, webhooks, or queue/workflow state changes.
- Use workflows/background commands for any repo sync, sandbox provisioning, agent install, branch restack/rebase, or other multi-step external work. Do not keep user-facing actions/requests open while that work runs.
- `send` policy: always `await` the `send(...)` call itself so enqueue failures surface immediately, but default to `wait: false`.
@ -227,8 +241,8 @@ Action handlers must return fast. The pattern:
Examples:
- `createTask``wait: true` (returns `{ taskId }`), then enqueue provisioning with `wait: false`. Client sees task appear immediately with pending status, observes `ready` via organization events.
- `sendWorkbenchMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running``idle` via session events.
- `createWorkbenchSession` → `wait: true` (returns `{ tabId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision``ready` via task events.
- `sendWorkspaceMessage` → validate session is `ready` (throw if not), enqueue with `wait: false`. Client observes session transition to `running``idle` via session events.
- `createWorkspaceSession` → `wait: true` (returns `{ sessionId }`), enqueue sandbox provisioning with `wait: false`. Client observes `pending_provision``ready` via task events.
Never use `wait: true` for operations that depend on external readiness, sandbox I/O, agent responses, git network operations, polling loops, or long-running queue drains. Never hold an action open while waiting for an external system to become ready — that is a polling/retry loop in disguise.
@ -240,11 +254,11 @@ All `wait: true` sends must have an explicit `timeout`. Maximum timeout for any
### Task creation: resolve metadata before creating the actor
When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the parent actor (repository) *before* the task actor is created. The task actor must never be created with null `branchName` or `title`.
When creating a task, all deterministic metadata (title, branch name) must be resolved synchronously in the organization actor *before* the task actor is created. The task actor must never be created with null `branchName` or `title`.
- Title is derived from the task description via `deriveFallbackTitle()` — pure string manipulation, no external I/O.
- Branch name is derived from the title via `sanitizeBranchName()` + conflict checking against the repository's task index.
- The repository actor already has the task index and GitHub-backed default branch metadata. Resolve the branch name there without local git fetches.
- The organization actor owns the task index and reads GitHub-backed default branch metadata from the github-data actor. Resolve the branch name there without local git fetches.
- Do not defer naming to a background provision workflow. Do not poll for names to become available.
- The `onBranch` path (attaching to an existing branch) and the new-task path should both produce a fully-named task record on return.
- Actor handle policy:
@ -320,9 +334,9 @@ Each entry must include:
- Friction/issue
- Attempted fix/workaround and outcome
## History Events
## Audit Log Events
Log notable workflow changes to `events` so `hf history` remains complete:
Log notable workflow changes to `events` so the audit log remains complete:
- create
- attach
@ -331,6 +345,8 @@ Log notable workflow changes to `events` so `hf history` remains complete:
- status transitions
- PR state transitions
When adding new task/workspace commands, always add a corresponding audit log event.
## Validation After Changes
Always run and fix failures:

1456
foundry/FOUNDRY-CHANGES.md Normal file

File diff suppressed because it is too large Load diff

View file

@ -8,4 +8,4 @@ RUN npm install -g pnpm@10.28.2
WORKDIR /app
CMD ["bash", "-lc", "pnpm install --force --frozen-lockfile --filter @sandbox-agent/foundry-frontend... && cd foundry/packages/frontend && exec pnpm vite --host 0.0.0.0 --port 4173"]
CMD ["bash", "-lc", "pnpm install --frozen-lockfile --filter @sandbox-agent/foundry-frontend... && cd foundry/packages/frontend && exec pnpm vite --host 0.0.0.0 --port 4173"]

View file

@ -5,14 +5,12 @@
Keep the backend actor tree aligned with this shape unless we explicitly decide to change it:
```text
OrganizationActor
├─ HistoryActor(organization-scoped global feed)
OrganizationActor (direct coordinator for tasks)
├─ AuditLogActor (organization-scoped global feed)
├─ GithubDataActor
├─ RepositoryActor(repo)
│ └─ TaskActor(task)
│ ├─ TaskSessionActor(session) × N
│ │ └─ SessionStatusSyncActor(session) × 0..1
│ └─ Task-local workbench state
├─ TaskActor(task)
│ ├─ taskSessions → session metadata/transcripts
│ └─ taskSandboxes → sandbox instance index
└─ SandboxInstanceActor(sandboxProviderId, sandboxId) × N
```
@ -28,53 +26,125 @@ Children push updates **up** to their direct coordinator only. Coordinators broa
### Coordinator hierarchy and index tables
```text
OrganizationActor (coordinator for repos + auth users)
OrganizationActor (coordinator for tasks + auth users)
│ Index tables:
│ ├─ repos → RepositoryActor index (repo catalog)
│ ├─ taskLookup → TaskActor index (taskId → repoId routing)
│ ├─ taskSummaries → TaskActor index (materialized sidebar projection)
│ ├─ authSessionIndex → AuthUserActor index (session token → userId)
│ ├─ authEmailIndex → AuthUserActor index (email → userId)
│ └─ authAccountIndex → AuthUserActor index (OAuth account → userId)
│ ├─ taskIndex → TaskActor index (taskId → repoId + branchName)
│ ├─ taskSummaries → TaskActor materialized sidebar projection
│ ├─ authSessionIndex → UserActor index (session token → userId)
│ ├─ authEmailIndex → UserActor index (email → userId)
│ └─ authAccountIndex → UserActor index (OAuth account → userId)
├─ RepositoryActor (coordinator for tasks)
├─ TaskActor (coordinator for sessions + sandboxes)
│ │
│ │ Index tables:
│ │ └─ taskIndex → TaskActor index (taskId → branchName)
│ │ ├─ taskWorkspaceSessions → Session index (session metadata + transcript)
│ │ └─ taskSandboxes → SandboxInstanceActor index (sandbox history)
│ │
│ └─ TaskActor (coordinator for sessions + sandboxes)
│ │
│ │ Index tables:
│ │ ├─ taskWorkbenchSessions → Session index (session metadata, transcript, draft)
│ │ └─ taskSandboxes → SandboxInstanceActor index (sandbox history)
│ │
│ └─ SandboxInstanceActor (leaf)
│ └─ SandboxInstanceActor (leaf)
├─ HistoryActor (organization-scoped audit log, not a coordinator)
├─ AuditLogActor (organization-scoped audit log, not a coordinator)
└─ GithubDataActor (GitHub API cache, not a coordinator)
```
When adding a new index table, annotate it in the schema file with a doc comment identifying it as a coordinator index and which child actor it indexes (see existing examples).
## Lazy Task Actor Creation — CRITICAL
**Task actors must NEVER be created during GitHub sync or bulk operations.** Creating hundreds of task actors simultaneously causes OOM crashes. An org can have 200+ PRs; spawning an actor per PR kills the process.
### The two creation points
There are exactly **two** places that may create a task actor:
1. **`createTaskMutation`** in `task-mutations.ts` — the only backend code that calls `getOrCreateTask`. Triggered by explicit user action ("New Task" button). One actor at a time.
2. **`backend-client.ts` client helper** — calls `client.task.getOrCreate(...)`. This is the lazy materialization point: when a user clicks a virtual task in the sidebar, the client creates the actor, and it self-initializes in `getCurrentRecord()` (`workflow/common.ts`) by reading branch/title from the org's `getTaskIndexEntry` action.
### The rule
### The rule
**Never use `getOrCreateTask` inside a sync loop, webhook handler, or any bulk operation.** That's what caused the OOM — 186 actors spawned simultaneously during PR sync.
`getOrCreateTask` IS allowed in:
- `createTaskMutation` — explicit user "New Task" action
- `requireWorkspaceTask` — user-initiated actions (createSession, sendMessage, etc.) that may hit a virtual task
- `getTask` action on the org — called by sandbox actor and client, needs to materialize virtual tasks
- `backend-client.ts` client helper — lazy materialization when user views a task
### Virtual tasks (PR-driven)
During PR sync, `refreshTaskSummaryForBranchMutation` is called for every changed PR (via github-data's `emitPullRequestChangeEvents`). It writes **virtual task entries** to the org actor's local `taskIndex` + `taskSummaries` tables only. No task actor is spawned. No cross-actor calls to task actors.
When the user interacts with a virtual task (clicks it, creates a session):
1. Client or org actor calls `getOrCreate` on the task actor key → actor is created with empty DB
2. Any action on the actor calls `getCurrentRecord()` → sees empty DB → reads branch/title from org's `getTaskIndexEntry` → calls `initBootstrapDbActivity` + `initCompleteActivity` → task is now real
### Call sites to watch
- `refreshTaskSummaryForBranchMutation` — called in bulk during sync. Must ONLY write to org local tables. Never create task actors or call task actor actions.
- `emitPullRequestChangeEvents` in github-data — iterates all changed PRs. Must remain fire-and-forget with no actor fan-out.
## Ownership Rules
- `OrganizationActor` is the organization coordinator and lookup/index owner.
- `HistoryActor` is organization-scoped. There is one organization-level history feed.
- `RepositoryActor` is the repo coordinator and owns repo-local caches/indexes.
- `OrganizationActor` is the organization coordinator, direct coordinator for tasks, and lookup/index owner. It owns the task index, task summaries, and repo catalog.
- `AuditLogActor` is organization-scoped. There is one organization-level audit log feed.
- `TaskActor` is one branch. Treat `1 task = 1 branch` once branch assignment is finalized.
- `TaskActor` can have many sessions.
- `TaskActor` can reference many sandbox instances historically, but should have only one active sandbox/session at a time.
- Session unread state and draft prompts are backend-owned workbench state, not frontend-local state.
- Branch rename is a real git operation, not just metadata.
- Session unread state and draft prompts are backend-owned workspace state, not frontend-local state.
- Branch names are immutable after task creation. Do not implement branch-rename flows.
- `SandboxInstanceActor` stays separate from `TaskActor`; tasks/sessions reference it by identity.
- The backend stores no local git state. No clones, no refs, no working trees, and no git-spice. Repository metadata comes from GitHub API data and webhook events. Any working-tree git operation runs inside a sandbox via `executeInSandbox()`.
- When a backend request path must aggregate multiple independent actor calls or reads, prefer bounded parallelism over sequential fan-out when correctness permits. Do not serialize independent work by default.
- Only a coordinator creates/destroys its children. Do not create child actors from outside the coordinator.
- Children push state changes up to their direct coordinator only — never skip levels (e.g., task pushes to repo, not directly to org, unless org is the direct coordinator for that index).
- Children push state changes up to their direct coordinator only. Task actors push summary updates directly to the organization actor.
- Read paths must use the coordinator's local index tables. Do not fan out to child actors on the hot read path.
- Never build "enriched" read actions that chain through multiple actors (e.g., coordinator → child actor → sibling actor). If data from multiple actors is needed for a read, it should already be materialized in the coordinator's index tables via push updates. If it's not there, fix the write path to push it — do not add a fan-out read path.
## Drizzle Migration Maintenance
After changing any actor's `db/schema.ts`, you **must** regenerate the corresponding migration so the runtime creates the tables that match the schema. Forgetting this step causes `no such table` errors at runtime.
1. **Generate a new drizzle migration.** Run from `packages/backend`:
```bash
npx drizzle-kit generate --config=./src/actors/<actor>/db/drizzle.config.ts
```
If the interactive prompt is unavailable (e.g. in a non-TTY), manually create a new `.sql` file under `./src/actors/<actor>/db/drizzle/` and add the corresponding entry to `meta/_journal.json`.
2. **Regenerate the compiled `migrations.ts`.** Run from the foundry root:
```bash
npx tsx packages/backend/src/actors/_scripts/generate-actor-migrations.ts
```
3. **Verify insert/upsert calls.** Every column with `.notNull()` (and no `.default(...)`) must be provided a value in all `insert()` and `onConflictDoUpdate()` calls. Missing a NOT NULL column causes a runtime constraint violation, not a type error.
4. **Nuke RivetKit state in dev** after migration changes to start fresh:
```bash
docker compose -f compose.dev.yaml down
docker volume rm foundry_foundry_rivetkit_storage
docker compose -f compose.dev.yaml up -d
```
Actors with drizzle migrations: `organization`, `audit-log`, `task`. Other actors (`user`, `github-data`) use inline migrations without drizzle.
## Workflow Step Nesting — FORBIDDEN
**Never call `c.step()` / `ctx.step()` from inside another step's `run` callback.** RivetKit workflow steps cannot be nested. Doing so causes the runtime error: *"Cannot start a new workflow entry while another is in progress."*
This means:
- Functions called from within a step `run` callback must NOT use `c.step()`, `c.loop()`, `c.sleep()`, or `c.queue.next()`.
- If a mutation function needs to be called both from a step and standalone, it must only do plain DB/API work — no workflow primitives. The workflow step wrapping belongs in the workflow file, not in the mutation.
- Helper wrappers that conditionally call `c.step()` (like a `runSyncStep` pattern) are dangerous — if the caller is already inside a step, the nested `c.step()` will crash at runtime with no compile-time warning.
**Rule of thumb:** Workflow primitives (`step`, `loop`, `sleep`, `queue.next`) may only appear at the top level of a workflow function or inside a `loop` callback — never inside a step's `run`.
## SQLite Constraints
- Single-row tables must use an integer primary key with `CHECK (id = 1)` to enforce the singleton invariant at the database level.
- Follow the task actor pattern for metadata/profile rows and keep the fixed row id in code as `1`, not a string sentinel.
## Multiplayer Correctness
Per-user UI state must live on the user actor, not on shared task/session actors. This is critical for multiplayer — multiple users may view the same task simultaneously with different active sessions, unread states, and in-progress drafts.
@ -85,6 +155,49 @@ Per-user UI state must live on the user actor, not on shared task/session actors
Do not store per-user preferences, selections, or ephemeral UI state on shared actors. If a field's value should differ between two users looking at the same task, it belongs on the user actor.
## Audit Log Maintenance
Every new action or command handler that represents a user-visible or workflow-significant event must append to the audit log actor. The audit log must remain a comprehensive record of significant operations.
## Debugging Actors
### RivetKit Inspector UI
The RivetKit inspector UI at `http://localhost:6420/ui/` is the most reliable way to debug actor state in local development. The inspector HTTP API (`/inspector/workflow-history`) has a known bug where it returns empty `{}` even when the workflow has entries — always cross-check with the UI.
**Useful inspector URL pattern:**
```
http://localhost:6420/ui/?u=http%3A%2F%2F127.0.0.1%3A6420&ns=default&r=default&n=[%22<actor-name>%22]&actorId=<actor-id>&tab=<tab>
```
Tabs: `workflow`, `database`, `state`, `queue`, `connections`, `metadata`.
**To find actor IDs:**
```bash
curl -s 'http://127.0.0.1:6420/actors?name=organization'
```
**To query actor DB via bun (inside container):**
```bash
docker compose -f compose.dev.yaml exec -T backend bun -e '
var Database = require("bun:sqlite");
var db = new Database("/root/.local/share/foundry/rivetkit/databases/<actor-id>.db", { readonly: true });
console.log(JSON.stringify(db.query("SELECT name FROM sqlite_master WHERE type=?").all("table")));
'
```
**To call actor actions via inspector:**
```bash
curl -s -X POST 'http://127.0.0.1:6420/gateway/<actor-id>/inspector/action/<actionName>' \
-H 'Content-Type: application/json' -d '{"args":[{}]}'
```
### Known inspector API bugs
- `GET /inspector/workflow-history` may return `{"history":{}}` even when workflow has run. Use the UI's Workflow tab instead.
- `GET /inspector/queue` is reliable for checking pending messages.
- `GET /inspector/state` is reliable for checking actor state.
## Maintenance
- Keep this file up to date whenever actor ownership, hierarchy, or lifecycle responsibilities change.

View file

@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const authUserDb = db({ schema, migrations });
export const auditLogDb = db({ schema, migrations });

View file

@ -0,0 +1,6 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/audit-log/db/drizzle",
schema: "./src/actors/audit-log/db/schema.ts",
});

View file

@ -0,0 +1 @@
ALTER TABLE `events` ADD COLUMN `repo_id` text;

View file

@ -1,48 +1,31 @@
{
"version": "6",
"dialect": "sqlite",
"id": "6ffd6acb-e737-46ee-a8fe-fcfddcdd6ea9",
"prevId": "00000000-0000-0000-0000-000000000000",
"id": "a1b2c3d4-0001-4000-8000-000000000001",
"prevId": "e592c829-141f-4740-88b7-09cf957a4405",
"tables": {
"repo_meta": {
"name": "repo_meta",
"events": {
"name": "events",
"columns": {
"id": {
"name": "id",
"type": "integer",
"primaryKey": true,
"notNull": true,
"autoincrement": false
"autoincrement": true
},
"remote_url": {
"name": "remote_url",
"repo_id": {
"name": "repo_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"task_index": {
"name": "task_index",
"columns": {
"task_id": {
"name": "task_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"branch_name": {
@ -52,15 +35,22 @@
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"kind": {
"name": "kind",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"payload_json": {
"name": "payload_json",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,

View file

@ -8,6 +8,13 @@
"when": 1773376223815,
"tag": "0000_fluffy_kid_colt",
"breakpoints": true
},
{
"idx": 1,
"version": "6",
"when": 1773376223816,
"tag": "0001_add_repo_id",
"breakpoints": true
}
]
}

View file

@ -10,6 +10,12 @@ const journal = {
tag: "0000_fluffy_kid_colt",
breakpoints: true,
},
{
idx: 1,
when: 1773376223816,
tag: "0001_add_repo_id",
breakpoints: true,
},
],
} as const;
@ -24,6 +30,8 @@ export default {
\`payload_json\` text NOT NULL,
\`created_at\` integer NOT NULL
);
`,
m0001: `ALTER TABLE \`events\` ADD COLUMN \`repo_id\` text;
`,
} as const,
};

View file

@ -2,10 +2,11 @@ import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
export const events = sqliteTable("events", {
id: integer("id").primaryKey({ autoIncrement: true }),
repoId: text("repo_id"),
taskId: text("task_id"),
branchName: text("branch_name"),
kind: text("kind").notNull(),
// Structured by the history event kind definitions in application code.
// Structured by the audit-log event kind definitions in application code.
payloadJson: text("payload_json").notNull(),
createdAt: integer("created_at").notNull(),
});

View file

@ -0,0 +1,98 @@
// @ts-nocheck
import { and, desc, eq } from "drizzle-orm";
import { actor } from "rivetkit";
import type { AuditLogEvent } from "@sandbox-agent/foundry-shared";
import { auditLogDb } from "./db/db.js";
import { events } from "./db/schema.js";
export interface AuditLogInput {
organizationId: string;
}
export interface AppendAuditLogCommand {
kind: string;
repoId?: string;
taskId?: string;
branchName?: string;
payload: Record<string, unknown>;
}
export interface ListAuditLogParams {
repoId?: string;
branch?: string;
taskId?: string;
limit?: number;
}
/**
* Organization-scoped audit log. One per org, not one per repo.
*
* The org is the coordinator for all tasks across repos, and we frequently need
* to query the full audit trail across repos (e.g. org-wide activity feed,
* compliance). A per-repo audit log would require fan-out reads every time.
* Keeping it org-scoped gives us a single queryable feed with optional repoId
* filtering when callers want a narrower view.
*/
export const auditLog = actor({
db: auditLogDb,
options: {
name: "Audit Log",
icon: "database",
},
createState: (_c, input: AuditLogInput) => ({
organizationId: input.organizationId,
}),
actions: {
async append(c, body: AppendAuditLogCommand): Promise<{ ok: true }> {
const now = Date.now();
await c.db
.insert(events)
.values({
repoId: body.repoId ?? null,
taskId: body.taskId ?? null,
branchName: body.branchName ?? null,
kind: body.kind,
payloadJson: JSON.stringify(body.payload),
createdAt: now,
})
.run();
return { ok: true };
},
async list(c, params?: ListAuditLogParams): Promise<AuditLogEvent[]> {
const whereParts = [];
if (params?.repoId) {
whereParts.push(eq(events.repoId, params.repoId));
}
if (params?.taskId) {
whereParts.push(eq(events.taskId, params.taskId));
}
if (params?.branch) {
whereParts.push(eq(events.branchName, params.branch));
}
const base = c.db
.select({
id: events.id,
repoId: events.repoId,
taskId: events.taskId,
branchName: events.branchName,
kind: events.kind,
payloadJson: events.payloadJson,
createdAt: events.createdAt,
})
.from(events);
const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base)
.orderBy(desc(events.createdAt))
.limit(params?.limit ?? 100)
.all();
return rows.map((row) => ({
...row,
organizationId: c.state.organizationId,
repoId: row.repoId ?? null,
}));
},
},
});

View file

@ -1,70 +0,0 @@
import { integer, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core";
export const authUsers = sqliteTable("user", {
id: text("id").notNull().primaryKey(),
name: text("name").notNull(),
email: text("email").notNull(),
emailVerified: integer("email_verified").notNull(),
image: text("image"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const authSessions = sqliteTable(
"session",
{
id: text("id").notNull().primaryKey(),
token: text("token").notNull(),
userId: text("user_id").notNull(),
expiresAt: integer("expires_at").notNull(),
ipAddress: text("ip_address"),
userAgent: text("user_agent"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
tokenIdx: uniqueIndex("session_token_idx").on(table.token),
}),
);
export const authAccounts = sqliteTable(
"account",
{
id: text("id").notNull().primaryKey(),
accountId: text("account_id").notNull(),
providerId: text("provider_id").notNull(),
userId: text("user_id").notNull(),
accessToken: text("access_token"),
refreshToken: text("refresh_token"),
idToken: text("id_token"),
accessTokenExpiresAt: integer("access_token_expires_at"),
refreshTokenExpiresAt: integer("refresh_token_expires_at"),
scope: text("scope"),
password: text("password"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
providerAccountIdx: uniqueIndex("account_provider_account_idx").on(table.providerId, table.accountId),
}),
);
export const userProfiles = sqliteTable("user_profiles", {
userId: text("user_id").notNull().primaryKey(),
githubAccountId: text("github_account_id"),
githubLogin: text("github_login"),
roleLabel: text("role_label").notNull(),
eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(),
starterRepoStatus: text("starter_repo_status").notNull(),
starterRepoStarredAt: integer("starter_repo_starred_at"),
starterRepoSkippedAt: integer("starter_repo_skipped_at"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const sessionState = sqliteTable("session_state", {
sessionId: text("session_id").notNull().primaryKey(),
activeOrganizationId: text("active_organization_id"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});

View file

@ -1,353 +0,0 @@
import { and, asc, count as sqlCount, desc, eq, gt, gte, inArray, isNotNull, isNull, like, lt, lte, ne, notInArray, or } from "drizzle-orm";
import { actor } from "rivetkit";
import { authUserDb } from "./db/db.js";
import { authAccounts, authSessions, authUsers, sessionState, userProfiles } from "./db/schema.js";
const tables = {
user: authUsers,
session: authSessions,
account: authAccounts,
userProfiles,
sessionState,
} as const;
function tableFor(model: string) {
const table = tables[model as keyof typeof tables];
if (!table) {
throw new Error(`Unsupported auth user model: ${model}`);
}
return table as any;
}
function columnFor(table: any, field: string) {
const column = table[field];
if (!column) {
throw new Error(`Unsupported auth user field: ${field}`);
}
return column;
}
function normalizeValue(value: unknown): unknown {
if (value instanceof Date) {
return value.getTime();
}
if (Array.isArray(value)) {
return value.map((entry) => normalizeValue(entry));
}
return value;
}
function clauseToExpr(table: any, clause: any) {
const column = columnFor(table, clause.field);
const value = normalizeValue(clause.value);
switch (clause.operator) {
case "ne":
return value === null ? isNotNull(column) : ne(column, value as any);
case "lt":
return lt(column, value as any);
case "lte":
return lte(column, value as any);
case "gt":
return gt(column, value as any);
case "gte":
return gte(column, value as any);
case "in":
return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]);
case "not_in":
return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]);
case "contains":
return like(column, `%${String(value ?? "")}%`);
case "starts_with":
return like(column, `${String(value ?? "")}%`);
case "ends_with":
return like(column, `%${String(value ?? "")}`);
case "eq":
default:
return value === null ? isNull(column) : eq(column, value as any);
}
}
function buildWhere(table: any, where: any[] | undefined) {
if (!where || where.length === 0) {
return undefined;
}
let expr = clauseToExpr(table, where[0]);
for (const clause of where.slice(1)) {
const next = clauseToExpr(table, clause);
expr = clause.connector === "OR" ? or(expr, next) : and(expr, next);
}
return expr;
}
function applyJoinToRow(c: any, model: string, row: any, join: any) {
if (!row || !join) {
return row;
}
if (model === "session" && join.user) {
return c.db
.select()
.from(authUsers)
.where(eq(authUsers.id, row.userId))
.get()
.then((user: any) => ({ ...row, user: user ?? null }));
}
if (model === "account" && join.user) {
return c.db
.select()
.from(authUsers)
.where(eq(authUsers.id, row.userId))
.get()
.then((user: any) => ({ ...row, user: user ?? null }));
}
if (model === "user" && join.account) {
return c.db
.select()
.from(authAccounts)
.where(eq(authAccounts.userId, row.id))
.all()
.then((accounts: any[]) => ({ ...row, account: accounts }));
}
return Promise.resolve(row);
}
async function applyJoinToRows(c: any, model: string, rows: any[], join: any) {
if (!join || rows.length === 0) {
return rows;
}
if (model === "session" && join.user) {
const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))];
const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.id, userIds)).all() : [];
const userMap = new Map(users.map((user: any) => [user.id, user]));
return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null }));
}
if (model === "account" && join.user) {
const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))];
const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.id, userIds)).all() : [];
const userMap = new Map(users.map((user: any) => [user.id, user]));
return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null }));
}
if (model === "user" && join.account) {
const userIds = rows.map((row) => row.id);
const accounts = userIds.length > 0 ? await c.db.select().from(authAccounts).where(inArray(authAccounts.userId, userIds)).all() : [];
const accountsByUserId = new Map<string, any[]>();
for (const account of accounts) {
const entries = accountsByUserId.get(account.userId) ?? [];
entries.push(account);
accountsByUserId.set(account.userId, entries);
}
return rows.map((row) => ({ ...row, account: accountsByUserId.get(row.id) ?? [] }));
}
return rows;
}
export const authUser = actor({
db: authUserDb,
options: {
name: "Auth User",
icon: "shield",
actionTimeout: 60_000,
},
createState: (_c, input: { userId: string }) => ({
userId: input.userId,
}),
actions: {
async createAuthRecord(c, input: { model: string; data: Record<string, unknown> }) {
const table = tableFor(input.model);
await c.db
.insert(table)
.values(input.data as any)
.run();
return await c.db
.select()
.from(table)
.where(eq(columnFor(table, "id"), input.data.id as any))
.get();
},
async findOneAuthRecord(c, input: { model: string; where: any[]; join?: any }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
const row = predicate ? await c.db.select().from(table).where(predicate).get() : await c.db.select().from(table).get();
return await applyJoinToRow(c, input.model, row ?? null, input.join);
},
async findManyAuthRecords(c, input: { model: string; where?: any[]; limit?: number; offset?: number; sortBy?: any; join?: any }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
let query: any = c.db.select().from(table);
if (predicate) {
query = query.where(predicate);
}
if (input.sortBy?.field) {
const column = columnFor(table, input.sortBy.field);
query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column));
}
if (typeof input.limit === "number") {
query = query.limit(input.limit);
}
if (typeof input.offset === "number") {
query = query.offset(input.offset);
}
const rows = await query.all();
return await applyJoinToRows(c, input.model, rows, input.join);
},
async updateAuthRecord(c, input: { model: string; where: any[]; update: Record<string, unknown> }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) {
throw new Error("updateAuthRecord requires a where clause");
}
await c.db
.update(table)
.set(input.update as any)
.where(predicate)
.run();
return await c.db.select().from(table).where(predicate).get();
},
async updateManyAuthRecords(c, input: { model: string; where: any[]; update: Record<string, unknown> }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) {
throw new Error("updateManyAuthRecords requires a where clause");
}
await c.db
.update(table)
.set(input.update as any)
.where(predicate)
.run();
const row = await c.db.select({ value: sqlCount() }).from(table).where(predicate).get();
return row?.value ?? 0;
},
async deleteAuthRecord(c, input: { model: string; where: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) {
throw new Error("deleteAuthRecord requires a where clause");
}
await c.db.delete(table).where(predicate).run();
},
async deleteManyAuthRecords(c, input: { model: string; where: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) {
throw new Error("deleteManyAuthRecords requires a where clause");
}
const rows = await c.db.select().from(table).where(predicate).all();
await c.db.delete(table).where(predicate).run();
return rows.length;
},
async countAuthRecords(c, input: { model: string; where?: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
const row = predicate
? await c.db.select({ value: sqlCount() }).from(table).where(predicate).get()
: await c.db.select({ value: sqlCount() }).from(table).get();
return row?.value ?? 0;
},
async getAppAuthState(c, input: { sessionId: string }) {
const session = await c.db.select().from(authSessions).where(eq(authSessions.id, input.sessionId)).get();
if (!session) {
return null;
}
const [user, profile, currentSessionState, accounts] = await Promise.all([
c.db.select().from(authUsers).where(eq(authUsers.id, session.userId)).get(),
c.db.select().from(userProfiles).where(eq(userProfiles.userId, session.userId)).get(),
c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(),
c.db.select().from(authAccounts).where(eq(authAccounts.userId, session.userId)).all(),
]);
return {
session,
user,
profile: profile ?? null,
sessionState: currentSessionState ?? null,
accounts,
};
},
async upsertUserProfile(
c,
input: {
userId: string;
patch: {
githubAccountId?: string | null;
githubLogin?: string | null;
roleLabel?: string;
eligibleOrganizationIdsJson?: string;
starterRepoStatus?: string;
starterRepoStarredAt?: number | null;
starterRepoSkippedAt?: number | null;
};
},
) {
const now = Date.now();
await c.db
.insert(userProfiles)
.values({
userId: input.userId,
githubAccountId: input.patch.githubAccountId ?? null,
githubLogin: input.patch.githubLogin ?? null,
roleLabel: input.patch.roleLabel ?? "GitHub user",
eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson ?? "[]",
starterRepoStatus: input.patch.starterRepoStatus ?? "pending",
starterRepoStarredAt: input.patch.starterRepoStarredAt ?? null,
starterRepoSkippedAt: input.patch.starterRepoSkippedAt ?? null,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: userProfiles.userId,
set: {
...(input.patch.githubAccountId !== undefined ? { githubAccountId: input.patch.githubAccountId } : {}),
...(input.patch.githubLogin !== undefined ? { githubLogin: input.patch.githubLogin } : {}),
...(input.patch.roleLabel !== undefined ? { roleLabel: input.patch.roleLabel } : {}),
...(input.patch.eligibleOrganizationIdsJson !== undefined ? { eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson } : {}),
...(input.patch.starterRepoStatus !== undefined ? { starterRepoStatus: input.patch.starterRepoStatus } : {}),
...(input.patch.starterRepoStarredAt !== undefined ? { starterRepoStarredAt: input.patch.starterRepoStarredAt } : {}),
...(input.patch.starterRepoSkippedAt !== undefined ? { starterRepoSkippedAt: input.patch.starterRepoSkippedAt } : {}),
updatedAt: now,
},
})
.run();
return await c.db.select().from(userProfiles).where(eq(userProfiles.userId, input.userId)).get();
},
async upsertSessionState(c, input: { sessionId: string; activeOrganizationId: string | null }) {
const now = Date.now();
await c.db
.insert(sessionState)
.values({
sessionId: input.sessionId,
activeOrganizationId: input.activeOrganizationId,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: sessionState.sessionId,
set: {
activeOrganizationId: input.activeOrganizationId,
updatedAt: now,
},
})
.run();
return await c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get();
},
},
});

View file

@ -1,104 +0,0 @@
import type { TaskStatus, SandboxProviderId } from "@sandbox-agent/foundry-shared";
export interface TaskCreatedEvent {
organizationId: string;
repoId: string;
taskId: string;
sandboxProviderId: SandboxProviderId;
branchName: string;
title: string;
}
export interface TaskStatusEvent {
organizationId: string;
repoId: string;
taskId: string;
status: TaskStatus;
message: string;
}
export interface RepositorySnapshotEvent {
organizationId: string;
repoId: string;
updatedAt: number;
}
export interface AgentStartedEvent {
organizationId: string;
repoId: string;
taskId: string;
sessionId: string;
}
export interface AgentIdleEvent {
organizationId: string;
repoId: string;
taskId: string;
sessionId: string;
}
export interface AgentErrorEvent {
organizationId: string;
repoId: string;
taskId: string;
message: string;
}
export interface PrCreatedEvent {
organizationId: string;
repoId: string;
taskId: string;
prNumber: number;
url: string;
}
export interface PrClosedEvent {
organizationId: string;
repoId: string;
taskId: string;
prNumber: number;
merged: boolean;
}
export interface PrReviewEvent {
organizationId: string;
repoId: string;
taskId: string;
prNumber: number;
reviewer: string;
status: string;
}
export interface CiStatusChangedEvent {
organizationId: string;
repoId: string;
taskId: string;
prNumber: number;
status: string;
}
export type TaskStepName = "auto_commit" | "push" | "pr_submit";
export type TaskStepStatus = "started" | "completed" | "skipped" | "failed";
export interface TaskStepEvent {
organizationId: string;
repoId: string;
taskId: string;
step: TaskStepName;
status: TaskStepStatus;
message: string;
}
export interface BranchSwitchedEvent {
organizationId: string;
repoId: string;
taskId: string;
branchName: string;
}
export interface SessionAttachedEvent {
organizationId: string;
repoId: string;
taskId: string;
sessionId: string;
}

View file

@ -18,6 +18,12 @@ const journal = {
tag: "0002_github_branches",
breakpoints: true,
},
{
idx: 3,
when: 1773907200000,
tag: "0003_sync_progress",
breakpoints: true,
},
],
} as const;
@ -32,7 +38,8 @@ export default {
\`installation_id\` integer,
\`last_sync_label\` text NOT NULL,
\`last_sync_at\` integer,
\`updated_at\` integer NOT NULL
\`updated_at\` integer NOT NULL,
CONSTRAINT \`github_meta_singleton_id_check\` CHECK(\`id\` = 1)
);
--> statement-breakpoint
CREATE TABLE \`github_repositories\` (
@ -78,6 +85,22 @@ CREATE TABLE \`github_pull_requests\` (
\`commit_sha\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0003: `ALTER TABLE \`github_meta\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0;
--> statement-breakpoint
ALTER TABLE \`github_meta\` ADD \`sync_phase\` text;
--> statement-breakpoint
ALTER TABLE \`github_meta\` ADD \`processed_repository_count\` integer NOT NULL DEFAULT 0;
--> statement-breakpoint
ALTER TABLE \`github_meta\` ADD \`total_repository_count\` integer NOT NULL DEFAULT 0;
--> statement-breakpoint
ALTER TABLE \`github_repositories\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0;
--> statement-breakpoint
ALTER TABLE \`github_members\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0;
--> statement-breakpoint
ALTER TABLE \`github_pull_requests\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0;
--> statement-breakpoint
ALTER TABLE \`github_branches\` ADD \`sync_generation\` integer NOT NULL DEFAULT 0;
`,
} as const,
};

View file

@ -1,15 +1,24 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle";
import { sql } from "drizzle-orm";
export const githubMeta = sqliteTable("github_meta", {
id: integer("id").primaryKey(),
connectedAccount: text("connected_account").notNull(),
installationStatus: text("installation_status").notNull(),
syncStatus: text("sync_status").notNull(),
installationId: integer("installation_id"),
lastSyncLabel: text("last_sync_label").notNull(),
lastSyncAt: integer("last_sync_at"),
updatedAt: integer("updated_at").notNull(),
});
export const githubMeta = sqliteTable(
"github_meta",
{
id: integer("id").primaryKey(),
connectedAccount: text("connected_account").notNull(),
installationStatus: text("installation_status").notNull(),
syncStatus: text("sync_status").notNull(),
installationId: integer("installation_id"),
lastSyncLabel: text("last_sync_label").notNull(),
lastSyncAt: integer("last_sync_at"),
syncGeneration: integer("sync_generation").notNull(),
syncPhase: text("sync_phase"),
processedRepositoryCount: integer("processed_repository_count").notNull(),
totalRepositoryCount: integer("total_repository_count").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => [check("github_meta_singleton_id_check", sql`${table.id} = 1`)],
);
export const githubRepositories = sqliteTable("github_repositories", {
repoId: text("repo_id").notNull().primaryKey(),
@ -17,6 +26,7 @@ export const githubRepositories = sqliteTable("github_repositories", {
cloneUrl: text("clone_url").notNull(),
private: integer("private").notNull(),
defaultBranch: text("default_branch").notNull(),
syncGeneration: integer("sync_generation").notNull(),
updatedAt: integer("updated_at").notNull(),
});
@ -25,6 +35,7 @@ export const githubBranches = sqliteTable("github_branches", {
repoId: text("repo_id").notNull(),
branchName: text("branch_name").notNull(),
commitSha: text("commit_sha").notNull(),
syncGeneration: integer("sync_generation").notNull(),
updatedAt: integer("updated_at").notNull(),
});
@ -35,6 +46,7 @@ export const githubMembers = sqliteTable("github_members", {
email: text("email"),
role: text("role"),
state: text("state").notNull(),
syncGeneration: integer("sync_generation").notNull(),
updatedAt: integer("updated_at").notNull(),
});
@ -51,5 +63,6 @@ export const githubPullRequests = sqliteTable("github_pull_requests", {
baseRefName: text("base_ref_name").notNull(),
authorLogin: text("author_login"),
isDraft: integer("is_draft").notNull(),
syncGeneration: integer("sync_generation").notNull(),
updatedAt: integer("updated_at").notNull(),
});

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,81 @@
// @ts-nocheck
import { logActorWarning, resolveErrorMessage } from "../logging.js";
// Dynamic imports to break circular dependency: index.ts imports workflow.ts,
// and workflow.ts needs functions from index.ts.
async function getIndexModule() {
return await import("./index.js");
}
export const GITHUB_DATA_QUEUE_NAMES = [
"githubData.command.syncRepos",
"githubData.command.reloadRepository",
"githubData.command.clearState",
"githubData.command.handlePullRequestWebhook",
] as const;
export type GithubDataQueueName = (typeof GITHUB_DATA_QUEUE_NAMES)[number];
export function githubDataWorkflowQueueName(name: GithubDataQueueName): GithubDataQueueName {
return name;
}
/**
* Plain run handler (no workflow engine). Drains the queue using `c.queue.iter()`
* with completable messages. This avoids the RivetKit bug where actors created
* from another actor's workflow context never start their `run: workflow(...)`.
*/
export async function runGithubDataCommandLoop(c: any): Promise<void> {
for await (const msg of c.queue.iter({ names: [...GITHUB_DATA_QUEUE_NAMES], completable: true })) {
try {
if (msg.name === "githubData.command.syncRepos") {
try {
const { runFullSync } = await getIndexModule();
await runFullSync(c, msg.body);
await msg.complete({ ok: true });
} catch (error) {
const { fullSyncError } = await getIndexModule();
try {
await fullSyncError(c, error);
} catch {
/* best effort */
}
const message = error instanceof Error ? error.message : String(error);
await msg.complete({ error: message }).catch(() => {});
}
continue;
}
if (msg.name === "githubData.command.reloadRepository") {
const { reloadRepositoryMutation } = await getIndexModule();
const result = await reloadRepositoryMutation(c, msg.body);
await msg.complete(result);
continue;
}
if (msg.name === "githubData.command.clearState") {
const { clearStateMutation } = await getIndexModule();
await clearStateMutation(c, msg.body);
await msg.complete({ ok: true });
continue;
}
if (msg.name === "githubData.command.handlePullRequestWebhook") {
const { handlePullRequestWebhookMutation } = await getIndexModule();
await handlePullRequestWebhookMutation(c, msg.body);
await msg.complete({ ok: true });
continue;
}
logActorWarning("githubData", "unknown queue message", { queueName: msg.name });
await msg.complete({ error: `Unknown command: ${msg.name}` });
} catch (error) {
const message = resolveErrorMessage(error);
logActorWarning("githubData", "github-data command failed", {
queueName: msg.name,
error: message,
});
await msg.complete({ error: message }).catch(() => {});
}
}
}

View file

@ -1,4 +1,4 @@
import { authUserKey, githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "./keys.js";
import { auditLogKey, githubDataKey, organizationKey, taskKey, taskSandboxKey, userKey } from "./keys.js";
export function actorClient(c: any) {
return c.client();
@ -10,28 +10,14 @@ export async function getOrCreateOrganization(c: any, organizationId: string) {
});
}
export async function getOrCreateAuthUser(c: any, userId: string) {
return await actorClient(c).authUser.getOrCreate(authUserKey(userId), {
export async function getOrCreateUser(c: any, userId: string) {
return await actorClient(c).user.getOrCreate(userKey(userId), {
createWithInput: { userId },
});
}
export function getAuthUser(c: any, userId: string) {
return actorClient(c).authUser.get(authUserKey(userId));
}
export async function getOrCreateRepository(c: any, organizationId: string, repoId: string, remoteUrl: string) {
return await actorClient(c).repository.getOrCreate(repositoryKey(organizationId, repoId), {
createWithInput: {
organizationId,
repoId,
remoteUrl,
},
});
}
export function getRepository(c: any, organizationId: string, repoId: string) {
return actorClient(c).repository.get(repositoryKey(organizationId, repoId));
export function getUser(c: any, userId: string) {
return actorClient(c).user.get(userKey(userId));
}
export function getTask(c: any, organizationId: string, repoId: string, taskId: string) {
@ -44,11 +30,10 @@ export async function getOrCreateTask(c: any, organizationId: string, repoId: st
});
}
export async function getOrCreateHistory(c: any, organizationId: string, repoId: string) {
return await actorClient(c).history.getOrCreate(historyKey(organizationId, repoId), {
export async function getOrCreateAuditLog(c: any, organizationId: string) {
return await actorClient(c).auditLog.getOrCreate(auditLogKey(organizationId), {
createWithInput: {
organizationId,
repoId,
},
});
}
@ -75,8 +60,8 @@ export async function getOrCreateTaskSandbox(c: any, organizationId: string, san
});
}
export function selfHistory(c: any) {
return actorClient(c).history.getForId(c.actorId);
export function selfAuditLog(c: any) {
return actorClient(c).auditLog.getForId(c.actorId);
}
export function selfTask(c: any) {
@ -87,12 +72,8 @@ export function selfOrganization(c: any) {
return actorClient(c).organization.getForId(c.actorId);
}
export function selfRepository(c: any) {
return actorClient(c).repository.getForId(c.actorId);
}
export function selfAuthUser(c: any) {
return actorClient(c).authUser.getForId(c.actorId);
export function selfUser(c: any) {
return actorClient(c).user.getForId(c.actorId);
}
export function selfGithubData(c: any) {

View file

@ -1,6 +0,0 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/history/db/drizzle",
schema: "./src/actors/history/db/schema.ts",
});

View file

@ -1,115 +0,0 @@
// @ts-nocheck
import { and, desc, eq } from "drizzle-orm";
import { actor, queue } from "rivetkit";
import { Loop, workflow } from "rivetkit/workflow";
import type { HistoryEvent } from "@sandbox-agent/foundry-shared";
import { selfHistory } from "../handles.js";
import { historyDb } from "./db/db.js";
import { events } from "./db/schema.js";
export interface HistoryInput {
organizationId: string;
repoId: string;
}
export interface AppendHistoryCommand {
kind: string;
taskId?: string;
branchName?: string;
payload: Record<string, unknown>;
}
export interface ListHistoryParams {
branch?: string;
taskId?: string;
limit?: number;
}
const HISTORY_QUEUE_NAMES = ["history.command.append"] as const;
async function appendHistoryRow(loopCtx: any, body: AppendHistoryCommand): Promise<void> {
const now = Date.now();
await loopCtx.db
.insert(events)
.values({
taskId: body.taskId ?? null,
branchName: body.branchName ?? null,
kind: body.kind,
payloadJson: JSON.stringify(body.payload),
createdAt: now,
})
.run();
}
async function runHistoryWorkflow(ctx: any): Promise<void> {
await ctx.loop("history-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-history-command", {
names: [...HISTORY_QUEUE_NAMES],
completable: true,
});
if (!msg) {
return Loop.continue(undefined);
}
if (msg.name === "history.command.append") {
await loopCtx.step("append-history-row", async () => appendHistoryRow(loopCtx, msg.body as AppendHistoryCommand));
await msg.complete({ ok: true });
}
return Loop.continue(undefined);
});
}
export const history = actor({
db: historyDb,
queues: {
"history.command.append": queue(),
},
options: {
name: "History",
icon: "database",
},
createState: (_c, input: HistoryInput) => ({
organizationId: input.organizationId,
repoId: input.repoId,
}),
actions: {
async append(c, command: AppendHistoryCommand): Promise<void> {
const self = selfHistory(c);
await self.send("history.command.append", command, { wait: true, timeout: 15_000 });
},
async list(c, params?: ListHistoryParams): Promise<HistoryEvent[]> {
const whereParts = [];
if (params?.taskId) {
whereParts.push(eq(events.taskId, params.taskId));
}
if (params?.branch) {
whereParts.push(eq(events.branchName, params.branch));
}
const base = c.db
.select({
id: events.id,
taskId: events.taskId,
branchName: events.branchName,
kind: events.kind,
payloadJson: events.payloadJson,
createdAt: events.createdAt,
})
.from(events);
const rows = await (whereParts.length > 0 ? base.where(and(...whereParts)) : base)
.orderBy(desc(events.createdAt))
.limit(params?.limit ?? 100)
.all();
return rows.map((row) => ({
...row,
organizationId: c.state.organizationId,
repoId: c.state.repoId,
}));
},
},
run: workflow(runHistoryWorkflow),
});

View file

@ -1,9 +1,8 @@
import { authUser } from "./auth-user/index.js";
import { user } from "./user/index.js";
import { setup } from "rivetkit";
import { githubData } from "./github-data/index.js";
import { task } from "./task/index.js";
import { history } from "./history/index.js";
import { repository } from "./repository/index.js";
import { auditLog } from "./audit-log/index.js";
import { taskSandbox } from "./sandbox/index.js";
import { organization } from "./organization/index.js";
import { logger } from "../logging.js";
@ -21,23 +20,20 @@ export const registry = setup({
baseLogger: logger,
},
use: {
authUser,
user,
organization,
repository,
task,
taskSandbox,
history,
auditLog,
githubData,
},
});
export * from "./context.js";
export * from "./events.js";
export * from "./auth-user/index.js";
export * from "./audit-log/index.js";
export * from "./user/index.js";
export * from "./github-data/index.js";
export * from "./task/index.js";
export * from "./history/index.js";
export * from "./keys.js";
export * from "./repository/index.js";
export * from "./sandbox/index.js";
export * from "./organization/index.js";

View file

@ -4,24 +4,21 @@ export function organizationKey(organizationId: string): ActorKey {
return ["org", organizationId];
}
export function authUserKey(userId: string): ActorKey {
export function userKey(userId: string): ActorKey {
return ["org", "app", "user", userId];
}
export function repositoryKey(organizationId: string, repoId: string): ActorKey {
return ["org", organizationId, "repository", repoId];
}
export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey {
return ["org", organizationId, "repository", repoId, "task", taskId];
return ["org", organizationId, "task", repoId, taskId];
}
export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey {
return ["org", organizationId, "sandbox", sandboxId];
}
export function historyKey(organizationId: string, repoId: string): ActorKey {
return ["org", organizationId, "repository", repoId, "history"];
/** One audit log per org (not per repo) — see audit-log/index.ts for rationale. */
export function auditLogKey(organizationId: string): ActorKey {
return ["org", organizationId, "audit-log"];
}
export function githubDataKey(organizationId: string): ActorKey {

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1 @@
export { organizationAppActions } from "../app-shell.js";

View file

@ -0,0 +1,323 @@
import {
and,
asc,
count as sqlCount,
desc,
eq,
gt,
gte,
inArray,
isNotNull,
isNull,
like,
lt,
lte,
ne,
notInArray,
or,
} from "drizzle-orm";
import { authAccountIndex, authEmailIndex, authSessionIndex, authVerification } from "../db/schema.js";
import { APP_SHELL_ORGANIZATION_ID } from "../constants.js";
function assertAppOrganization(c: any): void {
if (c.state.organizationId !== APP_SHELL_ORGANIZATION_ID) {
throw new Error(`App shell action requires organization ${APP_SHELL_ORGANIZATION_ID}, got ${c.state.organizationId}`);
}
}
function organizationAuthColumn(table: any, field: string): any {
const column = table[field];
if (!column) {
throw new Error(`Unknown auth table field: ${field}`);
}
return column;
}
function normalizeAuthValue(value: unknown): unknown {
if (value instanceof Date) {
return value.getTime();
}
if (Array.isArray(value)) {
return value.map((entry) => normalizeAuthValue(entry));
}
return value;
}
function organizationAuthClause(table: any, clause: { field: string; value: unknown; operator?: string }): any {
const column = organizationAuthColumn(table, clause.field);
const value = normalizeAuthValue(clause.value);
switch (clause.operator) {
case "ne":
return value === null ? isNotNull(column) : ne(column, value as any);
case "lt":
return lt(column, value as any);
case "lte":
return lte(column, value as any);
case "gt":
return gt(column, value as any);
case "gte":
return gte(column, value as any);
case "in":
return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]);
case "not_in":
return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]);
case "contains":
return like(column, `%${String(value ?? "")}%`);
case "starts_with":
return like(column, `${String(value ?? "")}%`);
case "ends_with":
return like(column, `%${String(value ?? "")}`);
case "eq":
default:
return value === null ? isNull(column) : eq(column, value as any);
}
}
function organizationBetterAuthWhere(table: any, clauses: any[] | undefined): any {
if (!clauses || clauses.length === 0) {
return undefined;
}
let expr = organizationAuthClause(table, clauses[0]);
for (const clause of clauses.slice(1)) {
const next = organizationAuthClause(table, clause);
expr = clause.connector === "OR" ? or(expr, next) : and(expr, next);
}
return expr;
}
export async function betterAuthUpsertSessionIndexMutation(c: any, input: { sessionId: string; sessionToken: string; userId: string }) {
assertAppOrganization(c);
const now = Date.now();
await c.db
.insert(authSessionIndex)
.values({
sessionId: input.sessionId,
sessionToken: input.sessionToken,
userId: input.userId,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: authSessionIndex.sessionId,
set: {
sessionToken: input.sessionToken,
userId: input.userId,
updatedAt: now,
},
})
.run();
return await c.db.select().from(authSessionIndex).where(eq(authSessionIndex.sessionId, input.sessionId)).get();
}
export async function betterAuthDeleteSessionIndexMutation(c: any, input: { sessionId?: string; sessionToken?: string }) {
assertAppOrganization(c);
const clauses = [
...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []),
...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []),
];
if (clauses.length === 0) {
return;
}
const predicate = organizationBetterAuthWhere(authSessionIndex, clauses);
await c.db.delete(authSessionIndex).where(predicate!).run();
}
export async function betterAuthUpsertEmailIndexMutation(c: any, input: { email: string; userId: string }) {
assertAppOrganization(c);
const now = Date.now();
await c.db
.insert(authEmailIndex)
.values({
email: input.email,
userId: input.userId,
updatedAt: now,
})
.onConflictDoUpdate({
target: authEmailIndex.email,
set: {
userId: input.userId,
updatedAt: now,
},
})
.run();
return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get();
}
export async function betterAuthDeleteEmailIndexMutation(c: any, input: { email: string }) {
assertAppOrganization(c);
await c.db.delete(authEmailIndex).where(eq(authEmailIndex.email, input.email)).run();
}
export async function betterAuthUpsertAccountIndexMutation(
c: any,
input: { id: string; providerId: string; accountId: string; userId: string },
) {
assertAppOrganization(c);
const now = Date.now();
await c.db
.insert(authAccountIndex)
.values({
id: input.id,
providerId: input.providerId,
accountId: input.accountId,
userId: input.userId,
updatedAt: now,
})
.onConflictDoUpdate({
target: authAccountIndex.id,
set: {
providerId: input.providerId,
accountId: input.accountId,
userId: input.userId,
updatedAt: now,
},
})
.run();
return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get();
}
export async function betterAuthDeleteAccountIndexMutation(c: any, input: { id?: string; providerId?: string; accountId?: string }) {
assertAppOrganization(c);
if (input.id) {
await c.db.delete(authAccountIndex).where(eq(authAccountIndex.id, input.id)).run();
return;
}
if (input.providerId && input.accountId) {
await c.db
.delete(authAccountIndex)
.where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId)))
.run();
}
}
export async function betterAuthCreateVerificationMutation(c: any, input: { data: Record<string, unknown> }) {
assertAppOrganization(c);
await c.db.insert(authVerification).values(input.data as any).run();
return await c.db.select().from(authVerification).where(eq(authVerification.id, input.data.id as string)).get();
}
export async function betterAuthUpdateVerificationMutation(c: any, input: { where: any[]; update: Record<string, unknown> }) {
assertAppOrganization(c);
const predicate = organizationBetterAuthWhere(authVerification, input.where);
if (!predicate) {
return null;
}
await c.db.update(authVerification).set(input.update as any).where(predicate).run();
return await c.db.select().from(authVerification).where(predicate).get();
}
export async function betterAuthUpdateManyVerificationMutation(c: any, input: { where: any[]; update: Record<string, unknown> }) {
assertAppOrganization(c);
const predicate = organizationBetterAuthWhere(authVerification, input.where);
if (!predicate) {
return 0;
}
await c.db.update(authVerification).set(input.update as any).where(predicate).run();
const row = await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get();
return row?.value ?? 0;
}
export async function betterAuthDeleteVerificationMutation(c: any, input: { where: any[] }) {
assertAppOrganization(c);
const predicate = organizationBetterAuthWhere(authVerification, input.where);
if (!predicate) {
return;
}
await c.db.delete(authVerification).where(predicate).run();
}
export async function betterAuthDeleteManyVerificationMutation(c: any, input: { where: any[] }) {
assertAppOrganization(c);
const predicate = organizationBetterAuthWhere(authVerification, input.where);
if (!predicate) {
return 0;
}
const rows = await c.db.select().from(authVerification).where(predicate).all();
await c.db.delete(authVerification).where(predicate).run();
return rows.length;
}
export const organizationBetterAuthActions = {
async betterAuthFindSessionIndex(c: any, input: { sessionId?: string; sessionToken?: string }) {
assertAppOrganization(c);
const clauses = [
...(input.sessionId ? [{ field: "sessionId", value: input.sessionId }] : []),
...(input.sessionToken ? [{ field: "sessionToken", value: input.sessionToken }] : []),
];
if (clauses.length === 0) {
return null;
}
const predicate = organizationBetterAuthWhere(authSessionIndex, clauses);
return await c.db.select().from(authSessionIndex).where(predicate!).get();
},
async betterAuthFindEmailIndex(c: any, input: { email: string }) {
assertAppOrganization(c);
return await c.db.select().from(authEmailIndex).where(eq(authEmailIndex.email, input.email)).get();
},
async betterAuthFindAccountIndex(c: any, input: { id?: string; providerId?: string; accountId?: string }) {
assertAppOrganization(c);
if (input.id) {
return await c.db.select().from(authAccountIndex).where(eq(authAccountIndex.id, input.id)).get();
}
if (!input.providerId || !input.accountId) {
return null;
}
return await c.db
.select()
.from(authAccountIndex)
.where(and(eq(authAccountIndex.providerId, input.providerId), eq(authAccountIndex.accountId, input.accountId)))
.get();
},
async betterAuthFindOneVerification(c: any, input: { where: any[] }) {
assertAppOrganization(c);
const predicate = organizationBetterAuthWhere(authVerification, input.where);
return predicate ? await c.db.select().from(authVerification).where(predicate).get() : null;
},
async betterAuthFindManyVerification(c: any, input: { where?: any[]; limit?: number; sortBy?: any; offset?: number }) {
assertAppOrganization(c);
const predicate = organizationBetterAuthWhere(authVerification, input.where);
let query = c.db.select().from(authVerification);
if (predicate) {
query = query.where(predicate);
}
if (input.sortBy?.field) {
const column = organizationAuthColumn(authVerification, input.sortBy.field);
query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column));
}
if (typeof input.limit === "number") {
query = query.limit(input.limit);
}
if (typeof input.offset === "number") {
query = query.offset(input.offset);
}
return await query.all();
},
async betterAuthCountVerification(c: any, input: { where?: any[] }) {
assertAppOrganization(c);
const predicate = organizationBetterAuthWhere(authVerification, input.where);
const row = predicate
? await c.db.select({ value: sqlCount() }).from(authVerification).where(predicate).get()
: await c.db.select({ value: sqlCount() }).from(authVerification).get();
return row?.value ?? 0;
},
};

View file

@ -0,0 +1,78 @@
import { desc } from "drizzle-orm";
import type { FoundryAppSnapshot } from "@sandbox-agent/foundry-shared";
import { getOrCreateGithubData, getOrCreateOrganization } from "../../handles.js";
import { authSessionIndex } from "../db/schema.js";
import {
assertAppOrganization,
buildAppSnapshot,
requireEligibleOrganization,
requireSignedInSession,
markOrganizationSyncStartedMutation,
} from "../app-shell.js";
import { getBetterAuthService } from "../../../services/better-auth.js";
import { refreshOrganizationSnapshotMutation } from "../actions.js";
export const organizationGithubActions = {
async resolveAppGithubToken(
c: any,
input: { organizationId: string; requireRepoScope?: boolean },
): Promise<{ accessToken: string; scopes: string[] } | null> {
assertAppOrganization(c);
const auth = getBetterAuthService();
const rows = await c.db.select().from(authSessionIndex).orderBy(desc(authSessionIndex.updatedAt)).all();
for (const row of rows) {
const authState = await auth.getAuthState(row.sessionId);
if (authState?.sessionState?.activeOrganizationId !== input.organizationId) {
continue;
}
const token = await auth.getAccessTokenForSession(row.sessionId);
if (!token?.accessToken) {
continue;
}
const scopes = token.scopes;
if (input.requireRepoScope !== false && scopes.length > 0 && !scopes.some((scope) => scope === "repo" || scope.startsWith("repo:"))) {
continue;
}
return {
accessToken: token.accessToken,
scopes,
};
}
return null;
},
async triggerAppRepoImport(c: any, input: { sessionId: string; organizationId: string }): Promise<FoundryAppSnapshot> {
assertAppOrganization(c);
const session = await requireSignedInSession(c, input.sessionId);
requireEligibleOrganization(session, input.organizationId);
const githubData = await getOrCreateGithubData(c, input.organizationId);
const summary = await githubData.getSummary({});
if (summary.syncStatus === "syncing") {
return await buildAppSnapshot(c, input.sessionId);
}
const organizationHandle = await getOrCreateOrganization(c, input.organizationId);
await organizationHandle.commandMarkSyncStarted({ label: "Importing repository catalog..." });
await organizationHandle.commandBroadcastSnapshot({});
void githubData.syncRepos({ label: "Importing repository catalog..." }).catch(() => {});
return await buildAppSnapshot(c, input.sessionId);
},
async adminReloadGithubOrganization(c: any): Promise<void> {
const githubData = await getOrCreateGithubData(c, c.state.organizationId);
await githubData.syncRepos({ label: "Reloading GitHub organization..." });
},
async adminReloadGithubRepository(c: any, input: { repoId: string }): Promise<void> {
const githubData = await getOrCreateGithubData(c, c.state.organizationId);
await githubData.reloadRepository(input);
},
};

View file

@ -0,0 +1,82 @@
import { randomUUID } from "node:crypto";
import type { FoundryAppSnapshot, StarSandboxAgentRepoInput, StarSandboxAgentRepoResult } from "@sandbox-agent/foundry-shared";
import { getOrCreateGithubData, getOrCreateOrganization } from "../../handles.js";
import {
assertAppOrganization,
buildAppSnapshot,
getOrganizationState,
requireEligibleOrganization,
requireSignedInSession,
} from "../app-shell.js";
import { getBetterAuthService } from "../../../services/better-auth.js";
import { getActorRuntimeContext } from "../../context.js";
import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js";
const SANDBOX_AGENT_REPO = "rivet-dev/sandbox-agent";
export const organizationOnboardingActions = {
async skipAppStarterRepo(c: any, input: { sessionId: string }): Promise<FoundryAppSnapshot> {
assertAppOrganization(c);
const session = await requireSignedInSession(c, input.sessionId);
await getBetterAuthService().upsertUserProfile(session.authUserId, {
starterRepoStatus: "skipped",
starterRepoSkippedAt: Date.now(),
starterRepoStarredAt: null,
});
return await buildAppSnapshot(c, input.sessionId);
},
async starAppStarterRepo(c: any, input: { sessionId: string; organizationId: string }): Promise<FoundryAppSnapshot> {
assertAppOrganization(c);
const session = await requireSignedInSession(c, input.sessionId);
requireEligibleOrganization(session, input.organizationId);
const organization = await getOrCreateOrganization(c, input.organizationId);
await organization.starSandboxAgentRepo({
organizationId: input.organizationId,
});
await getBetterAuthService().upsertUserProfile(session.authUserId, {
starterRepoStatus: "starred",
starterRepoStarredAt: Date.now(),
starterRepoSkippedAt: null,
});
return await buildAppSnapshot(c, input.sessionId);
},
async selectAppOrganization(c: any, input: { sessionId: string; organizationId: string }): Promise<FoundryAppSnapshot> {
assertAppOrganization(c);
const session = await requireSignedInSession(c, input.sessionId);
requireEligibleOrganization(session, input.organizationId);
await getBetterAuthService().setActiveOrganization(input.sessionId, input.organizationId);
await getOrCreateGithubData(c, input.organizationId);
return await buildAppSnapshot(c, input.sessionId);
},
async beginAppGithubInstall(c: any, input: { sessionId: string; organizationId: string }): Promise<{ url: string }> {
assertAppOrganization(c);
const session = await requireSignedInSession(c, input.sessionId);
requireEligibleOrganization(session, input.organizationId);
const { appShell } = getActorRuntimeContext();
const organizationHandle = await getOrCreateOrganization(c, input.organizationId);
const organizationState = await getOrganizationState(organizationHandle);
if (organizationState.snapshot.kind !== "organization") {
return {
url: `${appShell.appUrl}/organizations/${input.organizationId}`,
};
}
return {
url: await appShell.github.buildInstallationUrl(organizationState.githubLogin, randomUUID()),
};
},
async starSandboxAgentRepo(c: any, input: StarSandboxAgentRepoInput): Promise<StarSandboxAgentRepoResult> {
const { driver } = getActorRuntimeContext();
const auth = await resolveOrganizationGithubAuth(c, c.state.organizationId);
await driver.github.starRepository(SANDBOX_AGENT_REPO, {
githubToken: auth?.githubToken ?? null,
});
return {
repo: SANDBOX_AGENT_REPO,
starredAt: Date.now(),
};
},
};

View file

@ -0,0 +1,55 @@
import type { FoundryAppSnapshot, UpdateFoundryOrganizationProfileInput, WorkspaceModelId } from "@sandbox-agent/foundry-shared";
import { getBetterAuthService } from "../../../services/better-auth.js";
import { getOrCreateOrganization } from "../../handles.js";
// actions called directly (no queue)
import {
assertAppOrganization,
assertOrganizationShell,
buildAppSnapshot,
buildOrganizationState,
buildOrganizationStateIfInitialized,
requireEligibleOrganization,
requireSignedInSession,
} from "../app-shell.js";
// org queue names removed — using direct actions
export const organizationShellActions = {
async getAppSnapshot(c: any, input: { sessionId: string }): Promise<FoundryAppSnapshot> {
return await buildAppSnapshot(c, input.sessionId);
},
async setAppDefaultModel(c: any, input: { sessionId: string; defaultModel: WorkspaceModelId }): Promise<FoundryAppSnapshot> {
assertAppOrganization(c);
const session = await requireSignedInSession(c, input.sessionId);
await getBetterAuthService().upsertUserProfile(session.authUserId, {
defaultModel: input.defaultModel,
});
return await buildAppSnapshot(c, input.sessionId);
},
async updateAppOrganizationProfile(
c: any,
input: { sessionId: string; organizationId: string } & UpdateFoundryOrganizationProfileInput,
): Promise<FoundryAppSnapshot> {
assertAppOrganization(c);
const session = await requireSignedInSession(c, input.sessionId);
requireEligibleOrganization(session, input.organizationId);
const organization = await getOrCreateOrganization(c, input.organizationId);
await organization.commandUpdateShellProfile({
displayName: input.displayName,
slug: input.slug,
primaryDomain: input.primaryDomain,
});
return await buildAppSnapshot(c, input.sessionId);
},
async getOrganizationShellState(c: any): Promise<any> {
assertOrganizationShell(c);
return await buildOrganizationState(c);
},
async getOrganizationShellStateIfInitialized(c: any): Promise<any | null> {
assertOrganizationShell(c);
return await buildOrganizationStateIfInitialized(c);
},
};

View file

@ -0,0 +1,543 @@
// @ts-nocheck
import { randomUUID } from "node:crypto";
import { and, desc, eq, isNotNull, ne } from "drizzle-orm";
import type {
RepoOverview,
SandboxProviderId,
TaskRecord,
TaskSummary,
WorkspacePullRequestSummary,
WorkspaceSessionSummary,
WorkspaceTaskSummary,
} from "@sandbox-agent/foundry-shared";
import { getActorRuntimeContext } from "../../context.js";
import { getGithubData, getOrCreateAuditLog, getOrCreateTask, getTask } from "../../handles.js";
// task actions called directly (no queue)
import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../../services/create-flow.js";
// actions return directly (no queue response unwrapping)
import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../../logging.js";
import { defaultSandboxProviderId } from "../../../sandbox-config.js";
import { taskIndex, taskSummaries } from "../db/schema.js";
import { refreshOrganizationSnapshotMutation } from "../actions.js";
interface CreateTaskCommand {
repoId: string;
task: string;
sandboxProviderId: SandboxProviderId;
explicitTitle: string | null;
explicitBranchName: string | null;
onBranch: string | null;
}
interface RegisterTaskBranchCommand {
repoId: string;
taskId: string;
branchName: string;
requireExistingRemote?: boolean;
}
function isStaleTaskReferenceError(error: unknown): boolean {
const message = resolveErrorMessage(error);
return isActorNotFoundError(error) || message.startsWith("Task not found:");
}
function parseJsonValue<T>(value: string | null | undefined, fallback: T): T {
if (!value) {
return fallback;
}
try {
return JSON.parse(value) as T;
} catch {
return fallback;
}
}
function taskSummaryRowFromSummary(taskSummary: WorkspaceTaskSummary) {
return {
taskId: taskSummary.id,
repoId: taskSummary.repoId,
title: taskSummary.title,
status: taskSummary.status,
repoName: taskSummary.repoName,
updatedAtMs: taskSummary.updatedAtMs,
branch: taskSummary.branch,
pullRequestJson: JSON.stringify(taskSummary.pullRequest),
sessionsSummaryJson: JSON.stringify(taskSummary.sessionsSummary),
};
}
export function taskSummaryFromRow(repoId: string, row: any): WorkspaceTaskSummary {
return {
id: row.taskId,
repoId,
title: row.title,
status: row.status,
repoName: row.repoName,
updatedAtMs: row.updatedAtMs,
branch: row.branch ?? null,
pullRequest: parseJsonValue<WorkspacePullRequestSummary | null>(row.pullRequestJson, null),
sessionsSummary: parseJsonValue<WorkspaceSessionSummary[]>(row.sessionsSummaryJson, []),
};
}
export async function upsertTaskSummary(c: any, taskSummary: WorkspaceTaskSummary): Promise<void> {
await c.db
.insert(taskSummaries)
.values(taskSummaryRowFromSummary(taskSummary))
.onConflictDoUpdate({
target: taskSummaries.taskId,
set: taskSummaryRowFromSummary(taskSummary),
})
.run();
}
async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise<void> {
try {
await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run();
} catch {
// Best effort cleanup only.
}
}
async function listKnownTaskBranches(c: any, repoId: string): Promise<string[]> {
const rows = await c.db
.select({ branchName: taskIndex.branchName })
.from(taskIndex)
.where(and(eq(taskIndex.repoId, repoId), isNotNull(taskIndex.branchName)))
.all();
return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0);
}
async function resolveGitHubRepository(c: any, repoId: string) {
const githubData = getGithubData(c, c.state.organizationId);
return await githubData.getRepository({ repoId }).catch(() => null);
}
async function listGitHubBranches(c: any, repoId: string): Promise<Array<{ branchName: string; commitSha: string }>> {
const githubData = getGithubData(c, c.state.organizationId);
return await githubData.listBranchesForRepository({ repoId }).catch(() => []);
}
async function resolveRepositoryRemoteUrl(c: any, repoId: string): Promise<string> {
const repository = await resolveGitHubRepository(c, repoId);
const remoteUrl = repository?.cloneUrl?.trim();
if (!remoteUrl) {
throw new Error(`Missing remote URL for repo ${repoId}`);
}
return remoteUrl;
}
/**
* The ONLY backend code path that creates a task actor via getOrCreateTask.
* Called when a user explicitly creates a new task (not during sync/webhooks).
*
* All other code must use getTask (handles.ts) which calls .get() and will
* error if the actor doesn't exist. Virtual tasks created during PR sync
* are materialized lazily by the client's getOrCreate in backend-client.ts.
*
* NEVER call this from a sync loop or webhook handler.
*/
export async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise<TaskRecord> {
const organizationId = c.state.organizationId;
const repoId = cmd.repoId;
await resolveRepositoryRemoteUrl(c, repoId);
const onBranch = cmd.onBranch?.trim() || null;
const taskId = randomUUID();
let initialBranchName: string | null = null;
let initialTitle: string | null = null;
if (onBranch) {
initialBranchName = onBranch;
initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined);
await registerTaskBranchMutation(c, {
repoId,
taskId,
branchName: onBranch,
requireExistingRemote: true,
});
} else {
const reservedBranches = await listKnownTaskBranches(c, repoId);
const resolved = resolveCreateFlowDecision({
task: cmd.task,
explicitTitle: cmd.explicitTitle ?? undefined,
explicitBranchName: cmd.explicitBranchName ?? undefined,
localBranches: [],
taskBranches: reservedBranches,
});
initialBranchName = resolved.branchName;
initialTitle = resolved.title;
const now = Date.now();
await c.db
.insert(taskIndex)
.values({
taskId,
repoId,
branchName: resolved.branchName,
createdAt: now,
updatedAt: now,
})
.onConflictDoNothing()
.run();
}
let taskHandle: Awaited<ReturnType<typeof getOrCreateTask>>;
try {
taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, {
organizationId,
repoId,
taskId,
});
} catch (error) {
if (initialBranchName) {
await deleteStaleTaskIndexRow(c, taskId);
}
throw error;
}
const created = await taskHandle.initialize({
sandboxProviderId: cmd.sandboxProviderId,
branchName: initialBranchName,
title: initialTitle,
task: cmd.task,
});
try {
await upsertTaskSummary(c, await taskHandle.getTaskSummary({}));
await refreshOrganizationSnapshotMutation(c);
} catch (error) {
logActorWarning("organization", "failed seeding task summary after task creation", {
organizationId,
repoId,
taskId,
error: resolveErrorMessage(error),
});
}
const auditLog = await getOrCreateAuditLog(c, organizationId);
void auditLog.append({
kind: "task.created",
repoId,
taskId,
payload: {
repoId,
sandboxProviderId: cmd.sandboxProviderId,
},
});
try {
const taskSummary = await taskHandle.getTaskSummary({});
await upsertTaskSummary(c, taskSummary);
} catch (error) {
logActorWarning("organization", "failed seeding organization task projection", {
organizationId,
repoId,
taskId,
error: resolveErrorMessage(error),
});
}
return created;
}
export async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> {
const branchName = cmd.branchName.trim();
if (!branchName) {
throw new Error("branchName is required");
}
const existingOwner = await c.db
.select({ taskId: taskIndex.taskId })
.from(taskIndex)
.where(and(eq(taskIndex.branchName, branchName), eq(taskIndex.repoId, cmd.repoId), ne(taskIndex.taskId, cmd.taskId)))
.get();
if (existingOwner) {
let ownerMissing = false;
try {
await getTask(c, c.state.organizationId, cmd.repoId, existingOwner.taskId).get();
} catch (error) {
if (isStaleTaskReferenceError(error)) {
ownerMissing = true;
await deleteStaleTaskIndexRow(c, existingOwner.taskId);
} else {
throw error;
}
}
if (!ownerMissing) {
throw new Error(`branch is already assigned to a different task: ${branchName}`);
}
}
const branches = await listGitHubBranches(c, cmd.repoId);
const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null;
if (cmd.requireExistingRemote && !branchMatch) {
throw new Error(`Remote branch not found: ${branchName}`);
}
const repository = await resolveGitHubRepository(c, cmd.repoId);
const defaultBranch = repository?.defaultBranch ?? "main";
const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? "";
const now = Date.now();
await c.db
.insert(taskIndex)
.values({
taskId: cmd.taskId,
repoId: cmd.repoId,
branchName,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskIndex.taskId,
set: {
branchName,
updatedAt: now,
},
})
.run();
return { branchName, headSha };
}
export async function applyTaskSummaryUpdateMutation(c: any, input: { taskSummary: WorkspaceTaskSummary }): Promise<void> {
await upsertTaskSummary(c, input.taskSummary);
await refreshOrganizationSnapshotMutation(c);
}
export async function removeTaskSummaryMutation(c: any, input: { taskId: string }): Promise<void> {
await c.db.delete(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).run();
await refreshOrganizationSnapshotMutation(c);
}
/**
* Called for every changed PR during sync and on webhook PR events.
* Runs in a bulk loop MUST NOT create task actors or make cross-actor calls
* to task actors. Only writes to the org's local taskIndex/taskSummaries tables.
* Task actors are created lazily when the user views the task.
*/
export async function refreshTaskSummaryForBranchMutation(
c: any,
input: { repoId: string; branchName: string; pullRequest?: WorkspacePullRequestSummary | null; repoName?: string },
): Promise<void> {
const pullRequest = input.pullRequest ?? null;
let rows = await c.db
.select({ taskId: taskSummaries.taskId })
.from(taskSummaries)
.where(and(eq(taskSummaries.branch, input.branchName), eq(taskSummaries.repoId, input.repoId)))
.all();
if (rows.length === 0 && pullRequest) {
// Create a virtual task entry in the org's local tables only.
// No task actor is spawned — it will be created lazily when the user
// clicks on the task in the sidebar (the "materialize" path).
const taskId = randomUUID();
const now = Date.now();
const title = pullRequest.title?.trim() || input.branchName;
const repoName = input.repoName ?? `${c.state.organizationId}/${input.repoId}`;
await c.db
.insert(taskIndex)
.values({ taskId, repoId: input.repoId, branchName: input.branchName, createdAt: now, updatedAt: now })
.onConflictDoNothing()
.run();
await c.db
.insert(taskSummaries)
.values({
taskId,
repoId: input.repoId,
title,
status: "init_complete",
repoName,
updatedAtMs: pullRequest.updatedAtMs ?? now,
branch: input.branchName,
pullRequestJson: JSON.stringify(pullRequest),
sessionsSummaryJson: "[]",
})
.onConflictDoNothing()
.run();
rows = [{ taskId }];
} else {
// Update PR data on existing task summaries locally.
// If a real task actor exists, also notify it.
for (const row of rows) {
// Update the local summary with the new PR data
await c.db
.update(taskSummaries)
.set({
pullRequestJson: pullRequest ? JSON.stringify(pullRequest) : null,
updatedAtMs: pullRequest?.updatedAtMs ?? Date.now(),
})
.where(eq(taskSummaries.taskId, row.taskId))
.run();
// Best-effort notify the task actor if it exists (fire-and-forget)
try {
const task = getTask(c, c.state.organizationId, input.repoId, row.taskId);
void task.pullRequestSync({ pullRequest }).catch(() => {});
} catch {
// Task actor doesn't exist yet — that's fine, it's virtual
}
}
}
await refreshOrganizationSnapshotMutation(c);
}
export function sortOverviewBranches(
branches: Array<{
branchName: string;
commitSha: string;
taskId: string | null;
taskTitle: string | null;
taskStatus: TaskRecord["status"] | null;
pullRequest: WorkspacePullRequestSummary | null;
ciStatus: string | null;
updatedAt: number;
}>,
defaultBranch: string | null,
) {
return [...branches].sort((left, right) => {
if (defaultBranch) {
if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1;
if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1;
}
if (Boolean(left.taskId) !== Boolean(right.taskId)) {
return left.taskId ? -1 : 1;
}
if (left.updatedAt !== right.updatedAt) {
return right.updatedAt - left.updatedAt;
}
return left.branchName.localeCompare(right.branchName);
});
}
export async function listTaskSummariesForRepo(c: any, repoId: string, includeArchived = false): Promise<TaskSummary[]> {
const rows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoId)).orderBy(desc(taskSummaries.updatedAtMs)).all();
return rows
.map((row) => ({
organizationId: c.state.organizationId,
repoId,
taskId: row.taskId,
branchName: row.branch ?? null,
title: row.title,
status: row.status,
updatedAt: row.updatedAtMs,
pullRequest: parseJsonValue<WorkspacePullRequestSummary | null>(row.pullRequestJson, null),
}))
.filter((row) => includeArchived || row.status !== "archived");
}
export async function listAllTaskSummaries(c: any, includeArchived = false): Promise<TaskSummary[]> {
const rows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all();
return rows
.map((row) => ({
organizationId: c.state.organizationId,
repoId: row.repoId,
taskId: row.taskId,
branchName: row.branch ?? null,
title: row.title,
status: row.status,
updatedAt: row.updatedAtMs,
pullRequest: parseJsonValue<WorkspacePullRequestSummary | null>(row.pullRequestJson, null),
}))
.filter((row) => includeArchived || row.status !== "archived");
}
export async function listWorkspaceTaskSummaries(c: any): Promise<WorkspaceTaskSummary[]> {
const rows = await c.db.select().from(taskSummaries).orderBy(desc(taskSummaries.updatedAtMs)).all();
return rows.map((row) => taskSummaryFromRow(row.repoId, row));
}
export async function getRepoOverviewFromOrg(c: any, repoId: string): Promise<RepoOverview> {
const now = Date.now();
const repository = await resolveGitHubRepository(c, repoId);
const remoteUrl = await resolveRepositoryRemoteUrl(c, repoId);
const githubBranches = await listGitHubBranches(c, repoId).catch(() => []);
const taskRows = await c.db.select().from(taskSummaries).where(eq(taskSummaries.repoId, repoId)).all();
const taskMetaByBranch = new Map<
string,
{ taskId: string; title: string | null; status: TaskRecord["status"] | null; updatedAt: number; pullRequest: WorkspacePullRequestSummary | null }
>();
for (const row of taskRows) {
if (!row.branch) {
continue;
}
taskMetaByBranch.set(row.branch, {
taskId: row.taskId,
title: row.title ?? null,
status: row.status,
updatedAt: row.updatedAtMs,
pullRequest: parseJsonValue<WorkspacePullRequestSummary | null>(row.pullRequestJson, null),
});
}
const branchMap = new Map<string, { branchName: string; commitSha: string }>();
for (const branch of githubBranches) {
branchMap.set(branch.branchName, branch);
}
for (const branchName of taskMetaByBranch.keys()) {
if (!branchMap.has(branchName)) {
branchMap.set(branchName, { branchName, commitSha: "" });
}
}
if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) {
branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" });
}
const branches = sortOverviewBranches(
[...branchMap.values()].map((branch) => {
const taskMeta = taskMetaByBranch.get(branch.branchName);
const pr = taskMeta?.pullRequest ?? null;
return {
branchName: branch.branchName,
commitSha: branch.commitSha,
taskId: taskMeta?.taskId ?? null,
taskTitle: taskMeta?.title ?? null,
taskStatus: taskMeta?.status ?? null,
pullRequest: pr,
ciStatus: null,
updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now),
};
}),
repository?.defaultBranch ?? null,
);
return {
organizationId: c.state.organizationId,
repoId,
remoteUrl,
baseRef: repository?.defaultBranch ?? null,
fetchedAt: now,
branches,
};
}
export async function getRepositoryMetadataFromOrg(
c: any,
repoId: string,
): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> {
const repository = await resolveGitHubRepository(c, repoId);
const remoteUrl = await resolveRepositoryRemoteUrl(c, repoId);
return {
defaultBranch: repository?.defaultBranch ?? null,
fullName: repository?.fullName ?? null,
remoteUrl,
};
}
export async function findTaskForBranch(c: any, repoId: string, branchName: string): Promise<{ taskId: string | null }> {
const row = await c.db
.select({ taskId: taskSummaries.taskId })
.from(taskSummaries)
.where(and(eq(taskSummaries.branch, branchName), eq(taskSummaries.repoId, repoId)))
.get();
return { taskId: row?.taskId ?? null };
}

View file

@ -0,0 +1,340 @@
// @ts-nocheck
import { desc, eq } from "drizzle-orm";
import type {
AuditLogEvent,
CreateTaskInput,
HistoryQueryInput,
ListTasksInput,
RepoOverview,
SwitchResult,
TaskRecord,
TaskSummary,
TaskWorkspaceChangeModelInput,
TaskWorkspaceCreateTaskInput,
TaskWorkspaceDiffInput,
TaskWorkspaceRenameInput,
TaskWorkspaceRenameSessionInput,
TaskWorkspaceSelectInput,
TaskWorkspaceSetSessionUnreadInput,
TaskWorkspaceSendMessageInput,
TaskWorkspaceSessionInput,
TaskWorkspaceUpdateDraftInput,
} from "@sandbox-agent/foundry-shared";
import { getActorRuntimeContext } from "../../context.js";
import { getOrCreateAuditLog, getOrCreateTask, getTask as getTaskHandle } from "../../handles.js";
import { defaultSandboxProviderId } from "../../../sandbox-config.js";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { taskIndex, taskSummaries } from "../db/schema.js";
import {
createTaskMutation,
getRepoOverviewFromOrg,
getRepositoryMetadataFromOrg,
findTaskForBranch,
listTaskSummariesForRepo,
listAllTaskSummaries,
} from "./task-mutations.js";
function assertOrganization(c: { state: { organizationId: string } }, organizationId: string): void {
if (organizationId !== c.state.organizationId) {
throw new Error(`Organization actor mismatch: actor=${c.state.organizationId} command=${organizationId}`);
}
}
/**
* Look up the repoId for a task from the local task index.
* Used when callers (e.g. sandbox actor) only have taskId but need repoId
* to construct the task actor key.
*/
async function resolveTaskRepoId(c: any, taskId: string): Promise<string> {
const row = await c.db.select({ repoId: taskIndex.repoId }).from(taskIndex).where(eq(taskIndex.taskId, taskId)).get();
if (!row) {
throw new Error(`Task ${taskId} not found in task index`);
}
return row.repoId;
}
/**
* Get or lazily create a task actor for a user-initiated action.
* Uses getOrCreate because the user may be interacting with a virtual task
* (PR-driven) that has no actor yet. The task actor self-initializes in
* getCurrentRecord() from the org's getTaskIndexEntry data.
*
* This is safe because requireWorkspaceTask is only called from user-initiated
* actions (createSession, sendMessage, etc.), never from sync loops.
* See CLAUDE.md "Lazy Task Actor Creation".
*/
async function requireWorkspaceTask(c: any, repoId: string, taskId: string) {
return getOrCreateTask(c, c.state.organizationId, repoId, taskId, {
organizationId: c.state.organizationId,
repoId,
taskId,
});
}
interface GetTaskInput {
organizationId: string;
repoId: string;
taskId: string;
}
interface TaskProxyActionInput extends GetTaskInput {
reason?: string;
}
interface RepoOverviewInput {
organizationId: string;
repoId: string;
}
export { createTaskMutation };
export const organizationTaskActions = {
async createTask(c: any, input: CreateTaskInput): Promise<TaskRecord> {
assertOrganization(c, input.organizationId);
const { config } = getActorRuntimeContext();
const sandboxProviderId = input.sandboxProviderId ?? defaultSandboxProviderId(config);
// Self-call: call the mutation directly since we're inside the org actor
return await createTaskMutation(c, {
repoId: input.repoId,
task: input.task,
sandboxProviderId,
explicitTitle: input.explicitTitle ?? null,
explicitBranchName: input.explicitBranchName ?? null,
onBranch: input.onBranch ?? null,
});
},
async materializeTask(c: any, input: { organizationId: string; repoId: string; virtualTaskId: string }): Promise<TaskRecord> {
assertOrganization(c, input.organizationId);
const { config } = getActorRuntimeContext();
// Self-call: call the mutation directly
return await createTaskMutation(c, {
repoId: input.repoId,
task: input.virtualTaskId,
sandboxProviderId: defaultSandboxProviderId(config),
explicitTitle: null,
explicitBranchName: null,
onBranch: null,
});
},
async createWorkspaceTask(c: any, input: TaskWorkspaceCreateTaskInput): Promise<{ taskId: string; sessionId?: string }> {
const created = await organizationTaskActions.createTask(c, {
organizationId: c.state.organizationId,
repoId: input.repoId,
task: input.task,
...(input.title ? { explicitTitle: input.title } : {}),
...(input.onBranch ? { onBranch: input.onBranch } : input.branch ? { explicitBranchName: input.branch } : {}),
});
const task = await requireWorkspaceTask(c, input.repoId, created.taskId);
void task
.createSessionAndSend({
model: input.model,
text: input.task,
authSessionId: input.authSessionId,
})
.catch(() => {});
return { taskId: created.taskId };
},
async markWorkspaceUnread(c: any, input: TaskWorkspaceSelectInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
await task.markUnread({ authSessionId: input.authSessionId });
},
async renameWorkspaceTask(c: any, input: TaskWorkspaceRenameInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
await task.renameTask({ value: input.value });
},
async createWorkspaceSession(c: any, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
return await task.createSession({
...(input.model ? { model: input.model } : {}),
...(input.authSessionId ? { authSessionId: input.authSessionId } : {}),
});
},
async renameWorkspaceSession(c: any, input: TaskWorkspaceRenameSessionInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
await task.renameSession({ sessionId: input.sessionId, title: input.title, authSessionId: input.authSessionId });
},
async selectWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
await task.selectSession({ sessionId: input.sessionId, authSessionId: input.authSessionId });
},
async setWorkspaceSessionUnread(c: any, input: TaskWorkspaceSetSessionUnreadInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
await task.setSessionUnread({ sessionId: input.sessionId, unread: input.unread, authSessionId: input.authSessionId });
},
async updateWorkspaceDraft(c: any, input: TaskWorkspaceUpdateDraftInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
void task
.updateDraft({
sessionId: input.sessionId,
text: input.text,
attachments: input.attachments,
authSessionId: input.authSessionId,
})
.catch(() => {});
},
async changeWorkspaceModel(c: any, input: TaskWorkspaceChangeModelInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
await task.changeModel({ sessionId: input.sessionId, model: input.model, authSessionId: input.authSessionId });
},
async sendWorkspaceMessage(c: any, input: TaskWorkspaceSendMessageInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
void task
.sendMessage({
sessionId: input.sessionId,
text: input.text,
attachments: input.attachments,
authSessionId: input.authSessionId,
})
.catch(() => {});
},
async stopWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
void task.stopSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }).catch(() => {});
},
async closeWorkspaceSession(c: any, input: TaskWorkspaceSessionInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
void task.closeSession({ sessionId: input.sessionId, authSessionId: input.authSessionId }).catch(() => {});
},
async publishWorkspacePr(c: any, input: TaskWorkspaceSelectInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
void task.publishPr({}).catch(() => {});
},
async revertWorkspaceFile(c: any, input: TaskWorkspaceDiffInput): Promise<void> {
const task = await requireWorkspaceTask(c, input.repoId, input.taskId);
void task.revertFile(input).catch(() => {});
},
async getRepoOverview(c: any, input: RepoOverviewInput): Promise<RepoOverview> {
assertOrganization(c, input.organizationId);
return await getRepoOverviewFromOrg(c, input.repoId);
},
async listTasks(c: any, input: ListTasksInput): Promise<TaskSummary[]> {
assertOrganization(c, input.organizationId);
if (input.repoId) {
return await listTaskSummariesForRepo(c, input.repoId, true);
}
return await listAllTaskSummaries(c, true);
},
async switchTask(c: any, input: { repoId: string; taskId: string }): Promise<SwitchResult> {
const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId);
const record = await h.get();
const switched = await h.switchTask({});
return {
organizationId: c.state.organizationId,
taskId: input.taskId,
sandboxProviderId: record.sandboxProviderId,
switchTarget: switched.switchTarget,
};
},
async auditLog(c: any, input: HistoryQueryInput): Promise<AuditLogEvent[]> {
assertOrganization(c, input.organizationId);
const auditLog = await getOrCreateAuditLog(c, c.state.organizationId);
return await auditLog.list({
repoId: input.repoId,
branch: input.branch,
taskId: input.taskId,
limit: input.limit ?? 20,
});
},
async getTask(c: any, input: GetTaskInput): Promise<TaskRecord> {
assertOrganization(c, input.organizationId);
// Resolve repoId from local task index if not provided (e.g. sandbox actor only has taskId)
const repoId = input.repoId || (await resolveTaskRepoId(c, input.taskId));
// Use getOrCreate — the task may be virtual (PR-driven, no actor yet).
// The task actor self-initializes in getCurrentRecord().
const handle = await getOrCreateTask(c, c.state.organizationId, repoId, input.taskId, {
organizationId: c.state.organizationId,
repoId,
taskId: input.taskId,
});
return await handle.get();
},
async attachTask(c: any, input: TaskProxyActionInput): Promise<{ target: string; sessionId: string | null }> {
assertOrganization(c, input.organizationId);
const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId);
return await h.attach({ reason: input.reason });
},
async pushTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertOrganization(c, input.organizationId);
const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId);
void h.push({ reason: input.reason }).catch(() => {});
},
async syncTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertOrganization(c, input.organizationId);
const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId);
void h.sync({ reason: input.reason }).catch(() => {});
},
async mergeTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertOrganization(c, input.organizationId);
const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId);
void h.merge({ reason: input.reason }).catch(() => {});
},
async archiveTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertOrganization(c, input.organizationId);
const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId);
void h.archive({ reason: input.reason }).catch(() => {});
},
async killTask(c: any, input: TaskProxyActionInput): Promise<void> {
assertOrganization(c, input.organizationId);
const h = getTaskHandle(c, c.state.organizationId, input.repoId, input.taskId);
void h.kill({ reason: input.reason }).catch(() => {});
},
async getRepositoryMetadata(c: any, input: { repoId: string }): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> {
return await getRepositoryMetadataFromOrg(c, input.repoId);
},
async findTaskForBranch(c: any, input: { repoId: string; branchName: string }): Promise<{ taskId: string | null }> {
return await findTaskForBranch(c, input.repoId, input.branchName);
},
/**
* Lightweight read of task index + summary data. Used by the task actor
* to self-initialize when lazily materialized from a virtual task.
* Does NOT trigger materialization no circular dependency.
*/
async getTaskIndexEntry(c: any, input: { taskId: string }): Promise<{ branchName: string | null; title: string | null } | null> {
const idx = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(eq(taskIndex.taskId, input.taskId)).get();
const summary = await c.db.select({ title: taskSummaries.title }).from(taskSummaries).where(eq(taskSummaries.taskId, input.taskId)).get();
if (!idx && !summary) return null;
return {
branchName: idx?.branchName ?? null,
title: summary?.title ?? null,
};
},
};

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1 @@
export const APP_SHELL_ORGANIZATION_ID = "app";

View file

@ -56,6 +56,10 @@ CREATE TABLE `organization_profile` (
`github_last_sync_at` integer,
`github_last_webhook_at` integer,
`github_last_webhook_event` text,
`github_sync_generation` integer NOT NULL,
`github_sync_phase` text,
`github_processed_repository_count` integer NOT NULL,
`github_total_repository_count` integer NOT NULL,
`stripe_customer_id` text,
`stripe_subscription_id` text,
`stripe_price_id` text,
@ -86,8 +90,3 @@ CREATE TABLE `stripe_lookup` (
`organization_id` text NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `task_lookup` (
`task_id` text PRIMARY KEY NOT NULL,
`repo_id` text NOT NULL
);

View file

@ -0,0 +1,50 @@
CREATE TABLE `auth_session_index` (
`session_id` text PRIMARY KEY NOT NULL,
`session_token` text NOT NULL,
`user_id` text NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `auth_email_index` (
`email` text PRIMARY KEY NOT NULL,
`user_id` text NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `auth_account_index` (
`id` text PRIMARY KEY NOT NULL,
`provider_id` text NOT NULL,
`account_id` text NOT NULL,
`user_id` text NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `auth_verification` (
`id` text PRIMARY KEY NOT NULL,
`identifier` text NOT NULL,
`value` text NOT NULL,
`expires_at` integer NOT NULL,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `task_index` (
`task_id` text PRIMARY KEY NOT NULL,
`repo_id` text NOT NULL,
`branch_name` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `task_summaries` (
`task_id` text PRIMARY KEY NOT NULL,
`repo_id` text NOT NULL,
`title` text NOT NULL,
`status` text NOT NULL,
`repo_name` text NOT NULL,
`updated_at_ms` integer NOT NULL,
`branch` text,
`pull_request_json` text,
`sessions_summary_json` text DEFAULT '[]' NOT NULL
);

View file

@ -373,6 +373,34 @@
"notNull": false,
"autoincrement": false
},
"github_sync_generation": {
"name": "github_sync_generation",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"github_sync_phase": {
"name": "github_sync_phase",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"github_processed_repository_count": {
"name": "github_processed_repository_count",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"github_total_repository_count": {
"name": "github_total_repository_count",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"stripe_customer_id": {
"name": "stripe_customer_id",
"type": "text",
@ -549,30 +577,6 @@
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"task_lookup": {
"name": "task_lookup",
"columns": {
"task_id": {
"name": "task_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"repo_id": {
"name": "repo_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},

View file

@ -8,6 +8,13 @@
"when": 1773376221152,
"tag": "0000_melted_viper",
"breakpoints": true
},
{
"idx": 1,
"version": "6",
"when": 1773840000000,
"tag": "0001_add_auth_and_task_tables",
"breakpoints": true
}
]
}

View file

@ -12,20 +12,8 @@ const journal = {
},
{
idx: 1,
when: 1773638400000,
tag: "0001_auth_index_tables",
breakpoints: true,
},
{
idx: 2,
when: 1773720000000,
tag: "0002_task_summaries",
breakpoints: true,
},
{
idx: 3,
when: 1773810001000,
tag: "0003_drop_provider_profiles",
when: 1773840000000,
tag: "0001_add_auth_and_task_tables",
breakpoints: true,
},
],
@ -92,6 +80,10 @@ CREATE TABLE \`organization_profile\` (
\`github_last_sync_at\` integer,
\`github_last_webhook_at\` integer,
\`github_last_webhook_event\` text,
\`github_sync_generation\` integer NOT NULL,
\`github_sync_phase\` text,
\`github_processed_repository_count\` integer NOT NULL,
\`github_total_repository_count\` integer NOT NULL,
\`stripe_customer_id\` text,
\`stripe_subscription_id\` text,
\`stripe_price_id\` text,
@ -122,13 +114,8 @@ CREATE TABLE \`stripe_lookup\` (
\`organization_id\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`task_lookup\` (
\`task_id\` text PRIMARY KEY NOT NULL,
\`repo_id\` text NOT NULL
);
`,
m0001: `CREATE TABLE IF NOT EXISTS \`auth_session_index\` (
m0001: `CREATE TABLE \`auth_session_index\` (
\`session_id\` text PRIMARY KEY NOT NULL,
\`session_token\` text NOT NULL,
\`user_id\` text NOT NULL,
@ -136,13 +123,13 @@ CREATE TABLE \`task_lookup\` (
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS \`auth_email_index\` (
CREATE TABLE \`auth_email_index\` (
\`email\` text PRIMARY KEY NOT NULL,
\`user_id\` text NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS \`auth_account_index\` (
CREATE TABLE \`auth_account_index\` (
\`id\` text PRIMARY KEY NOT NULL,
\`provider_id\` text NOT NULL,
\`account_id\` text NOT NULL,
@ -150,7 +137,7 @@ CREATE TABLE IF NOT EXISTS \`auth_account_index\` (
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS \`auth_verification\` (
CREATE TABLE \`auth_verification\` (
\`id\` text PRIMARY KEY NOT NULL,
\`identifier\` text NOT NULL,
\`value\` text NOT NULL,
@ -158,8 +145,16 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` (
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0002: `CREATE TABLE IF NOT EXISTS \`task_summaries\` (
--> statement-breakpoint
CREATE TABLE \`task_index\` (
\`task_id\` text PRIMARY KEY NOT NULL,
\`repo_id\` text NOT NULL,
\`branch_name\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`task_summaries\` (
\`task_id\` text PRIMARY KEY NOT NULL,
\`repo_id\` text NOT NULL,
\`title\` text NOT NULL,
@ -170,8 +165,6 @@ CREATE TABLE IF NOT EXISTS \`auth_verification\` (
\`pull_request_json\` text,
\`sessions_summary_json\` text DEFAULT '[]' NOT NULL
);
`,
m0003: `DROP TABLE IF EXISTS \`provider_profiles\`;
`,
} as const,
};

View file

@ -1,34 +1,34 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
import { check, integer, sqliteTable, text } from "rivetkit/db/drizzle";
import { sql } from "drizzle-orm";
import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared";
// SQLite is per organization actor instance, so no organizationId column needed.
/**
* Coordinator index of RepositoryActor instances.
* The organization actor is the coordinator for repositories.
* Rows are created/removed when repos are added/removed from the organization.
* Coordinator index of TaskActor instances.
* The organization actor is the direct coordinator for tasks (not a per-repo
* actor) because the sidebar needs to query all tasks across all repos on
* every snapshot. With many repos, fanning out to N repo actors on the hot
* read path is too expensive owning the index here keeps that a single
* local table scan. Each row maps a taskId to its repo and immutable branch
* name. Used for branch conflict checking (scoped by repoId) and
* task-by-branch lookups.
*/
export const repos = sqliteTable("repos", {
repoId: text("repo_id").notNull().primaryKey(),
remoteUrl: text("remote_url").notNull(),
export const taskIndex = sqliteTable("task_index", {
taskId: text("task_id").notNull().primaryKey(),
repoId: text("repo_id").notNull(),
branchName: text("branch_name"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
/**
* Coordinator index of TaskActor instances.
* Fast taskId repoId lookup so the organization can route requests
* to the correct RepositoryActor without scanning all repos.
*/
export const taskLookup = sqliteTable("task_lookup", {
taskId: text("task_id").notNull().primaryKey(),
repoId: text("repo_id").notNull(),
});
/**
* Coordinator index of TaskActor instances materialized sidebar projection.
* Task actors push summary updates to the organization actor via
* applyTaskSummaryUpdate(). Source of truth lives on each TaskActor;
* this table exists so organization reads stay local without fan-out.
* Organization-owned materialized task summary projection.
* Task actors push summary updates directly to the organization coordinator,
* which keeps this table local for fast list/lookups without fan-out.
* Same rationale as taskIndex: the sidebar repeatedly reads all tasks across
* all repos, so the org must own the materialized view to avoid O(repos)
* actor fan-out on the hot read path.
*/
export const taskSummaries = sqliteTable("task_summaries", {
taskId: text("task_id").notNull().primaryKey(),
@ -42,38 +42,46 @@ export const taskSummaries = sqliteTable("task_summaries", {
sessionsSummaryJson: text("sessions_summary_json").notNull().default("[]"),
});
export const organizationProfile = sqliteTable("organization_profile", {
id: text("id").notNull().primaryKey(),
kind: text("kind").notNull(),
githubAccountId: text("github_account_id").notNull(),
githubLogin: text("github_login").notNull(),
githubAccountType: text("github_account_type").notNull(),
displayName: text("display_name").notNull(),
slug: text("slug").notNull(),
primaryDomain: text("primary_domain").notNull(),
defaultModel: text("default_model").notNull(),
autoImportRepos: integer("auto_import_repos").notNull(),
repoImportStatus: text("repo_import_status").notNull(),
githubConnectedAccount: text("github_connected_account").notNull(),
githubInstallationStatus: text("github_installation_status").notNull(),
githubSyncStatus: text("github_sync_status").notNull(),
githubInstallationId: integer("github_installation_id"),
githubLastSyncLabel: text("github_last_sync_label").notNull(),
githubLastSyncAt: integer("github_last_sync_at"),
githubLastWebhookAt: integer("github_last_webhook_at"),
githubLastWebhookEvent: text("github_last_webhook_event"),
stripeCustomerId: text("stripe_customer_id"),
stripeSubscriptionId: text("stripe_subscription_id"),
stripePriceId: text("stripe_price_id"),
billingPlanId: text("billing_plan_id").notNull(),
billingStatus: text("billing_status").notNull(),
billingSeatsIncluded: integer("billing_seats_included").notNull(),
billingTrialEndsAt: text("billing_trial_ends_at"),
billingRenewalAt: text("billing_renewal_at"),
billingPaymentMethodLabel: text("billing_payment_method_label").notNull(),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
export const organizationProfile = sqliteTable(
"organization_profile",
{
id: integer("id").primaryKey(),
kind: text("kind").notNull(),
githubAccountId: text("github_account_id").notNull(),
githubLogin: text("github_login").notNull(),
githubAccountType: text("github_account_type").notNull(),
displayName: text("display_name").notNull(),
slug: text("slug").notNull(),
defaultModel: text("default_model").notNull().default(DEFAULT_WORKSPACE_MODEL_ID),
primaryDomain: text("primary_domain").notNull(),
autoImportRepos: integer("auto_import_repos").notNull(),
repoImportStatus: text("repo_import_status").notNull(),
githubConnectedAccount: text("github_connected_account").notNull(),
githubInstallationStatus: text("github_installation_status").notNull(),
githubSyncStatus: text("github_sync_status").notNull(),
githubInstallationId: integer("github_installation_id"),
githubLastSyncLabel: text("github_last_sync_label").notNull(),
githubLastSyncAt: integer("github_last_sync_at"),
githubLastWebhookAt: integer("github_last_webhook_at"),
githubLastWebhookEvent: text("github_last_webhook_event"),
githubSyncGeneration: integer("github_sync_generation").notNull(),
githubSyncPhase: text("github_sync_phase"),
githubProcessedRepositoryCount: integer("github_processed_repository_count").notNull(),
githubTotalRepositoryCount: integer("github_total_repository_count").notNull(),
stripeCustomerId: text("stripe_customer_id"),
stripeSubscriptionId: text("stripe_subscription_id"),
stripePriceId: text("stripe_price_id"),
billingPlanId: text("billing_plan_id").notNull(),
billingStatus: text("billing_status").notNull(),
billingSeatsIncluded: integer("billing_seats_included").notNull(),
billingTrialEndsAt: text("billing_trial_ends_at"),
billingRenewalAt: text("billing_renewal_at"),
billingPaymentMethodLabel: text("billing_payment_method_label").notNull(),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => [check("organization_profile_singleton_id_check", sql`${table.id} = 1`)],
);
export const organizationMembers = sqliteTable("organization_members", {
id: text("id").notNull().primaryKey(),
@ -133,6 +141,7 @@ export const authAccountIndex = sqliteTable("auth_account_index", {
updatedAt: integer("updated_at").notNull(),
});
/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */
export const authVerification = sqliteTable("auth_verification", {
id: text("id").notNull().primaryKey(),
identifier: text("identifier").notNull(),

View file

@ -1,11 +1,10 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { actor } from "rivetkit";
import { organizationDb } from "./db/db.js";
import { runOrganizationWorkflow, ORGANIZATION_QUEUE_NAMES, organizationActions } from "./actions.js";
import { organizationActions } from "./actions.js";
import { organizationCommandActions } from "./workflow.js";
export const organization = actor({
db: organizationDb,
queues: Object.fromEntries(ORGANIZATION_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
name: "Organization",
icon: "compass",
@ -14,6 +13,8 @@ export const organization = actor({
createState: (_c, organizationId: string) => ({
organizationId,
}),
actions: organizationActions,
run: workflow(runOrganizationWorkflow),
actions: {
...organizationActions,
...organizationCommandActions,
},
});

View file

@ -0,0 +1,39 @@
export const ORGANIZATION_QUEUE_NAMES = [
"organization.command.createTask",
"organization.command.materializeTask",
"organization.command.registerTaskBranch",
"organization.command.applyTaskSummaryUpdate",
"organization.command.removeTaskSummary",
"organization.command.refreshTaskSummaryForBranch",
"organization.command.snapshot.broadcast",
"organization.command.syncGithubSession",
"organization.command.better_auth.session_index.upsert",
"organization.command.better_auth.session_index.delete",
"organization.command.better_auth.email_index.upsert",
"organization.command.better_auth.email_index.delete",
"organization.command.better_auth.account_index.upsert",
"organization.command.better_auth.account_index.delete",
"organization.command.better_auth.verification.create",
"organization.command.better_auth.verification.update",
"organization.command.better_auth.verification.update_many",
"organization.command.better_auth.verification.delete",
"organization.command.better_auth.verification.delete_many",
"organization.command.github.sync_progress.apply",
"organization.command.github.webhook_receipt.record",
"organization.command.github.organization_shell.sync_from_github",
"organization.command.shell.profile.update",
"organization.command.shell.sync_started.mark",
"organization.command.billing.stripe_customer.apply",
"organization.command.billing.stripe_subscription.apply",
"organization.command.billing.free_plan.apply",
"organization.command.billing.payment_method.set",
"organization.command.billing.status.set",
"organization.command.billing.invoice.upsert",
"organization.command.billing.seat_usage.record",
] as const;
export type OrganizationQueueName = (typeof ORGANIZATION_QUEUE_NAMES)[number];
export function organizationWorkflowQueueName(name: OrganizationQueueName): OrganizationQueueName {
return name;
}

View file

@ -0,0 +1,163 @@
// @ts-nocheck
/**
* Organization command actions converted from queue handlers to direct actions.
* Each export becomes an action on the organization actor.
*/
import { applyGithubSyncProgressMutation, recordGithubWebhookReceiptMutation, refreshOrganizationSnapshotMutation } from "./actions.js";
import {
applyTaskSummaryUpdateMutation,
createTaskMutation,
refreshTaskSummaryForBranchMutation,
registerTaskBranchMutation,
removeTaskSummaryMutation,
} from "./actions/task-mutations.js";
import {
betterAuthCreateVerificationMutation,
betterAuthDeleteAccountIndexMutation,
betterAuthDeleteEmailIndexMutation,
betterAuthDeleteManyVerificationMutation,
betterAuthDeleteSessionIndexMutation,
betterAuthDeleteVerificationMutation,
betterAuthUpdateManyVerificationMutation,
betterAuthUpdateVerificationMutation,
betterAuthUpsertAccountIndexMutation,
betterAuthUpsertEmailIndexMutation,
betterAuthUpsertSessionIndexMutation,
} from "./actions/better-auth.js";
import {
applyOrganizationFreePlanMutation,
applyOrganizationStripeCustomerMutation,
applyOrganizationStripeSubscriptionMutation,
markOrganizationSyncStartedMutation,
recordOrganizationSeatUsageMutation,
setOrganizationBillingPaymentMethodMutation,
setOrganizationBillingStatusMutation,
syncOrganizationShellFromGithubMutation,
updateOrganizationShellProfileMutation,
upsertOrganizationInvoiceMutation,
} from "./app-shell.js";
export const organizationCommandActions = {
async commandCreateTask(c: any, body: any) {
return await createTaskMutation(c, body);
},
async commandMaterializeTask(c: any, body: any) {
return await createTaskMutation(c, body);
},
async commandRegisterTaskBranch(c: any, body: any) {
return await registerTaskBranchMutation(c, body);
},
async commandApplyTaskSummaryUpdate(c: any, body: any) {
await applyTaskSummaryUpdateMutation(c, body);
return { ok: true };
},
async commandRemoveTaskSummary(c: any, body: any) {
await removeTaskSummaryMutation(c, body);
return { ok: true };
},
async commandRefreshTaskSummaryForBranch(c: any, body: any) {
await refreshTaskSummaryForBranchMutation(c, body);
return { ok: true };
},
async commandBroadcastSnapshot(c: any, _body: any) {
await refreshOrganizationSnapshotMutation(c);
return { ok: true };
},
async commandSyncGithubSession(c: any, body: any) {
const { syncGithubOrganizations } = await import("./app-shell.js");
await syncGithubOrganizations(c, body);
return { ok: true };
},
// Better Auth index actions
async commandBetterAuthSessionIndexUpsert(c: any, body: any) {
return await betterAuthUpsertSessionIndexMutation(c, body);
},
async commandBetterAuthSessionIndexDelete(c: any, body: any) {
await betterAuthDeleteSessionIndexMutation(c, body);
return { ok: true };
},
async commandBetterAuthEmailIndexUpsert(c: any, body: any) {
return await betterAuthUpsertEmailIndexMutation(c, body);
},
async commandBetterAuthEmailIndexDelete(c: any, body: any) {
await betterAuthDeleteEmailIndexMutation(c, body);
return { ok: true };
},
async commandBetterAuthAccountIndexUpsert(c: any, body: any) {
return await betterAuthUpsertAccountIndexMutation(c, body);
},
async commandBetterAuthAccountIndexDelete(c: any, body: any) {
await betterAuthDeleteAccountIndexMutation(c, body);
return { ok: true };
},
async commandBetterAuthVerificationCreate(c: any, body: any) {
return await betterAuthCreateVerificationMutation(c, body);
},
async commandBetterAuthVerificationUpdate(c: any, body: any) {
return await betterAuthUpdateVerificationMutation(c, body);
},
async commandBetterAuthVerificationUpdateMany(c: any, body: any) {
return await betterAuthUpdateManyVerificationMutation(c, body);
},
async commandBetterAuthVerificationDelete(c: any, body: any) {
await betterAuthDeleteVerificationMutation(c, body);
return { ok: true };
},
async commandBetterAuthVerificationDeleteMany(c: any, body: any) {
return await betterAuthDeleteManyVerificationMutation(c, body);
},
// GitHub sync actions
async commandApplyGithubSyncProgress(c: any, body: any) {
await applyGithubSyncProgressMutation(c, body);
return { ok: true };
},
async commandRecordGithubWebhookReceipt(c: any, body: any) {
await recordGithubWebhookReceiptMutation(c, body);
return { ok: true };
},
async commandSyncOrganizationShellFromGithub(c: any, body: any) {
return await syncOrganizationShellFromGithubMutation(c, body);
},
// Shell/profile actions
async commandUpdateShellProfile(c: any, body: any) {
await updateOrganizationShellProfileMutation(c, body);
return { ok: true };
},
async commandMarkSyncStarted(c: any, body: any) {
await markOrganizationSyncStartedMutation(c, body);
return { ok: true };
},
// Billing actions
async commandApplyStripeCustomer(c: any, body: any) {
await applyOrganizationStripeCustomerMutation(c, body);
return { ok: true };
},
async commandApplyStripeSubscription(c: any, body: any) {
await applyOrganizationStripeSubscriptionMutation(c, body);
return { ok: true };
},
async commandApplyFreePlan(c: any, body: any) {
await applyOrganizationFreePlanMutation(c, body);
return { ok: true };
},
async commandSetPaymentMethod(c: any, body: any) {
await setOrganizationBillingPaymentMethodMutation(c, body);
return { ok: true };
},
async commandSetBillingStatus(c: any, body: any) {
await setOrganizationBillingStatusMutation(c, body);
return { ok: true };
},
async commandUpsertInvoice(c: any, body: any) {
await upsertOrganizationInvoiceMutation(c, body);
return { ok: true };
},
async commandRecordSeatUsage(c: any, body: any) {
await recordOrganizationSeatUsageMutation(c, body);
return { ok: true };
},
};

View file

@ -1,557 +0,0 @@
// @ts-nocheck
import { randomUUID } from "node:crypto";
import { and, desc, eq, isNotNull, ne } from "drizzle-orm";
import { Loop } from "rivetkit/workflow";
import type { AgentType, RepoOverview, SandboxProviderId, TaskRecord, TaskSummary } from "@sandbox-agent/foundry-shared";
import { getGithubData, getOrCreateHistory, getOrCreateTask, getTask, selfRepository } from "../handles.js";
import { deriveFallbackTitle, resolveCreateFlowDecision } from "../../services/create-flow.js";
import { expectQueueResponse } from "../../services/queue.js";
import { isActorNotFoundError, logActorWarning, resolveErrorMessage } from "../logging.js";
import { repoMeta, taskIndex } from "./db/schema.js";
interface CreateTaskCommand {
task: string;
sandboxProviderId: SandboxProviderId;
agentType: AgentType | null;
explicitTitle: string | null;
explicitBranchName: string | null;
initialPrompt: string | null;
onBranch: string | null;
}
interface RegisterTaskBranchCommand {
taskId: string;
branchName: string;
requireExistingRemote?: boolean;
}
interface ListTaskSummariesCommand {
includeArchived?: boolean;
}
interface GetTaskEnrichedCommand {
taskId: string;
}
interface GetPullRequestForBranchCommand {
branchName: string;
}
const REPOSITORY_QUEUE_NAMES = ["repository.command.createTask", "repository.command.registerTaskBranch"] as const;
type RepositoryQueueName = (typeof REPOSITORY_QUEUE_NAMES)[number];
export { REPOSITORY_QUEUE_NAMES };
export function repositoryWorkflowQueueName(name: RepositoryQueueName): RepositoryQueueName {
return name;
}
function isStaleTaskReferenceError(error: unknown): boolean {
const message = resolveErrorMessage(error);
return isActorNotFoundError(error) || message.startsWith("Task not found:");
}
async function persistRemoteUrl(c: any, remoteUrl: string): Promise<void> {
c.state.remoteUrl = remoteUrl;
await c.db
.insert(repoMeta)
.values({
id: 1,
remoteUrl,
updatedAt: Date.now(),
})
.onConflictDoUpdate({
target: repoMeta.id,
set: {
remoteUrl,
updatedAt: Date.now(),
},
})
.run();
}
async function deleteStaleTaskIndexRow(c: any, taskId: string): Promise<void> {
try {
await c.db.delete(taskIndex).where(eq(taskIndex.taskId, taskId)).run();
} catch {
// Best effort cleanup only.
}
}
async function reinsertTaskIndexRow(c: any, taskId: string, branchName: string | null, updatedAt: number): Promise<void> {
const now = Date.now();
await c.db
.insert(taskIndex)
.values({
taskId,
branchName,
createdAt: updatedAt || now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskIndex.taskId,
set: {
branchName,
updatedAt: now,
},
})
.run();
}
async function listKnownTaskBranches(c: any): Promise<string[]> {
const rows = await c.db.select({ branchName: taskIndex.branchName }).from(taskIndex).where(isNotNull(taskIndex.branchName)).all();
return rows.map((row) => row.branchName).filter((value): value is string => typeof value === "string" && value.trim().length > 0);
}
async function resolveGitHubRepository(c: any) {
const githubData = getGithubData(c, c.state.organizationId);
return await githubData.getRepository({ repoId: c.state.repoId }).catch(() => null);
}
async function listGitHubBranches(c: any): Promise<Array<{ branchName: string; commitSha: string }>> {
const githubData = getGithubData(c, c.state.organizationId);
return await githubData.listBranchesForRepository({ repoId: c.state.repoId }).catch(() => []);
}
async function enrichTaskRecord(c: any, record: TaskRecord): Promise<TaskRecord> {
const branchName = record.branchName?.trim() || null;
if (!branchName) {
return record;
}
const pr =
branchName != null
? await getGithubData(c, c.state.organizationId)
.listPullRequestsForRepository({ repoId: c.state.repoId })
.then((rows: any[]) => rows.find((row) => row.headRefName === branchName) ?? null)
.catch(() => null)
: null;
return {
...record,
prUrl: pr?.url ?? null,
prAuthor: pr?.authorLogin ?? null,
ciStatus: null,
reviewStatus: null,
reviewer: pr?.authorLogin ?? null,
diffStat: record.diffStat ?? null,
hasUnpushed: record.hasUnpushed ?? null,
conflictsWithMain: record.conflictsWithMain ?? null,
parentBranch: record.parentBranch ?? null,
};
}
async function createTaskMutation(c: any, cmd: CreateTaskCommand): Promise<TaskRecord> {
const organizationId = c.state.organizationId;
const repoId = c.state.repoId;
const repoRemote = c.state.remoteUrl;
const onBranch = cmd.onBranch?.trim() || null;
const taskId = randomUUID();
let initialBranchName: string | null = null;
let initialTitle: string | null = null;
await persistRemoteUrl(c, repoRemote);
if (onBranch) {
initialBranchName = onBranch;
initialTitle = deriveFallbackTitle(cmd.task, cmd.explicitTitle ?? undefined);
await registerTaskBranchMutation(c, {
taskId,
branchName: onBranch,
requireExistingRemote: true,
});
} else {
const reservedBranches = await listKnownTaskBranches(c);
const resolved = resolveCreateFlowDecision({
task: cmd.task,
explicitTitle: cmd.explicitTitle ?? undefined,
explicitBranchName: cmd.explicitBranchName ?? undefined,
localBranches: [],
taskBranches: reservedBranches,
});
initialBranchName = resolved.branchName;
initialTitle = resolved.title;
const now = Date.now();
await c.db
.insert(taskIndex)
.values({
taskId,
branchName: resolved.branchName,
createdAt: now,
updatedAt: now,
})
.onConflictDoNothing()
.run();
}
let taskHandle: Awaited<ReturnType<typeof getOrCreateTask>>;
try {
taskHandle = await getOrCreateTask(c, organizationId, repoId, taskId, {
organizationId,
repoId,
taskId,
repoRemote,
branchName: initialBranchName,
title: initialTitle,
task: cmd.task,
sandboxProviderId: cmd.sandboxProviderId,
agentType: cmd.agentType,
explicitTitle: null,
explicitBranchName: null,
initialPrompt: cmd.initialPrompt,
});
} catch (error) {
if (initialBranchName) {
await deleteStaleTaskIndexRow(c, taskId);
}
throw error;
}
const created = await taskHandle.initialize({ sandboxProviderId: cmd.sandboxProviderId });
const history = await getOrCreateHistory(c, organizationId, repoId);
await history.append({
kind: "task.created",
taskId,
payload: {
repoId,
sandboxProviderId: cmd.sandboxProviderId,
},
});
return created;
}
async function registerTaskBranchMutation(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> {
const branchName = cmd.branchName.trim();
if (!branchName) {
throw new Error("branchName is required");
}
await persistRemoteUrl(c, c.state.remoteUrl);
const existingOwner = await c.db
.select({ taskId: taskIndex.taskId })
.from(taskIndex)
.where(and(eq(taskIndex.branchName, branchName), ne(taskIndex.taskId, cmd.taskId)))
.get();
if (existingOwner) {
let ownerMissing = false;
try {
await getTask(c, c.state.organizationId, c.state.repoId, existingOwner.taskId).get();
} catch (error) {
if (isStaleTaskReferenceError(error)) {
ownerMissing = true;
await deleteStaleTaskIndexRow(c, existingOwner.taskId);
} else {
throw error;
}
}
if (!ownerMissing) {
throw new Error(`branch is already assigned to a different task: ${branchName}`);
}
}
const branches = await listGitHubBranches(c);
const branchMatch = branches.find((branch) => branch.branchName === branchName) ?? null;
if (cmd.requireExistingRemote && !branchMatch) {
throw new Error(`Remote branch not found: ${branchName}`);
}
const repository = await resolveGitHubRepository(c);
const defaultBranch = repository?.defaultBranch ?? "main";
const headSha = branchMatch?.commitSha ?? branches.find((branch) => branch.branchName === defaultBranch)?.commitSha ?? "";
const now = Date.now();
await c.db
.insert(taskIndex)
.values({
taskId: cmd.taskId,
branchName,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskIndex.taskId,
set: {
branchName,
updatedAt: now,
},
})
.run();
return { branchName, headSha };
}
async function listTaskSummaries(c: any, includeArchived = false): Promise<TaskSummary[]> {
const taskRows = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).orderBy(desc(taskIndex.updatedAt)).all();
const records: TaskSummary[] = [];
for (const row of taskRows) {
try {
const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get();
if (!includeArchived && record.status === "archived") {
continue;
}
records.push({
organizationId: record.organizationId,
repoId: record.repoId,
taskId: record.taskId,
branchName: record.branchName,
title: record.title,
status: record.status,
updatedAt: record.updatedAt,
});
} catch (error) {
if (isStaleTaskReferenceError(error)) {
await deleteStaleTaskIndexRow(c, row.taskId);
continue;
}
logActorWarning("repository", "failed loading task summary row", {
organizationId: c.state.organizationId,
repoId: c.state.repoId,
taskId: row.taskId,
error: resolveErrorMessage(error),
});
}
}
records.sort((a, b) => b.updatedAt - a.updatedAt);
return records;
}
function sortOverviewBranches(
branches: Array<{
branchName: string;
commitSha: string;
taskId: string | null;
taskTitle: string | null;
taskStatus: TaskRecord["status"] | null;
prNumber: number | null;
prState: string | null;
prUrl: string | null;
ciStatus: string | null;
reviewStatus: string | null;
reviewer: string | null;
updatedAt: number;
}>,
defaultBranch: string | null,
) {
return [...branches].sort((left, right) => {
if (defaultBranch) {
if (left.branchName === defaultBranch && right.branchName !== defaultBranch) return -1;
if (right.branchName === defaultBranch && left.branchName !== defaultBranch) return 1;
}
if (Boolean(left.taskId) !== Boolean(right.taskId)) {
return left.taskId ? -1 : 1;
}
if (left.updatedAt !== right.updatedAt) {
return right.updatedAt - left.updatedAt;
}
return left.branchName.localeCompare(right.branchName);
});
}
export async function runRepositoryWorkflow(ctx: any): Promise<void> {
await ctx.loop("repository-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-repository-command", {
names: [...REPOSITORY_QUEUE_NAMES],
completable: true,
});
if (!msg) {
return Loop.continue(undefined);
}
try {
if (msg.name === "repository.command.createTask") {
const result = await loopCtx.step({
name: "repository-create-task",
timeout: 5 * 60_000,
run: async () => createTaskMutation(loopCtx, msg.body as CreateTaskCommand),
});
await msg.complete(result);
return Loop.continue(undefined);
}
if (msg.name === "repository.command.registerTaskBranch") {
const result = await loopCtx.step({
name: "repository-register-task-branch",
timeout: 60_000,
run: async () => registerTaskBranchMutation(loopCtx, msg.body as RegisterTaskBranchCommand),
});
await msg.complete(result);
return Loop.continue(undefined);
}
} catch (error) {
const message = resolveErrorMessage(error);
logActorWarning("repository", "repository workflow command failed", {
queueName: msg.name,
error: message,
});
await msg.complete({ error: message }).catch(() => {});
}
return Loop.continue(undefined);
});
}
export const repositoryActions = {
async createTask(c: any, cmd: CreateTaskCommand): Promise<TaskRecord> {
const self = selfRepository(c);
return expectQueueResponse<TaskRecord>(
await self.send(repositoryWorkflowQueueName("repository.command.createTask"), cmd, {
wait: true,
timeout: 10_000,
}),
);
},
async listReservedBranches(c: any): Promise<string[]> {
return await listKnownTaskBranches(c);
},
async registerTaskBranch(c: any, cmd: RegisterTaskBranchCommand): Promise<{ branchName: string; headSha: string }> {
const self = selfRepository(c);
return expectQueueResponse<{ branchName: string; headSha: string }>(
await self.send(repositoryWorkflowQueueName("repository.command.registerTaskBranch"), cmd, {
wait: true,
timeout: 10_000,
}),
);
},
async listTaskSummaries(c: any, cmd?: ListTaskSummariesCommand): Promise<TaskSummary[]> {
return await listTaskSummaries(c, cmd?.includeArchived === true);
},
async getTaskEnriched(c: any, cmd: GetTaskEnrichedCommand): Promise<TaskRecord> {
const row = await c.db.select({ taskId: taskIndex.taskId }).from(taskIndex).where(eq(taskIndex.taskId, cmd.taskId)).get();
if (!row) {
const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get();
await reinsertTaskIndexRow(c, cmd.taskId, record.branchName ?? null, record.updatedAt ?? Date.now());
return await enrichTaskRecord(c, record);
}
try {
const record = await getTask(c, c.state.organizationId, c.state.repoId, cmd.taskId).get();
return await enrichTaskRecord(c, record);
} catch (error) {
if (isStaleTaskReferenceError(error)) {
await deleteStaleTaskIndexRow(c, cmd.taskId);
throw new Error(`Unknown task in repo ${c.state.repoId}: ${cmd.taskId}`);
}
throw error;
}
},
async getRepositoryMetadata(c: any): Promise<{ defaultBranch: string | null; fullName: string | null; remoteUrl: string }> {
const repository = await resolveGitHubRepository(c);
return {
defaultBranch: repository?.defaultBranch ?? null,
fullName: repository?.fullName ?? null,
remoteUrl: c.state.remoteUrl,
};
},
async getRepoOverview(c: any): Promise<RepoOverview> {
await persistRemoteUrl(c, c.state.remoteUrl);
const now = Date.now();
const repository = await resolveGitHubRepository(c);
const githubBranches = await listGitHubBranches(c).catch(() => []);
const githubData = getGithubData(c, c.state.organizationId);
const prRows = await githubData.listPullRequestsForRepository({ repoId: c.state.repoId }).catch(() => []);
const prByBranch = new Map(prRows.map((row) => [row.headRefName, row]));
const taskRows = await c.db
.select({
taskId: taskIndex.taskId,
branchName: taskIndex.branchName,
updatedAt: taskIndex.updatedAt,
})
.from(taskIndex)
.all();
const taskMetaByBranch = new Map<string, { taskId: string; title: string | null; status: TaskRecord["status"] | null; updatedAt: number }>();
for (const row of taskRows) {
if (!row.branchName) {
continue;
}
try {
const record = await getTask(c, c.state.organizationId, c.state.repoId, row.taskId).get();
taskMetaByBranch.set(row.branchName, {
taskId: row.taskId,
title: record.title ?? null,
status: record.status,
updatedAt: record.updatedAt,
});
} catch (error) {
if (isStaleTaskReferenceError(error)) {
await deleteStaleTaskIndexRow(c, row.taskId);
continue;
}
}
}
const branchMap = new Map<string, { branchName: string; commitSha: string }>();
for (const branch of githubBranches) {
branchMap.set(branch.branchName, branch);
}
for (const branchName of taskMetaByBranch.keys()) {
if (!branchMap.has(branchName)) {
branchMap.set(branchName, { branchName, commitSha: "" });
}
}
if (repository?.defaultBranch && !branchMap.has(repository.defaultBranch)) {
branchMap.set(repository.defaultBranch, { branchName: repository.defaultBranch, commitSha: "" });
}
const branches = sortOverviewBranches(
[...branchMap.values()].map((branch) => {
const taskMeta = taskMetaByBranch.get(branch.branchName);
const pr = prByBranch.get(branch.branchName);
return {
branchName: branch.branchName,
commitSha: branch.commitSha,
taskId: taskMeta?.taskId ?? null,
taskTitle: taskMeta?.title ?? null,
taskStatus: taskMeta?.status ?? null,
prNumber: pr?.number ?? null,
prState: pr?.state ?? null,
prUrl: pr?.url ?? null,
ciStatus: null,
reviewStatus: null,
reviewer: pr?.authorLogin ?? null,
updatedAt: Math.max(taskMeta?.updatedAt ?? 0, pr?.updatedAtMs ?? 0, now),
};
}),
repository?.defaultBranch ?? null,
);
return {
organizationId: c.state.organizationId,
repoId: c.state.repoId,
remoteUrl: c.state.remoteUrl,
baseRef: repository?.defaultBranch ?? null,
fetchedAt: now,
branches,
};
},
async getPullRequestForBranch(c: any, cmd: GetPullRequestForBranchCommand): Promise<{ number: number; status: "draft" | "ready" } | null> {
const branchName = cmd.branchName?.trim();
if (!branchName) {
return null;
}
const githubData = getGithubData(c, c.state.organizationId);
return await githubData.getPullRequestForBranch({
repoId: c.state.repoId,
branchName,
});
},
};

View file

@ -1,5 +0,0 @@
import { db } from "rivetkit/db/drizzle";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const repositoryDb = db({ schema, migrations });

View file

@ -1,6 +0,0 @@
import { defineConfig } from "rivetkit/db/drizzle";
export default defineConfig({
out: "./src/actors/repository/db/drizzle",
schema: "./src/actors/repository/db/schema.ts",
});

View file

@ -1,12 +0,0 @@
CREATE TABLE `repo_meta` (
`id` integer PRIMARY KEY NOT NULL,
`remote_url` text NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `task_index` (
`task_id` text PRIMARY KEY NOT NULL,
`branch_name` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);

View file

@ -1,13 +0,0 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1773376221848,
"tag": "0000_useful_la_nuit",
"breakpoints": true
}
]
}

View file

@ -1,43 +0,0 @@
// This file is generated by src/actors/_scripts/generate-actor-migrations.ts.
// Source of truth is drizzle-kit output under ./drizzle (meta/_journal.json + *.sql).
// Do not hand-edit this file.
const journal = {
entries: [
{
idx: 0,
when: 1773376221848,
tag: "0000_useful_la_nuit",
breakpoints: true,
},
{
idx: 1,
when: 1778900000000,
tag: "0001_remove_local_git_state",
breakpoints: true,
},
],
} as const;
export default {
journal,
migrations: {
m0000: `CREATE TABLE \`repo_meta\` (
\t\`id\` integer PRIMARY KEY NOT NULL,
\t\`remote_url\` text NOT NULL,
\t\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`task_index\` (
\t\`task_id\` text PRIMARY KEY NOT NULL,
\t\`branch_name\` text,
\t\`created_at\` integer NOT NULL,
\t\`updated_at\` integer NOT NULL
);
`,
m0001: `DROP TABLE IF EXISTS \`branches\`;
--> statement-breakpoint
DROP TABLE IF EXISTS \`repo_action_jobs\`;
`,
} as const,
};

View file

@ -1,23 +0,0 @@
import { integer, sqliteTable, text } from "rivetkit/db/drizzle";
// SQLite is per repository actor instance (organizationId+repoId).
export const repoMeta = sqliteTable("repo_meta", {
id: integer("id").primaryKey(),
remoteUrl: text("remote_url").notNull(),
updatedAt: integer("updated_at").notNull(),
});
/**
* Coordinator index of TaskActor instances.
* The repository actor is the coordinator for tasks. Each row maps a
* taskId to its branch name. Used for branch conflict checking and
* task-by-branch lookups. Rows are inserted at task creation and
* updated on branch rename.
*/
export const taskIndex = sqliteTable("task_index", {
taskId: text("task_id").notNull().primaryKey(),
branchName: text("branch_name"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});

View file

@ -1,27 +0,0 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import { repositoryDb } from "./db/db.js";
import { REPOSITORY_QUEUE_NAMES, repositoryActions, runRepositoryWorkflow } from "./actions.js";
export interface RepositoryInput {
organizationId: string;
repoId: string;
remoteUrl: string;
}
export const repository = actor({
db: repositoryDb,
queues: Object.fromEntries(REPOSITORY_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
name: "Repository",
icon: "folder",
actionTimeout: 5 * 60_000,
},
createState: (_c, input: RepositoryInput) => ({
organizationId: input.organizationId,
repoId: input.repoId,
remoteUrl: input.remoteUrl,
}),
actions: repositoryActions,
run: workflow(runRepositoryWorkflow),
});

View file

@ -2,12 +2,14 @@ import { actor } from "rivetkit";
import { e2b, sandboxActor } from "rivetkit/sandbox";
import { existsSync } from "node:fs";
import Dockerode from "dockerode";
import { DEFAULT_WORKSPACE_MODEL_GROUPS, workspaceModelGroupsFromSandboxAgents, type WorkspaceModelGroup } from "@sandbox-agent/foundry-shared";
import { SandboxAgent } from "sandbox-agent";
import { getActorRuntimeContext } from "../context.js";
import { organizationKey } from "../keys.js";
import { logActorWarning, resolveErrorMessage } from "../logging.js";
import { resolveSandboxProviderId } from "../../sandbox-config.js";
const SANDBOX_REPO_CWD = "/home/sandbox/organization/repo";
const SANDBOX_REPO_CWD = "/home/user/repo";
const DEFAULT_LOCAL_SANDBOX_IMAGE = "rivetdev/sandbox-agent:full";
const DEFAULT_LOCAL_SANDBOX_PORT = 2468;
const dockerClient = new Dockerode({ socketPath: "/var/run/docker.sock" });
@ -203,6 +205,13 @@ const baseTaskSandbox = sandboxActor({
create: () => ({
template: config.sandboxProviders.e2b.template ?? "sandbox-agent-full-0.3.x",
envs: sandboxEnvObject(),
// TEMPORARY: Default E2B timeout is 5 minutes which is too short.
// Set to 1 hour as a stopgap. Remove this once the E2B provider in
// sandbox-agent uses betaCreate + autoPause (see
// .context/proposal-rivetkit-sandbox-resilience.md). At that point
// the provider handles timeout/pause lifecycle and this override is
// unnecessary.
timeoutMs: 60 * 60 * 1000,
}),
installAgents: ["claude", "codex"],
});
@ -219,8 +228,12 @@ async function broadcastProcesses(c: any, actions: Record<string, (...args: any[
type: "processesUpdated",
processes: listed.processes ?? [],
});
} catch {
} catch (error) {
// Process broadcasts are best-effort. Callers still receive the primary action result.
logActorWarning("taskSandbox", "broadcastProcesses failed", {
sandboxId: c.state?.sandboxId,
error: resolveErrorMessage(error),
});
}
}
@ -258,6 +271,26 @@ async function providerForConnection(c: any): Promise<any | null> {
return provider;
}
async function listWorkspaceModelGroupsForSandbox(c: any): Promise<WorkspaceModelGroup[]> {
const provider = await providerForConnection(c);
if (!provider || !c.state.sandboxId || typeof provider.connectAgent !== "function") {
return DEFAULT_WORKSPACE_MODEL_GROUPS;
}
try {
const client = await provider.connectAgent(c.state.sandboxId, {
waitForHealth: {
timeoutMs: 15_000,
},
});
const listed = await client.listAgents({ config: true });
const groups = workspaceModelGroupsFromSandboxAgents(Array.isArray(listed?.agents) ? listed.agents : []);
return groups.length > 0 ? groups : DEFAULT_WORKSPACE_MODEL_GROUPS;
} catch {
return DEFAULT_WORKSPACE_MODEL_GROUPS;
}
}
const baseActions = baseTaskSandbox.config.actions as Record<string, (c: any, ...args: any[]) => Promise<any>>;
export const taskSandbox = actor({
@ -316,6 +349,19 @@ export const taskSandbox = actor({
return sanitizeActorResult(await session.prompt([{ type: "text", text }]));
},
async listProcesses(c: any): Promise<any> {
try {
return await baseActions.listProcesses(c);
} catch (error) {
// Sandbox may be gone (E2B timeout, destroyed, etc.) — degrade to empty
logActorWarning("taskSandbox", "listProcesses failed, sandbox may be expired", {
sandboxId: c.state.sandboxId,
error: resolveErrorMessage(error),
});
return { processes: [] };
}
},
async createProcess(c: any, request: any): Promise<any> {
const created = await baseActions.createProcess(c, request);
await broadcastProcesses(c, baseActions);
@ -360,6 +406,10 @@ export const taskSandbox = actor({
}
},
async listWorkspaceModelGroups(c: any): Promise<WorkspaceModelGroup[]> {
return await listWorkspaceModelGroupsForSandbox(c);
},
async providerState(c: any): Promise<{ sandboxProviderId: "e2b" | "local"; sandboxId: string; state: string; at: number }> {
const { config } = getActorRuntimeContext();
const { taskId } = parseTaskSandboxKey(c.key);

View file

@ -3,10 +3,9 @@ CREATE TABLE `task` (
`branch_name` text,
`title` text,
`task` text NOT NULL,
`provider_id` text NOT NULL,
`sandbox_provider_id` text NOT NULL,
`status` text NOT NULL,
`agent_type` text DEFAULT 'claude',
`pr_submitted` integer DEFAULT 0,
`pull_request_json` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
CONSTRAINT "task_singleton_id_check" CHECK("task"."id" = 1)
@ -15,33 +14,33 @@ CREATE TABLE `task` (
CREATE TABLE `task_runtime` (
`id` integer PRIMARY KEY NOT NULL,
`active_sandbox_id` text,
`active_session_id` text,
`active_switch_target` text,
`active_cwd` text,
`status_message` text,
`git_state_json` text,
`git_state_updated_at` integer,
`updated_at` integer NOT NULL,
CONSTRAINT "task_runtime_singleton_id_check" CHECK("task_runtime"."id" = 1)
);
--> statement-breakpoint
CREATE TABLE `task_sandboxes` (
`sandbox_id` text PRIMARY KEY NOT NULL,
`provider_id` text NOT NULL,
`sandbox_provider_id` text NOT NULL,
`sandbox_actor_id` text,
`switch_target` text NOT NULL,
`cwd` text,
`status_message` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `task_workbench_sessions` (
CREATE TABLE `task_workspace_sessions` (
`session_id` text PRIMARY KEY NOT NULL,
`sandbox_session_id` text,
`session_name` text NOT NULL,
`model` text NOT NULL,
`unread` integer DEFAULT 0 NOT NULL,
`draft_text` text DEFAULT '' NOT NULL,
`draft_attachments_json` text DEFAULT '[]' NOT NULL,
`draft_updated_at` integer,
`status` text DEFAULT 'ready' NOT NULL,
`error_message` text,
`transcript_json` text DEFAULT '[]' NOT NULL,
`transcript_updated_at` integer,
`created` integer DEFAULT 1 NOT NULL,
`closed` integer DEFAULT 0 NOT NULL,
`thinking_since_ms` integer,

View file

@ -35,8 +35,8 @@
"notNull": true,
"autoincrement": false
},
"provider_id": {
"name": "provider_id",
"sandbox_provider_id": {
"name": "sandbox_provider_id",
"type": "text",
"primaryKey": false,
"notNull": true,
@ -49,21 +49,12 @@
"notNull": true,
"autoincrement": false
},
"agent_type": {
"name": "agent_type",
"pull_request_json": {
"name": "pull_request_json",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": "'claude'"
},
"pr_submitted": {
"name": "pr_submitted",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false,
"default": 0
"autoincrement": false
},
"created_at": {
"name": "created_at",
@ -108,13 +99,6 @@
"notNull": false,
"autoincrement": false
},
"active_session_id": {
"name": "active_session_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"active_switch_target": {
"name": "active_switch_target",
"type": "text",
@ -129,13 +113,20 @@
"notNull": false,
"autoincrement": false
},
"status_message": {
"name": "status_message",
"git_state_json": {
"name": "git_state_json",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"git_state_updated_at": {
"name": "git_state_updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
@ -165,8 +156,8 @@
"notNull": true,
"autoincrement": false
},
"provider_id": {
"name": "provider_id",
"sandbox_provider_id": {
"name": "sandbox_provider_id",
"type": "text",
"primaryKey": false,
"notNull": true,
@ -193,13 +184,6 @@
"notNull": false,
"autoincrement": false
},
"status_message": {
"name": "status_message",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
@ -221,8 +205,8 @@
"uniqueConstraints": {},
"checkConstraints": {}
},
"task_workbench_sessions": {
"name": "task_workbench_sessions",
"task_workspace_sessions": {
"name": "task_workspace_sessions",
"columns": {
"session_id": {
"name": "session_id",
@ -231,6 +215,13 @@
"notNull": true,
"autoincrement": false
},
"sandbox_session_id": {
"name": "sandbox_session_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"session_name": {
"name": "session_name",
"type": "text",
@ -245,32 +236,31 @@
"notNull": true,
"autoincrement": false
},
"unread": {
"name": "unread",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": 0
},
"draft_text": {
"name": "draft_text",
"status": {
"name": "status",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "''"
"default": "'ready'"
},
"draft_attachments_json": {
"name": "draft_attachments_json",
"error_message": {
"name": "error_message",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"transcript_json": {
"name": "transcript_json",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false,
"default": "'[]'"
},
"draft_updated_at": {
"name": "draft_updated_at",
"transcript_updated_at": {
"name": "transcript_updated_at",
"type": "integer",
"primaryKey": false,
"notNull": false,

View file

@ -10,12 +10,6 @@ const journal = {
tag: "0000_charming_maestro",
breakpoints: true,
},
{
idx: 1,
when: 1773810000000,
tag: "0001_sandbox_provider_columns",
breakpoints: true,
},
],
} as const;
@ -27,10 +21,9 @@ export default {
\`branch_name\` text,
\`title\` text,
\`task\` text NOT NULL,
\`provider_id\` text NOT NULL,
\`sandbox_provider_id\` text NOT NULL,
\`status\` text NOT NULL,
\`agent_type\` text DEFAULT 'claude',
\`pr_submitted\` integer DEFAULT 0,
\`pull_request_json\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL,
CONSTRAINT "task_singleton_id_check" CHECK("task"."id" = 1)
@ -39,43 +32,39 @@ export default {
CREATE TABLE \`task_runtime\` (
\`id\` integer PRIMARY KEY NOT NULL,
\`active_sandbox_id\` text,
\`active_session_id\` text,
\`active_switch_target\` text,
\`active_cwd\` text,
\`status_message\` text,
\`git_state_json\` text,
\`git_state_updated_at\` integer,
\`updated_at\` integer NOT NULL,
CONSTRAINT "task_runtime_singleton_id_check" CHECK("task_runtime"."id" = 1)
);
--> statement-breakpoint
CREATE TABLE \`task_sandboxes\` (
\`sandbox_id\` text PRIMARY KEY NOT NULL,
\`provider_id\` text NOT NULL,
\`sandbox_provider_id\` text NOT NULL,
\`sandbox_actor_id\` text,
\`switch_target\` text NOT NULL,
\`cwd\` text,
\`status_message\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE \`task_workbench_sessions\` (
CREATE TABLE \`task_workspace_sessions\` (
\`session_id\` text PRIMARY KEY NOT NULL,
\`sandbox_session_id\` text,
\`session_name\` text NOT NULL,
\`model\` text NOT NULL,
\`unread\` integer DEFAULT 0 NOT NULL,
\`draft_text\` text DEFAULT '' NOT NULL,
\`draft_attachments_json\` text DEFAULT '[]' NOT NULL,
\`draft_updated_at\` integer,
\`status\` text DEFAULT 'ready' NOT NULL,
\`error_message\` text,
\`transcript_json\` text DEFAULT '[]' NOT NULL,
\`transcript_updated_at\` integer,
\`created\` integer DEFAULT 1 NOT NULL,
\`closed\` integer DEFAULT 0 NOT NULL,
\`thinking_since_ms\` integer,
\`created_at\` integer NOT NULL,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);
`,
m0001: `ALTER TABLE \`task\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`;
--> statement-breakpoint
ALTER TABLE \`task_sandboxes\` RENAME COLUMN \`provider_id\` TO \`sandbox_provider_id\`;
`,
} as const,
};

View file

@ -11,8 +11,7 @@ export const task = sqliteTable(
task: text("task").notNull(),
sandboxProviderId: text("sandbox_provider_id").notNull(),
status: text("status").notNull(),
agentType: text("agent_type").default("claude"),
prSubmitted: integer("pr_submitted").default(0),
pullRequestJson: text("pull_request_json"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
@ -24,14 +23,10 @@ export const taskRuntime = sqliteTable(
{
id: integer("id").primaryKey(),
activeSandboxId: text("active_sandbox_id"),
activeSessionId: text("active_session_id"),
activeSwitchTarget: text("active_switch_target"),
activeCwd: text("active_cwd"),
statusMessage: text("status_message"),
gitStateJson: text("git_state_json"),
gitStateUpdatedAt: integer("git_state_updated_at"),
provisionStage: text("provision_stage"),
provisionStageUpdatedAt: integer("provision_stage_updated_at"),
updatedAt: integer("updated_at").notNull(),
},
(table) => [check("task_runtime_singleton_id_check", sql`${table.id} = 1`)],
@ -48,18 +43,17 @@ export const taskSandboxes = sqliteTable("task_sandboxes", {
sandboxActorId: text("sandbox_actor_id"),
switchTarget: text("switch_target").notNull(),
cwd: text("cwd"),
statusMessage: text("status_message"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
/**
* Coordinator index of workbench sessions within this task.
* Coordinator index of workspace sessions within this task.
* The task actor is the coordinator for sessions. Each row holds session
* metadata, model, status, transcript, and draft state. Sessions are
* sub-entities of the task no separate session actor in the DB.
*/
export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", {
export const taskWorkspaceSessions = sqliteTable("task_workspace_sessions", {
sessionId: text("session_id").notNull().primaryKey(),
sandboxSessionId: text("sandbox_session_id"),
sessionName: text("session_name").notNull(),
@ -68,11 +62,6 @@ export const taskWorkbenchSessions = sqliteTable("task_workbench_sessions", {
errorMessage: text("error_message"),
transcriptJson: text("transcript_json").notNull().default("[]"),
transcriptUpdatedAt: integer("transcript_updated_at"),
unread: integer("unread").notNull().default(0),
draftText: text("draft_text").notNull().default(""),
// Structured by the workbench composer attachment payload format.
draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"),
draftUpdatedAt: integer("draft_updated_at"),
created: integer("created").notNull().default(1),
closed: integer("closed").notNull().default(0),
thinkingSinceMs: integer("thinking_since_ms"),

View file

@ -1,393 +1,47 @@
import { actor, queue } from "rivetkit";
import { workflow } from "rivetkit/workflow";
import type {
AgentType,
TaskRecord,
TaskWorkbenchChangeModelInput,
TaskWorkbenchRenameInput,
TaskWorkbenchRenameSessionInput,
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchUpdateDraftInput,
SandboxProviderId,
} from "@sandbox-agent/foundry-shared";
import { expectQueueResponse } from "../../services/queue.js";
import { selfTask } from "../handles.js";
import { actor } from "rivetkit";
import type { TaskRecord } from "@sandbox-agent/foundry-shared";
import { taskDb } from "./db/db.js";
import { getCurrentRecord } from "./workflow/common.js";
import {
changeWorkbenchModel,
closeWorkbenchSession,
createWorkbenchSession,
getSessionDetail,
getTaskDetail,
getTaskSummary,
markWorkbenchUnread,
publishWorkbenchPr,
renameWorkbenchBranch,
renameWorkbenchTask,
renameWorkbenchSession,
revertWorkbenchFile,
sendWorkbenchMessage,
syncWorkbenchSessionStatus,
setWorkbenchSessionUnread,
stopWorkbenchSession,
updateWorkbenchDraft,
} from "./workbench.js";
import { TASK_QUEUE_NAMES, taskWorkflowQueueName, runTaskWorkflow } from "./workflow/index.js";
import { getSessionDetail, getTaskDetail, getTaskSummary } from "./workspace.js";
import { taskCommandActions } from "./workflow/index.js";
export interface TaskInput {
organizationId: string;
repoId: string;
taskId: string;
repoRemote: string;
branchName: string | null;
title: string | null;
task: string;
sandboxProviderId: SandboxProviderId;
agentType: AgentType | null;
explicitTitle: string | null;
explicitBranchName: string | null;
initialPrompt: string | null;
}
interface InitializeCommand {
sandboxProviderId?: SandboxProviderId;
}
interface TaskActionCommand {
reason?: string;
}
interface TaskSessionCommand {
sessionId: string;
}
interface TaskStatusSyncCommand {
sessionId: string;
status: "running" | "idle" | "error";
at: number;
}
interface TaskWorkbenchValueCommand {
value: string;
}
interface TaskWorkbenchSessionTitleCommand {
sessionId: string;
title: string;
}
interface TaskWorkbenchSessionUnreadCommand {
sessionId: string;
unread: boolean;
}
interface TaskWorkbenchUpdateDraftCommand {
sessionId: string;
text: string;
attachments: Array<any>;
}
interface TaskWorkbenchChangeModelCommand {
sessionId: string;
model: string;
}
interface TaskWorkbenchSendMessageCommand {
sessionId: string;
text: string;
attachments: Array<any>;
}
interface TaskWorkbenchCreateSessionCommand {
model?: string;
}
interface TaskWorkbenchCreateSessionAndSendCommand {
model?: string;
text: string;
}
interface TaskWorkbenchSessionCommand {
sessionId: string;
}
export const task = actor({
db: taskDb,
queues: Object.fromEntries(TASK_QUEUE_NAMES.map((name) => [name, queue()])),
options: {
name: "Task",
icon: "wrench",
actionTimeout: 5 * 60_000,
actionTimeout: 10 * 60_000,
},
createState: (_c, input: TaskInput) => ({
organizationId: input.organizationId,
repoId: input.repoId,
taskId: input.taskId,
repoRemote: input.repoRemote,
branchName: input.branchName,
title: input.title,
task: input.task,
sandboxProviderId: input.sandboxProviderId,
agentType: input.agentType,
explicitTitle: input.explicitTitle,
explicitBranchName: input.explicitBranchName,
initialPrompt: input.initialPrompt,
initialized: false,
previousStatus: null as string | null,
}),
actions: {
async initialize(c, cmd: InitializeCommand): Promise<TaskRecord> {
const self = selfTask(c);
const result = await self.send(taskWorkflowQueueName("task.command.initialize"), cmd ?? {}, {
wait: true,
timeout: 10_000,
});
return expectQueueResponse<TaskRecord>(result);
},
async provision(c, cmd: InitializeCommand): Promise<{ ok: true }> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.provision"), cmd ?? {}, {
wait: false,
});
return { ok: true };
},
async attach(c, cmd?: TaskActionCommand): Promise<{ target: string; sessionId: string | null }> {
const self = selfTask(c);
const result = await self.send(taskWorkflowQueueName("task.command.attach"), cmd ?? {}, {
wait: true,
timeout: 10_000,
});
return expectQueueResponse<{ target: string; sessionId: string | null }>(result);
},
async switch(c): Promise<{ switchTarget: string }> {
const self = selfTask(c);
const result = await self.send(
taskWorkflowQueueName("task.command.switch"),
{},
{
wait: true,
timeout: 10_000,
},
);
return expectQueueResponse<{ switchTarget: string }>(result);
},
async push(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.push"), cmd ?? {}, {
wait: false,
});
},
async sync(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.sync"), cmd ?? {}, {
wait: false,
});
},
async merge(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.merge"), cmd ?? {}, {
wait: false,
});
},
async archive(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.archive"), cmd ?? {}, {
wait: false,
});
},
async kill(c, cmd?: TaskActionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.kill"), cmd ?? {}, {
wait: false,
});
},
async get(c): Promise<TaskRecord> {
return await getCurrentRecord({ db: c.db, state: c.state });
return await getCurrentRecord(c);
},
async getTaskSummary(c) {
return await getTaskSummary(c);
},
async getTaskDetail(c) {
return await getTaskDetail(c);
async getTaskDetail(c, input?: { authSessionId?: string }) {
return await getTaskDetail(c, input?.authSessionId);
},
async getSessionDetail(c, input: { sessionId: string }) {
return await getSessionDetail(c, input.sessionId);
async getSessionDetail(c, input: { sessionId: string; authSessionId?: string }) {
return await getSessionDetail(c, input.sessionId, input.authSessionId);
},
async markWorkbenchUnread(c): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.mark_unread"),
{},
{
wait: true,
timeout: 10_000,
},
);
},
async renameWorkbenchTask(c, input: TaskWorkbenchRenameInput): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.rename_task"), { value: input.value } satisfies TaskWorkbenchValueCommand, {
wait: true,
timeout: 20_000,
});
},
async renameWorkbenchBranch(c, input: TaskWorkbenchRenameInput): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.rename_branch"), { value: input.value } satisfies TaskWorkbenchValueCommand, {
wait: false,
});
},
async createWorkbenchSession(c, input?: { model?: string }): Promise<{ sessionId: string }> {
const self = selfTask(c);
const result = await self.send(
taskWorkflowQueueName("task.command.workbench.create_session"),
{ ...(input?.model ? { model: input.model } : {}) } satisfies TaskWorkbenchCreateSessionCommand,
{
wait: true,
timeout: 10_000,
},
);
return expectQueueResponse<{ sessionId: string }>(result);
},
/**
* Fire-and-forget: creates a workbench session and sends the initial message.
* Used by createWorkbenchTask so the caller doesn't block on session creation.
*/
async createWorkbenchSessionAndSend(c, input: { model?: string; text: string }): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.create_session_and_send"),
{ model: input.model, text: input.text } satisfies TaskWorkbenchCreateSessionAndSendCommand,
{ wait: false },
);
},
async renameWorkbenchSession(c, input: TaskWorkbenchRenameSessionInput): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.rename_session"),
{ sessionId: input.sessionId, title: input.title } satisfies TaskWorkbenchSessionTitleCommand,
{
wait: true,
timeout: 10_000,
},
);
},
async setWorkbenchSessionUnread(c, input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.set_session_unread"),
{ sessionId: input.sessionId, unread: input.unread } satisfies TaskWorkbenchSessionUnreadCommand,
{
wait: true,
timeout: 10_000,
},
);
},
async updateWorkbenchDraft(c, input: TaskWorkbenchUpdateDraftInput): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.update_draft"),
{
sessionId: input.sessionId,
text: input.text,
attachments: input.attachments,
} satisfies TaskWorkbenchUpdateDraftCommand,
{
wait: false,
},
);
},
async changeWorkbenchModel(c, input: TaskWorkbenchChangeModelInput): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.change_model"),
{ sessionId: input.sessionId, model: input.model } satisfies TaskWorkbenchChangeModelCommand,
{
wait: true,
timeout: 10_000,
},
);
},
async sendWorkbenchMessage(c, input: TaskWorkbenchSendMessageInput): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.send_message"),
{
sessionId: input.sessionId,
text: input.text,
attachments: input.attachments,
} satisfies TaskWorkbenchSendMessageCommand,
{
wait: false,
},
);
},
async stopWorkbenchSession(c, input: TaskSessionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.stop_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, {
wait: false,
});
},
async syncWorkbenchSessionStatus(c, input: TaskStatusSyncCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.sync_session_status"), input, {
wait: true,
timeout: 20_000,
});
},
async closeWorkbenchSession(c, input: TaskSessionCommand): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.close_session"), { sessionId: input.sessionId } satisfies TaskWorkbenchSessionCommand, {
wait: false,
});
},
async publishWorkbenchPr(c): Promise<void> {
const self = selfTask(c);
await self.send(
taskWorkflowQueueName("task.command.workbench.publish_pr"),
{},
{
wait: false,
},
);
},
async revertWorkbenchFile(c, input: { path: string }): Promise<void> {
const self = selfTask(c);
await self.send(taskWorkflowQueueName("task.command.workbench.revert_file"), input, {
wait: false,
});
},
...taskCommandActions,
},
run: workflow(runTaskWorkflow),
});
export { TASK_QUEUE_NAMES };
export { taskWorkflowQueueName } from "./workflow/index.js";

View file

@ -2,8 +2,8 @@
import { eq } from "drizzle-orm";
import { getTaskSandbox } from "../../handles.js";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { task as taskTable, taskRuntime } from "../db/schema.js";
import { TASK_ROW_ID, appendHistory, getCurrentRecord, setTaskState } from "./common.js";
import { task as taskTable } from "../db/schema.js";
import { TASK_ROW_ID, appendAuditLog, getCurrentRecord, setTaskState } from "./common.js";
import { pushActiveBranchActivity } from "./push.js";
async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: string): Promise<T> {
@ -25,6 +25,7 @@ async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: str
export async function handleAttachActivity(loopCtx: any, msg: any): Promise<void> {
const record = await getCurrentRecord(loopCtx);
let target = record.sandboxes.find((sandbox: any) => sandbox.sandboxId === record.activeSandboxId)?.switchTarget ?? "";
const sessionId = msg.body?.sessionId ?? null;
if (record.activeSandboxId) {
try {
@ -38,14 +39,14 @@ export async function handleAttachActivity(loopCtx: any, msg: any): Promise<void
}
}
await appendHistory(loopCtx, "task.attach", {
await appendAuditLog(loopCtx, "task.attach", {
target,
sessionId: record.activeSessionId,
sessionId,
});
await msg.complete({
target,
sessionId: record.activeSessionId,
sessionId,
});
}
@ -64,20 +65,17 @@ export async function handlePushActivity(loopCtx: any, msg: any): Promise<void>
await msg.complete({ ok: true });
}
export async function handleSimpleCommandActivity(loopCtx: any, msg: any, statusMessage: string, historyKind: string): Promise<void> {
const db = loopCtx.db;
await db.update(taskRuntime).set({ statusMessage, updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run();
await appendHistory(loopCtx, historyKind, { reason: msg.body?.reason ?? null });
export async function handleSimpleCommandActivity(loopCtx: any, msg: any, historyKind: string): Promise<void> {
await appendAuditLog(loopCtx, historyKind, { reason: msg.body?.reason ?? null });
await msg.complete({ ok: true });
}
export async function handleArchiveActivity(loopCtx: any, msg: any): Promise<void> {
await setTaskState(loopCtx, "archive_stop_status_sync", "stopping status sync");
await setTaskState(loopCtx, "archive_stop_status_sync");
const record = await getCurrentRecord(loopCtx);
if (record.activeSandboxId) {
await setTaskState(loopCtx, "archive_release_sandbox", "releasing sandbox");
await setTaskState(loopCtx, "archive_release_sandbox");
void withTimeout(getTaskSandbox(loopCtx, loopCtx.state.organizationId, record.activeSandboxId).destroy(), 45_000, "sandbox destroy").catch((error) => {
logActorWarning("task.commands", "failed to release sandbox during archive", {
organizationId: loopCtx.state.organizationId,
@ -90,17 +88,15 @@ export async function handleArchiveActivity(loopCtx: any, msg: any): Promise<voi
}
const db = loopCtx.db;
await setTaskState(loopCtx, "archive_finalize", "finalizing archive");
await setTaskState(loopCtx, "archive_finalize");
await db.update(taskTable).set({ status: "archived", updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run();
await db.update(taskRuntime).set({ activeSessionId: null, statusMessage: "archived", updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run();
await appendHistory(loopCtx, "task.archive", { reason: msg.body?.reason ?? null });
await appendAuditLog(loopCtx, "task.archive", { reason: msg.body?.reason ?? null });
await msg.complete({ ok: true });
}
export async function killDestroySandboxActivity(loopCtx: any): Promise<void> {
await setTaskState(loopCtx, "kill_destroy_sandbox", "destroying sandbox");
await setTaskState(loopCtx, "kill_destroy_sandbox");
const record = await getCurrentRecord(loopCtx);
if (!record.activeSandboxId) {
return;
@ -110,13 +106,11 @@ export async function killDestroySandboxActivity(loopCtx: any): Promise<void> {
}
export async function killWriteDbActivity(loopCtx: any, msg: any): Promise<void> {
await setTaskState(loopCtx, "kill_finalize", "finalizing kill");
await setTaskState(loopCtx, "kill_finalize");
const db = loopCtx.db;
await db.update(taskTable).set({ status: "killed", updatedAt: Date.now() }).where(eq(taskTable.id, TASK_ROW_ID)).run();
await db.update(taskRuntime).set({ statusMessage: "killed", updatedAt: Date.now() }).where(eq(taskRuntime.id, TASK_ROW_ID)).run();
await appendHistory(loopCtx, "task.kill", { reason: msg.body?.reason ?? null });
await appendAuditLog(loopCtx, "task.kill", { reason: msg.body?.reason ?? null });
await msg.complete({ ok: true });
}

View file

@ -2,8 +2,10 @@
import { eq } from "drizzle-orm";
import type { TaskRecord, TaskStatus } from "@sandbox-agent/foundry-shared";
import { task as taskTable, taskRuntime, taskSandboxes } from "../db/schema.js";
import { historyKey } from "../../keys.js";
import { broadcastTaskUpdate } from "../workbench.js";
import { getOrCreateAuditLog, getOrCreateOrganization } from "../../handles.js";
import { broadcastTaskUpdate } from "../workspace.js";
import { getActorRuntimeContext } from "../../context.js";
import { defaultSandboxProviderId } from "../../../sandbox-config.js";
export const TASK_ROW_ID = 1;
@ -56,50 +58,32 @@ export function buildAgentPrompt(task: string): string {
return task.trim();
}
export async function setTaskState(ctx: any, status: TaskStatus, statusMessage?: string): Promise<void> {
export async function setTaskState(ctx: any, status: TaskStatus): Promise<void> {
const now = Date.now();
const db = ctx.db;
await db.update(taskTable).set({ status, updatedAt: now }).where(eq(taskTable.id, TASK_ROW_ID)).run();
if (statusMessage != null) {
await db
.insert(taskRuntime)
.values({
id: TASK_ROW_ID,
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskRuntime.id,
set: {
statusMessage,
updatedAt: now,
},
})
.run();
}
await broadcastTaskUpdate(ctx);
}
/**
* Read the task's current record from its local SQLite DB.
* If the task actor was lazily created (virtual task from PR sync) and has no
* DB rows yet, auto-initializes by reading branch/title from the org actor's
* getTaskIndexEntry. This is the self-initialization path for lazy task actors.
*/
export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
const db = ctx.db;
const row = await db
const organization = await getOrCreateOrganization(ctx, ctx.state.organizationId);
let row = await db
.select({
branchName: taskTable.branchName,
title: taskTable.title,
task: taskTable.task,
sandboxProviderId: taskTable.sandboxProviderId,
status: taskTable.status,
statusMessage: taskRuntime.statusMessage,
pullRequestJson: taskTable.pullRequestJson,
activeSandboxId: taskRuntime.activeSandboxId,
activeSessionId: taskRuntime.activeSessionId,
agentType: taskTable.agentType,
prSubmitted: taskTable.prSubmitted,
createdAt: taskTable.createdAt,
updatedAt: taskTable.updatedAt,
})
@ -109,7 +93,58 @@ export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
.get();
if (!row) {
throw new Error(`Task not found: ${ctx.state.taskId}`);
// Virtual task — auto-initialize from org actor's task index data
let branchName: string | null = null;
let title = "Untitled";
try {
const entry = await organization.getTaskIndexEntry({ taskId: ctx.state.taskId });
branchName = entry?.branchName ?? null;
title = entry?.title ?? title;
} catch {}
const { config } = getActorRuntimeContext();
const { initBootstrapDbActivity, initCompleteActivity } = await import("./init.js");
await initBootstrapDbActivity(ctx, {
sandboxProviderId: defaultSandboxProviderId(config),
branchName,
title,
task: title,
});
await initCompleteActivity(ctx, { sandboxProviderId: defaultSandboxProviderId(config) });
// Re-read the row after initialization
const initialized = await db
.select({
branchName: taskTable.branchName,
title: taskTable.title,
task: taskTable.task,
sandboxProviderId: taskTable.sandboxProviderId,
status: taskTable.status,
pullRequestJson: taskTable.pullRequestJson,
activeSandboxId: taskRuntime.activeSandboxId,
createdAt: taskTable.createdAt,
updatedAt: taskTable.updatedAt,
})
.from(taskTable)
.leftJoin(taskRuntime, eq(taskTable.id, taskRuntime.id))
.where(eq(taskTable.id, TASK_ROW_ID))
.get();
if (!initialized) {
throw new Error(`Task not found after initialization: ${ctx.state.taskId}`);
}
row = initialized;
}
const repositoryMetadata = await organization.getRepositoryMetadata({ repoId: ctx.state.repoId });
let pullRequest = null;
if (row.pullRequestJson) {
try {
pullRequest = JSON.parse(row.pullRequestJson);
} catch {
pullRequest = null;
}
}
const sandboxes = await db
@ -128,16 +163,15 @@ export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
return {
organizationId: ctx.state.organizationId,
repoId: ctx.state.repoId,
repoRemote: ctx.state.repoRemote,
repoRemote: repositoryMetadata.remoteUrl,
taskId: ctx.state.taskId,
branchName: row.branchName,
title: row.title,
task: row.task,
sandboxProviderId: row.sandboxProviderId,
status: row.status,
statusMessage: row.statusMessage ?? null,
activeSandboxId: row.activeSandboxId ?? null,
activeSessionId: row.activeSessionId ?? null,
pullRequest,
sandboxes: sandboxes.map((sb) => ({
sandboxId: sb.sandboxId,
sandboxProviderId: sb.sandboxProviderId,
@ -147,31 +181,19 @@ export async function getCurrentRecord(ctx: any): Promise<TaskRecord> {
createdAt: sb.createdAt,
updatedAt: sb.updatedAt,
})),
agentType: row.agentType ?? null,
prSubmitted: Boolean(row.prSubmitted),
diffStat: null,
hasUnpushed: null,
conflictsWithMain: null,
parentBranch: null,
prUrl: null,
prAuthor: null,
ciStatus: null,
reviewStatus: null,
reviewer: null,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
} as TaskRecord;
}
export async function appendHistory(ctx: any, kind: string, payload: Record<string, unknown>): Promise<void> {
const client = ctx.client();
const history = await client.history.getOrCreate(historyKey(ctx.state.organizationId, ctx.state.repoId), {
createWithInput: { organizationId: ctx.state.organizationId, repoId: ctx.state.repoId },
});
await history.append({
export async function appendAuditLog(ctx: any, kind: string, payload: Record<string, unknown>): Promise<void> {
const row = await ctx.db.select({ branchName: taskTable.branchName }).from(taskTable).where(eq(taskTable.id, TASK_ROW_ID)).get();
const auditLog = await getOrCreateAuditLog(ctx, ctx.state.organizationId);
void auditLog.append({
kind,
repoId: ctx.state.repoId,
taskId: ctx.state.taskId,
branchName: ctx.state.branchName,
branchName: row?.branchName ?? null,
payload,
});

View file

@ -1,4 +1,3 @@
import { Loop } from "rivetkit/workflow";
import { logActorWarning, resolveErrorMessage } from "../../logging.js";
import { getCurrentRecord } from "./common.js";
import { initBootstrapDbActivity, initCompleteActivity, initEnqueueProvisionActivity, initFailedActivity } from "./init.js";
@ -12,283 +11,254 @@ import {
killDestroySandboxActivity,
killWriteDbActivity,
} from "./commands.js";
import { TASK_QUEUE_NAMES } from "./queue.js";
import {
changeWorkbenchModel,
closeWorkbenchSession,
createWorkbenchSession,
ensureWorkbenchSession,
refreshWorkbenchDerivedState,
refreshWorkbenchSessionTranscript,
markWorkbenchUnread,
publishWorkbenchPr,
renameWorkbenchBranch,
renameWorkbenchTask,
renameWorkbenchSession,
revertWorkbenchFile,
sendWorkbenchMessage,
setWorkbenchSessionUnread,
stopWorkbenchSession,
syncWorkbenchSessionStatus,
updateWorkbenchDraft,
} from "../workbench.js";
changeWorkspaceModel,
closeWorkspaceSession,
createWorkspaceSession,
ensureWorkspaceSession,
refreshWorkspaceDerivedState,
refreshWorkspaceSessionTranscript,
markWorkspaceUnread,
publishWorkspacePr,
renameWorkspaceTask,
renameWorkspaceSession,
selectWorkspaceSession,
revertWorkspaceFile,
sendWorkspaceMessage,
setWorkspaceSessionUnread,
stopWorkspaceSession,
syncTaskPullRequest,
syncWorkspaceSessionStatus,
updateWorkspaceDraft,
} from "../workspace.js";
export { TASK_QUEUE_NAMES, taskWorkflowQueueName } from "./queue.js";
export { taskWorkflowQueueName } from "./queue.js";
type TaskQueueName = (typeof TASK_QUEUE_NAMES)[number];
/**
* Task command actions converted from queue/workflow handlers to direct actions.
* Each export becomes an action on the task actor.
*/
export const taskCommandActions = {
async initialize(c: any, body: any) {
await initBootstrapDbActivity(c, body);
await initEnqueueProvisionActivity(c, body);
return await getCurrentRecord(c);
},
type WorkflowHandler = (loopCtx: any, msg: { name: TaskQueueName; body: any; complete: (response: unknown) => Promise<void> }) => Promise<void>;
const commandHandlers: Record<TaskQueueName, WorkflowHandler> = {
"task.command.initialize": async (loopCtx, msg) => {
const body = msg.body;
await loopCtx.step("init-bootstrap-db", async () => initBootstrapDbActivity(loopCtx, body));
await loopCtx.step("init-enqueue-provision", async () => initEnqueueProvisionActivity(loopCtx, body));
await loopCtx.removed("init-dispatch-provision-v2", "step");
const currentRecord = await loopCtx.step("init-read-current-record", async () => getCurrentRecord(loopCtx));
async provision(c: any, body: any) {
try {
await msg.complete(currentRecord);
await initCompleteActivity(c, body);
return { ok: true };
} catch (error) {
logActorWarning("task.workflow", "initialize completion failed", {
error: resolveErrorMessage(error),
});
await initFailedActivity(c, error, body);
return { ok: false, error: resolveErrorMessage(error) };
}
},
"task.command.provision": async (loopCtx, msg) => {
await loopCtx.removed("init-failed", "step");
await loopCtx.removed("init-failed-v2", "step");
async attach(c: any, body: any) {
// handleAttachActivity expects msg with complete — adapt
const result = { value: undefined as any };
const msg = {
name: "task.command.attach",
body,
complete: async (v: any) => {
result.value = v;
},
};
await handleAttachActivity(c, msg);
return result.value;
},
async switchTask(c: any, body: any) {
const result = { value: undefined as any };
const msg = {
name: "task.command.switch",
body,
complete: async (v: any) => {
result.value = v;
},
};
await handleSwitchActivity(c, msg);
return result.value;
},
async push(c: any, body: any) {
const result = { value: undefined as any };
const msg = {
name: "task.command.push",
body,
complete: async (v: any) => {
result.value = v;
},
};
await handlePushActivity(c, msg);
return result.value;
},
async sync(c: any, body: any) {
const result = { value: undefined as any };
const msg = {
name: "task.command.sync",
body,
complete: async (v: any) => {
result.value = v;
},
};
await handleSimpleCommandActivity(c, msg, "task.sync");
return result.value;
},
async merge(c: any, body: any) {
const result = { value: undefined as any };
const msg = {
name: "task.command.merge",
body,
complete: async (v: any) => {
result.value = v;
},
};
await handleSimpleCommandActivity(c, msg, "task.merge");
return result.value;
},
async archive(c: any, body: any) {
const result = { value: undefined as any };
const msg = {
name: "task.command.archive",
body,
complete: async (v: any) => {
result.value = v;
},
};
await handleArchiveActivity(c, msg);
return result.value;
},
async kill(c: any, body: any) {
const result = { value: undefined as any };
const msg = {
name: "task.command.kill",
body,
complete: async (v: any) => {
result.value = v;
},
};
await killDestroySandboxActivity(c);
await killWriteDbActivity(c, msg);
return result.value;
},
async getRecord(c: any, body: any) {
const result = { value: undefined as any };
const msg = {
name: "task.command.get",
body,
complete: async (v: any) => {
result.value = v;
},
};
await handleGetActivity(c, msg);
return result.value;
},
async pullRequestSync(c: any, body: any) {
await syncTaskPullRequest(c, body?.pullRequest ?? null);
return { ok: true };
},
async markUnread(c: any, body: any) {
await markWorkspaceUnread(c, body?.authSessionId);
return { ok: true };
},
async renameTask(c: any, body: any) {
await renameWorkspaceTask(c, body.value);
return { ok: true };
},
async createSession(c: any, body: any) {
return await createWorkspaceSession(c, body?.model, body?.authSessionId);
},
async createSessionAndSend(c: any, body: any) {
try {
await loopCtx.removed("init-ensure-name", "step");
await loopCtx.removed("init-assert-name", "step");
await loopCtx.removed("init-create-sandbox", "step");
await loopCtx.removed("init-ensure-agent", "step");
await loopCtx.removed("init-start-sandbox-instance", "step");
await loopCtx.removed("init-expose-sandbox", "step");
await loopCtx.removed("init-create-session", "step");
await loopCtx.removed("init-write-db", "step");
await loopCtx.removed("init-start-status-sync", "step");
await loopCtx.step("init-complete", async () => initCompleteActivity(loopCtx, msg.body));
await msg.complete({ ok: true });
} catch (error) {
await loopCtx.step("init-failed-v3", async () => initFailedActivity(loopCtx, error));
await msg.complete({
ok: false,
error: resolveErrorMessage(error),
});
}
},
"task.command.attach": async (loopCtx, msg) => {
await loopCtx.step("handle-attach", async () => handleAttachActivity(loopCtx, msg));
},
"task.command.switch": async (loopCtx, msg) => {
await loopCtx.step("handle-switch", async () => handleSwitchActivity(loopCtx, msg));
},
"task.command.push": async (loopCtx, msg) => {
await loopCtx.step("handle-push", async () => handlePushActivity(loopCtx, msg));
},
"task.command.sync": async (loopCtx, msg) => {
await loopCtx.step("handle-sync", async () => handleSimpleCommandActivity(loopCtx, msg, "sync requested", "task.sync"));
},
"task.command.merge": async (loopCtx, msg) => {
await loopCtx.step("handle-merge", async () => handleSimpleCommandActivity(loopCtx, msg, "merge requested", "task.merge"));
},
"task.command.archive": async (loopCtx, msg) => {
await loopCtx.step("handle-archive", async () => handleArchiveActivity(loopCtx, msg));
},
"task.command.kill": async (loopCtx, msg) => {
await loopCtx.step("kill-destroy-sandbox", async () => killDestroySandboxActivity(loopCtx));
await loopCtx.step("kill-write-db", async () => killWriteDbActivity(loopCtx, msg));
},
"task.command.get": async (loopCtx, msg) => {
await loopCtx.step("handle-get", async () => handleGetActivity(loopCtx, msg));
},
"task.command.workbench.mark_unread": async (loopCtx, msg) => {
await loopCtx.step("workbench-mark-unread", async () => markWorkbenchUnread(loopCtx));
await msg.complete({ ok: true });
},
"task.command.workbench.rename_task": async (loopCtx, msg) => {
await loopCtx.step("workbench-rename-task", async () => renameWorkbenchTask(loopCtx, msg.body.value));
await msg.complete({ ok: true });
},
"task.command.workbench.rename_branch": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-rename-branch",
timeout: 5 * 60_000,
run: async () => renameWorkbenchBranch(loopCtx, msg.body.value),
});
await msg.complete({ ok: true });
},
"task.command.workbench.create_session": async (loopCtx, msg) => {
try {
const created = await loopCtx.step({
name: "workbench-create-session",
timeout: 5 * 60_000,
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
});
await msg.complete(created);
} catch (error) {
await msg.complete({ error: resolveErrorMessage(error) });
}
},
"task.command.workbench.create_session_and_send": async (loopCtx, msg) => {
try {
const created = await loopCtx.step({
name: "workbench-create-session-for-send",
timeout: 5 * 60_000,
run: async () => createWorkbenchSession(loopCtx, msg.body?.model),
});
await loopCtx.step({
name: "workbench-send-initial-message",
timeout: 5 * 60_000,
run: async () => sendWorkbenchMessage(loopCtx, created.sessionId, msg.body.text, []),
});
const created = await createWorkspaceSession(c, body?.model, body?.authSessionId);
await sendWorkspaceMessage(c, created.sessionId, body.text, [], body?.authSessionId);
} catch (error) {
logActorWarning("task.workflow", "create_session_and_send failed", {
error: resolveErrorMessage(error),
});
}
await msg.complete({ ok: true });
return { ok: true };
},
"task.command.workbench.ensure_session": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-ensure-session",
timeout: 5 * 60_000,
run: async () => ensureWorkbenchSession(loopCtx, msg.body.sessionId, msg.body?.model),
});
await msg.complete({ ok: true });
async ensureSession(c: any, body: any) {
await ensureWorkspaceSession(c, body.sessionId, body?.model, body?.authSessionId);
return { ok: true };
},
"task.command.workbench.rename_session": async (loopCtx, msg) => {
await loopCtx.step("workbench-rename-session", async () => renameWorkbenchSession(loopCtx, msg.body.sessionId, msg.body.title));
await msg.complete({ ok: true });
async renameSession(c: any, body: any) {
await renameWorkspaceSession(c, body.sessionId, body.title);
return { ok: true };
},
"task.command.workbench.set_session_unread": async (loopCtx, msg) => {
await loopCtx.step("workbench-set-session-unread", async () => setWorkbenchSessionUnread(loopCtx, msg.body.sessionId, msg.body.unread));
await msg.complete({ ok: true });
async selectSession(c: any, body: any) {
await selectWorkspaceSession(c, body.sessionId, body?.authSessionId);
return { ok: true };
},
"task.command.workbench.update_draft": async (loopCtx, msg) => {
await loopCtx.step("workbench-update-draft", async () => updateWorkbenchDraft(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments));
await msg.complete({ ok: true });
async setSessionUnread(c: any, body: any) {
await setWorkspaceSessionUnread(c, body.sessionId, body.unread, body?.authSessionId);
return { ok: true };
},
"task.command.workbench.change_model": async (loopCtx, msg) => {
await loopCtx.step("workbench-change-model", async () => changeWorkbenchModel(loopCtx, msg.body.sessionId, msg.body.model));
await msg.complete({ ok: true });
async updateDraft(c: any, body: any) {
await updateWorkspaceDraft(c, body.sessionId, body.text, body.attachments, body?.authSessionId);
return { ok: true };
},
"task.command.workbench.send_message": async (loopCtx, msg) => {
try {
await loopCtx.step({
name: "workbench-send-message",
timeout: 10 * 60_000,
run: async () => sendWorkbenchMessage(loopCtx, msg.body.sessionId, msg.body.text, msg.body.attachments),
});
await msg.complete({ ok: true });
} catch (error) {
await msg.complete({ error: resolveErrorMessage(error) });
}
async changeModel(c: any, body: any) {
await changeWorkspaceModel(c, body.sessionId, body.model, body?.authSessionId);
return { ok: true };
},
"task.command.workbench.stop_session": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-stop-session",
timeout: 5 * 60_000,
run: async () => stopWorkbenchSession(loopCtx, msg.body.sessionId),
});
await msg.complete({ ok: true });
async sendMessage(c: any, body: any) {
await sendWorkspaceMessage(c, body.sessionId, body.text, body.attachments, body?.authSessionId);
return { ok: true };
},
"task.command.workbench.sync_session_status": async (loopCtx, msg) => {
await loopCtx.step("workbench-sync-session-status", async () => syncWorkbenchSessionStatus(loopCtx, msg.body.sessionId, msg.body.status, msg.body.at));
await msg.complete({ ok: true });
async stopSession(c: any, body: any) {
await stopWorkspaceSession(c, body.sessionId);
return { ok: true };
},
"task.command.workbench.refresh_derived": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-refresh-derived",
timeout: 5 * 60_000,
run: async () => refreshWorkbenchDerivedState(loopCtx),
});
await msg.complete({ ok: true });
async syncSessionStatus(c: any, body: any) {
await syncWorkspaceSessionStatus(c, body.sessionId, body.status, body.at);
return { ok: true };
},
"task.command.workbench.refresh_session_transcript": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-refresh-session-transcript",
timeout: 60_000,
run: async () => refreshWorkbenchSessionTranscript(loopCtx, msg.body.sessionId),
});
await msg.complete({ ok: true });
async refreshDerived(c: any, _body: any) {
await refreshWorkspaceDerivedState(c);
return { ok: true };
},
"task.command.workbench.close_session": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-close-session",
timeout: 5 * 60_000,
run: async () => closeWorkbenchSession(loopCtx, msg.body.sessionId),
});
await msg.complete({ ok: true });
async refreshSessionTranscript(c: any, body: any) {
await refreshWorkspaceSessionTranscript(c, body.sessionId);
return { ok: true };
},
"task.command.workbench.publish_pr": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-publish-pr",
timeout: 10 * 60_000,
run: async () => publishWorkbenchPr(loopCtx),
});
await msg.complete({ ok: true });
async closeSession(c: any, body: any) {
await closeWorkspaceSession(c, body.sessionId, body?.authSessionId);
return { ok: true };
},
"task.command.workbench.revert_file": async (loopCtx, msg) => {
await loopCtx.step({
name: "workbench-revert-file",
timeout: 5 * 60_000,
run: async () => revertWorkbenchFile(loopCtx, msg.body.path),
});
await msg.complete({ ok: true });
async publishPr(c: any, _body: any) {
await publishWorkspacePr(c);
return { ok: true };
},
async revertFile(c: any, body: any) {
await revertWorkspaceFile(c, body.path);
return { ok: true };
},
};
export async function runTaskWorkflow(ctx: any): Promise<void> {
await ctx.loop("task-command-loop", async (loopCtx: any) => {
const msg = await loopCtx.queue.next("next-command", {
names: [...TASK_QUEUE_NAMES],
completable: true,
});
if (!msg) {
return Loop.continue(undefined);
}
const handler = commandHandlers[msg.name as TaskQueueName];
if (handler) {
try {
await handler(loopCtx, msg);
} catch (error) {
const message = resolveErrorMessage(error);
logActorWarning("task.workflow", "task workflow command failed", {
queueName: msg.name,
error: message,
});
await msg.complete({ error: message }).catch(() => {});
}
}
return Loop.continue(undefined);
});
}

View file

@ -1,49 +1,44 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import { getActorRuntimeContext } from "../../context.js";
import { getOrCreateHistory, selfTask } from "../../handles.js";
import { selfTask } from "../../handles.js";
import { resolveErrorMessage } from "../../logging.js";
import { defaultSandboxProviderId } from "../../../sandbox-config.js";
import { task as taskTable, taskRuntime } from "../db/schema.js";
import { TASK_ROW_ID, appendHistory, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js";
import { taskWorkflowQueueName } from "./queue.js";
async function ensureTaskRuntimeCacheColumns(db: any): Promise<void> {
await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_json text`).catch(() => {});
await db.execute(`ALTER TABLE task_runtime ADD COLUMN git_state_updated_at integer`).catch(() => {});
await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage text`).catch(() => {});
await db.execute(`ALTER TABLE task_runtime ADD COLUMN provision_stage_updated_at integer`).catch(() => {});
}
import { TASK_ROW_ID, appendAuditLog, collectErrorMessages, resolveErrorDetail, setTaskState } from "./common.js";
// task actions called directly (no queue)
export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<void> {
const { config } = getActorRuntimeContext();
const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config);
const sandboxProviderId = body?.sandboxProviderId ?? defaultSandboxProviderId(config);
const task = body?.task;
if (typeof task !== "string" || task.trim().length === 0) {
throw new Error("task initialize requires the task prompt");
}
const now = Date.now();
await ensureTaskRuntimeCacheColumns(loopCtx.db);
await loopCtx.db
.insert(taskTable)
.values({
id: TASK_ROW_ID,
branchName: loopCtx.state.branchName,
title: loopCtx.state.title,
task: loopCtx.state.task,
branchName: body?.branchName ?? null,
title: body?.title ?? null,
task,
sandboxProviderId,
status: "init_bootstrap_db",
agentType: loopCtx.state.agentType ?? config.default_agent,
pullRequestJson: null,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskTable.id,
set: {
branchName: loopCtx.state.branchName,
title: loopCtx.state.title,
task: loopCtx.state.task,
branchName: body?.branchName ?? null,
title: body?.title ?? null,
task,
sandboxProviderId,
status: "init_bootstrap_db",
agentType: loopCtx.state.agentType ?? config.default_agent,
pullRequestJson: null,
updatedAt: now,
},
})
@ -54,26 +49,18 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
.values({
id: TASK_ROW_ID,
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: "provisioning",
gitStateJson: null,
gitStateUpdatedAt: null,
provisionStage: "queued",
provisionStageUpdatedAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskRuntime.id,
set: {
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: "provisioning",
provisionStage: "queued",
provisionStageUpdatedAt: now,
updatedAt: now,
},
})
@ -81,22 +68,11 @@ export async function initBootstrapDbActivity(loopCtx: any, body: any): Promise<
}
export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Promise<void> {
await setTaskState(loopCtx, "init_enqueue_provision", "provision queued");
await loopCtx.db
.update(taskRuntime)
.set({
provisionStage: "queued",
provisionStageUpdatedAt: Date.now(),
updatedAt: Date.now(),
})
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
await setTaskState(loopCtx, "init_enqueue_provision");
const self = selfTask(loopCtx);
try {
await self.send(taskWorkflowQueueName("task.command.provision"), body, {
wait: false,
});
void self.provision(body).catch(() => {});
} catch (error) {
logActorWarning("task.init", "background provision command failed", {
organizationId: loopCtx.state.organizationId,
@ -111,60 +87,52 @@ export async function initEnqueueProvisionActivity(loopCtx: any, body: any): Pro
export async function initCompleteActivity(loopCtx: any, body: any): Promise<void> {
const now = Date.now();
const { config } = getActorRuntimeContext();
const sandboxProviderId = body?.sandboxProviderId ?? loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config);
const sandboxProviderId = body?.sandboxProviderId ?? defaultSandboxProviderId(config);
await setTaskState(loopCtx, "init_complete", "task initialized");
await setTaskState(loopCtx, "init_complete");
await loopCtx.db
.update(taskRuntime)
.set({
statusMessage: "ready",
provisionStage: "ready",
provisionStageUpdatedAt: now,
updatedAt: now,
})
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
const history = await getOrCreateHistory(loopCtx, loopCtx.state.organizationId, loopCtx.state.repoId);
await history.append({
kind: "task.initialized",
taskId: loopCtx.state.taskId,
branchName: loopCtx.state.branchName,
await appendAuditLog(loopCtx, "task.initialized", {
payload: { sandboxProviderId },
});
loopCtx.state.initialized = true;
}
export async function initFailedActivity(loopCtx: any, error: unknown): Promise<void> {
export async function initFailedActivity(loopCtx: any, error: unknown, body?: any): Promise<void> {
const now = Date.now();
const detail = resolveErrorDetail(error);
const messages = collectErrorMessages(error);
const { config } = getActorRuntimeContext();
const sandboxProviderId = loopCtx.state.sandboxProviderId ?? defaultSandboxProviderId(config);
const sandboxProviderId = defaultSandboxProviderId(config);
const task = typeof body?.task === "string" ? body.task : null;
await loopCtx.db
.insert(taskTable)
.values({
id: TASK_ROW_ID,
branchName: loopCtx.state.branchName ?? null,
title: loopCtx.state.title ?? null,
task: loopCtx.state.task,
branchName: body?.branchName ?? null,
title: body?.title ?? null,
task: task ?? detail,
sandboxProviderId,
status: "error",
agentType: loopCtx.state.agentType ?? config.default_agent,
pullRequestJson: null,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskTable.id,
set: {
branchName: loopCtx.state.branchName ?? null,
title: loopCtx.state.title ?? null,
task: loopCtx.state.task,
branchName: body?.branchName ?? null,
title: body?.title ?? null,
task: task ?? detail,
sandboxProviderId,
status: "error",
agentType: loopCtx.state.agentType ?? config.default_agent,
pullRequestJson: null,
updatedAt: now,
},
})
@ -175,30 +143,22 @@ export async function initFailedActivity(loopCtx: any, error: unknown): Promise<
.values({
id: TASK_ROW_ID,
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: detail,
provisionStage: "error",
provisionStageUpdatedAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: taskRuntime.id,
set: {
activeSandboxId: null,
activeSessionId: null,
activeSwitchTarget: null,
activeCwd: null,
statusMessage: detail,
provisionStage: "error",
provisionStageUpdatedAt: now,
updatedAt: now,
},
})
.run();
await appendHistory(loopCtx, "task.error", {
await appendAuditLog(loopCtx, "task.error", {
detail,
messages,
});

View file

@ -1,9 +1,7 @@
// @ts-nocheck
import { eq } from "drizzle-orm";
import { getTaskSandbox } from "../../handles.js";
import { resolveOrganizationGithubAuth } from "../../../services/github-auth.js";
import { taskRuntime, taskSandboxes } from "../db/schema.js";
import { TASK_ROW_ID, appendHistory, getCurrentRecord } from "./common.js";
import { appendAuditLog, getCurrentRecord } from "./common.js";
export interface PushActiveBranchOptions {
reason?: string | null;
@ -13,7 +11,7 @@ export interface PushActiveBranchOptions {
export async function pushActiveBranchActivity(loopCtx: any, options: PushActiveBranchOptions = {}): Promise<void> {
const record = await getCurrentRecord(loopCtx);
const activeSandboxId = record.activeSandboxId;
const branchName = loopCtx.state.branchName ?? record.branchName;
const branchName = record.branchName;
if (!activeSandboxId) {
throw new Error("cannot push: no active sandbox");
@ -28,19 +26,6 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive
throw new Error("cannot push: active sandbox cwd is not set");
}
const now = Date.now();
await loopCtx.db
.update(taskRuntime)
.set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now })
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
await loopCtx.db
.update(taskSandboxes)
.set({ statusMessage: `pushing branch ${branchName}`, updatedAt: now })
.where(eq(taskSandboxes.sandboxId, activeSandboxId))
.run();
const script = [
"set -euo pipefail",
`cd ${JSON.stringify(cwd)}`,
@ -68,20 +53,7 @@ export async function pushActiveBranchActivity(loopCtx: any, options: PushActive
throw new Error(`git push failed (${result.exitCode ?? 1}): ${[result.stdout, result.stderr].filter(Boolean).join("")}`);
}
const updatedAt = Date.now();
await loopCtx.db
.update(taskRuntime)
.set({ statusMessage: `push complete for ${branchName}`, updatedAt })
.where(eq(taskRuntime.id, TASK_ROW_ID))
.run();
await loopCtx.db
.update(taskSandboxes)
.set({ statusMessage: `push complete for ${branchName}`, updatedAt })
.where(eq(taskSandboxes.sandboxId, activeSandboxId))
.run();
await appendHistory(loopCtx, options.historyKind ?? "task.push", {
await appendAuditLog(loopCtx, options.historyKind ?? "task.push", {
reason: options.reason ?? null,
branchName,
sandboxId: activeSandboxId,

View file

@ -9,24 +9,25 @@ export const TASK_QUEUE_NAMES = [
"task.command.archive",
"task.command.kill",
"task.command.get",
"task.command.workbench.mark_unread",
"task.command.workbench.rename_task",
"task.command.workbench.rename_branch",
"task.command.workbench.create_session",
"task.command.workbench.create_session_and_send",
"task.command.workbench.ensure_session",
"task.command.workbench.rename_session",
"task.command.workbench.set_session_unread",
"task.command.workbench.update_draft",
"task.command.workbench.change_model",
"task.command.workbench.send_message",
"task.command.workbench.stop_session",
"task.command.workbench.sync_session_status",
"task.command.workbench.refresh_derived",
"task.command.workbench.refresh_session_transcript",
"task.command.workbench.close_session",
"task.command.workbench.publish_pr",
"task.command.workbench.revert_file",
"task.command.pull_request.sync",
"task.command.workspace.mark_unread",
"task.command.workspace.rename_task",
"task.command.workspace.create_session",
"task.command.workspace.create_session_and_send",
"task.command.workspace.ensure_session",
"task.command.workspace.rename_session",
"task.command.workspace.select_session",
"task.command.workspace.set_session_unread",
"task.command.workspace.update_draft",
"task.command.workspace.change_model",
"task.command.workspace.send_message",
"task.command.workspace.stop_session",
"task.command.workspace.sync_session_status",
"task.command.workspace.refresh_derived",
"task.command.workspace.refresh_session_transcript",
"task.command.workspace.close_session",
"task.command.workspace.publish_pr",
"task.command.workspace.revert_file",
] as const;
export function taskWorkflowQueueName(name: string): string {

View file

@ -0,0 +1,47 @@
import { asc, count as sqlCount, desc } from "drizzle-orm";
import { applyJoinToRow, applyJoinToRows, buildWhere, columnFor, tableFor } from "../query-helpers.js";
export const betterAuthActions = {
// Better Auth adapter action — called by the Better Auth adapter in better-auth.ts.
// Schema and behavior are constrained by Better Auth.
async betterAuthFindOneRecord(c, input: { model: string; where: any[]; join?: any }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
const row = predicate ? await c.db.select().from(table).where(predicate).get() : await c.db.select().from(table).get();
return await applyJoinToRow(c, input.model, row ?? null, input.join);
},
// Better Auth adapter action — called by the Better Auth adapter in better-auth.ts.
// Schema and behavior are constrained by Better Auth.
async betterAuthFindManyRecords(c, input: { model: string; where?: any[]; limit?: number; offset?: number; sortBy?: any; join?: any }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
let query: any = c.db.select().from(table);
if (predicate) {
query = query.where(predicate);
}
if (input.sortBy?.field) {
const column = columnFor(input.model, table, input.sortBy.field);
query = query.orderBy(input.sortBy.direction === "asc" ? asc(column) : desc(column));
}
if (typeof input.limit === "number") {
query = query.limit(input.limit);
}
if (typeof input.offset === "number") {
query = query.offset(input.offset);
}
const rows = await query.all();
return await applyJoinToRows(c, input.model, rows, input.join);
},
// Better Auth adapter action — called by the Better Auth adapter in better-auth.ts.
// Schema and behavior are constrained by Better Auth.
async betterAuthCountRecords(c, input: { model: string; where?: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
const row = predicate
? await c.db.select({ value: sqlCount() }).from(table).where(predicate).get()
: await c.db.select({ value: sqlCount() }).from(table).get();
return row?.value ?? 0;
},
};

View file

@ -0,0 +1,44 @@
import { eq } from "drizzle-orm";
import { authAccounts, authSessions, authUsers, sessionState, userProfiles, userTaskState } from "../db/schema.js";
import { materializeRow } from "../query-helpers.js";
export const userActions = {
// Custom Foundry action — not part of Better Auth.
async getAppAuthState(c, input: { sessionId: string }) {
const session = await c.db.select().from(authSessions).where(eq(authSessions.id, input.sessionId)).get();
if (!session) {
return null;
}
const [user, profile, currentSessionState, accounts] = await Promise.all([
c.db.select().from(authUsers).where(eq(authUsers.authUserId, session.userId)).get(),
c.db.select().from(userProfiles).where(eq(userProfiles.userId, session.userId)).get(),
c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get(),
c.db.select().from(authAccounts).where(eq(authAccounts.userId, session.userId)).all(),
]);
return {
session,
user: materializeRow("user", user),
profile: profile ?? null,
sessionState: currentSessionState ?? null,
accounts,
};
},
// Custom Foundry action — not part of Better Auth.
async getTaskState(c, input: { taskId: string }) {
const rows = await c.db.select().from(userTaskState).where(eq(userTaskState.taskId, input.taskId)).all();
const activeSessionId = rows.find((row) => typeof row.activeSessionId === "string" && row.activeSessionId.length > 0)?.activeSessionId ?? null;
return {
taskId: input.taskId,
activeSessionId,
sessions: rows.map((row) => ({
sessionId: row.sessionId,
unread: row.unread === 1,
draftText: row.draftText,
draftAttachmentsJson: row.draftAttachmentsJson,
draftUpdatedAt: row.draftUpdatedAt ?? null,
updatedAt: row.updatedAt,
})),
};
},
};

View file

@ -2,4 +2,4 @@ import { db } from "rivetkit/db/drizzle";
import * as schema from "./schema.js";
import migrations from "./migrations.js";
export const historyDb = db({ schema, migrations });
export const userDb = db({ schema, migrations });

View file

@ -10,6 +10,12 @@ const journal = {
tag: "0000_auth_user",
breakpoints: true,
},
{
idx: 1,
when: 1773532800000,
tag: "0001_user_task_state",
breakpoints: true,
},
],
} as const;
@ -17,15 +23,19 @@ export default {
journal,
migrations: {
m0000: `CREATE TABLE \`user\` (
\`id\` text PRIMARY KEY NOT NULL,
\`id\` integer PRIMARY KEY NOT NULL,
\`auth_user_id\` text NOT NULL,
\`name\` text NOT NULL,
\`email\` text NOT NULL,
\`email_verified\` integer NOT NULL,
\`image\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
\`updated_at\` integer NOT NULL,
CONSTRAINT \`user_singleton_id_check\` CHECK(\`id\` = 1)
);
--> statement-breakpoint
CREATE UNIQUE INDEX \`user_auth_user_id_idx\` ON \`user\` (\`auth_user_id\`);
--> statement-breakpoint
CREATE TABLE \`session\` (
\`id\` text PRIMARY KEY NOT NULL,
\`token\` text NOT NULL,
@ -58,23 +68,39 @@ CREATE TABLE \`account\` (
CREATE UNIQUE INDEX \`account_provider_account_idx\` ON \`account\` (\`provider_id\`, \`account_id\`);
--> statement-breakpoint
CREATE TABLE \`user_profiles\` (
\`user_id\` text PRIMARY KEY NOT NULL,
\`id\` integer PRIMARY KEY NOT NULL,
\`user_id\` text NOT NULL,
\`github_account_id\` text,
\`github_login\` text,
\`role_label\` text NOT NULL,
\`default_model\` text DEFAULT 'gpt-5.3-codex' NOT NULL,
\`eligible_organization_ids_json\` text NOT NULL,
\`starter_repo_status\` text NOT NULL,
\`starter_repo_starred_at\` integer,
\`starter_repo_skipped_at\` integer,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
\`updated_at\` integer NOT NULL,
CONSTRAINT \`user_profiles_singleton_id_check\` CHECK(\`id\` = 1)
);
--> statement-breakpoint
CREATE UNIQUE INDEX \`user_profiles_user_id_idx\` ON \`user_profiles\` (\`user_id\`);
--> statement-breakpoint
CREATE TABLE \`session_state\` (
\`session_id\` text PRIMARY KEY NOT NULL,
\`active_organization_id\` text,
\`created_at\` integer NOT NULL,
\`updated_at\` integer NOT NULL
);`,
m0001: `CREATE TABLE \`user_task_state\` (
\`task_id\` text NOT NULL,
\`session_id\` text NOT NULL,
\`active_session_id\` text,
\`unread\` integer DEFAULT 0 NOT NULL,
\`draft_text\` text DEFAULT '' NOT NULL,
\`draft_attachments_json\` text DEFAULT '[]' NOT NULL,
\`draft_updated_at\` integer,
\`updated_at\` integer NOT NULL,
PRIMARY KEY(\`task_id\`, \`session_id\`)
);`,
} as const,
};

View file

@ -0,0 +1,112 @@
import { check, integer, primaryKey, sqliteTable, text, uniqueIndex } from "drizzle-orm/sqlite-core";
import { sql } from "drizzle-orm";
import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared";
/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */
export const authUsers = sqliteTable(
"user",
{
id: integer("id").primaryKey(),
authUserId: text("auth_user_id").notNull(),
name: text("name").notNull(),
email: text("email").notNull(),
emailVerified: integer("email_verified").notNull(),
image: text("image"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
authUserIdIdx: uniqueIndex("user_auth_user_id_idx").on(table.authUserId),
singletonCheck: check("user_singleton_id_check", sql`${table.id} = 1`),
}),
);
/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */
export const authSessions = sqliteTable(
"session",
{
id: text("id").notNull().primaryKey(),
token: text("token").notNull(),
userId: text("user_id").notNull(),
expiresAt: integer("expires_at").notNull(),
ipAddress: text("ip_address"),
userAgent: text("user_agent"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
tokenIdx: uniqueIndex("session_token_idx").on(table.token),
}),
);
/** Better Auth core model — schema defined at https://better-auth.com/docs/concepts/database */
export const authAccounts = sqliteTable(
"account",
{
id: text("id").notNull().primaryKey(),
accountId: text("account_id").notNull(),
providerId: text("provider_id").notNull(),
userId: text("user_id").notNull(),
accessToken: text("access_token"),
refreshToken: text("refresh_token"),
idToken: text("id_token"),
accessTokenExpiresAt: integer("access_token_expires_at"),
refreshTokenExpiresAt: integer("refresh_token_expires_at"),
scope: text("scope"),
password: text("password"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
providerAccountIdx: uniqueIndex("account_provider_account_idx").on(table.providerId, table.accountId),
}),
);
/** Custom Foundry table — not part of Better Auth. */
export const userProfiles = sqliteTable(
"user_profiles",
{
id: integer("id").primaryKey(),
userId: text("user_id").notNull(),
githubAccountId: text("github_account_id"),
githubLogin: text("github_login"),
roleLabel: text("role_label").notNull(),
defaultModel: text("default_model").notNull().default(DEFAULT_WORKSPACE_MODEL_ID),
eligibleOrganizationIdsJson: text("eligible_organization_ids_json").notNull(),
starterRepoStatus: text("starter_repo_status").notNull(),
starterRepoStarredAt: integer("starter_repo_starred_at"),
starterRepoSkippedAt: integer("starter_repo_skipped_at"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
userIdIdx: uniqueIndex("user_profiles_user_id_idx").on(table.userId),
singletonCheck: check("user_profiles_singleton_id_check", sql`${table.id} = 1`),
}),
);
/** Custom Foundry table — not part of Better Auth. */
export const sessionState = sqliteTable("session_state", {
sessionId: text("session_id").notNull().primaryKey(),
activeOrganizationId: text("active_organization_id"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
});
/** Custom Foundry table — not part of Better Auth. Stores per-user task/session UI state. */
export const userTaskState = sqliteTable(
"user_task_state",
{
taskId: text("task_id").notNull(),
sessionId: text("session_id").notNull(),
activeSessionId: text("active_session_id"),
unread: integer("unread").notNull().default(0),
draftText: text("draft_text").notNull().default(""),
draftAttachmentsJson: text("draft_attachments_json").notNull().default("[]"),
draftUpdatedAt: integer("draft_updated_at"),
updatedAt: integer("updated_at").notNull(),
},
(table) => ({
pk: primaryKey({ columns: [table.taskId, table.sessionId] }),
}),
);

View file

@ -0,0 +1,60 @@
import { actor } from "rivetkit";
import { userDb } from "./db/db.js";
import { betterAuthActions } from "./actions/better-auth.js";
import { userActions } from "./actions/user.js";
import {
createAuthRecordMutation,
updateAuthRecordMutation,
updateManyAuthRecordsMutation,
deleteAuthRecordMutation,
deleteManyAuthRecordsMutation,
upsertUserProfileMutation,
upsertSessionStateMutation,
upsertTaskStateMutation,
deleteTaskStateMutation,
} from "./workflow.js";
export const user = actor({
db: userDb,
options: {
name: "User",
icon: "shield",
actionTimeout: 60_000,
},
createState: (_c, input: { userId: string }) => ({
userId: input.userId,
}),
actions: {
...betterAuthActions,
...userActions,
async authCreate(c, body) {
return await createAuthRecordMutation(c, body);
},
async authUpdate(c, body) {
return await updateAuthRecordMutation(c, body);
},
async authUpdateMany(c, body) {
return await updateManyAuthRecordsMutation(c, body);
},
async authDelete(c, body) {
await deleteAuthRecordMutation(c, body);
return { ok: true };
},
async authDeleteMany(c, body) {
return await deleteManyAuthRecordsMutation(c, body);
},
async profileUpsert(c, body) {
return await upsertUserProfileMutation(c, body);
},
async sessionStateUpsert(c, body) {
return await upsertSessionStateMutation(c, body);
},
async taskStateUpsert(c, body) {
return await upsertTaskStateMutation(c, body);
},
async taskStateDelete(c, body) {
await deleteTaskStateMutation(c, body);
return { ok: true };
},
},
});

View file

@ -0,0 +1,197 @@
import { and, eq, inArray, isNotNull, isNull, like, lt, lte, gt, gte, ne, notInArray, or } from "drizzle-orm";
import { authAccounts, authSessions, authUsers, sessionState, userProfiles, userTaskState } from "./db/schema.js";
export const userTables = {
user: authUsers,
session: authSessions,
account: authAccounts,
userProfiles,
sessionState,
userTaskState,
} as const;
export function tableFor(model: string) {
const table = userTables[model as keyof typeof userTables];
if (!table) {
throw new Error(`Unsupported user model: ${model}`);
}
return table as any;
}
function dbFieldFor(model: string, field: string): string {
if (model === "user" && field === "id") {
return "authUserId";
}
return field;
}
export function materializeRow(model: string, row: any) {
if (!row || model !== "user") {
return row;
}
const { id: _singletonId, authUserId, ...rest } = row;
return {
id: authUserId,
...rest,
};
}
export function persistInput(model: string, data: Record<string, unknown>) {
if (model !== "user") {
return data;
}
const { id, ...rest } = data;
return {
id: 1,
authUserId: id,
...rest,
};
}
export function persistPatch(model: string, data: Record<string, unknown>) {
if (model !== "user") {
return data;
}
const { id, ...rest } = data;
return {
...(id !== undefined ? { authUserId: id } : {}),
...rest,
};
}
export function columnFor(model: string, table: any, field: string) {
const column = table[dbFieldFor(model, field)];
if (!column) {
throw new Error(`Unsupported user field: ${model}.${field}`);
}
return column;
}
export function normalizeValue(value: unknown): unknown {
if (value instanceof Date) {
return value.getTime();
}
if (Array.isArray(value)) {
return value.map((entry) => normalizeValue(entry));
}
return value;
}
export function clauseToExpr(table: any, clause: any) {
const model = table === authUsers ? "user" : table === authSessions ? "session" : table === authAccounts ? "account" : "";
const column = columnFor(model, table, clause.field);
const value = normalizeValue(clause.value);
switch (clause.operator) {
case "ne":
return value === null ? isNotNull(column) : ne(column, value as any);
case "lt":
return lt(column, value as any);
case "lte":
return lte(column, value as any);
case "gt":
return gt(column, value as any);
case "gte":
return gte(column, value as any);
case "in":
return inArray(column, Array.isArray(value) ? (value as any[]) : [value as any]);
case "not_in":
return notInArray(column, Array.isArray(value) ? (value as any[]) : [value as any]);
case "contains":
return like(column, `%${String(value ?? "")}%`);
case "starts_with":
return like(column, `${String(value ?? "")}%`);
case "ends_with":
return like(column, `%${String(value ?? "")}`);
case "eq":
default:
return value === null ? isNull(column) : eq(column, value as any);
}
}
export function buildWhere(table: any, where: any[] | undefined) {
if (!where || where.length === 0) {
return undefined;
}
let expr = clauseToExpr(table, where[0]);
for (const clause of where.slice(1)) {
const next = clauseToExpr(table, clause);
expr = clause.connector === "OR" ? or(expr, next) : and(expr, next);
}
return expr;
}
export function applyJoinToRow(c: any, model: string, row: any, join: any) {
const materialized = materializeRow(model, row);
if (!materialized || !join) {
return materialized;
}
if (model === "session" && join.user) {
return c.db
.select()
.from(authUsers)
.where(eq(authUsers.authUserId, materialized.userId))
.get()
.then((user: any) => ({ ...materialized, user: materializeRow("user", user) ?? null }));
}
if (model === "account" && join.user) {
return c.db
.select()
.from(authUsers)
.where(eq(authUsers.authUserId, materialized.userId))
.get()
.then((user: any) => ({ ...materialized, user: materializeRow("user", user) ?? null }));
}
if (model === "user" && join.account) {
return c.db
.select()
.from(authAccounts)
.where(eq(authAccounts.userId, materialized.id))
.all()
.then((accounts: any[]) => ({ ...materialized, account: accounts }));
}
return Promise.resolve(materialized);
}
export async function applyJoinToRows(c: any, model: string, rows: any[], join: any) {
if (!join || rows.length === 0) {
return rows.map((row) => materializeRow(model, row));
}
if (model === "session" && join.user) {
const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))];
const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.authUserId, userIds)).all() : [];
const userMap = new Map(users.map((user: any) => [user.authUserId, materializeRow("user", user)]));
return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null }));
}
if (model === "account" && join.user) {
const userIds = [...new Set(rows.map((row) => row.userId).filter(Boolean))];
const users = userIds.length > 0 ? await c.db.select().from(authUsers).where(inArray(authUsers.authUserId, userIds)).all() : [];
const userMap = new Map(users.map((user: any) => [user.authUserId, materializeRow("user", user)]));
return rows.map((row) => ({ ...row, user: userMap.get(row.userId) ?? null }));
}
if (model === "user" && join.account) {
const materializedRows = rows.map((row) => materializeRow("user", row));
const userIds = materializedRows.map((row) => row.id);
const accounts = userIds.length > 0 ? await c.db.select().from(authAccounts).where(inArray(authAccounts.userId, userIds)).all() : [];
const accountsByUserId = new Map<string, any[]>();
for (const account of accounts) {
const entries = accountsByUserId.get(account.userId) ?? [];
entries.push(account);
accountsByUserId.set(account.userId, entries);
}
return materializedRows.map((row) => ({ ...row, account: accountsByUserId.get(row.id) ?? [] }));
}
return rows.map((row) => materializeRow(model, row));
}

View file

@ -0,0 +1,197 @@
import { eq, count as sqlCount, and } from "drizzle-orm";
import { DEFAULT_WORKSPACE_MODEL_ID } from "@sandbox-agent/foundry-shared";
import { authUsers, sessionState, userProfiles, userTaskState } from "./db/schema.js";
import { buildWhere, columnFor, materializeRow, persistInput, persistPatch, tableFor } from "./query-helpers.js";
export async function createAuthRecordMutation(c: any, input: { model: string; data: Record<string, unknown> }) {
const table = tableFor(input.model);
const persisted = persistInput(input.model, input.data);
await c.db
.insert(table)
.values(persisted as any)
.run();
const row = await c.db
.select()
.from(table)
.where(eq(columnFor(input.model, table, "id"), input.data.id as any))
.get();
return materializeRow(input.model, row);
}
export async function updateAuthRecordMutation(c: any, input: { model: string; where: any[]; update: Record<string, unknown> }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) throw new Error("updateAuthRecord requires a where clause");
await c.db
.update(table)
.set(persistPatch(input.model, input.update) as any)
.where(predicate)
.run();
return materializeRow(input.model, await c.db.select().from(table).where(predicate).get());
}
export async function updateManyAuthRecordsMutation(c: any, input: { model: string; where: any[]; update: Record<string, unknown> }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) throw new Error("updateManyAuthRecords requires a where clause");
await c.db
.update(table)
.set(persistPatch(input.model, input.update) as any)
.where(predicate)
.run();
const row = await c.db.select({ value: sqlCount() }).from(table).where(predicate).get();
return row?.value ?? 0;
}
export async function deleteAuthRecordMutation(c: any, input: { model: string; where: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) throw new Error("deleteAuthRecord requires a where clause");
await c.db.delete(table).where(predicate).run();
}
export async function deleteManyAuthRecordsMutation(c: any, input: { model: string; where: any[] }) {
const table = tableFor(input.model);
const predicate = buildWhere(table, input.where);
if (!predicate) throw new Error("deleteManyAuthRecords requires a where clause");
const rows = await c.db.select().from(table).where(predicate).all();
await c.db.delete(table).where(predicate).run();
return rows.length;
}
export async function upsertUserProfileMutation(
c: any,
input: {
userId: string;
patch: {
githubAccountId?: string | null;
githubLogin?: string | null;
roleLabel?: string;
defaultModel?: string;
eligibleOrganizationIdsJson?: string;
starterRepoStatus?: string;
starterRepoStarredAt?: number | null;
starterRepoSkippedAt?: number | null;
};
},
) {
const now = Date.now();
await c.db
.insert(userProfiles)
.values({
id: 1,
userId: input.userId,
githubAccountId: input.patch.githubAccountId ?? null,
githubLogin: input.patch.githubLogin ?? null,
roleLabel: input.patch.roleLabel ?? "GitHub user",
defaultModel: input.patch.defaultModel ?? DEFAULT_WORKSPACE_MODEL_ID,
eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson ?? "[]",
starterRepoStatus: input.patch.starterRepoStatus ?? "pending",
starterRepoStarredAt: input.patch.starterRepoStarredAt ?? null,
starterRepoSkippedAt: input.patch.starterRepoSkippedAt ?? null,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: userProfiles.userId,
set: {
...(input.patch.githubAccountId !== undefined ? { githubAccountId: input.patch.githubAccountId } : {}),
...(input.patch.githubLogin !== undefined ? { githubLogin: input.patch.githubLogin } : {}),
...(input.patch.roleLabel !== undefined ? { roleLabel: input.patch.roleLabel } : {}),
...(input.patch.defaultModel !== undefined ? { defaultModel: input.patch.defaultModel } : {}),
...(input.patch.eligibleOrganizationIdsJson !== undefined ? { eligibleOrganizationIdsJson: input.patch.eligibleOrganizationIdsJson } : {}),
...(input.patch.starterRepoStatus !== undefined ? { starterRepoStatus: input.patch.starterRepoStatus } : {}),
...(input.patch.starterRepoStarredAt !== undefined ? { starterRepoStarredAt: input.patch.starterRepoStarredAt } : {}),
...(input.patch.starterRepoSkippedAt !== undefined ? { starterRepoSkippedAt: input.patch.starterRepoSkippedAt } : {}),
updatedAt: now,
},
})
.run();
return await c.db.select().from(userProfiles).where(eq(userProfiles.userId, input.userId)).get();
}
export async function upsertSessionStateMutation(c: any, input: { sessionId: string; activeOrganizationId: string | null }) {
const now = Date.now();
await c.db
.insert(sessionState)
.values({
sessionId: input.sessionId,
activeOrganizationId: input.activeOrganizationId,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: sessionState.sessionId,
set: { activeOrganizationId: input.activeOrganizationId, updatedAt: now },
})
.run();
return await c.db.select().from(sessionState).where(eq(sessionState.sessionId, input.sessionId)).get();
}
export async function upsertTaskStateMutation(
c: any,
input: {
taskId: string;
sessionId: string;
patch: {
activeSessionId?: string | null;
unread?: boolean;
draftText?: string;
draftAttachmentsJson?: string;
draftUpdatedAt?: number | null;
};
},
) {
const now = Date.now();
const existing = await c.db
.select()
.from(userTaskState)
.where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId)))
.get();
if (input.patch.activeSessionId !== undefined) {
await c.db.update(userTaskState).set({ activeSessionId: input.patch.activeSessionId, updatedAt: now }).where(eq(userTaskState.taskId, input.taskId)).run();
}
await c.db
.insert(userTaskState)
.values({
taskId: input.taskId,
sessionId: input.sessionId,
activeSessionId: input.patch.activeSessionId ?? existing?.activeSessionId ?? null,
unread: input.patch.unread !== undefined ? (input.patch.unread ? 1 : 0) : (existing?.unread ?? 0),
draftText: input.patch.draftText ?? existing?.draftText ?? "",
draftAttachmentsJson: input.patch.draftAttachmentsJson ?? existing?.draftAttachmentsJson ?? "[]",
draftUpdatedAt: input.patch.draftUpdatedAt === undefined ? (existing?.draftUpdatedAt ?? null) : input.patch.draftUpdatedAt,
updatedAt: now,
})
.onConflictDoUpdate({
target: [userTaskState.taskId, userTaskState.sessionId],
set: {
...(input.patch.activeSessionId !== undefined ? { activeSessionId: input.patch.activeSessionId } : {}),
...(input.patch.unread !== undefined ? { unread: input.patch.unread ? 1 : 0 } : {}),
...(input.patch.draftText !== undefined ? { draftText: input.patch.draftText } : {}),
...(input.patch.draftAttachmentsJson !== undefined ? { draftAttachmentsJson: input.patch.draftAttachmentsJson } : {}),
...(input.patch.draftUpdatedAt !== undefined ? { draftUpdatedAt: input.patch.draftUpdatedAt } : {}),
updatedAt: now,
},
})
.run();
return await c.db
.select()
.from(userTaskState)
.where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId)))
.get();
}
export async function deleteTaskStateMutation(c: any, input: { taskId: string; sessionId?: string }) {
if (input.sessionId) {
await c.db
.delete(userTaskState)
.where(and(eq(userTaskState.taskId, input.taskId), eq(userTaskState.sessionId, input.sessionId)))
.run();
return;
}
await c.db.delete(userTaskState).where(eq(userTaskState.taskId, input.taskId)).run();
}

View file

@ -10,7 +10,7 @@ import { createDefaultDriver } from "./driver.js";
import { createClient } from "rivetkit/client";
import { initBetterAuthService } from "./services/better-auth.js";
import { createDefaultAppShellServices } from "./services/app-shell-runtime.js";
import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/app-shell.js";
import { APP_SHELL_ORGANIZATION_ID } from "./actors/organization/constants.js";
import { logger } from "./logging.js";
export interface BackendStartOptions {
@ -48,6 +48,19 @@ function isRivetRequest(request: Request): boolean {
}
export async function startBackend(options: BackendStartOptions = {}): Promise<void> {
// Prevent the sandbox-agent SDK's unhandled SQLite constraint errors from
// crashing the entire process. The SDK has a bug where duplicate event
// inserts (sandbox_agent_events UNIQUE constraint) throw from an internal
// async path with no catch. Log and continue.
process.on("uncaughtException", (error) => {
logger.error({ error: error?.message ?? String(error), stack: error?.stack }, "uncaughtException (kept alive)");
});
process.on("unhandledRejection", (reason) => {
const msg = reason instanceof Error ? reason.message : String(reason);
const stack = reason instanceof Error ? reason.stack : undefined;
logger.error({ error: msg, stack }, "unhandledRejection (kept alive)");
});
// sandbox-agent agent plugins vary on which env var they read for OpenAI/Codex auth.
// Normalize to keep local dev + docker-compose simple.
if (!process.env.CODEX_API_KEY && process.env.OPENAI_API_KEY) {

View file

@ -1,8 +1,11 @@
import { betterAuth } from "better-auth";
import { createAdapterFactory } from "better-auth/adapters";
import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js";
import { authUserKey, organizationKey } from "../actors/keys.js";
import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/constants.js";
// organization actions are called directly (no queue)
// user actor actions are called directly (no queue)
import { organizationKey, userKey } from "../actors/keys.js";
import { logger } from "../logging.js";
// expectQueueResponse removed — actions return values directly
const AUTH_BASE_PATH = "/v1/auth";
const SESSION_COOKIE = "better-auth.session_token";
@ -59,6 +62,8 @@ function resolveRouteUserId(organization: any, resolved: any): string | null {
return null;
}
// sendOrganizationCommand removed — org actions are called directly
export interface BetterAuthService {
auth: any;
resolveSession(headers: Headers): Promise<{ session: any; user: any } | null>;
@ -75,7 +80,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
// getOrCreate is intentional here: the adapter runs during Better Auth callbacks
// which can fire before any explicit create path. The app organization and auth user
// which can fire before any explicit create path. The app organization and user
// actors must exist by the time the adapter needs them.
const appOrganization = () =>
actorClient.organization.getOrCreate(organizationKey(APP_SHELL_ORGANIZATION_ID), {
@ -83,9 +88,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
});
// getOrCreate is intentional: Better Auth creates user records during OAuth
// callbacks, so the auth-user actor must be lazily provisioned on first access.
const getAuthUser = async (userId: string) =>
await actorClient.authUser.getOrCreate(authUserKey(userId), {
// callbacks, so the user actor must be lazily provisioned on first access.
const getUser = async (userId: string) =>
await actorClient.user.getOrCreate(userKey(userId), {
createWithInput: { userId },
});
@ -110,7 +115,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const email = direct("email");
if (typeof email === "string" && email.length > 0) {
const organization = await appOrganization();
const resolved = await organization.authFindEmailIndex({ email: email.toLowerCase() });
const resolved = await organization.betterAuthFindEmailIndex({ email: email.toLowerCase() });
return resolveRouteUserId(organization, resolved);
}
return null;
@ -125,7 +130,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const sessionToken = direct("token") ?? data?.token;
if (typeof sessionId === "string" || typeof sessionToken === "string") {
const organization = await appOrganization();
const resolved = await organization.authFindSessionIndex({
const resolved = await organization.betterAuthFindSessionIndex({
...(typeof sessionId === "string" ? { sessionId } : {}),
...(typeof sessionToken === "string" ? { sessionToken } : {}),
});
@ -144,11 +149,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const accountId = direct("accountId") ?? data?.accountId;
const organization = await appOrganization();
if (typeof accountRecordId === "string" && accountRecordId.length > 0) {
const resolved = await organization.authFindAccountIndex({ id: accountRecordId });
const resolved = await organization.betterAuthFindAccountIndex({ id: accountRecordId });
return resolveRouteUserId(organization, resolved);
}
if (typeof providerId === "string" && providerId.length > 0 && typeof accountId === "string" && accountId.length > 0) {
const resolved = await organization.authFindAccountIndex({ providerId, accountId });
const resolved = await organization.betterAuthFindAccountIndex({ providerId, accountId });
return resolveRouteUserId(organization, resolved);
}
return null;
@ -157,9 +162,9 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return null;
};
const ensureOrganizationVerification = async (method: string, payload: Record<string, unknown>) => {
const ensureOrganizationVerification = async (actionName: string, payload: Record<string, unknown>) => {
const organization = await appOrganization();
return await organization[method](payload);
return await (organization as any)[actionName](payload);
};
return {
@ -170,7 +175,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
create: async ({ model, data }) => {
const transformed = await transformInput(data, model, "create", true);
if (model === "verification") {
return await ensureOrganizationVerification("authCreateVerification", { data: transformed });
return await ensureOrganizationVerification("commandBetterAuthVerificationCreate", { data: transformed });
}
const userId = await resolveUserIdForQuery(model, undefined, transformed);
@ -178,19 +183,19 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
throw new Error(`Unable to resolve auth actor for create(${model})`);
}
const userActor = await getAuthUser(userId);
const created = await userActor.createAuthRecord({ model, data: transformed });
const userActor = await getUser(userId);
const created = await userActor.authCreate({ model, data: transformed });
const organization = await appOrganization();
if (model === "user" && typeof transformed.email === "string" && transformed.email.length > 0) {
await organization.authUpsertEmailIndex({
await organization.commandBetterAuthEmailIndexUpsert({
email: transformed.email.toLowerCase(),
userId,
});
}
if (model === "session") {
await organization.authUpsertSessionIndex({
await organization.commandBetterAuthSessionIndexUpsert({
sessionId: String(created.id),
sessionToken: String(created.token),
userId,
@ -198,7 +203,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
if (model === "account") {
await organization.authUpsertAccountIndex({
await organization.commandBetterAuthAccountIndexUpsert({
id: String(created.id),
providerId: String(created.providerId),
accountId: String(created.accountId),
@ -212,7 +217,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
findOne: async ({ model, where, join }) => {
const transformedWhere = transformWhereClause({ model, where, action: "findOne" });
if (model === "verification") {
return await ensureOrganizationVerification("authFindOneVerification", { where: transformedWhere, join });
const organization = await appOrganization();
return await organization.betterAuthFindOneVerification({ where: transformedWhere, join });
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
@ -220,15 +226,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return null;
}
const userActor = await getAuthUser(userId);
const found = await userActor.findOneAuthRecord({ model, where: transformedWhere, join });
const userActor = await getUser(userId);
const found = await userActor.betterAuthFindOneRecord({ model, where: transformedWhere, join });
return found ? ((await transformOutput(found, model, undefined, join)) as any) : null;
},
findMany: async ({ model, where, limit, sortBy, offset, join }) => {
const transformedWhere = transformWhereClause({ model, where, action: "findMany" });
if (model === "verification") {
return await ensureOrganizationVerification("authFindManyVerification", {
const organization = await appOrganization();
return await organization.betterAuthFindManyVerification({
where: transformedWhere,
limit,
sortBy,
@ -244,7 +251,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const resolved = await Promise.all(
(tokenClause.value as string[]).map(async (sessionToken: string) => ({
sessionToken,
route: await organization.authFindSessionIndex({ sessionToken }),
route: await organization.betterAuthFindSessionIndex({ sessionToken }),
})),
);
const byUser = new Map<string, string[]>();
@ -259,11 +266,11 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const rows = [];
for (const [userId, tokens] of byUser) {
const userActor = await getAuthUser(userId);
const userActor = await getUser(userId);
const scopedWhere = transformedWhere.map((entry: any) =>
entry.field === "token" && entry.operator === "in" ? { ...entry, value: tokens } : entry,
);
const found = await userActor.findManyAuthRecords({ model, where: scopedWhere, limit, sortBy, offset, join });
const found = await userActor.betterAuthFindManyRecords({ model, where: scopedWhere, limit, sortBy, offset, join });
rows.push(...found);
}
return await Promise.all(rows.map(async (row: any) => await transformOutput(row, model, undefined, join)));
@ -275,8 +282,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return [];
}
const userActor = await getAuthUser(userId);
const found = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit, sortBy, offset, join });
const userActor = await getUser(userId);
const found = await userActor.betterAuthFindManyRecords({ model, where: transformedWhere, limit, sortBy, offset, join });
return await Promise.all(found.map(async (row: any) => await transformOutput(row, model, undefined, join)));
},
@ -284,7 +291,10 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const transformedWhere = transformWhereClause({ model, where, action: "update" });
const transformedUpdate = (await transformInput(update as Record<string, unknown>, model, "update", true)) as Record<string, unknown>;
if (model === "verification") {
return await ensureOrganizationVerification("authUpdateVerification", { where: transformedWhere, update: transformedUpdate });
return await ensureOrganizationVerification("commandBetterAuthVerificationUpdate", {
where: transformedWhere,
update: transformedUpdate,
});
}
const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate);
@ -292,29 +302,34 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return null;
}
const userActor = await getAuthUser(userId);
const userActor = await getUser(userId);
const before =
model === "user"
? await userActor.findOneAuthRecord({ model, where: transformedWhere })
? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere })
: model === "account"
? await userActor.findOneAuthRecord({ model, where: transformedWhere })
? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere })
: model === "session"
? await userActor.findOneAuthRecord({ model, where: transformedWhere })
? await userActor.betterAuthFindOneRecord({ model, where: transformedWhere })
: null;
const updated = await userActor.updateAuthRecord({ model, where: transformedWhere, update: transformedUpdate });
const updated = await userActor.authUpdate({ model, where: transformedWhere, update: transformedUpdate });
const organization = await appOrganization();
if (model === "user" && updated) {
if (before?.email && before.email !== updated.email) {
await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() });
await organization.commandBetterAuthEmailIndexDelete({
email: before.email.toLowerCase(),
});
}
if (updated.email) {
await organization.authUpsertEmailIndex({ email: updated.email.toLowerCase(), userId });
await organization.commandBetterAuthEmailIndexUpsert({
email: updated.email.toLowerCase(),
userId,
});
}
}
if (model === "session" && updated) {
await organization.authUpsertSessionIndex({
await organization.commandBetterAuthSessionIndexUpsert({
sessionId: String(updated.id),
sessionToken: String(updated.token),
userId,
@ -322,7 +337,7 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
if (model === "account" && updated) {
await organization.authUpsertAccountIndex({
await organization.commandBetterAuthAccountIndexUpsert({
id: String(updated.id),
providerId: String(updated.providerId),
accountId: String(updated.accountId),
@ -337,7 +352,10 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
const transformedWhere = transformWhereClause({ model, where, action: "updateMany" });
const transformedUpdate = (await transformInput(update as Record<string, unknown>, model, "update", true)) as Record<string, unknown>;
if (model === "verification") {
return await ensureOrganizationVerification("authUpdateManyVerification", { where: transformedWhere, update: transformedUpdate });
return await ensureOrganizationVerification("commandBetterAuthVerificationUpdateMany", {
where: transformedWhere,
update: transformedUpdate,
});
}
const userId = await resolveUserIdForQuery(model, transformedWhere, transformedUpdate);
@ -345,14 +363,15 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return 0;
}
const userActor = await getAuthUser(userId);
return await userActor.updateManyAuthRecords({ model, where: transformedWhere, update: transformedUpdate });
const userActor = await getUser(userId);
return await userActor.authUpdateMany({ model, where: transformedWhere, update: transformedUpdate });
},
delete: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "delete" });
if (model === "verification") {
await ensureOrganizationVerification("authDeleteVerification", { where: transformedWhere });
const organization = await appOrganization();
await organization.commandBetterAuthVerificationDelete({ where: transformedWhere });
return;
}
@ -361,20 +380,20 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return;
}
const userActor = await getAuthUser(userId);
const userActor = await getUser(userId);
const organization = await appOrganization();
const before = await userActor.findOneAuthRecord({ model, where: transformedWhere });
await userActor.deleteAuthRecord({ model, where: transformedWhere });
const before = await userActor.betterAuthFindOneRecord({ model, where: transformedWhere });
await userActor.authDelete({ model, where: transformedWhere });
if (model === "session" && before) {
await organization.authDeleteSessionIndex({
await organization.commandBetterAuthSessionIndexDelete({
sessionId: before.id,
sessionToken: before.token,
});
}
if (model === "account" && before) {
await organization.authDeleteAccountIndex({
await organization.commandBetterAuthAccountIndexDelete({
id: before.id,
providerId: before.providerId,
accountId: before.accountId,
@ -382,14 +401,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
}
if (model === "user" && before?.email) {
await organization.authDeleteEmailIndex({ email: before.email.toLowerCase() });
await organization.commandBetterAuthEmailIndexDelete({
email: before.email.toLowerCase(),
});
}
},
deleteMany: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "deleteMany" });
if (model === "verification") {
return await ensureOrganizationVerification("authDeleteManyVerification", { where: transformedWhere });
return await ensureOrganizationVerification("commandBetterAuthVerificationDeleteMany", { where: transformedWhere });
}
if (model === "session") {
@ -397,12 +418,12 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
if (!userId) {
return 0;
}
const userActor = await getAuthUser(userId);
const userActor = await getUser(userId);
const organization = await appOrganization();
const sessions = await userActor.findManyAuthRecords({ model, where: transformedWhere, limit: 5000 });
const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere });
const sessions = await userActor.betterAuthFindManyRecords({ model, where: transformedWhere, limit: 5000 });
const deleted = await userActor.authDeleteMany({ model, where: transformedWhere });
for (const session of sessions) {
await organization.authDeleteSessionIndex({
await organization.commandBetterAuthSessionIndexDelete({
sessionId: session.id,
sessionToken: session.token,
});
@ -415,15 +436,16 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return 0;
}
const userActor = await getAuthUser(userId);
const deleted = await userActor.deleteManyAuthRecords({ model, where: transformedWhere });
const userActor = await getUser(userId);
const deleted = await userActor.authDeleteMany({ model, where: transformedWhere });
return deleted;
},
count: async ({ model, where }) => {
const transformedWhere = transformWhereClause({ model, where, action: "count" });
if (model === "verification") {
return await ensureOrganizationVerification("authCountVerification", { where: transformedWhere });
const organization = await appOrganization();
return await organization.betterAuthCountVerification({ where: transformedWhere });
}
const userId = await resolveUserIdForQuery(model, transformedWhere);
@ -431,8 +453,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
return 0;
}
const userActor = await getAuthUser(userId);
return await userActor.countAuthRecords({ model, where: transformedWhere });
const userActor = await getUser(userId);
return await userActor.betterAuthCountRecords({ model, where: transformedWhere });
},
};
},
@ -477,17 +499,17 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
async getAuthState(sessionId: string) {
const organization = await appOrganization();
const route = await organization.authFindSessionIndex({ sessionId });
const route = await organization.betterAuthFindSessionIndex({ sessionId });
if (!route?.userId) {
return null;
}
const userActor = await getAuthUser(route.userId);
const userActor = await getUser(route.userId);
return await userActor.getAppAuthState({ sessionId });
},
async upsertUserProfile(userId: string, patch: Record<string, unknown>) {
const userActor = await getAuthUser(userId);
return await userActor.upsertUserProfile({ userId, patch });
const userActor = await getUser(userId);
return await userActor.profileUpsert({ userId, patch });
},
async setActiveOrganization(sessionId: string, activeOrganizationId: string | null) {
@ -495,8 +517,8 @@ export function initBetterAuthService(actorClient: any, options: { apiUrl: strin
if (!authState?.user?.id) {
throw new Error(`Unknown auth session ${sessionId}`);
}
const userActor = await getAuthUser(authState.user.id);
return await userActor.upsertSessionState({ sessionId, activeOrganizationId });
const userActor = await getUser(authState.user.id);
return await userActor.sessionStateUpsert({ sessionId, activeOrganizationId });
},
async getAccessTokenForSession(sessionId: string) {

View file

@ -0,0 +1,584 @@
// Auto-generated list of branch name prefixes.
// Source: McMaster-Carr product catalog.
export const BRANCH_NAME_PREFIXES: readonly string[] = [
"abrasive-blasters",
"ac-motors",
"access-doors",
"adjustable-handles",
"aerosol-paint",
"air-cleaners",
"air-cylinders",
"air-filters",
"air-hose",
"air-knives",
"air-nozzles",
"air-regulators",
"air-ride-wheels",
"air-slides",
"alligator-clips",
"alloy-steel",
"aluminum-honeycomb",
"angle-indicators",
"antiseize-lubricants",
"antislip-fluid",
"backlight-panel-kits",
"ball-bearings",
"ball-end-mills",
"ball-joint-linkages",
"ball-transfers",
"band-clamps",
"band-saw-blades",
"bar-clamps",
"bar-grating",
"barbed-hose-fittings",
"barbed-tube-fittings",
"basket-strainers",
"batch-cans",
"battery-chargers",
"battery-holders",
"bead-chain",
"beam-clamps",
"belt-conveyors",
"bench-scales",
"bench-vises",
"bin-boxes",
"bin-storage",
"binding-posts",
"blank-tags",
"blasting-cabinets",
"blind-rivets",
"bluetooth-padlocks",
"boring-lathe-tools",
"box-reducers",
"box-wrenches",
"braided-hose",
"brass-pipe-fittings",
"breather-vents",
"butt-splices",
"c-clamps",
"cable-cutters",
"cable-holders",
"cable-tie-mounts",
"cable-ties",
"cam-handles",
"cam-latches",
"cam-locks",
"cap-nuts",
"captive-panel-screws",
"carbide-burs",
"carbide-inserts",
"carbon-fiber",
"carbon-steel",
"cardstock-tags",
"carriage-bolts",
"cast-acrylic",
"cast-iron",
"cast-nylon",
"casting-compounds",
"ceiling-lights",
"ceramic-adhesives",
"chain-slings",
"check-valves",
"chemical-hose",
"chemistry-meters",
"chemistry-testing",
"chip-clearing-tools",
"chucking-reamers",
"cinching-straps",
"circuit-breakers",
"circular-saw-blades",
"circular-saws",
"clamping-hangers",
"clevis-pins",
"clevis-rod-ends",
"clip-on-nuts",
"coaxial-connectors",
"coaxial-cords",
"coiled-spring-pins",
"compact-connectors",
"computer-adapters",
"concrete-adhesives",
"concrete-repair",
"contour-transfers",
"conveyor-belt-lacing",
"conveyor-belting",
"conveyor-brushes",
"conveyor-rollers",
"coolant-hose",
"copper-tube-fittings",
"copper-tubing",
"cord-grips",
"cord-reels",
"cotter-pins",
"coupling-nuts",
"cpvc-pipe-fittings",
"cup-brushes",
"cutoff-wheels",
"cylinder-hones",
"cylinder-racks",
"cylinder-trucks",
"data-cable",
"data-connectors",
"dc-motors",
"dead-blow-hammers",
"delrin-acetal-resin",
"desiccant-air-dryers",
"desktop-cranes",
"dial-calipers",
"dial-indicators",
"die-springs",
"direct-heaters",
"disconnect-switches",
"dispensing-needles",
"dispensing-pumps",
"disposable-clothing",
"disposable-gloves",
"document-protectors",
"door-closers",
"door-handles",
"door-holders",
"dowel-pins",
"drafting-equipment",
"drain-cleaners",
"drainage-mats",
"draw-latches",
"drawer-cabinets",
"drawer-slides",
"drill-bit-sets",
"drill-bits",
"drill-bushings",
"drill-chucks",
"drill-presses",
"drilling-screws",
"drinking-fountains",
"drive-anchors",
"drive-rollers",
"drive-shafts",
"drum-faucets",
"drum-pumps",
"drum-top-vacuums",
"drum-trucks",
"dry-box-gloves",
"dry-erase-boards",
"dry-film-lubricants",
"duct-fans",
"duct-hose",
"duct-tape",
"dust-collectors",
"dustless-chalk",
"edge-trim",
"electric-actuators",
"electric-drills",
"electric-drum-pumps",
"electric-mixers",
"electrical-switches",
"electrical-tape",
"electronic-calipers",
"enclosure-heaters",
"enclosure-panels",
"ethernet-cords",
"exhaust-fans",
"exit-lights",
"expansion-joints",
"expansion-plugs",
"extension-cords",
"extension-springs",
"fabric-snaps",
"fan-blades",
"fep-tubing",
"fiberglass-grating",
"file-holders",
"filter-bag-housings",
"filter-bags",
"filter-cartridges",
"fire-fighting-hose",
"first-aid-supplies",
"fixture-clamps",
"flange-locknuts",
"flange-mount-seals",
"flap-sanding-discs",
"flap-sanding-wheels",
"flared-tube-fittings",
"flashing-lights",
"flat-washers",
"flexible-shafts",
"flexible-shank-burs",
"flexible-trays",
"float-valves",
"floor-locks",
"floor-marking-tape",
"floor-scales",
"floor-squeegees",
"flow-sights",
"flow-switches",
"flowmeter-totalizers",
"foot-switches",
"force-gauges",
"fume-exhausters",
"garbage-bags",
"garden-hose",
"gas-hose",
"gas-regulators",
"gas-springs",
"gauge-blocks",
"glass-sights",
"gold-wire",
"grab-latches",
"grease-fittings",
"grinding-bits",
"grinding-wheels",
"hand-brushes",
"hand-chain-hoists",
"hand-reamers",
"hand-trucks",
"hand-wheels",
"hand-winches",
"hanging-scales",
"hard-hats",
"hardened-shafts",
"hardness-testers",
"heat-exchangers",
"heat-guns",
"heat-lamps",
"heat-sealable-bags",
"heat-set-inserts",
"heat-shrink-tubing",
"heat-sinks",
"heated-scrapers",
"helical-inserts",
"hex-bit-sockets",
"hex-head-screws",
"hex-nuts",
"high-accuracy-rulers",
"high-amp-relays",
"high-vacuum-filters",
"high-vacuum-sights",
"hinge-adjusters",
"hoist-rings",
"hole-saws",
"hose-couplings",
"hose-reels",
"hot-melt-glue",
"hydraulic-cylinders",
"hydraulic-hose",
"hydraulic-jacks",
"iec-connectors",
"immersion-heaters",
"impression-foam",
"indicating-lights",
"inflatable-wedges",
"ink-markers",
"insertion-heaters",
"inspection-mirrors",
"instrument-carts",
"insulation-jacketing",
"jam-removers",
"jigsaw-blades",
"key-cabinets",
"key-locking-inserts",
"key-stock",
"keyed-drive-shafts",
"keyseat-end-mills",
"l-key-sets",
"l-keys",
"label-holders",
"latching-connectors",
"lathe-tools",
"lavatory-partitions",
"lead-screws",
"leveling-lasers",
"leveling-mounts",
"lid-supports",
"lift-off-hinges",
"lift-trucks",
"light-bulbs",
"limit-switches",
"linear-ball-bearings",
"liquid-level-gauges",
"lock-washers",
"lockout-devices",
"loop-clamps",
"loop-hangers",
"machine-brackets",
"machine-handles",
"machine-keys",
"magnetic-base-drills",
"magnetic-bumpers",
"masking-tape",
"masonry-drill-bits",
"medium-amp-relays",
"metal-cable-ties",
"metal-panels",
"metal-plates",
"metal-tags",
"metering-pumps",
"metric-o-rings",
"mil-spec-connectors",
"mobile-lift-tables",
"motor-controls",
"motor-starters",
"mountable-cable-ties",
"mounting-tape",
"neoprene-foam",
"nickel-titanium",
"nonmarring-hammers",
"nonslip-bumpers",
"nylon-rivets",
"nylon-tubing",
"o-rings",
"oil-level-indicators",
"oil-reservoirs",
"oil-skimmers",
"on-off-valves",
"open-end-wrenches",
"outlet-boxes",
"outlet-strips",
"packaging-tape",
"paint-brushes",
"paint-markers",
"paint-sprayers",
"pallet-racks",
"pallet-trucks",
"panel-air-filters",
"parts-baskets",
"pendant-switches",
"perforated-sheets",
"pest-control",
"petroleum-hose",
"piano-hinges",
"pipe-couplings",
"pipe-gaskets",
"pipe-markers",
"pipe-wrenches",
"plank-grating",
"plastic-clamps",
"plastic-mesh",
"plate-lifting-clamps",
"platinum-wire",
"plier-clamps",
"plug-gauges",
"portable-lights",
"power-cords",
"power-supplied",
"power-supplies",
"precision-knives",
"press-fit-nuts",
"press-in-nuts",
"protecting-tape",
"protective-coatings",
"protective-curtains",
"protective-panels",
"protective-wrap",
"proximity-switches",
"pull-handles",
"push-brooms",
"push-nuts",
"push-on-seals",
"pvc-pipe-fittings",
"pvc-tubing",
"quick-release-pins",
"ratchet-pullers",
"recycled-plastics",
"repair-adhesives",
"repair-clamps",
"reusable-cable-ties",
"ring-terminals",
"rivet-nuts",
"robot-base-mounts",
"robot-bases",
"rocker-switches",
"rod-wipers",
"roller-bearings",
"roller-chain",
"roller-conveyors",
"roof-exhaust-fans",
"roof-repair",
"rotary-broaches",
"rotary-hammers",
"rotary-shaft-seals",
"rotating-cranes",
"rotating-joints",
"router-bits",
"rtd-probes",
"rubber-edge-seals",
"rubber-tread-wheels",
"rubber-tubing",
"safety-cabinets",
"safety-glasses",
"safety-mirrors",
"sanding-belts",
"sanding-discs",
"sanding-guides",
"sanding-rolls",
"sanding-sheets",
"screw-extractors",
"screw-jacks",
"scrub-brushes",
"sealing-washers",
"security-lights",
"sensor-connectors",
"set-screws",
"setup-clamps",
"shaft-collars",
"shaft-couplings",
"shaft-repair-sleeves",
"shaft-supports",
"sharpening-stones",
"sheet-metal-cutters",
"shelf-cabinets",
"shim-stock",
"shim-tape",
"shipping-pails",
"shock-absorbers",
"shoulder-screws",
"shower-stations",
"silicone-foam",
"sleeve-bearings",
"slide-bolts",
"slitting-saws",
"slotted-spring-pins",
"sludge-samplers",
"small-parts-storage",
"snap-acting-switches",
"soap-dispensers",
"socket-head-screws",
"socket-organizers",
"socket-wrenches",
"soldering-irons",
"solid-rivets",
"solid-rod-ends",
"sound-insulation",
"space-heaters",
"spacing-beads",
"spanner-wrenches",
"specialty-pliers",
"specialty-vises",
"specialty-washers",
"speed-reducers",
"splicing-connectors",
"spray-bottles",
"spray-nozzles",
"spring-clamps",
"spring-plungers",
"spring-steel",
"square-drive-sockets",
"square-end-mills",
"square-nuts",
"squeeze-bottles",
"stack-lights",
"stainless-steel",
"stair-treads",
"static-control-mats",
"steel-carts",
"steel-pipe-fittings",
"steel-pipe-flanges",
"steel-stamps",
"steel-tubing",
"step-ladders",
"stepper-motors",
"storage-bags",
"storage-boxes",
"storage-chests",
"straight-ladders",
"strap-hinges",
"stretch-wrap",
"strip-doors",
"strip-springs",
"strobe-lights",
"structural-adhesives",
"strut-channel",
"strut-channel-nuts",
"strut-mount-clamps",
"suction-cup-lifters",
"suction-strainers",
"super-absorbent-foam",
"super-flexible-glass",
"surface-fillers",
"surface-mount-hinges",
"t-handle-keys",
"t-slotted-framing",
"tamper-seals",
"tank-level-measurers",
"tape-dispensers",
"tape-measures",
"taper-pins",
"tapping-screws",
"teflon-ptfe",
"terminal-blocks",
"test-indicators",
"test-leads",
"test-weights",
"tethered-knobs",
"thermal-insulation",
"thread-adapters",
"thread-sealant-tape",
"thread-sealants",
"threaded-inserts",
"threaded-standoffs",
"threaded-studs",
"thrust-ball-bearings",
"thrust-bearings",
"thumb-nuts",
"thumb-screws",
"tie-down-rings",
"time-clocks",
"timer-relays",
"timer-switches",
"toggle-clamps",
"toggle-switches",
"tool-holders",
"tool-sets",
"tool-steel",
"torque-wrenches",
"torsion-springs",
"tote-boxes",
"touch-bars",
"track-casters",
"track-rollers",
"track-wheels",
"traction-mats",
"trolley-systems",
"tube-brushes",
"tube-fittings",
"tubular-light-bulbs",
"turn-lock-connectors",
"twist-ties",
"u-bolts",
"u-joints",
"ul-class-fuses",
"unthreaded-spacers",
"usb-adapters",
"usb-cords",
"utility-knives",
"v-belts",
"vacuum-cups",
"vacuum-pumps",
"wall-louvers",
"wash-fountains",
"wash-guns",
"waste-containers",
"water-deionizers",
"water-filters",
"water-hose",
"water-removal-pumps",
"weather-stations",
"web-slings",
"weld-nuts",
"welding-clothing",
"welding-helmets",
"wet-dry-vacuums",
"wet-mops",
"wheel-brushes",
"wing-nuts",
"wire-cloth",
"wire-connectors",
"wire-cutting-pliers",
"wire-partitions",
"wire-rope",
"wire-rope-clamps",
"wire-wrap",
"wool-felt",
"work-platforms",
"workbench-legs",
"woven-wire-cloth",
] as const;

View file

@ -1,3 +1,5 @@
import { BRANCH_NAME_PREFIXES } from "./branch-name-prefixes.js";
export interface ResolveCreateFlowDecisionInput {
task: string;
explicitTitle?: string;
@ -89,30 +91,42 @@ export function sanitizeBranchName(input: string): string {
return trimmed.slice(0, 50).replace(/-+$/g, "");
}
function generateRandomSuffix(length: number): string {
const chars = "abcdefghijklmnopqrstuvwxyz0123456789";
let result = "";
for (let i = 0; i < length; i++) {
result += chars[Math.floor(Math.random() * chars.length)];
}
return result;
}
function generateBranchName(): string {
const prefix = BRANCH_NAME_PREFIXES[Math.floor(Math.random() * BRANCH_NAME_PREFIXES.length)]!;
const suffix = generateRandomSuffix(4);
return `${prefix}-${suffix}`;
}
export function resolveCreateFlowDecision(input: ResolveCreateFlowDecisionInput): ResolveCreateFlowDecisionResult {
const explicitBranch = input.explicitBranchName?.trim();
const title = deriveFallbackTitle(input.task, input.explicitTitle);
const generatedBase = sanitizeBranchName(title) || "task";
const branchBase = explicitBranch && explicitBranch.length > 0 ? explicitBranch : generatedBase;
const existingBranches = new Set(input.localBranches.map((value) => value.trim()).filter((value) => value.length > 0));
const existingTaskBranches = new Set(input.taskBranches.map((value) => value.trim()).filter((value) => value.length > 0));
const conflicts = (name: string): boolean => existingBranches.has(name) || existingTaskBranches.has(name);
if (explicitBranch && conflicts(branchBase)) {
throw new Error(`Branch '${branchBase}' already exists. Choose a different --name/--branch value.`);
if (explicitBranch && explicitBranch.length > 0) {
if (conflicts(explicitBranch)) {
throw new Error(`Branch '${explicitBranch}' already exists. Choose a different --name/--branch value.`);
}
return { title, branchName: explicitBranch };
}
if (explicitBranch) {
return { title, branchName: branchBase };
}
let candidate = branchBase;
let index = 2;
while (conflicts(candidate)) {
candidate = `${branchBase}-${index}`;
index += 1;
// Generate a random McMaster-Carr-style branch name, retrying on conflicts
let candidate = generateBranchName();
let attempts = 0;
while (conflicts(candidate) && attempts < 100) {
candidate = generateBranchName();
attempts += 1;
}
return {

View file

@ -1,5 +1,5 @@
import { getOrCreateOrganization } from "../actors/handles.js";
import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/app-shell.js";
import { APP_SHELL_ORGANIZATION_ID } from "../actors/organization/constants.js";
export interface ResolvedGithubAuth {
githubToken: string;

View file

@ -1,5 +1,6 @@
import { describe, expect, it } from "vitest";
import { deriveFallbackTitle, resolveCreateFlowDecision, sanitizeBranchName } from "../src/services/create-flow.js";
import { BRANCH_NAME_PREFIXES } from "../src/services/branch-name-prefixes.js";
describe("create flow decision", () => {
it("derives a conventional-style fallback title from task text", () => {
@ -17,15 +18,49 @@ describe("create flow decision", () => {
expect(sanitizeBranchName(" spaces everywhere ")).toBe("spaces-everywhere");
});
it("auto-increments generated branch names for conflicts", () => {
it("generates a McMaster-Carr-style branch name with random suffix", () => {
const resolved = resolveCreateFlowDecision({
task: "Add auth",
localBranches: ["feat-add-auth"],
taskBranches: ["feat-add-auth-2"],
localBranches: [],
taskBranches: [],
});
expect(resolved.title).toBe("feat: Add auth");
expect(resolved.branchName).toBe("feat-add-auth-3");
// Branch name should be "<prefix>-<4-char-suffix>" where prefix is from BRANCH_NAME_PREFIXES
const lastDash = resolved.branchName.lastIndexOf("-");
const prefix = resolved.branchName.slice(0, lastDash);
const suffix = resolved.branchName.slice(lastDash + 1);
expect(BRANCH_NAME_PREFIXES).toContain(prefix);
expect(suffix).toMatch(/^[a-z0-9]{4}$/);
});
it("avoids conflicts by generating a different random name", () => {
// Even with a conflicting branch, it should produce something different
const resolved = resolveCreateFlowDecision({
task: "Add auth",
localBranches: [],
taskBranches: [],
});
// Running again with the first result as a conflict should produce a different name
const resolved2 = resolveCreateFlowDecision({
task: "Add auth",
localBranches: [resolved.branchName],
taskBranches: [],
});
expect(resolved2.branchName).not.toBe(resolved.branchName);
});
it("uses explicit branch name when provided", () => {
const resolved = resolveCreateFlowDecision({
task: "new task",
explicitBranchName: "my-branch",
localBranches: [],
taskBranches: [],
});
expect(resolved.branchName).toBe("my-branch");
});
it("fails when explicit branch already exists", () => {

View file

@ -1,14 +1,13 @@
import { describe, expect, it } from "vitest";
import { githubDataKey, historyKey, organizationKey, repositoryKey, taskKey, taskSandboxKey } from "../src/actors/keys.js";
import { auditLogKey, githubDataKey, organizationKey, taskKey, taskSandboxKey } from "../src/actors/keys.js";
describe("actor keys", () => {
it("prefixes every key with organization namespace", () => {
const keys = [
organizationKey("default"),
repositoryKey("default", "repo"),
taskKey("default", "repo", "task"),
taskSandboxKey("default", "sbx"),
historyKey("default", "repo"),
auditLogKey("default"),
githubDataKey("default"),
];

View file

@ -8,6 +8,7 @@ import { describe, expect, it } from "vitest";
import { setupTest } from "rivetkit/test";
import { organizationKey } from "../src/actors/keys.js";
import { registry } from "../src/actors/index.js";
import { organizationWorkflowQueueName } from "../src/actors/organization/queues.js";
import { repoIdFromRemote } from "../src/services/repo.js";
import { createTestDriver } from "./helpers/test-driver.js";
import { createTestRuntimeContext } from "./helpers/test-context.js";
@ -51,8 +52,8 @@ describe("organization isolation", () => {
const { repoPath } = createRepo();
const repoId = repoIdFromRemote(repoPath);
await wsA.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath });
await wsB.applyGithubRepositoryProjection({ repoId, remoteUrl: repoPath });
await wsA.send(organizationWorkflowQueueName("organization.command.github.repository_projection.apply"), { repoId, remoteUrl: repoPath }, { wait: true });
await wsB.send(organizationWorkflowQueueName("organization.command.github.repository_projection.apply"), { repoId, remoteUrl: repoPath }, { wait: true });
await wsA.createTask({
organizationId: "alpha",

View file

@ -1,7 +1,7 @@
import { describe, expect, it } from "vitest";
import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workbench.js";
import { requireSendableSessionMeta, shouldMarkSessionUnreadForStatus, shouldRecreateSessionForModelChange } from "../src/actors/task/workspace.js";
describe("workbench unread status transitions", () => {
describe("workspace unread status transitions", () => {
it("marks unread when a running session first becomes idle", () => {
expect(shouldMarkSessionUnreadForStatus({ thinkingSinceMs: Date.now() - 1_000 }, "idle")).toBe(true);
});
@ -15,7 +15,7 @@ describe("workbench unread status transitions", () => {
});
});
describe("workbench model changes", () => {
describe("workspace model changes", () => {
it("recreates an unused ready session so the selected model takes effect", () => {
expect(
shouldRecreateSessionForModelChange({
@ -58,9 +58,9 @@ describe("workbench model changes", () => {
});
});
describe("workbench send readiness", () => {
describe("workspace send readiness", () => {
it("rejects unknown sessions", () => {
expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workbench session: session-1");
expect(() => requireSendableSessionMeta(null, "session-1")).toThrow("Unknown workspace session: session-1");
});
it("rejects pending sessions", () => {

View file

@ -1,4 +1,4 @@
import type { AppConfig, TaskRecord } from "@sandbox-agent/foundry-shared";
import type { AppConfig, TaskRecord, WorkspaceTaskDetail } from "@sandbox-agent/foundry-shared";
import { spawnSync } from "node:child_process";
import { createBackendClientFromConfig, filterTasks, formatRelativeAge, groupTaskStatus } from "@sandbox-agent/foundry-client";
import { CLI_BUILD_ID } from "./build-id.js";
@ -51,14 +51,28 @@ interface DisplayRow {
age: string;
}
type TuiTaskRow = TaskRecord & Pick<WorkspaceTaskDetail, "pullRequest"> & { activeSessionId?: string | null };
interface RenderOptions {
width?: number;
height?: number;
}
async function listDetailedTasks(client: ReturnType<typeof createBackendClientFromConfig>, organizationId: string): Promise<TaskRecord[]> {
async function listDetailedTasks(client: ReturnType<typeof createBackendClientFromConfig>, organizationId: string): Promise<TuiTaskRow[]> {
const rows = await client.listTasks(organizationId);
return await Promise.all(rows.map(async (row) => await client.getTask(organizationId, row.taskId)));
return await Promise.all(
rows.map(async (row) => {
const [task, detail] = await Promise.all([
client.getTask(organizationId, row.repoId, row.taskId),
client.getTaskDetail(organizationId, row.repoId, row.taskId).catch(() => null),
]);
return {
...task,
pullRequest: detail?.pullRequest ?? null,
activeSessionId: detail?.activeSessionId ?? null,
};
}),
);
}
function pad(input: string, width: number): string {
@ -143,29 +157,17 @@ function agentSymbol(status: TaskRecord["status"]): string {
return "-";
}
function toDisplayRow(row: TaskRecord): DisplayRow {
const conflictPrefix = row.conflictsWithMain === "true" ? "\u26A0 " : "";
const prLabel = row.prUrl ? `#${row.prUrl.match(/\/pull\/(\d+)/)?.[1] ?? "?"}` : row.prSubmitted ? "sub" : "-";
const ciLabel = row.ciStatus ?? "-";
const reviewLabel = row.reviewStatus
? row.reviewStatus === "approved"
? "ok"
: row.reviewStatus === "changes_requested"
? "chg"
: row.reviewStatus === "pending"
? "..."
: row.reviewStatus
: "-";
function toDisplayRow(row: TuiTaskRow): DisplayRow {
const prLabel = row.pullRequest ? `#${row.pullRequest.number}` : "-";
const reviewLabel = row.pullRequest ? (row.pullRequest.isDraft ? "draft" : row.pullRequest.state.toLowerCase()) : "-";
return {
name: `${conflictPrefix}${row.title || row.branchName}`,
diff: row.diffStat ?? "-",
name: row.title || row.branchName || row.taskId,
diff: "-",
agent: agentSymbol(row.status),
pr: prLabel,
author: row.prAuthor ?? "-",
ci: ciLabel,
author: row.pullRequest?.authorLogin ?? "-",
ci: "-",
review: reviewLabel,
age: formatRelativeAge(row.updatedAt),
};
@ -186,7 +188,7 @@ function helpLines(width: number): string[] {
}
export function formatRows(
rows: TaskRecord[],
rows: TuiTaskRow[],
selected: number,
organizationId: string,
status: string,
@ -336,8 +338,8 @@ export async function runTui(config: AppConfig, organizationId: string): Promise
renderer.root.add(text);
renderer.start();
let allRows: TaskRecord[] = [];
let filteredRows: TaskRecord[] = [];
let allRows: TuiTaskRow[] = [];
let filteredRows: TuiTaskRow[] = [];
let selected = 0;
let searchQuery = "";
let showHelp = false;
@ -393,7 +395,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise
render();
};
const selectedRow = (): TaskRecord | null => {
const selectedRow = (): TuiTaskRow | null => {
if (filteredRows.length === 0) {
return null;
}
@ -522,7 +524,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise
render();
void (async () => {
try {
const result = await client.switchTask(organizationId, row.taskId);
const result = await client.switchTask(organizationId, row.repoId, row.taskId);
close(`cd ${result.switchTarget}`);
} catch (err) {
busy = false;
@ -543,7 +545,7 @@ export async function runTui(config: AppConfig, organizationId: string): Promise
render();
void (async () => {
try {
const result = await client.attachTask(organizationId, row.taskId);
const result = await client.attachTask(organizationId, row.repoId, row.taskId);
close(`target=${result.target} session=${result.sessionId ?? "none"}`);
} catch (err) {
busy = false;
@ -559,7 +561,11 @@ export async function runTui(config: AppConfig, organizationId: string): Promise
if (!row) {
return;
}
void runActionWithRefresh(`archiving ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "archive"), `archived ${row.taskId}`);
void runActionWithRefresh(
`archiving ${row.taskId}`,
async () => client.runAction(organizationId, row.repoId, row.taskId, "archive"),
`archived ${row.taskId}`,
);
return;
}
@ -568,7 +574,11 @@ export async function runTui(config: AppConfig, organizationId: string): Promise
if (!row) {
return;
}
void runActionWithRefresh(`syncing ${row.taskId}`, async () => client.runAction(organizationId, row.taskId, "sync"), `synced ${row.taskId}`);
void runActionWithRefresh(
`syncing ${row.taskId}`,
async () => client.runAction(organizationId, row.repoId, row.taskId, "sync"),
`synced ${row.taskId}`,
);
return;
}
@ -580,8 +590,8 @@ export async function runTui(config: AppConfig, organizationId: string): Promise
void runActionWithRefresh(
`merging ${row.taskId}`,
async () => {
await client.runAction(organizationId, row.taskId, "merge");
await client.runAction(organizationId, row.taskId, "archive");
await client.runAction(organizationId, row.repoId, row.taskId, "merge");
await client.runAction(organizationId, row.repoId, row.taskId, "archive");
},
`merged+archived ${row.taskId}`,
);
@ -590,14 +600,15 @@ export async function runTui(config: AppConfig, organizationId: string): Promise
if (ctrl && name === "o") {
const row = selectedRow();
if (!row?.prUrl) {
const prUrl = row?.pullRequest?.url ?? null;
if (!prUrl) {
status = "no PR URL available for this task";
render();
return;
}
const openCmd = process.platform === "darwin" ? "open" : "xdg-open";
spawnSync(openCmd, [row.prUrl], { stdio: "ignore" });
status = `opened ${row.prUrl}`;
spawnSync(openCmd, [prUrl], { stdio: "ignore" });
status = `opened ${prUrl}`;
render();
return;
}

View file

@ -3,7 +3,7 @@ import type { TaskRecord } from "@sandbox-agent/foundry-shared";
import { filterTasks, fuzzyMatch } from "@sandbox-agent/foundry-client";
import { formatRows } from "../src/tui.js";
const sample: TaskRecord = {
const sample = {
organizationId: "default",
repoId: "repo-a",
repoRemote: "https://example.com/repo-a.git",
@ -13,33 +13,22 @@ const sample: TaskRecord = {
task: "Do test",
sandboxProviderId: "local",
status: "running",
statusMessage: null,
activeSandboxId: "sandbox-1",
activeSessionId: "session-1",
pullRequest: null,
sandboxes: [
{
sandboxId: "sandbox-1",
sandboxProviderId: "local",
sandboxActorId: null,
switchTarget: "sandbox://local/sandbox-1",
cwd: null,
createdAt: 1,
updatedAt: 1,
},
],
agentType: null,
prSubmitted: false,
diffStat: null,
prUrl: null,
prAuthor: null,
ciStatus: null,
reviewStatus: null,
reviewer: null,
conflictsWithMain: null,
hasUnpushed: null,
parentBranch: null,
createdAt: 1,
updatedAt: 1,
};
} satisfies TaskRecord & { pullRequest: null; activeSessionId?: null };
describe("formatRows", () => {
it("renders rust-style table header and empty state", () => {

View file

@ -10,8 +10,8 @@
"typecheck": "tsc --noEmit",
"test": "vitest run",
"test:e2e:full": "HF_ENABLE_DAEMON_FULL_E2E=1 vitest run test/e2e/full-integration-e2e.test.ts",
"test:e2e:workbench": "HF_ENABLE_DAEMON_WORKBENCH_E2E=1 vitest run test/e2e/workbench-e2e.test.ts",
"test:e2e:workbench-load": "HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E=1 vitest run test/e2e/workbench-load-e2e.test.ts"
"test:e2e:workspace": "HF_ENABLE_DAEMON_WORKBENCH_E2E=1 vitest run test/e2e/workspace-e2e.test.ts",
"test:e2e:workspace-load": "HF_ENABLE_DAEMON_WORKBENCH_LOAD_E2E=1 vitest run test/e2e/workspace-load-e2e.test.ts"
},
"dependencies": {
"@sandbox-agent/foundry-shared": "workspace:*",

View file

@ -4,6 +4,7 @@ import type {
FoundryOrganization,
FoundryUser,
UpdateFoundryOrganizationProfileInput,
WorkspaceModelId,
} from "@sandbox-agent/foundry-shared";
import type { BackendClient } from "./backend-client.js";
import { getMockFoundryAppClient } from "./mock-app.js";
@ -17,6 +18,7 @@ export interface FoundryAppClient {
skipStarterRepo(): Promise<void>;
starStarterRepo(organizationId: string): Promise<void>;
selectOrganization(organizationId: string): Promise<void>;
setDefaultModel(model: WorkspaceModelId): Promise<void>;
updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise<void>;
triggerGithubSync(organizationId: string): Promise<void>;
completeHostedCheckout(organizationId: string, planId: FoundryBillingPlanId): Promise<void>;

View file

@ -7,28 +7,29 @@ import type {
CreateTaskInput,
AppEvent,
SessionEvent,
SandboxProcessSnapshot,
SandboxProcessesEvent,
TaskRecord,
TaskSummary,
TaskWorkbenchChangeModelInput,
TaskWorkbenchCreateTaskInput,
TaskWorkbenchCreateTaskResponse,
TaskWorkbenchDiffInput,
TaskWorkbenchRenameInput,
TaskWorkbenchRenameSessionInput,
TaskWorkbenchSelectInput,
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchSnapshot,
TaskWorkbenchSessionInput,
TaskWorkbenchUpdateDraftInput,
TaskWorkspaceChangeModelInput,
TaskWorkspaceCreateTaskInput,
TaskWorkspaceCreateTaskResponse,
TaskWorkspaceDiffInput,
TaskWorkspaceRenameInput,
TaskWorkspaceRenameSessionInput,
TaskWorkspaceSelectInput,
TaskWorkspaceSetSessionUnreadInput,
TaskWorkspaceSendMessageInput,
TaskWorkspaceSnapshot,
TaskWorkspaceSessionInput,
TaskWorkspaceUpdateDraftInput,
TaskEvent,
WorkbenchTaskDetail,
WorkbenchTaskSummary,
WorkbenchSessionDetail,
WorkspaceTaskDetail,
WorkspaceTaskSummary,
WorkspaceSessionDetail,
OrganizationEvent,
OrganizationSummarySnapshot,
HistoryEvent,
AuditLogEvent as HistoryEvent,
HistoryQueryInput,
SandboxProviderId,
RepoOverview,
@ -37,8 +38,10 @@ import type {
StarSandboxAgentRepoResult,
SwitchResult,
UpdateFoundryOrganizationProfileInput,
WorkspaceModelGroup,
WorkspaceModelId,
} from "@sandbox-agent/foundry-shared";
import type { ProcessCreateRequest, ProcessInfo, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent";
import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent";
import { createMockBackendClient } from "./mock/backend-client.js";
import { taskKey, taskSandboxKey, organizationKey } from "./keys.js";
@ -64,7 +67,7 @@ export interface SandboxSessionEventRecord {
payload: unknown;
}
export type SandboxProcessRecord = ProcessInfo;
export type SandboxProcessRecord = SandboxProcessSnapshot;
export interface ActorConn {
on(event: string, listener: (payload: any) => void): () => void;
@ -72,45 +75,44 @@ export interface ActorConn {
dispose(): Promise<void>;
}
interface AuthSessionScopedInput {
authSessionId?: string;
}
interface OrganizationHandle {
connect(): ActorConn;
listRepos(input: { organizationId: string }): Promise<RepoRecord[]>;
createTask(input: CreateTaskInput): Promise<TaskRecord>;
listTasks(input: { organizationId: string; repoId?: string }): Promise<TaskSummary[]>;
getRepoOverview(input: { organizationId: string; repoId: string }): Promise<RepoOverview>;
history(input: HistoryQueryInput): Promise<HistoryEvent[]>;
switchTask(taskId: string): Promise<SwitchResult>;
getTask(input: { organizationId: string; taskId: string }): Promise<TaskRecord>;
attachTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>;
pushTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<void>;
syncTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<void>;
mergeTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<void>;
archiveTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<void>;
killTask(input: { organizationId: string; taskId: string; reason?: string }): Promise<void>;
auditLog(input: HistoryQueryInput): Promise<HistoryEvent[]>;
switchTask(input: { repoId: string; taskId: string }): Promise<SwitchResult>;
getTask(input: { organizationId: string; repoId: string; taskId: string }): Promise<TaskRecord>;
attachTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<{ target: string; sessionId: string | null }>;
pushTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<void>;
syncTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<void>;
mergeTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<void>;
archiveTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<void>;
killTask(input: { organizationId: string; repoId: string; taskId: string; reason?: string }): Promise<void>;
useOrganization(input: { organizationId: string }): Promise<{ organizationId: string }>;
starSandboxAgentRepo(input: StarSandboxAgentRepoInput): Promise<StarSandboxAgentRepoResult>;
getOrganizationSummary(input: { organizationId: string }): Promise<OrganizationSummarySnapshot>;
applyTaskSummaryUpdate(input: { taskSummary: WorkbenchTaskSummary }): Promise<void>;
removeTaskSummary(input: { taskId: string }): Promise<void>;
reconcileWorkbenchState(input: { organizationId: string }): Promise<OrganizationSummarySnapshot>;
createWorkbenchTask(input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse>;
markWorkbenchUnread(input: TaskWorkbenchSelectInput): Promise<void>;
renameWorkbenchTask(input: TaskWorkbenchRenameInput): Promise<void>;
renameWorkbenchBranch(input: TaskWorkbenchRenameInput): Promise<void>;
createWorkbenchSession(input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>;
renameWorkbenchSession(input: TaskWorkbenchRenameSessionInput): Promise<void>;
setWorkbenchSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise<void>;
updateWorkbenchDraft(input: TaskWorkbenchUpdateDraftInput): Promise<void>;
changeWorkbenchModel(input: TaskWorkbenchChangeModelInput): Promise<void>;
sendWorkbenchMessage(input: TaskWorkbenchSendMessageInput): Promise<void>;
stopWorkbenchSession(input: TaskWorkbenchSessionInput): Promise<void>;
closeWorkbenchSession(input: TaskWorkbenchSessionInput): Promise<void>;
publishWorkbenchPr(input: TaskWorkbenchSelectInput): Promise<void>;
revertWorkbenchFile(input: TaskWorkbenchDiffInput): Promise<void>;
reloadGithubOrganization(): Promise<void>;
reloadGithubPullRequests(): Promise<void>;
reloadGithubRepository(input: { repoId: string }): Promise<void>;
reloadGithubPullRequest(input: { repoId: string; prNumber: number }): Promise<void>;
createWorkspaceTask(input: TaskWorkspaceCreateTaskInput & AuthSessionScopedInput): Promise<TaskWorkspaceCreateTaskResponse>;
markWorkspaceUnread(input: TaskWorkspaceSelectInput & AuthSessionScopedInput): Promise<void>;
renameWorkspaceTask(input: TaskWorkspaceRenameInput & AuthSessionScopedInput): Promise<void>;
createWorkspaceSession(input: TaskWorkspaceSelectInput & { model?: string } & AuthSessionScopedInput): Promise<{ sessionId: string }>;
renameWorkspaceSession(input: TaskWorkspaceRenameSessionInput & AuthSessionScopedInput): Promise<void>;
selectWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise<void>;
setWorkspaceSessionUnread(input: TaskWorkspaceSetSessionUnreadInput & AuthSessionScopedInput): Promise<void>;
updateWorkspaceDraft(input: TaskWorkspaceUpdateDraftInput & AuthSessionScopedInput): Promise<void>;
changeWorkspaceModel(input: TaskWorkspaceChangeModelInput & AuthSessionScopedInput): Promise<void>;
sendWorkspaceMessage(input: TaskWorkspaceSendMessageInput & AuthSessionScopedInput): Promise<void>;
stopWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise<void>;
closeWorkspaceSession(input: TaskWorkspaceSessionInput & AuthSessionScopedInput): Promise<void>;
publishWorkspacePr(input: TaskWorkspaceSelectInput & AuthSessionScopedInput): Promise<void>;
revertWorkspaceFile(input: TaskWorkspaceDiffInput & AuthSessionScopedInput): Promise<void>;
adminReloadGithubOrganization(): Promise<void>;
adminReloadGithubRepository(input: { repoId: string }): Promise<void>;
}
interface AppOrganizationHandle {
@ -119,6 +121,7 @@ interface AppOrganizationHandle {
skipAppStarterRepo(input: { sessionId: string }): Promise<FoundryAppSnapshot>;
starAppStarterRepo(input: { sessionId: string; organizationId: string }): Promise<FoundryAppSnapshot>;
selectAppOrganization(input: { sessionId: string; organizationId: string }): Promise<FoundryAppSnapshot>;
setAppDefaultModel(input: { sessionId: string; defaultModel: WorkspaceModelId }): Promise<FoundryAppSnapshot>;
updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput & { sessionId: string }): Promise<FoundryAppSnapshot>;
triggerAppRepoImport(input: { sessionId: string; organizationId: string }): Promise<FoundryAppSnapshot>;
beginAppGithubInstall(input: { sessionId: string; organizationId: string }): Promise<{ url: string }>;
@ -130,9 +133,9 @@ interface AppOrganizationHandle {
}
interface TaskHandle {
getTaskSummary(): Promise<WorkbenchTaskSummary>;
getTaskDetail(): Promise<WorkbenchTaskDetail>;
getSessionDetail(input: { sessionId: string }): Promise<WorkbenchSessionDetail>;
getTaskSummary(): Promise<WorkspaceTaskSummary>;
getTaskDetail(input?: AuthSessionScopedInput): Promise<WorkspaceTaskDetail>;
getSessionDetail(input: { sessionId: string } & AuthSessionScopedInput): Promise<WorkspaceSessionDetail>;
connect(): ActorConn;
}
@ -157,6 +160,7 @@ interface TaskSandboxHandle {
rawSendSessionMethod(sessionId: string, method: string, params: Record<string, unknown>): Promise<unknown>;
destroySession(sessionId: string): Promise<void>;
sandboxAgentConnection(): Promise<{ endpoint: string; token?: string }>;
listWorkspaceModelGroups(): Promise<WorkspaceModelGroup[]>;
providerState(): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>;
}
@ -179,6 +183,7 @@ export interface BackendClientOptions {
endpoint: string;
defaultOrganizationId?: string;
mode?: "remote" | "mock";
encoding?: "json" | "cbor" | "bare";
}
export interface BackendClient {
@ -192,6 +197,7 @@ export interface BackendClient {
skipAppStarterRepo(): Promise<FoundryAppSnapshot>;
starAppStarterRepo(organizationId: string): Promise<FoundryAppSnapshot>;
selectAppOrganization(organizationId: string): Promise<FoundryAppSnapshot>;
setAppDefaultModel(defaultModel: WorkspaceModelId): Promise<FoundryAppSnapshot>;
updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise<FoundryAppSnapshot>;
triggerAppRepoImport(organizationId: string): Promise<FoundryAppSnapshot>;
reconnectAppGithub(organizationId: string): Promise<void>;
@ -204,11 +210,11 @@ export interface BackendClient {
createTask(input: CreateTaskInput): Promise<TaskRecord>;
listTasks(organizationId: string, repoId?: string): Promise<TaskSummary[]>;
getRepoOverview(organizationId: string, repoId: string): Promise<RepoOverview>;
getTask(organizationId: string, taskId: string): Promise<TaskRecord>;
getTask(organizationId: string, repoId: string, taskId: string): Promise<TaskRecord>;
listHistory(input: HistoryQueryInput): Promise<HistoryEvent[]>;
switchTask(organizationId: string, taskId: string): Promise<SwitchResult>;
attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>;
runAction(organizationId: string, taskId: string, action: TaskAction): Promise<void>;
switchTask(organizationId: string, repoId: string, taskId: string): Promise<SwitchResult>;
attachTask(organizationId: string, repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }>;
runAction(organizationId: string, repoId: string, taskId: string, action: TaskAction): Promise<void>;
createSandboxSession(input: {
organizationId: string;
sandboxProviderId: SandboxProviderId;
@ -279,29 +285,28 @@ export interface BackendClient {
sandboxId: string,
): Promise<{ sandboxProviderId: SandboxProviderId; sandboxId: string; state: string; at: number }>;
getSandboxAgentConnection(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<{ endpoint: string; token?: string }>;
getSandboxWorkspaceModelGroups(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<WorkspaceModelGroup[]>;
getOrganizationSummary(organizationId: string): Promise<OrganizationSummarySnapshot>;
getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise<WorkbenchTaskDetail>;
getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise<WorkbenchSessionDetail>;
getWorkbench(organizationId: string): Promise<TaskWorkbenchSnapshot>;
subscribeWorkbench(organizationId: string, listener: () => void): () => void;
createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse>;
markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise<void>;
renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise<void>;
renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise<void>;
createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }>;
renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise<void>;
setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise<void>;
updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise<void>;
changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise<void>;
sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise<void>;
stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise<void>;
closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise<void>;
publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise<void>;
revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise<void>;
reloadGithubOrganization(organizationId: string): Promise<void>;
reloadGithubPullRequests(organizationId: string): Promise<void>;
reloadGithubRepository(organizationId: string, repoId: string): Promise<void>;
reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise<void>;
getTaskDetail(organizationId: string, repoId: string, taskId: string): Promise<WorkspaceTaskDetail>;
getSessionDetail(organizationId: string, repoId: string, taskId: string, sessionId: string): Promise<WorkspaceSessionDetail>;
getWorkspace(organizationId: string): Promise<TaskWorkspaceSnapshot>;
subscribeWorkspace(organizationId: string, listener: () => void): () => void;
createWorkspaceTask(organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise<TaskWorkspaceCreateTaskResponse>;
markWorkspaceUnread(organizationId: string, input: TaskWorkspaceSelectInput): Promise<void>;
renameWorkspaceTask(organizationId: string, input: TaskWorkspaceRenameInput): Promise<void>;
createWorkspaceSession(organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }>;
renameWorkspaceSession(organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise<void>;
selectWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise<void>;
setWorkspaceSessionUnread(organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise<void>;
updateWorkspaceDraft(organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise<void>;
changeWorkspaceModel(organizationId: string, input: TaskWorkspaceChangeModelInput): Promise<void>;
sendWorkspaceMessage(organizationId: string, input: TaskWorkspaceSendMessageInput): Promise<void>;
stopWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise<void>;
closeWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise<void>;
publishWorkspacePr(organizationId: string, input: TaskWorkspaceSelectInput): Promise<void>;
revertWorkspaceFile(organizationId: string, input: TaskWorkspaceDiffInput): Promise<void>;
adminReloadGithubOrganization(organizationId: string): Promise<void>;
adminReloadGithubRepository(organizationId: string, repoId: string): Promise<void>;
health(): Promise<{ ok: true }>;
useOrganization(organizationId: string): Promise<{ organizationId: string }>;
starSandboxAgentRepo(organizationId: string): Promise<StarSandboxAgentRepoResult>;
@ -409,8 +414,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
const endpoints = deriveBackendEndpoints(options.endpoint);
const rivetApiEndpoint = endpoints.rivetEndpoint;
const appApiEndpoint = endpoints.appEndpoint;
const client = createClient({ endpoint: rivetApiEndpoint }) as unknown as RivetClient;
const workbenchSubscriptions = new Map<
const client = createClient({ endpoint: rivetApiEndpoint, encoding: options.encoding }) as unknown as RivetClient;
const workspaceSubscriptions = new Map<
string,
{
listeners: Set<() => void>;
@ -461,6 +466,16 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
return typeof sessionId === "string" && sessionId.length > 0 ? sessionId : null;
};
const getAuthSessionInput = async (): Promise<AuthSessionScopedInput | undefined> => {
const authSessionId = await getSessionId();
return authSessionId ? { authSessionId } : undefined;
};
const withAuthSessionInput = async <TInput extends object>(input: TInput): Promise<TInput & AuthSessionScopedInput> => {
const authSessionInput = await getAuthSessionInput();
return authSessionInput ? { ...input, ...authSessionInput } : input;
};
const organization = async (organizationId: string): Promise<OrganizationHandle> =>
client.organization.getOrCreate(organizationKey(organizationId), {
createWithInput: organizationId,
@ -471,7 +486,15 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
createWithInput: "app",
}) as unknown as AppOrganizationHandle;
const task = async (organizationId: string, repoId: string, taskId: string): Promise<TaskHandle> => client.task.get(taskKey(organizationId, repoId, taskId));
// getOrCreate is intentional here — this is the ONLY lazy creation point for
// virtual tasks (PR-driven entries that exist in the org's local tables but
// have no task actor yet). The task actor self-initializes from org data in
// getCurrentRecord(). Backend code must NEVER use getOrCreateTask except in
// createTaskMutation. See backend/CLAUDE.md "Lazy Task Actor Creation".
const task = async (organizationId: string, repoId: string, taskId: string): Promise<TaskHandle> =>
client.task.getOrCreate(taskKey(organizationId, repoId, taskId), {
createWithInput: { organizationId, repoId, taskId },
});
const sandboxByKey = async (organizationId: string, _providerId: SandboxProviderId, sandboxId: string): Promise<TaskSandboxHandle> => {
return (client as any).taskSandbox.get(taskSandboxKey(organizationId, sandboxId));
@ -493,17 +516,15 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
for (const row of candidates) {
try {
const detail = await ws.getTask({ organizationId, taskId: row.taskId });
const detail = await ws.getTask({ organizationId, repoId: row.repoId, taskId: row.taskId });
if (detail.sandboxProviderId !== sandboxProviderId) {
continue;
}
const sandbox = detail.sandboxes.find(
const sandboxes = detail.sandboxes as Array<(typeof detail.sandboxes)[number] & { sandboxActorId?: string }>;
const sandbox = sandboxes.find(
(sb) =>
sb.sandboxId === sandboxId &&
sb.sandboxProviderId === sandboxProviderId &&
typeof (sb as any).sandboxActorId === "string" &&
(sb as any).sandboxActorId.length > 0,
) as { sandboxActorId?: string } | undefined;
sb.sandboxId === sandboxId && sb.sandboxProviderId === sandboxProviderId && typeof sb.sandboxActorId === "string" && sb.sandboxActorId.length > 0,
);
if (sandbox?.sandboxActorId) {
return (client as any).taskSandbox.getForId(sandbox.sandboxActorId);
}
@ -563,67 +584,81 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
}
};
const getWorkbenchCompat = async (organizationId: string): Promise<TaskWorkbenchSnapshot> => {
const getTaskDetailWithAuth = async (organizationId: string, repoId: string, taskIdValue: string): Promise<WorkspaceTaskDetail> => {
return (await task(organizationId, repoId, taskIdValue)).getTaskDetail(await getAuthSessionInput());
};
const getSessionDetailWithAuth = async (organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise<WorkspaceSessionDetail> => {
return (await task(organizationId, repoId, taskIdValue)).getSessionDetail(await withAuthSessionInput({ sessionId }));
};
const getWorkspaceCompat = async (organizationId: string): Promise<TaskWorkspaceSnapshot> => {
const authSessionInput = await getAuthSessionInput();
const summary = await (await organization(organizationId)).getOrganizationSummary({ organizationId });
const tasks = (
await Promise.all(
summary.taskSummaries.map(async (taskSummary) => {
let detail;
try {
detail = await (await task(organizationId, taskSummary.repoId, taskSummary.id)).getTaskDetail();
} catch (error) {
if (isActorNotFoundError(error)) {
return null;
}
throw error;
const resolvedTasks = await Promise.all(
summary.taskSummaries.map(async (taskSummary) => {
let detail;
try {
const taskHandle = await task(organizationId, taskSummary.repoId, taskSummary.id);
detail = await taskHandle.getTaskDetail(authSessionInput);
} catch (error) {
if (isActorNotFoundError(error)) {
return null;
}
const sessionDetails = await Promise.all(
detail.sessionsSummary.map(async (session) => {
try {
const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({ sessionId: session.id });
return [session.id, full] as const;
} catch (error) {
if (isActorNotFoundError(error)) {
return null;
}
throw error;
throw error;
}
const sessionDetails = await Promise.all(
detail.sessionsSummary.map(async (session) => {
try {
const full = await (await task(organizationId, detail.repoId, detail.id)).getSessionDetail({
sessionId: session.id,
...(authSessionInput ?? {}),
});
return [session.id, full] as const;
} catch (error) {
if (isActorNotFoundError(error)) {
return null;
}
}),
);
const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkbenchSessionDetail] => entry !== null));
return {
id: detail.id,
repoId: detail.repoId,
title: detail.title,
status: detail.status,
repoName: detail.repoName,
updatedAtMs: detail.updatedAtMs,
branch: detail.branch,
pullRequest: detail.pullRequest,
sessions: detail.sessionsSummary.map((session) => {
const full = sessionDetailsById.get(session.id);
return {
id: session.id,
sessionId: session.sessionId,
sessionName: session.sessionName,
agent: session.agent,
model: session.model,
status: session.status,
thinkingSinceMs: session.thinkingSinceMs,
unread: session.unread,
created: session.created,
draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null },
transcript: full?.transcript ?? [],
};
}),
fileChanges: detail.fileChanges,
diffs: detail.diffs,
fileTree: detail.fileTree,
minutesUsed: detail.minutesUsed,
};
}),
)
).filter((task): task is TaskWorkbenchSnapshot["tasks"][number] => task !== null);
throw error;
}
}),
);
const sessionDetailsById = new Map(sessionDetails.filter((entry): entry is readonly [string, WorkspaceSessionDetail] => entry !== null));
return {
id: detail.id,
repoId: detail.repoId,
title: detail.title,
status: detail.status,
repoName: detail.repoName,
updatedAtMs: detail.updatedAtMs,
branch: detail.branch,
pullRequest: detail.pullRequest,
activeSessionId: detail.activeSessionId ?? null,
sessions: detail.sessionsSummary.map((session) => {
const full = sessionDetailsById.get(session.id);
return {
id: session.id,
sessionId: session.sessionId,
sessionName: session.sessionName,
agent: session.agent,
model: session.model,
status: session.status,
thinkingSinceMs: session.thinkingSinceMs,
unread: session.unread,
created: session.created,
draft: full?.draft ?? { text: "", attachments: [], updatedAtMs: null },
transcript: full?.transcript ?? [],
};
}),
fileChanges: detail.fileChanges,
diffs: detail.diffs,
fileTree: detail.fileTree,
minutesUsed: detail.minutesUsed,
activeSandboxId: detail.activeSandboxId ?? null,
};
}),
);
const tasks = resolvedTasks.filter((task): task is Exclude<(typeof resolvedTasks)[number], null> => task !== null);
const repositories = summary.repos
.map((repo) => ({
@ -642,14 +677,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
};
};
const subscribeWorkbench = (organizationId: string, listener: () => void): (() => void) => {
let entry = workbenchSubscriptions.get(organizationId);
const subscribeWorkspace = (organizationId: string, listener: () => void): (() => void) => {
let entry = workspaceSubscriptions.get(organizationId);
if (!entry) {
entry = {
listeners: new Set(),
disposeConnPromise: null,
};
workbenchSubscriptions.set(organizationId, entry);
workspaceSubscriptions.set(organizationId, entry);
}
entry.listeners.add(listener);
@ -658,8 +693,8 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
entry.disposeConnPromise = (async () => {
const handle = await organization(organizationId);
const conn = (handle as any).connect();
const unsubscribeEvent = conn.on("workbenchUpdated", () => {
const current = workbenchSubscriptions.get(organizationId);
const unsubscribeEvent = conn.on("organizationUpdated", () => {
const current = workspaceSubscriptions.get(organizationId);
if (!current) {
return;
}
@ -677,7 +712,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
}
return () => {
const current = workbenchSubscriptions.get(organizationId);
const current = workspaceSubscriptions.get(organizationId);
if (!current) {
return;
}
@ -686,7 +721,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
return;
}
workbenchSubscriptions.delete(organizationId);
workspaceSubscriptions.delete(organizationId);
void current.disposeConnPromise?.then(async (disposeConn) => {
await disposeConn?.();
});
@ -849,6 +884,14 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
return await (await appOrganization()).selectAppOrganization({ sessionId, organizationId });
},
async setAppDefaultModel(defaultModel: WorkspaceModelId): Promise<FoundryAppSnapshot> {
const sessionId = await getSessionId();
if (!sessionId) {
throw new Error("No active auth session");
}
return await (await appOrganization()).setAppDefaultModel({ sessionId, defaultModel });
},
async updateAppOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise<FoundryAppSnapshot> {
const sessionId = await getSessionId();
if (!sessionId) {
@ -948,33 +991,36 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
return (await organization(organizationId)).getRepoOverview({ organizationId, repoId });
},
async getTask(organizationId: string, taskId: string): Promise<TaskRecord> {
async getTask(organizationId: string, repoId: string, taskId: string): Promise<TaskRecord> {
return (await organization(organizationId)).getTask({
organizationId,
repoId,
taskId,
});
},
async listHistory(input: HistoryQueryInput): Promise<HistoryEvent[]> {
return (await organization(input.organizationId)).history(input);
return (await organization(input.organizationId)).auditLog(input);
},
async switchTask(organizationId: string, taskId: string): Promise<SwitchResult> {
return (await organization(organizationId)).switchTask(taskId);
async switchTask(organizationId: string, repoId: string, taskId: string): Promise<SwitchResult> {
return (await organization(organizationId)).switchTask({ repoId, taskId });
},
async attachTask(organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> {
async attachTask(organizationId: string, repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> {
return (await organization(organizationId)).attachTask({
organizationId,
repoId,
taskId,
reason: "cli.attach",
});
},
async runAction(organizationId: string, taskId: string, action: TaskAction): Promise<void> {
async runAction(organizationId: string, repoId: string, taskId: string, action: TaskAction): Promise<void> {
if (action === "push") {
await (await organization(organizationId)).pushTask({
organizationId,
repoId,
taskId,
reason: "cli.push",
});
@ -983,6 +1029,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
if (action === "sync") {
await (await organization(organizationId)).syncTask({
organizationId,
repoId,
taskId,
reason: "cli.sync",
});
@ -991,6 +1038,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
if (action === "merge") {
await (await organization(organizationId)).mergeTask({
organizationId,
repoId,
taskId,
reason: "cli.merge",
});
@ -999,6 +1047,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
if (action === "archive") {
await (await organization(organizationId)).archiveTask({
organizationId,
repoId,
taskId,
reason: "cli.archive",
});
@ -1006,6 +1055,7 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
}
await (await organization(organizationId)).killTask({
organizationId,
repoId,
taskId,
reason: "cli.kill",
});
@ -1156,96 +1206,92 @@ export function createBackendClient(options: BackendClientOptions): BackendClien
return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.sandboxAgentConnection());
},
async getSandboxWorkspaceModelGroups(organizationId: string, sandboxProviderId: SandboxProviderId, sandboxId: string): Promise<WorkspaceModelGroup[]> {
return await withSandboxHandle(organizationId, sandboxProviderId, sandboxId, async (handle) => handle.listWorkspaceModelGroups());
},
async getOrganizationSummary(organizationId: string): Promise<OrganizationSummarySnapshot> {
return (await organization(organizationId)).getOrganizationSummary({ organizationId });
},
async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise<WorkbenchTaskDetail> {
return (await task(organizationId, repoId, taskIdValue)).getTaskDetail();
async getTaskDetail(organizationId: string, repoId: string, taskIdValue: string): Promise<WorkspaceTaskDetail> {
return await getTaskDetailWithAuth(organizationId, repoId, taskIdValue);
},
async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise<WorkbenchSessionDetail> {
return (await task(organizationId, repoId, taskIdValue)).getSessionDetail({ sessionId });
async getSessionDetail(organizationId: string, repoId: string, taskIdValue: string, sessionId: string): Promise<WorkspaceSessionDetail> {
return await getSessionDetailWithAuth(organizationId, repoId, taskIdValue, sessionId);
},
async getWorkbench(organizationId: string): Promise<TaskWorkbenchSnapshot> {
return await getWorkbenchCompat(organizationId);
async getWorkspace(organizationId: string): Promise<TaskWorkspaceSnapshot> {
return await getWorkspaceCompat(organizationId);
},
subscribeWorkbench(organizationId: string, listener: () => void): () => void {
return subscribeWorkbench(organizationId, listener);
subscribeWorkspace(organizationId: string, listener: () => void): () => void {
return subscribeWorkspace(organizationId, listener);
},
async createWorkbenchTask(organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
return (await organization(organizationId)).createWorkbenchTask(input);
async createWorkspaceTask(organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise<TaskWorkspaceCreateTaskResponse> {
return (await organization(organizationId)).createWorkspaceTask(await withAuthSessionInput(input));
},
async markWorkbenchUnread(organizationId: string, input: TaskWorkbenchSelectInput): Promise<void> {
await (await organization(organizationId)).markWorkbenchUnread(input);
async markWorkspaceUnread(organizationId: string, input: TaskWorkspaceSelectInput): Promise<void> {
await (await organization(organizationId)).markWorkspaceUnread(await withAuthSessionInput(input));
},
async renameWorkbenchTask(organizationId: string, input: TaskWorkbenchRenameInput): Promise<void> {
await (await organization(organizationId)).renameWorkbenchTask(input);
async renameWorkspaceTask(organizationId: string, input: TaskWorkspaceRenameInput): Promise<void> {
await (await organization(organizationId)).renameWorkspaceTask(await withAuthSessionInput(input));
},
async renameWorkbenchBranch(organizationId: string, input: TaskWorkbenchRenameInput): Promise<void> {
await (await organization(organizationId)).renameWorkbenchBranch(input);
async createWorkspaceSession(organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> {
return await (await organization(organizationId)).createWorkspaceSession(await withAuthSessionInput(input));
},
async createWorkbenchSession(organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> {
return await (await organization(organizationId)).createWorkbenchSession(input);
async renameWorkspaceSession(organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise<void> {
await (await organization(organizationId)).renameWorkspaceSession(await withAuthSessionInput(input));
},
async renameWorkbenchSession(organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise<void> {
await (await organization(organizationId)).renameWorkbenchSession(input);
async selectWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise<void> {
await (await organization(organizationId)).selectWorkspaceSession(await withAuthSessionInput(input));
},
async setWorkbenchSessionUnread(organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
await (await organization(organizationId)).setWorkbenchSessionUnread(input);
async setWorkspaceSessionUnread(organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise<void> {
await (await organization(organizationId)).setWorkspaceSessionUnread(await withAuthSessionInput(input));
},
async updateWorkbenchDraft(organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise<void> {
await (await organization(organizationId)).updateWorkbenchDraft(input);
async updateWorkspaceDraft(organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise<void> {
await (await organization(organizationId)).updateWorkspaceDraft(await withAuthSessionInput(input));
},
async changeWorkbenchModel(organizationId: string, input: TaskWorkbenchChangeModelInput): Promise<void> {
await (await organization(organizationId)).changeWorkbenchModel(input);
async changeWorkspaceModel(organizationId: string, input: TaskWorkspaceChangeModelInput): Promise<void> {
await (await organization(organizationId)).changeWorkspaceModel(await withAuthSessionInput(input));
},
async sendWorkbenchMessage(organizationId: string, input: TaskWorkbenchSendMessageInput): Promise<void> {
await (await organization(organizationId)).sendWorkbenchMessage(input);
async sendWorkspaceMessage(organizationId: string, input: TaskWorkspaceSendMessageInput): Promise<void> {
await (await organization(organizationId)).sendWorkspaceMessage(await withAuthSessionInput(input));
},
async stopWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise<void> {
await (await organization(organizationId)).stopWorkbenchSession(input);
async stopWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise<void> {
await (await organization(organizationId)).stopWorkspaceSession(await withAuthSessionInput(input));
},
async closeWorkbenchSession(organizationId: string, input: TaskWorkbenchSessionInput): Promise<void> {
await (await organization(organizationId)).closeWorkbenchSession(input);
async closeWorkspaceSession(organizationId: string, input: TaskWorkspaceSessionInput): Promise<void> {
await (await organization(organizationId)).closeWorkspaceSession(await withAuthSessionInput(input));
},
async publishWorkbenchPr(organizationId: string, input: TaskWorkbenchSelectInput): Promise<void> {
await (await organization(organizationId)).publishWorkbenchPr(input);
async publishWorkspacePr(organizationId: string, input: TaskWorkspaceSelectInput): Promise<void> {
await (await organization(organizationId)).publishWorkspacePr(await withAuthSessionInput(input));
},
async revertWorkbenchFile(organizationId: string, input: TaskWorkbenchDiffInput): Promise<void> {
await (await organization(organizationId)).revertWorkbenchFile(input);
async revertWorkspaceFile(organizationId: string, input: TaskWorkspaceDiffInput): Promise<void> {
await (await organization(organizationId)).revertWorkspaceFile(await withAuthSessionInput(input));
},
async reloadGithubOrganization(organizationId: string): Promise<void> {
await (await organization(organizationId)).reloadGithubOrganization();
async adminReloadGithubOrganization(organizationId: string): Promise<void> {
await (await organization(organizationId)).adminReloadGithubOrganization();
},
async reloadGithubPullRequests(organizationId: string): Promise<void> {
await (await organization(organizationId)).reloadGithubPullRequests();
},
async reloadGithubRepository(organizationId: string, repoId: string): Promise<void> {
await (await organization(organizationId)).reloadGithubRepository({ repoId });
},
async reloadGithubPullRequest(organizationId: string, repoId: string, prNumber: number): Promise<void> {
await (await organization(organizationId)).reloadGithubPullRequest({ repoId, prNumber });
async adminReloadGithubRepository(organizationId: string, repoId: string): Promise<void> {
await (await organization(organizationId)).adminReloadGithubRepository({ repoId });
},
async health(): Promise<{ ok: true }> {

View file

@ -8,4 +8,4 @@ export * from "./subscription/use-subscription.js";
export * from "./keys.js";
export * from "./mock-app.js";
export * from "./view-model.js";
export * from "./workbench-client.js";
export * from "./workspace-client.js";

View file

@ -4,18 +4,14 @@ export function organizationKey(organizationId: string): ActorKey {
return ["org", organizationId];
}
export function repositoryKey(organizationId: string, repoId: string): ActorKey {
return ["org", organizationId, "repository", repoId];
}
export function taskKey(organizationId: string, repoId: string, taskId: string): ActorKey {
return ["org", organizationId, "repository", repoId, "task", taskId];
return ["org", organizationId, "task", repoId, taskId];
}
export function taskSandboxKey(organizationId: string, sandboxId: string): ActorKey {
return ["org", organizationId, "sandbox", sandboxId];
}
export function historyKey(organizationId: string, repoId: string): ActorKey {
return ["org", organizationId, "repository", repoId, "history"];
export function auditLogKey(organizationId: string): ActorKey {
return ["org", organizationId, "audit-log"];
}

View file

@ -1,4 +1,8 @@
import type { WorkbenchModelId } from "@sandbox-agent/foundry-shared";
import { DEFAULT_WORKSPACE_MODEL_GROUPS, DEFAULT_WORKSPACE_MODEL_ID, type WorkspaceModelId } from "@sandbox-agent/foundry-shared";
const claudeModels = DEFAULT_WORKSPACE_MODEL_GROUPS.find((group) => group.agentKind === "Claude")?.models ?? [];
const CLAUDE_SECONDARY_MODEL_ID = claudeModels[1]?.id ?? claudeModels[0]?.id ?? DEFAULT_WORKSPACE_MODEL_ID;
const CLAUDE_TERTIARY_MODEL_ID = claudeModels[2]?.id ?? CLAUDE_SECONDARY_MODEL_ID;
import { injectMockLatency } from "./mock/latency.js";
import rivetDevFixture from "../../../scripts/data/rivet-dev.json" with { type: "json" };
@ -16,6 +20,7 @@ export interface MockFoundryUser {
githubLogin: string;
roleLabel: string;
eligibleOrganizationIds: string[];
defaultModel: WorkspaceModelId;
}
export interface MockFoundryOrganizationMember {
@ -61,7 +66,6 @@ export interface MockFoundryOrganizationSettings {
slug: string;
primaryDomain: string;
seatAccrualMode: "first_prompt";
defaultModel: WorkbenchModelId;
autoImportRepos: boolean;
}
@ -111,6 +115,7 @@ export interface MockFoundryAppClient {
skipStarterRepo(): Promise<void>;
starStarterRepo(organizationId: string): Promise<void>;
selectOrganization(organizationId: string): Promise<void>;
setDefaultModel(model: WorkspaceModelId): Promise<void>;
updateOrganizationProfile(input: UpdateMockOrganizationProfileInput): Promise<void>;
triggerGithubSync(organizationId: string): Promise<void>;
completeHostedCheckout(organizationId: string, planId: MockBillingPlanId): Promise<void>;
@ -180,7 +185,6 @@ function buildRivetOrganization(): MockFoundryOrganization {
slug: "rivet",
primaryDomain: "rivet.dev",
seatAccrualMode: "first_prompt",
defaultModel: "gpt-5.3-codex",
autoImportRepos: true,
},
github: {
@ -233,6 +237,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
githubLogin: "nathan",
roleLabel: "Founder",
eligibleOrganizationIds: ["personal-nathan", "acme", "rivet"],
defaultModel: DEFAULT_WORKSPACE_MODEL_ID,
},
{
id: "user-maya",
@ -241,6 +246,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
githubLogin: "maya",
roleLabel: "Staff Engineer",
eligibleOrganizationIds: ["acme"],
defaultModel: CLAUDE_SECONDARY_MODEL_ID,
},
{
id: "user-jamie",
@ -249,6 +255,7 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
githubLogin: "jamie",
roleLabel: "Platform Lead",
eligibleOrganizationIds: ["personal-jamie", "rivet"],
defaultModel: CLAUDE_TERTIARY_MODEL_ID,
},
],
organizations: [
@ -261,7 +268,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
slug: "nathan",
primaryDomain: "personal",
seatAccrualMode: "first_prompt",
defaultModel: "claude-sonnet-4",
autoImportRepos: true,
},
github: {
@ -297,7 +303,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
slug: "acme",
primaryDomain: "acme.dev",
seatAccrualMode: "first_prompt",
defaultModel: "claude-sonnet-4",
autoImportRepos: true,
},
github: {
@ -342,7 +347,6 @@ function buildDefaultSnapshot(): MockFoundryAppSnapshot {
slug: "jamie",
primaryDomain: "personal",
seatAccrualMode: "first_prompt",
defaultModel: "claude-opus-4",
autoImportRepos: true,
},
github: {
@ -538,6 +542,18 @@ class MockFoundryAppStore implements MockFoundryAppClient {
}
}
async setDefaultModel(model: WorkspaceModelId): Promise<void> {
await this.injectAsyncLatency();
const currentUserId = this.snapshot.auth.currentUserId;
if (!currentUserId) {
throw new Error("No signed-in mock user");
}
this.updateSnapshot((current) => ({
...current,
users: current.users.map((user) => (user.id === currentUserId ? { ...user, defaultModel: model } : user)),
}));
}
async updateOrganizationProfile(input: UpdateMockOrganizationProfileInput): Promise<void> {
await this.injectAsyncLatency();
this.requireOrganization(input.organizationId);

View file

@ -6,25 +6,26 @@ import type {
SessionEvent,
TaskRecord,
TaskSummary,
TaskWorkbenchChangeModelInput,
TaskWorkbenchCreateTaskInput,
TaskWorkbenchCreateTaskResponse,
TaskWorkbenchDiffInput,
TaskWorkbenchRenameInput,
TaskWorkbenchRenameSessionInput,
TaskWorkbenchSelectInput,
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchSnapshot,
TaskWorkbenchSessionInput,
TaskWorkbenchUpdateDraftInput,
TaskWorkspaceChangeModelInput,
TaskWorkspaceCreateTaskInput,
TaskWorkspaceCreateTaskResponse,
TaskWorkspaceDiffInput,
TaskWorkspaceRenameInput,
TaskWorkspaceRenameSessionInput,
TaskWorkspaceSelectInput,
TaskWorkspaceSetSessionUnreadInput,
TaskWorkspaceSendMessageInput,
TaskWorkspaceSnapshot,
TaskWorkspaceSessionInput,
TaskWorkspaceUpdateDraftInput,
TaskEvent,
WorkbenchSessionDetail,
WorkbenchTaskDetail,
WorkbenchTaskSummary,
WorkspaceSessionDetail,
WorkspaceModelGroup,
WorkspaceTaskDetail,
WorkspaceTaskSummary,
OrganizationEvent,
OrganizationSummarySnapshot,
HistoryEvent,
AuditLogEvent as HistoryEvent,
HistoryQueryInput,
SandboxProviderId,
RepoOverview,
@ -32,9 +33,10 @@ import type {
StarSandboxAgentRepoResult,
SwitchResult,
} from "@sandbox-agent/foundry-shared";
import { DEFAULT_WORKSPACE_MODEL_GROUPS } from "@sandbox-agent/foundry-shared";
import type { ProcessCreateRequest, ProcessLogFollowQuery, ProcessLogsResponse, ProcessSignalQuery } from "sandbox-agent";
import type { ActorConn, BackendClient, SandboxProcessRecord, SandboxSessionEventRecord, SandboxSessionRecord } from "../backend-client.js";
import { getSharedMockWorkbenchClient } from "./workbench-client.js";
import { getSharedMockWorkspaceClient } from "./workspace-client.js";
interface MockProcessRecord extends SandboxProcessRecord {
logText: string;
@ -89,7 +91,7 @@ function toTaskStatus(status: TaskRecord["status"], archived: boolean): TaskReco
}
export function createMockBackendClient(defaultOrganizationId = "default"): BackendClient {
const workbench = getSharedMockWorkbenchClient();
const workspace = getSharedMockWorkspaceClient();
const listenersBySandboxId = new Map<string, Set<() => void>>();
const processesBySandboxId = new Map<string, MockProcessRecord[]>();
const connectionListeners = new Map<string, Set<(payload: any) => void>>();
@ -97,7 +99,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
let nextProcessId = 1;
const requireTask = (taskId: string) => {
const task = workbench.getSnapshot().tasks.find((candidate) => candidate.id === taskId);
const task = workspace.getSnapshot().tasks.find((candidate) => candidate.id === taskId);
if (!task) {
throw new Error(`Unknown mock task ${taskId}`);
}
@ -164,7 +166,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
async dispose(): Promise<void> {},
});
const buildTaskSummary = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskSummary => ({
const buildTaskSummary = (task: TaskWorkspaceSnapshot["tasks"][number]): WorkspaceTaskSummary => ({
id: task.id,
repoId: task.repoId,
title: task.title,
@ -173,6 +175,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
updatedAtMs: task.updatedAtMs,
branch: task.branch,
pullRequest: task.pullRequest,
activeSessionId: task.activeSessionId ?? task.sessions[0]?.id ?? null,
sessionsSummary: task.sessions.map((tab) => ({
id: tab.id,
sessionId: tab.sessionId,
@ -187,16 +190,9 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
})),
});
const buildTaskDetail = (task: TaskWorkbenchSnapshot["tasks"][number]): WorkbenchTaskDetail => ({
const buildTaskDetail = (task: TaskWorkspaceSnapshot["tasks"][number]): WorkspaceTaskDetail => ({
...buildTaskSummary(task),
task: task.title,
agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude",
runtimeStatus: toTaskStatus(task.status === "archived" ? "archived" : "running", task.status === "archived"),
statusMessage: task.status === "archived" ? "archived" : "mock sandbox ready",
activeSessionId: task.sessions[0]?.sessionId ?? null,
diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0",
prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null,
reviewStatus: null,
fileChanges: task.fileChanges,
diffs: task.diffs,
fileTree: task.fileTree,
@ -211,7 +207,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
activeSandboxId: task.id,
});
const buildSessionDetail = (task: TaskWorkbenchSnapshot["tasks"][number], sessionId: string): WorkbenchSessionDetail => {
const buildSessionDetail = (task: TaskWorkspaceSnapshot["tasks"][number], sessionId: string): WorkspaceSessionDetail => {
const tab = task.sessions.find((candidate) => candidate.id === sessionId);
if (!tab) {
throw new Error(`Unknown mock session ${sessionId} for task ${task.id}`);
@ -232,10 +228,24 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
};
const buildOrganizationSummary = (): OrganizationSummarySnapshot => {
const snapshot = workbench.getSnapshot();
const snapshot = workspace.getSnapshot();
const taskSummaries = snapshot.tasks.map(buildTaskSummary);
return {
organizationId: defaultOrganizationId,
github: {
connectedAccount: "mock",
installationStatus: "connected",
syncStatus: "synced",
importedRepoCount: snapshot.repos.length,
lastSyncLabel: "Synced just now",
lastSyncAt: nowMs(),
lastWebhookAt: null,
lastWebhookEvent: "",
syncGeneration: 1,
syncPhase: null,
processedRepositoryCount: snapshot.repos.length,
totalRepositoryCount: snapshot.repos.length,
},
repos: snapshot.repos.map((repo) => {
const repoTasks = taskSummaries.filter((task) => task.repoId === repo.id);
return {
@ -246,7 +256,6 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
};
}),
taskSummaries,
openPullRequests: [],
};
};
@ -256,20 +265,16 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
`sandbox:${organizationId}:${sandboxProviderId}:${sandboxId}`;
const emitOrganizationSnapshot = (): void => {
const summary = buildOrganizationSummary();
const latestTask = [...summary.taskSummaries].sort((left, right) => right.updatedAtMs - left.updatedAtMs)[0] ?? null;
if (latestTask) {
emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", {
type: "taskSummaryUpdated",
taskSummary: latestTask,
} satisfies OrganizationEvent);
}
emitConnectionEvent(organizationScope(defaultOrganizationId), "organizationUpdated", {
type: "organizationUpdated",
snapshot: buildOrganizationSummary(),
} satisfies OrganizationEvent);
};
const emitTaskUpdate = (taskId: string): void => {
const task = requireTask(taskId);
emitConnectionEvent(taskScope(defaultOrganizationId, task.repoId, task.id), "taskUpdated", {
type: "taskDetailUpdated",
type: "taskUpdated",
detail: buildTaskDetail(task),
} satisfies TaskEvent);
};
@ -303,9 +308,8 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
task: task.title,
sandboxProviderId: "local",
status: toTaskStatus(archived ? "archived" : "running", archived),
statusMessage: archived ? "archived" : "mock sandbox ready",
pullRequest: null,
activeSandboxId: task.id,
activeSessionId: task.sessions[0]?.sessionId ?? null,
sandboxes: [
{
sandboxId: task.id,
@ -317,17 +321,6 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
updatedAt: task.updatedAtMs,
},
],
agentType: task.sessions[0]?.agent === "Codex" ? "codex" : "claude",
prSubmitted: Boolean(task.pullRequest),
diffStat: task.fileChanges.length > 0 ? `+${task.fileChanges.length}/-${task.fileChanges.length}` : "+0/-0",
prUrl: task.pullRequest ? `https://example.test/pr/${task.pullRequest.number}` : null,
prAuthor: task.pullRequest ? "mock" : null,
ciStatus: null,
reviewStatus: null,
reviewer: null,
conflictsWithMain: "0",
hasUnpushed: task.fileChanges.length > 0 ? "1" : "0",
parentBranch: null,
createdAt: task.updatedAtMs,
updatedAt: task.updatedAtMs,
};
@ -400,6 +393,10 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
return unsupportedAppSnapshot();
},
async setAppDefaultModel(): Promise<FoundryAppSnapshot> {
return unsupportedAppSnapshot();
},
async updateAppOrganizationProfile(): Promise<FoundryAppSnapshot> {
return unsupportedAppSnapshot();
},
@ -433,7 +430,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
},
async listRepos(_organizationId: string): Promise<RepoRecord[]> {
return workbench.getSnapshot().repos.map((repo) => ({
return workspace.getSnapshot().repos.map((repo) => ({
organizationId: defaultOrganizationId,
repoId: repo.id,
remoteUrl: mockRepoRemote(repo.label),
@ -447,7 +444,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
},
async listTasks(_organizationId: string, repoId?: string): Promise<TaskSummary[]> {
return workbench
return workspace
.getSnapshot()
.tasks.filter((task) => !repoId || task.repoId === repoId)
.map((task) => ({
@ -457,6 +454,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
branchName: task.branch,
title: task.title,
status: task.status === "archived" ? "archived" : "running",
pullRequest: null,
updatedAt: task.updatedAtMs,
}));
},
@ -464,7 +462,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
async getRepoOverview(_organizationId: string, _repoId: string): Promise<RepoOverview> {
notSupported("getRepoOverview");
},
async getTask(_organizationId: string, taskId: string): Promise<TaskRecord> {
async getTask(_organizationId: string, _repoId: string, taskId: string): Promise<TaskRecord> {
return buildTaskRecord(taskId);
},
@ -472,7 +470,7 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
return [];
},
async switchTask(_organizationId: string, taskId: string): Promise<SwitchResult> {
async switchTask(_organizationId: string, _repoId: string, taskId: string): Promise<SwitchResult> {
return {
organizationId: defaultOrganizationId,
taskId,
@ -481,14 +479,14 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
};
},
async attachTask(_organizationId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> {
async attachTask(_organizationId: string, _repoId: string, taskId: string): Promise<{ target: string; sessionId: string | null }> {
return {
target: `mock://${taskId}`,
sessionId: requireTask(taskId).sessions[0]?.sessionId ?? null,
};
},
async runAction(_organizationId: string, _taskId: string): Promise<void> {
async runAction(_organizationId: string, _repoId: string, _taskId: string): Promise<void> {
notSupported("runAction");
},
@ -637,28 +635,32 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
return { endpoint: "mock://terminal-unavailable" };
},
async getSandboxWorkspaceModelGroups(_organizationId: string, _sandboxProviderId: SandboxProviderId, _sandboxId: string): Promise<WorkspaceModelGroup[]> {
return DEFAULT_WORKSPACE_MODEL_GROUPS;
},
async getOrganizationSummary(): Promise<OrganizationSummarySnapshot> {
return buildOrganizationSummary();
},
async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise<WorkbenchTaskDetail> {
async getTaskDetail(_organizationId: string, _repoId: string, taskId: string): Promise<WorkspaceTaskDetail> {
return buildTaskDetail(requireTask(taskId));
},
async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise<WorkbenchSessionDetail> {
async getSessionDetail(_organizationId: string, _repoId: string, taskId: string, sessionId: string): Promise<WorkspaceSessionDetail> {
return buildSessionDetail(requireTask(taskId), sessionId);
},
async getWorkbench(): Promise<TaskWorkbenchSnapshot> {
return workbench.getSnapshot();
async getWorkspace(): Promise<TaskWorkspaceSnapshot> {
return workspace.getSnapshot();
},
subscribeWorkbench(_organizationId: string, listener: () => void): () => void {
return workbench.subscribe(listener);
subscribeWorkspace(_organizationId: string, listener: () => void): () => void {
return workspace.subscribe(listener);
},
async createWorkbenchTask(_organizationId: string, input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
const created = await workbench.createTask(input);
async createWorkspaceTask(_organizationId: string, input: TaskWorkspaceCreateTaskInput): Promise<TaskWorkspaceCreateTaskResponse> {
const created = await workspace.createTask(input);
emitOrganizationSnapshot();
emitTaskUpdate(created.taskId);
if (created.sessionId) {
@ -667,99 +669,95 @@ export function createMockBackendClient(defaultOrganizationId = "default"): Back
return created;
},
async markWorkbenchUnread(_organizationId: string, input: TaskWorkbenchSelectInput): Promise<void> {
await workbench.markTaskUnread(input);
async markWorkspaceUnread(_organizationId: string, input: TaskWorkspaceSelectInput): Promise<void> {
await workspace.markTaskUnread(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async renameWorkbenchTask(_organizationId: string, input: TaskWorkbenchRenameInput): Promise<void> {
await workbench.renameTask(input);
async renameWorkspaceTask(_organizationId: string, input: TaskWorkspaceRenameInput): Promise<void> {
await workspace.renameTask(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async renameWorkbenchBranch(_organizationId: string, input: TaskWorkbenchRenameInput): Promise<void> {
await workbench.renameBranch(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async createWorkbenchSession(_organizationId: string, input: TaskWorkbenchSelectInput & { model?: string }): Promise<{ sessionId: string }> {
const created = await workbench.addSession(input);
async createWorkspaceSession(_organizationId: string, input: TaskWorkspaceSelectInput & { model?: string }): Promise<{ sessionId: string }> {
const created = await workspace.addSession(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, created.sessionId);
return created;
},
async renameWorkbenchSession(_organizationId: string, input: TaskWorkbenchRenameSessionInput): Promise<void> {
await workbench.renameSession(input);
async renameWorkspaceSession(_organizationId: string, input: TaskWorkspaceRenameSessionInput): Promise<void> {
await workspace.renameSession(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async setWorkbenchSessionUnread(_organizationId: string, input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
await workbench.setSessionUnread(input);
async selectWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise<void> {
await workspace.selectSession(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async updateWorkbenchDraft(_organizationId: string, input: TaskWorkbenchUpdateDraftInput): Promise<void> {
await workbench.updateDraft(input);
async setWorkspaceSessionUnread(_organizationId: string, input: TaskWorkspaceSetSessionUnreadInput): Promise<void> {
await workspace.setSessionUnread(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async changeWorkbenchModel(_organizationId: string, input: TaskWorkbenchChangeModelInput): Promise<void> {
await workbench.changeModel(input);
async updateWorkspaceDraft(_organizationId: string, input: TaskWorkspaceUpdateDraftInput): Promise<void> {
await workspace.updateDraft(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async sendWorkbenchMessage(_organizationId: string, input: TaskWorkbenchSendMessageInput): Promise<void> {
await workbench.sendMessage(input);
async changeWorkspaceModel(_organizationId: string, input: TaskWorkspaceChangeModelInput): Promise<void> {
await workspace.changeModel(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async stopWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise<void> {
await workbench.stopAgent(input);
async sendWorkspaceMessage(_organizationId: string, input: TaskWorkspaceSendMessageInput): Promise<void> {
await workspace.sendMessage(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async closeWorkbenchSession(_organizationId: string, input: TaskWorkbenchSessionInput): Promise<void> {
await workbench.closeSession(input);
async stopWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise<void> {
await workspace.stopAgent(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
emitSessionUpdate(input.taskId, input.sessionId);
},
async closeWorkspaceSession(_organizationId: string, input: TaskWorkspaceSessionInput): Promise<void> {
await workspace.closeSession(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async publishWorkbenchPr(_organizationId: string, input: TaskWorkbenchSelectInput): Promise<void> {
await workbench.publishPr(input);
async publishWorkspacePr(_organizationId: string, input: TaskWorkspaceSelectInput): Promise<void> {
await workspace.publishPr(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async revertWorkbenchFile(_organizationId: string, input: TaskWorkbenchDiffInput): Promise<void> {
await workbench.revertFile(input);
async revertWorkspaceFile(_organizationId: string, input: TaskWorkspaceDiffInput): Promise<void> {
await workspace.revertFile(input);
emitOrganizationSnapshot();
emitTaskUpdate(input.taskId);
},
async reloadGithubOrganization(): Promise<void> {},
async reloadGithubPullRequests(): Promise<void> {},
async reloadGithubRepository(): Promise<void> {},
async reloadGithubPullRequest(): Promise<void> {},
async adminReloadGithubOrganization(): Promise<void> {},
async adminReloadGithubRepository(): Promise<void> {},
async health(): Promise<{ ok: true }> {
return { ok: true };

View file

@ -1,33 +1,34 @@
import {
MODEL_GROUPS,
buildInitialMockLayoutViewModel,
groupWorkbenchRepositories,
groupWorkspaceRepositories,
nowMs,
providerAgent,
randomReply,
removeFileTreePath,
slugify,
uid,
} from "../workbench-model.js";
} from "../workspace-model.js";
import { DEFAULT_WORKSPACE_MODEL_ID, workspaceAgentForModel } from "@sandbox-agent/foundry-shared";
import type {
TaskWorkbenchAddSessionResponse,
TaskWorkbenchChangeModelInput,
TaskWorkbenchCreateTaskInput,
TaskWorkbenchCreateTaskResponse,
TaskWorkbenchDiffInput,
TaskWorkbenchRenameInput,
TaskWorkbenchRenameSessionInput,
TaskWorkbenchSelectInput,
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchSnapshot,
TaskWorkbenchSessionInput,
TaskWorkbenchUpdateDraftInput,
WorkbenchSession as AgentSession,
WorkbenchTask as Task,
WorkbenchTranscriptEvent as TranscriptEvent,
TaskWorkspaceAddSessionResponse,
TaskWorkspaceChangeModelInput,
TaskWorkspaceCreateTaskInput,
TaskWorkspaceCreateTaskResponse,
TaskWorkspaceDiffInput,
TaskWorkspaceRenameInput,
TaskWorkspaceRenameSessionInput,
TaskWorkspaceSelectInput,
TaskWorkspaceSetSessionUnreadInput,
TaskWorkspaceSendMessageInput,
TaskWorkspaceSnapshot,
TaskWorkspaceSessionInput,
TaskWorkspaceUpdateDraftInput,
WorkspaceSession as AgentSession,
WorkspaceTask as Task,
WorkspaceTranscriptEvent as TranscriptEvent,
} from "@sandbox-agent/foundry-shared";
import type { TaskWorkbenchClient } from "../workbench-client.js";
import type { TaskWorkspaceClient } from "../workspace-client.js";
function buildTranscriptEvent(params: {
sessionId: string;
@ -47,12 +48,12 @@ function buildTranscriptEvent(params: {
};
}
class MockWorkbenchStore implements TaskWorkbenchClient {
class MockWorkspaceStore implements TaskWorkspaceClient {
private snapshot = buildInitialMockLayoutViewModel();
private listeners = new Set<() => void>();
private pendingTimers = new Map<string, ReturnType<typeof setTimeout>>();
getSnapshot(): TaskWorkbenchSnapshot {
getSnapshot(): TaskWorkspaceSnapshot {
return this.snapshot;
}
@ -63,7 +64,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
};
}
async createTask(input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
async createTask(input: TaskWorkspaceCreateTaskInput): Promise<TaskWorkspaceCreateTaskResponse> {
const id = uid();
const sessionId = `session-${id}`;
const repo = this.snapshot.repos.find((candidate) => candidate.id === input.repoId);
@ -74,20 +75,19 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
id,
repoId: repo.id,
title: input.title?.trim() || "New Task",
status: "new",
status: "init_enqueue_provision",
repoName: repo.label,
updatedAtMs: nowMs(),
branch: input.branch?.trim() || null,
pullRequest: null,
activeSessionId: sessionId,
sessions: [
{
id: sessionId,
sessionId: sessionId,
sessionName: "Session 1",
agent: providerAgent(
MODEL_GROUPS.find((group) => group.models.some((model) => model.id === (input.model ?? "claude-sonnet-4")))?.provider ?? "Claude",
),
model: input.model ?? "claude-sonnet-4",
agent: workspaceAgentForModel(input.model ?? DEFAULT_WORKSPACE_MODEL_ID, MODEL_GROUPS),
model: input.model ?? DEFAULT_WORKSPACE_MODEL_ID,
status: "idle",
thinkingSinceMs: null,
unread: false,
@ -109,7 +109,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
return { taskId: id, sessionId };
}
async markTaskUnread(input: TaskWorkbenchSelectInput): Promise<void> {
async markTaskUnread(input: TaskWorkspaceSelectInput): Promise<void> {
this.updateTask(input.taskId, (task) => {
const targetSession = task.sessions[task.sessions.length - 1] ?? null;
if (!targetSession) {
@ -123,7 +123,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
});
}
async renameTask(input: TaskWorkbenchRenameInput): Promise<void> {
async renameTask(input: TaskWorkspaceRenameInput): Promise<void> {
const value = input.value.trim();
if (!value) {
throw new Error(`Cannot rename task ${input.taskId} to an empty title`);
@ -131,28 +131,32 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
this.updateTask(input.taskId, (task) => ({ ...task, title: value, updatedAtMs: nowMs() }));
}
async renameBranch(input: TaskWorkbenchRenameInput): Promise<void> {
const value = input.value.trim();
if (!value) {
throw new Error(`Cannot rename branch for task ${input.taskId} to an empty value`);
}
this.updateTask(input.taskId, (task) => ({ ...task, branch: value, updatedAtMs: nowMs() }));
}
async archiveTask(input: TaskWorkbenchSelectInput): Promise<void> {
async archiveTask(input: TaskWorkspaceSelectInput): Promise<void> {
this.updateTask(input.taskId, (task) => ({ ...task, status: "archived", updatedAtMs: nowMs() }));
}
async publishPr(input: TaskWorkbenchSelectInput): Promise<void> {
async publishPr(input: TaskWorkspaceSelectInput): Promise<void> {
const nextPrNumber = Math.max(0, ...this.snapshot.tasks.map((task) => task.pullRequest?.number ?? 0)) + 1;
this.updateTask(input.taskId, (task) => ({
...task,
updatedAtMs: nowMs(),
pullRequest: { number: nextPrNumber, status: "ready" },
pullRequest: {
number: nextPrNumber,
status: "ready",
title: task.title,
state: "open",
url: `https://example.test/pr/${nextPrNumber}`,
headRefName: task.branch ?? `task/${task.id}`,
baseRefName: "main",
repoFullName: task.repoName,
authorLogin: "mock",
isDraft: false,
updatedAtMs: nowMs(),
},
}));
}
async revertFile(input: TaskWorkbenchDiffInput): Promise<void> {
async revertFile(input: TaskWorkspaceDiffInput): Promise<void> {
this.updateTask(input.taskId, (task) => {
const file = task.fileChanges.find((entry) => entry.path === input.path);
const nextDiffs = { ...task.diffs };
@ -167,7 +171,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
});
}
async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise<void> {
async updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise<void> {
this.assertSession(input.taskId, input.sessionId);
this.updateTask(input.taskId, (task) => ({
...task,
@ -187,7 +191,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
}));
}
async sendMessage(input: TaskWorkbenchSendMessageInput): Promise<void> {
async sendMessage(input: TaskWorkspaceSendMessageInput): Promise<void> {
const text = input.text.trim();
if (!text) {
throw new Error(`Cannot send an empty mock prompt for task ${input.taskId}`);
@ -197,7 +201,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
const startedAtMs = nowMs();
this.updateTask(input.taskId, (currentTask) => {
const isFirstOnTask = currentTask.status === "new";
const isFirstOnTask = String(currentTask.status).startsWith("init_");
const newTitle = isFirstOnTask ? (text.length > 50 ? `${text.slice(0, 47)}...` : text) : currentTask.title;
const newBranch = isFirstOnTask ? `feat/${slugify(newTitle)}` : currentTask.branch;
const userMessageLines = [text, ...input.attachments.map((attachment) => `@ ${attachment.filePath}:${attachment.lineNumber}`)];
@ -288,7 +292,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
this.pendingTimers.set(input.sessionId, timer);
}
async stopAgent(input: TaskWorkbenchSessionInput): Promise<void> {
async stopAgent(input: TaskWorkspaceSessionInput): Promise<void> {
this.assertSession(input.taskId, input.sessionId);
const existing = this.pendingTimers.get(input.sessionId);
if (existing) {
@ -311,14 +315,22 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
});
}
async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
async selectSession(input: TaskWorkspaceSessionInput): Promise<void> {
this.assertSession(input.taskId, input.sessionId);
this.updateTask(input.taskId, (currentTask) => ({
...currentTask,
activeSessionId: input.sessionId,
}));
}
async setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise<void> {
this.updateTask(input.taskId, (currentTask) => ({
...currentTask,
sessions: currentTask.sessions.map((candidate) => (candidate.id === input.sessionId ? { ...candidate, unread: input.unread } : candidate)),
}));
}
async renameSession(input: TaskWorkbenchRenameSessionInput): Promise<void> {
async renameSession(input: TaskWorkspaceRenameSessionInput): Promise<void> {
const title = input.title.trim();
if (!title) {
throw new Error(`Cannot rename session ${input.sessionId} to an empty title`);
@ -329,7 +341,7 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
}));
}
async closeSession(input: TaskWorkbenchSessionInput): Promise<void> {
async closeSession(input: TaskWorkspaceSessionInput): Promise<void> {
this.updateTask(input.taskId, (currentTask) => {
if (currentTask.sessions.length <= 1) {
return currentTask;
@ -337,12 +349,13 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
return {
...currentTask,
activeSessionId: currentTask.activeSessionId === input.sessionId ? (currentTask.sessions.find((candidate) => candidate.id !== input.sessionId)?.id ?? null) : currentTask.activeSessionId,
sessions: currentTask.sessions.filter((candidate) => candidate.id !== input.sessionId),
};
});
}
async addSession(input: TaskWorkbenchSelectInput): Promise<TaskWorkbenchAddSessionResponse> {
async addSession(input: TaskWorkspaceSelectInput): Promise<TaskWorkspaceAddSessionResponse> {
this.assertTask(input.taskId);
const nextSessionId = uid();
const nextSession: AgentSession = {
@ -350,8 +363,8 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
sessionId: nextSessionId,
sandboxSessionId: null,
sessionName: `Session ${this.requireTask(input.taskId).sessions.length + 1}`,
agent: "Claude",
model: "claude-sonnet-4",
agent: workspaceAgentForModel(DEFAULT_WORKSPACE_MODEL_ID, MODEL_GROUPS),
model: DEFAULT_WORKSPACE_MODEL_ID,
status: "idle",
thinkingSinceMs: null,
unread: false,
@ -363,12 +376,13 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
this.updateTask(input.taskId, (currentTask) => ({
...currentTask,
updatedAtMs: nowMs(),
activeSessionId: nextSession.id,
sessions: [...currentTask.sessions, nextSession],
}));
return { sessionId: nextSession.id };
}
async changeModel(input: TaskWorkbenchChangeModelInput): Promise<void> {
async changeModel(input: TaskWorkspaceChangeModelInput): Promise<void> {
const group = MODEL_GROUPS.find((candidate) => candidate.models.some((entry) => entry.id === input.model));
if (!group) {
throw new Error(`Unable to resolve model provider for ${input.model}`);
@ -377,16 +391,16 @@ class MockWorkbenchStore implements TaskWorkbenchClient {
this.updateTask(input.taskId, (currentTask) => ({
...currentTask,
sessions: currentTask.sessions.map((candidate) =>
candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: providerAgent(group.provider) } : candidate,
candidate.id === input.sessionId ? { ...candidate, model: input.model, agent: workspaceAgentForModel(input.model, MODEL_GROUPS) } : candidate,
),
}));
}
private updateState(updater: (current: TaskWorkbenchSnapshot) => TaskWorkbenchSnapshot): void {
private updateState(updater: (current: TaskWorkspaceSnapshot) => TaskWorkspaceSnapshot): void {
const nextSnapshot = updater(this.snapshot);
this.snapshot = {
...nextSnapshot,
repositories: groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks),
repositories: groupWorkspaceRepositories(nextSnapshot.repos, nextSnapshot.tasks),
};
this.notify();
}
@ -436,11 +450,11 @@ function candidateEventIndex(task: Task, sessionId: string): number {
return (session?.transcript.length ?? 0) + 1;
}
let sharedMockWorkbenchClient: TaskWorkbenchClient | null = null;
let sharedMockWorkspaceClient: TaskWorkspaceClient | null = null;
export function getSharedMockWorkbenchClient(): TaskWorkbenchClient {
if (!sharedMockWorkbenchClient) {
sharedMockWorkbenchClient = new MockWorkbenchStore();
export function getSharedMockWorkspaceClient(): TaskWorkspaceClient {
if (!sharedMockWorkspaceClient) {
sharedMockWorkspaceClient = new MockWorkspaceStore();
}
return sharedMockWorkbenchClient;
return sharedMockWorkspaceClient;
}

View file

@ -1,4 +1,4 @@
import type { FoundryAppSnapshot, FoundryBillingPlanId, UpdateFoundryOrganizationProfileInput } from "@sandbox-agent/foundry-shared";
import type { FoundryAppSnapshot, FoundryBillingPlanId, UpdateFoundryOrganizationProfileInput, WorkspaceModelId } from "@sandbox-agent/foundry-shared";
import type { BackendClient } from "../backend-client.js";
import type { FoundryAppClient } from "../app-client.js";
@ -72,6 +72,11 @@ class RemoteFoundryAppStore implements FoundryAppClient {
this.notify();
}
async setDefaultModel(model: WorkspaceModelId): Promise<void> {
this.snapshot = await this.backend.setAppDefaultModel(model);
this.notify();
}
async updateOrganizationProfile(input: UpdateFoundryOrganizationProfileInput): Promise<void> {
this.snapshot = await this.backend.updateAppOrganizationProfile(input);
this.notify();

View file

@ -1,198 +0,0 @@
import type {
TaskWorkbenchAddSessionResponse,
TaskWorkbenchChangeModelInput,
TaskWorkbenchCreateTaskInput,
TaskWorkbenchCreateTaskResponse,
TaskWorkbenchDiffInput,
TaskWorkbenchRenameInput,
TaskWorkbenchRenameSessionInput,
TaskWorkbenchSelectInput,
TaskWorkbenchSetSessionUnreadInput,
TaskWorkbenchSendMessageInput,
TaskWorkbenchSnapshot,
TaskWorkbenchSessionInput,
TaskWorkbenchUpdateDraftInput,
} from "@sandbox-agent/foundry-shared";
import type { BackendClient } from "../backend-client.js";
import { groupWorkbenchRepositories } from "../workbench-model.js";
import type { TaskWorkbenchClient } from "../workbench-client.js";
export interface RemoteWorkbenchClientOptions {
backend: BackendClient;
organizationId: string;
}
class RemoteWorkbenchStore implements TaskWorkbenchClient {
private readonly backend: BackendClient;
private readonly organizationId: string;
private snapshot: TaskWorkbenchSnapshot;
private readonly listeners = new Set<() => void>();
private unsubscribeWorkbench: (() => void) | null = null;
private refreshPromise: Promise<void> | null = null;
private refreshRetryTimeout: ReturnType<typeof setTimeout> | null = null;
constructor(options: RemoteWorkbenchClientOptions) {
this.backend = options.backend;
this.organizationId = options.organizationId;
this.snapshot = {
organizationId: options.organizationId,
repos: [],
repositories: [],
tasks: [],
};
}
getSnapshot(): TaskWorkbenchSnapshot {
return this.snapshot;
}
subscribe(listener: () => void): () => void {
this.listeners.add(listener);
this.ensureStarted();
return () => {
this.listeners.delete(listener);
if (this.listeners.size === 0 && this.refreshRetryTimeout) {
clearTimeout(this.refreshRetryTimeout);
this.refreshRetryTimeout = null;
}
if (this.listeners.size === 0 && this.unsubscribeWorkbench) {
this.unsubscribeWorkbench();
this.unsubscribeWorkbench = null;
}
};
}
async createTask(input: TaskWorkbenchCreateTaskInput): Promise<TaskWorkbenchCreateTaskResponse> {
const created = await this.backend.createWorkbenchTask(this.organizationId, input);
await this.refresh();
return created;
}
async markTaskUnread(input: TaskWorkbenchSelectInput): Promise<void> {
await this.backend.markWorkbenchUnread(this.organizationId, input);
await this.refresh();
}
async renameTask(input: TaskWorkbenchRenameInput): Promise<void> {
await this.backend.renameWorkbenchTask(this.organizationId, input);
await this.refresh();
}
async renameBranch(input: TaskWorkbenchRenameInput): Promise<void> {
await this.backend.renameWorkbenchBranch(this.organizationId, input);
await this.refresh();
}
async archiveTask(input: TaskWorkbenchSelectInput): Promise<void> {
await this.backend.runAction(this.organizationId, input.taskId, "archive");
await this.refresh();
}
async publishPr(input: TaskWorkbenchSelectInput): Promise<void> {
await this.backend.publishWorkbenchPr(this.organizationId, input);
await this.refresh();
}
async revertFile(input: TaskWorkbenchDiffInput): Promise<void> {
await this.backend.revertWorkbenchFile(this.organizationId, input);
await this.refresh();
}
async updateDraft(input: TaskWorkbenchUpdateDraftInput): Promise<void> {
await this.backend.updateWorkbenchDraft(this.organizationId, input);
// Skip refresh — the server broadcast will trigger it, and the frontend
// holds local draft state to avoid the round-trip overwriting user input.
}
async sendMessage(input: TaskWorkbenchSendMessageInput): Promise<void> {
await this.backend.sendWorkbenchMessage(this.organizationId, input);
await this.refresh();
}
async stopAgent(input: TaskWorkbenchSessionInput): Promise<void> {
await this.backend.stopWorkbenchSession(this.organizationId, input);
await this.refresh();
}
async setSessionUnread(input: TaskWorkbenchSetSessionUnreadInput): Promise<void> {
await this.backend.setWorkbenchSessionUnread(this.organizationId, input);
await this.refresh();
}
async renameSession(input: TaskWorkbenchRenameSessionInput): Promise<void> {
await this.backend.renameWorkbenchSession(this.organizationId, input);
await this.refresh();
}
async closeSession(input: TaskWorkbenchSessionInput): Promise<void> {
await this.backend.closeWorkbenchSession(this.organizationId, input);
await this.refresh();
}
async addSession(input: TaskWorkbenchSelectInput): Promise<TaskWorkbenchAddSessionResponse> {
const created = await this.backend.createWorkbenchSession(this.organizationId, input);
await this.refresh();
return created;
}
async changeModel(input: TaskWorkbenchChangeModelInput): Promise<void> {
await this.backend.changeWorkbenchModel(this.organizationId, input);
await this.refresh();
}
private ensureStarted(): void {
if (!this.unsubscribeWorkbench) {
this.unsubscribeWorkbench = this.backend.subscribeWorkbench(this.organizationId, () => {
void this.refresh().catch(() => {
this.scheduleRefreshRetry();
});
});
}
void this.refresh().catch(() => {
this.scheduleRefreshRetry();
});
}
private scheduleRefreshRetry(): void {
if (this.refreshRetryTimeout || this.listeners.size === 0) {
return;
}
this.refreshRetryTimeout = setTimeout(() => {
this.refreshRetryTimeout = null;
void this.refresh().catch(() => {
this.scheduleRefreshRetry();
});
}, 1_000);
}
private async refresh(): Promise<void> {
if (this.refreshPromise) {
await this.refreshPromise;
return;
}
this.refreshPromise = (async () => {
const nextSnapshot = await this.backend.getWorkbench(this.organizationId);
if (this.refreshRetryTimeout) {
clearTimeout(this.refreshRetryTimeout);
this.refreshRetryTimeout = null;
}
this.snapshot = {
...nextSnapshot,
repositories: nextSnapshot.repositories ?? groupWorkbenchRepositories(nextSnapshot.repos, nextSnapshot.tasks),
};
for (const listener of [...this.listeners]) {
listener();
}
})().finally(() => {
this.refreshPromise = null;
});
await this.refreshPromise;
}
}
export function createRemoteWorkbenchClient(options: RemoteWorkbenchClientOptions): TaskWorkbenchClient {
return new RemoteWorkbenchStore(options);
}

View file

@ -0,0 +1,198 @@
import type {
TaskWorkspaceAddSessionResponse,
TaskWorkspaceChangeModelInput,
TaskWorkspaceCreateTaskInput,
TaskWorkspaceCreateTaskResponse,
TaskWorkspaceDiffInput,
TaskWorkspaceRenameInput,
TaskWorkspaceRenameSessionInput,
TaskWorkspaceSelectInput,
TaskWorkspaceSetSessionUnreadInput,
TaskWorkspaceSendMessageInput,
TaskWorkspaceSnapshot,
TaskWorkspaceSessionInput,
TaskWorkspaceUpdateDraftInput,
} from "@sandbox-agent/foundry-shared";
import type { BackendClient } from "../backend-client.js";
import { groupWorkspaceRepositories } from "../workspace-model.js";
import type { TaskWorkspaceClient } from "../workspace-client.js";
export interface RemoteWorkspaceClientOptions {
backend: BackendClient;
organizationId: string;
}
class RemoteWorkspaceStore implements TaskWorkspaceClient {
private readonly backend: BackendClient;
private readonly organizationId: string;
private snapshot: TaskWorkspaceSnapshot;
private readonly listeners = new Set<() => void>();
private unsubscribeWorkspace: (() => void) | null = null;
private refreshPromise: Promise<void> | null = null;
private refreshRetryTimeout: ReturnType<typeof setTimeout> | null = null;
constructor(options: RemoteWorkspaceClientOptions) {
this.backend = options.backend;
this.organizationId = options.organizationId;
this.snapshot = {
organizationId: options.organizationId,
repos: [],
repositories: [],
tasks: [],
};
}
getSnapshot(): TaskWorkspaceSnapshot {
return this.snapshot;
}
subscribe(listener: () => void): () => void {
this.listeners.add(listener);
this.ensureStarted();
return () => {
this.listeners.delete(listener);
if (this.listeners.size === 0 && this.refreshRetryTimeout) {
clearTimeout(this.refreshRetryTimeout);
this.refreshRetryTimeout = null;
}
if (this.listeners.size === 0 && this.unsubscribeWorkspace) {
this.unsubscribeWorkspace();
this.unsubscribeWorkspace = null;
}
};
}
async createTask(input: TaskWorkspaceCreateTaskInput): Promise<TaskWorkspaceCreateTaskResponse> {
const created = await this.backend.createWorkspaceTask(this.organizationId, input);
await this.refresh();
return created;
}
async markTaskUnread(input: TaskWorkspaceSelectInput): Promise<void> {
await this.backend.markWorkspaceUnread(this.organizationId, input);
await this.refresh();
}
async renameTask(input: TaskWorkspaceRenameInput): Promise<void> {
await this.backend.renameWorkspaceTask(this.organizationId, input);
await this.refresh();
}
async archiveTask(input: TaskWorkspaceSelectInput): Promise<void> {
await this.backend.runAction(this.organizationId, input.repoId, input.taskId, "archive");
await this.refresh();
}
async publishPr(input: TaskWorkspaceSelectInput): Promise<void> {
await this.backend.publishWorkspacePr(this.organizationId, input);
await this.refresh();
}
async revertFile(input: TaskWorkspaceDiffInput): Promise<void> {
await this.backend.revertWorkspaceFile(this.organizationId, input);
await this.refresh();
}
async updateDraft(input: TaskWorkspaceUpdateDraftInput): Promise<void> {
await this.backend.updateWorkspaceDraft(this.organizationId, input);
// Skip refresh — the server broadcast will trigger it, and the frontend
// holds local draft state to avoid the round-trip overwriting user input.
}
async sendMessage(input: TaskWorkspaceSendMessageInput): Promise<void> {
await this.backend.sendWorkspaceMessage(this.organizationId, input);
await this.refresh();
}
async stopAgent(input: TaskWorkspaceSessionInput): Promise<void> {
await this.backend.stopWorkspaceSession(this.organizationId, input);
await this.refresh();
}
async selectSession(input: TaskWorkspaceSessionInput): Promise<void> {
await this.backend.selectWorkspaceSession(this.organizationId, input);
await this.refresh();
}
async setSessionUnread(input: TaskWorkspaceSetSessionUnreadInput): Promise<void> {
await this.backend.setWorkspaceSessionUnread(this.organizationId, input);
await this.refresh();
}
async renameSession(input: TaskWorkspaceRenameSessionInput): Promise<void> {
await this.backend.renameWorkspaceSession(this.organizationId, input);
await this.refresh();
}
async closeSession(input: TaskWorkspaceSessionInput): Promise<void> {
await this.backend.closeWorkspaceSession(this.organizationId, input);
await this.refresh();
}
async addSession(input: TaskWorkspaceSelectInput): Promise<TaskWorkspaceAddSessionResponse> {
const created = await this.backend.createWorkspaceSession(this.organizationId, input);
await this.refresh();
return created;
}
async changeModel(input: TaskWorkspaceChangeModelInput): Promise<void> {
await this.backend.changeWorkspaceModel(this.organizationId, input);
await this.refresh();
}
private ensureStarted(): void {
if (!this.unsubscribeWorkspace) {
this.unsubscribeWorkspace = this.backend.subscribeWorkspace(this.organizationId, () => {
void this.refresh().catch(() => {
this.scheduleRefreshRetry();
});
});
}
void this.refresh().catch(() => {
this.scheduleRefreshRetry();
});
}
private scheduleRefreshRetry(): void {
if (this.refreshRetryTimeout || this.listeners.size === 0) {
return;
}
this.refreshRetryTimeout = setTimeout(() => {
this.refreshRetryTimeout = null;
void this.refresh().catch(() => {
this.scheduleRefreshRetry();
});
}, 1_000);
}
private async refresh(): Promise<void> {
if (this.refreshPromise) {
await this.refreshPromise;
return;
}
this.refreshPromise = (async () => {
const nextSnapshot = await this.backend.getWorkspace(this.organizationId);
if (this.refreshRetryTimeout) {
clearTimeout(this.refreshRetryTimeout);
this.refreshRetryTimeout = null;
}
this.snapshot = {
...nextSnapshot,
repositories: nextSnapshot.repositories ?? groupWorkspaceRepositories(nextSnapshot.repos, nextSnapshot.tasks),
};
for (const listener of [...this.listeners]) {
listener();
}
})().finally(() => {
this.refreshPromise = null;
});
await this.refreshPromise;
}
}
export function createRemoteWorkspaceClient(options: RemoteWorkspaceClientOptions): TaskWorkspaceClient {
return new RemoteWorkspaceStore(options);
}

View file

@ -81,6 +81,7 @@ class TopicEntry<TData, TParams, TEvent> {
private unsubscribeError: (() => void) | null = null;
private teardownTimer: ReturnType<typeof setTimeout> | null = null;
private startPromise: Promise<void> | null = null;
private eventPromise: Promise<void> = Promise.resolve();
private started = false;
constructor(
@ -157,12 +158,7 @@ class TopicEntry<TData, TParams, TEvent> {
try {
this.conn = await this.definition.connect(this.backend, this.params);
this.unsubscribeEvent = this.conn.on(this.definition.event, (event: TEvent) => {
if (this.data === undefined) {
return;
}
this.data = this.definition.applyEvent(this.data, event);
this.lastRefreshAt = Date.now();
this.notify();
void this.applyEvent(event);
});
this.unsubscribeError = this.conn.onError((error: unknown) => {
this.status = "error";
@ -182,6 +178,33 @@ class TopicEntry<TData, TParams, TEvent> {
}
}
private applyEvent(event: TEvent): Promise<void> {
this.eventPromise = this.eventPromise
.then(async () => {
if (!this.started || this.data === undefined) {
return;
}
const nextData = await this.definition.applyEvent(this.backend, this.params, this.data, event);
if (!this.started) {
return;
}
this.data = nextData;
this.status = "connected";
this.error = null;
this.lastRefreshAt = Date.now();
this.notify();
})
.catch((error) => {
this.status = "error";
this.error = error instanceof Error ? error : new Error(String(error));
this.notify();
});
return this.eventPromise;
}
private notify(): void {
for (const listener of [...this.listeners]) {
listener();

View file

@ -5,8 +5,8 @@ import type {
SandboxProcessesEvent,
SessionEvent,
TaskEvent,
WorkbenchSessionDetail,
WorkbenchTaskDetail,
WorkspaceSessionDetail,
WorkspaceTaskDetail,
OrganizationEvent,
OrganizationSummarySnapshot,
} from "@sandbox-agent/foundry-shared";
@ -16,15 +16,15 @@ import type { ActorConn, BackendClient, SandboxProcessRecord } from "../backend-
* Topic definitions for the subscription manager.
*
* Each topic describes one actor connection plus one materialized read model.
* Events always carry full replacement payloads for the changed entity so the
* client can replace cached state directly instead of reconstructing patches.
* Some topics can apply broadcast payloads directly, while others refetch
* through BackendClient so auth-scoped state stays user-specific.
*/
export interface TopicDefinition<TData, TParams, TEvent> {
key: (params: TParams) => string;
event: string;
connect: (backend: BackendClient, params: TParams) => Promise<ActorConn>;
fetchInitial: (backend: BackendClient, params: TParams) => Promise<TData>;
applyEvent: (current: TData, event: TEvent) => TData;
applyEvent: (backend: BackendClient, params: TParams, current: TData, event: TEvent) => Promise<TData> | TData;
}
export interface AppTopicParams {}
@ -48,23 +48,13 @@ export interface SandboxProcessesTopicParams {
sandboxId: string;
}
function upsertById<T extends { id: string }>(items: T[], nextItem: T, sort: (left: T, right: T) => number): T[] {
const filtered = items.filter((item) => item.id !== nextItem.id);
return [...filtered, nextItem].sort(sort);
}
function upsertByPrId<T extends { prId: string }>(items: T[], nextItem: T, sort: (left: T, right: T) => number): T[] {
const filtered = items.filter((item) => item.prId !== nextItem.prId);
return [...filtered, nextItem].sort(sort);
}
export const topicDefinitions = {
app: {
key: () => "app",
event: "appUpdated",
connect: (backend: BackendClient, _params: AppTopicParams) => backend.connectOrganization("app"),
fetchInitial: (backend: BackendClient, _params: AppTopicParams) => backend.getAppSnapshot(),
applyEvent: (_current: FoundryAppSnapshot, event: AppEvent) => event.snapshot,
applyEvent: (_backend: BackendClient, _params: AppTopicParams, _current: FoundryAppSnapshot, event: AppEvent) => event.snapshot,
} satisfies TopicDefinition<FoundryAppSnapshot, AppTopicParams, AppEvent>,
organization: {
@ -72,41 +62,8 @@ export const topicDefinitions = {
event: "organizationUpdated",
connect: (backend: BackendClient, params: OrganizationTopicParams) => backend.connectOrganization(params.organizationId),
fetchInitial: (backend: BackendClient, params: OrganizationTopicParams) => backend.getOrganizationSummary(params.organizationId),
applyEvent: (current: OrganizationSummarySnapshot, event: OrganizationEvent) => {
switch (event.type) {
case "taskSummaryUpdated":
return {
...current,
taskSummaries: upsertById(current.taskSummaries, event.taskSummary, (left, right) => right.updatedAtMs - left.updatedAtMs),
};
case "taskRemoved":
return {
...current,
taskSummaries: current.taskSummaries.filter((task) => task.id !== event.taskId),
};
case "repoAdded":
case "repoUpdated":
return {
...current,
repos: upsertById(current.repos, event.repo, (left, right) => right.latestActivityMs - left.latestActivityMs),
};
case "repoRemoved":
return {
...current,
repos: current.repos.filter((repo) => repo.id !== event.repoId),
};
case "pullRequestUpdated":
return {
...current,
openPullRequests: upsertByPrId(current.openPullRequests, event.pullRequest, (left, right) => right.updatedAtMs - left.updatedAtMs),
};
case "pullRequestRemoved":
return {
...current,
openPullRequests: current.openPullRequests.filter((pullRequest) => pullRequest.prId !== event.prId),
};
}
},
applyEvent: (_backend: BackendClient, _params: OrganizationTopicParams, _current: OrganizationSummarySnapshot, event: OrganizationEvent) =>
event.snapshot,
} satisfies TopicDefinition<OrganizationSummarySnapshot, OrganizationTopicParams, OrganizationEvent>,
task: {
@ -114,8 +71,9 @@ export const topicDefinitions = {
event: "taskUpdated",
connect: (backend: BackendClient, params: TaskTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId),
fetchInitial: (backend: BackendClient, params: TaskTopicParams) => backend.getTaskDetail(params.organizationId, params.repoId, params.taskId),
applyEvent: (_current: WorkbenchTaskDetail, event: TaskEvent) => event.detail,
} satisfies TopicDefinition<WorkbenchTaskDetail, TaskTopicParams, TaskEvent>,
applyEvent: (backend: BackendClient, params: TaskTopicParams, _current: WorkspaceTaskDetail, _event: TaskEvent) =>
backend.getTaskDetail(params.organizationId, params.repoId, params.taskId),
} satisfies TopicDefinition<WorkspaceTaskDetail, TaskTopicParams, TaskEvent>,
session: {
key: (params: SessionTopicParams) => `session:${params.organizationId}:${params.taskId}:${params.sessionId}`,
@ -123,13 +81,13 @@ export const topicDefinitions = {
connect: (backend: BackendClient, params: SessionTopicParams) => backend.connectTask(params.organizationId, params.repoId, params.taskId),
fetchInitial: (backend: BackendClient, params: SessionTopicParams) =>
backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId),
applyEvent: (current: WorkbenchSessionDetail, event: SessionEvent) => {
if (event.session.sessionId !== current.sessionId) {
applyEvent: async (backend: BackendClient, params: SessionTopicParams, current: WorkspaceSessionDetail, event: SessionEvent) => {
if (event.session.sessionId !== params.sessionId) {
return current;
}
return event.session;
return await backend.getSessionDetail(params.organizationId, params.repoId, params.taskId, params.sessionId);
},
} satisfies TopicDefinition<WorkbenchSessionDetail, SessionTopicParams, SessionEvent>,
} satisfies TopicDefinition<WorkspaceSessionDetail, SessionTopicParams, SessionEvent>,
sandboxProcesses: {
key: (params: SandboxProcessesTopicParams) => `sandbox:${params.organizationId}:${params.sandboxProviderId}:${params.sandboxId}`,
@ -138,7 +96,8 @@ export const topicDefinitions = {
backend.connectSandbox(params.organizationId, params.sandboxProviderId, params.sandboxId),
fetchInitial: async (backend: BackendClient, params: SandboxProcessesTopicParams) =>
(await backend.listSandboxProcesses(params.organizationId, params.sandboxProviderId, params.sandboxId)).processes,
applyEvent: (_current: SandboxProcessRecord[], event: SandboxProcessesEvent) => event.processes,
applyEvent: (_backend: BackendClient, _params: SandboxProcessesTopicParams, _current: SandboxProcessRecord[], event: SandboxProcessesEvent) =>
event.processes,
} satisfies TopicDefinition<SandboxProcessRecord[], SandboxProcessesTopicParams, SandboxProcessesEvent>,
} as const;

Some files were not shown because too many files have changed in this diff Show more